diff --git a/src/controllers/admin.controller.ts b/src/controllers/admin.controller.ts index b975a8e..0f0cfe5 100644 --- a/src/controllers/admin.controller.ts +++ b/src/controllers/admin.controller.ts @@ -8,7 +8,7 @@ import logger from '@utils/logger'; import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils'; import { clearConfigCache } from '@services/configReader.service'; import { User, UserRole } from '@models/User'; -import { sanitizeHtml } from '@utils/sanitizer'; +import { sanitizeHtml, sanitizeObject, isHtmlEmpty } from '@utils/sanitizer'; /** * Get all holidays (with optional year filter) @@ -125,7 +125,9 @@ export const createHoliday = async (req: Request, res: Response): Promise logger.error('[Admin] Error creating holiday:', error); res.status(500).json({ success: false, - error: error.message || 'Failed to create holiday' + message: 'Failed to create holiday', + error: error.message, + details: error.errors // Sequelize validation errors are usually in .errors }); } }; @@ -172,7 +174,9 @@ export const updateHoliday = async (req: Request, res: Response): Promise logger.error('[Admin] Error updating holiday:', error); res.status(500).json({ success: false, - error: error.message || 'Failed to update holiday' + message: 'Failed to update holiday', + error: error.message, + details: error.errors }); } }; @@ -403,10 +407,18 @@ export const updateConfiguration = async (req: Request, res: Response): Promise< return; } - // Sanitize config value if it's likely to be rendered as HTML - // We can be selective or just sanitize all strings for safety - if (typeof configValue === 'string') { - configValue = sanitizeHtml(configValue); + // Sanitize config value using unified sanitizeObject + // This will handle strings, numbers, and nested objects consistently + const sanitizedObj = sanitizeObject({ [configKey]: configValue }); + configValue = sanitizedObj[configKey]; + + // If it's a string, ensure it's not effectively empty after sanitization + if (typeof configValue === 'string' && isHtmlEmpty(configValue)) { + res.status(400).json({ + success: false, + error: 'Config value is required and must contain valid content' + }); + return; } // Update configuration @@ -631,7 +643,7 @@ export const putForm16Config = async (req: Request, res: Response): Promise; + const body = sanitizeObject(req.body as Record); const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase(); const submissionViewerEmails = Array.isArray(body.submissionViewerEmails) ? body.submissionViewerEmails.map(normalizeEmail).filter(Boolean) diff --git a/src/controllers/conclusion.controller.ts b/src/controllers/conclusion.controller.ts index fb90b86..9bb91dd 100644 --- a/src/controllers/conclusion.controller.ts +++ b/src/controllers/conclusion.controller.ts @@ -1,5 +1,6 @@ import { Request, Response } from 'express'; import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index'; +import { isHtmlEmpty } from '../utils/sanitizer'; import { aiService } from '@services/ai.service'; import { activityService } from '@services/activity.service'; import logger from '@utils/logger'; @@ -227,8 +228,8 @@ export class ConclusionController { const { finalRemark } = req.body; const userId = (req as any).user?.userId; - if (!finalRemark || typeof finalRemark !== 'string') { - return res.status(400).json({ error: 'Final remark is required' }); + if (isHtmlEmpty(finalRemark)) { + return res.status(400).json({ error: 'A valid final remark is required. Please ensure the remark contains valid content.' }); } // Fetch request diff --git a/src/controllers/dealerClaim.controller.ts b/src/controllers/dealerClaim.controller.ts index f72c102..f6df725 100644 --- a/src/controllers/dealerClaim.controller.ts +++ b/src/controllers/dealerClaim.controller.ts @@ -12,11 +12,12 @@ import { sapIntegrationService } from '../services/sapIntegration.service'; import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; -import { WorkflowRequest } from '../models/WorkflowRequest'; import { DealerClaimDetails } from '../models/DealerClaimDetails'; import { ClaimInvoice } from '../models/ClaimInvoice'; import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem'; import { ActivityType } from '../models/ActivityType'; +import { Participant } from '../models/Participant'; +import { sanitizeObject, sanitizePermissive } from '../utils/sanitizer'; export class DealerClaimController { private dealerClaimService = new DealerClaimService(); @@ -50,8 +51,25 @@ export class DealerClaimController { } = req.body; // Validation - if (!activityName || !activityType || !dealerCode || !dealerName || !location || !requestDescription) { - return ResponseHandler.error(res, 'Missing required fields', 400); + const requiredFields = [ + { key: 'activityName', label: 'Activity Name' }, + { key: 'activityType', label: 'Activity Type' }, + { key: 'dealerCode', label: 'Dealer Code' }, + { key: 'dealerName', label: 'Dealer Name' }, + { key: 'location', label: 'Location' }, + { key: 'requestDescription', label: 'Request Description' }, + ]; + + const missingFields = requiredFields + .filter(field => !req.body[field.key]) + .map(field => field.label); + + if (missingFields.length > 0) { + return ResponseHandler.error( + res, + `Required fields are missing or contain invalid content: ${missingFields.join(', ')}`, + 400 + ); } const claimRequest = await this.dealerClaimService.createClaimRequest(userId, { @@ -76,9 +94,16 @@ export class DealerClaimController { message: 'Claim request created successfully' }, 'Claim request created'); } catch (error: any) { - // Handle approver validation errors - if (error.message && error.message.includes('Approver')) { - logger.warn('[DealerClaimController] Approver validation error:', { message: error.message }); + // Handle validation and business logic errors + const isValidationError = error.message && ( + error.message.includes('Approver') || + error.message.includes('Valid content is required') || + error.message.includes('invalid script') || + error.message.includes('empty input detected') + ); + + if (isValidationError) { + logger.warn('[DealerClaimController] Validation error:', { message: error.message }); return ResponseHandler.error(res, error.message, 400); } @@ -173,6 +198,8 @@ export class DealerClaimController { if (typeof costBreakup === 'string') { try { parsedCostBreakup = JSON.parse(costBreakup); + // Sanitize cost items + parsedCostBreakup = sanitizeObject(parsedCostBreakup); } catch (parseError) { logger.error('[DealerClaimController] Failed to parse costBreakup JSON:', parseError); return ResponseHandler.error(res, 'Invalid costBreakup format. Expected JSON array.', 400); @@ -232,7 +259,7 @@ export class DealerClaimController { timelineMode: timelineMode || 'date', expectedCompletionDate: expectedCompletionDate ? new Date(expectedCompletionDate) : undefined, expectedCompletionDays: expectedCompletionDays ? parseInt(expectedCompletionDays) : undefined, - dealerComments: dealerComments || '', + dealerComments: dealerComments ? sanitizePermissive(dealerComments) : '', }); return ResponseHandler.success(res, { message: 'Proposal submitted successfully' }, 'Proposal submitted'); @@ -264,6 +291,8 @@ export class DealerClaimController { if (closedExpenses) { try { parsedClosedExpenses = typeof closedExpenses === 'string' ? JSON.parse(closedExpenses) : closedExpenses; + // Sanitize expenses + parsedClosedExpenses = sanitizeObject(parsedClosedExpenses); } catch (e) { logger.warn('[DealerClaimController] Failed to parse closedExpenses JSON:', e); parsedClosedExpenses = []; @@ -547,7 +576,7 @@ export class DealerClaimController { totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0, invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined, attendanceSheet: attendanceSheet || undefined, - completionDescription: completionDescription || undefined, + completionDescription: completionDescription ? sanitizePermissive(completionDescription) : undefined, }); return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted'); @@ -784,6 +813,20 @@ export class DealerClaimController { return ResponseHandler.error(res, 'Invalid workflow request', 400); } + // Authorization Check + const userRole = (req as any).user?.role; + const userId = (req as any).user?.userId; + + if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') { + const participant = await Participant.findOne({ + where: { requestId, userId, isActive: true } + }); + + if (!participant) { + return ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403); + } + } + const { ClaimInvoice } = await import('../models/ClaimInvoice'); let invoice = await ClaimInvoice.findOne({ where: { requestId } }); @@ -1005,6 +1048,24 @@ export class DealerClaimController { const requestId = (workflow as any).requestId || (workflow as any).request_id; const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number; + if (!requestId) { + return ResponseHandler.error(res, 'Invalid workflow request', 400); + } + + // Authorization Check + const userRole = (req as any).user?.role; + const userId = (req as any).user?.userId; + + if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') { + const participant = await Participant.findOne({ + where: { requestId, userId, isActive: true } + }); + + if (!participant) { + return ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403); + } + } + // Fetch related data logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`); const [invoice, items, claimDetails, internalOrder] = await Promise.all([ diff --git a/src/controllers/document.controller.ts b/src/controllers/document.controller.ts index 0357291..8c6f1ea 100644 --- a/src/controllers/document.controller.ts +++ b/src/controllers/document.controller.ts @@ -7,6 +7,8 @@ import { User } from '@models/User'; import { WorkflowRequest } from '@models/WorkflowRequest'; import { Participant } from '@models/Participant'; import { ApprovalLevel } from '@models/ApprovalLevel'; +import { WorkNote } from '@models/WorkNote'; +import { WorkNoteAttachment } from '@models/WorkNoteAttachment'; import { Op } from 'sequelize'; import { ResponseHandler } from '@utils/responseHandler'; import { activityService } from '@services/activity.service'; @@ -17,6 +19,9 @@ import type { AuthenticatedRequest } from '../types/express'; import { getRequestMetadata } from '@utils/requestUtils'; import { getConfigNumber, getConfigValue } from '@services/configReader.service'; import { logDocumentEvent, logWithContext } from '@utils/logger'; +import { UPLOAD_DIR } from '../config/storage'; +import { Storage } from '@google-cloud/storage'; +import logger from '@utils/logger'; export class DocumentController { async upload(req: AuthenticatedRequest, res: Response): Promise { @@ -517,6 +522,196 @@ export class DocumentController { ResponseHandler.error(res, 'Upload failed', 500, message); } } + + /** + * Helper function to create proper Content-Disposition header + */ + private createContentDisposition(disposition: 'inline' | 'attachment', filename: string): string { + const cleanFilename = filename + .replace(/[<>:"|?*\x00-\x1F\x7F]/g, '_') + .replace(/\\/g, '_') + .trim(); + + const hasNonASCII = /[^\x00-\x7F]/.test(filename); + + if (hasNonASCII) { + const encodedFilename = encodeURIComponent(filename); + return `${disposition}; filename="${cleanFilename}"; filename*=UTF-8''${encodedFilename}`; + } else { + return `${disposition}; filename="${cleanFilename}"`; + } + } + + /** + * Preview or Download a standard workflow document + */ + async getWorkflowDocument(req: AuthenticatedRequest, res: Response, mode: 'preview' | 'download'): Promise { + try { + const { documentId } = req.params; + const userRole = req.user?.role; + const userId = req.user?.userId; + + const document = await Document.findOne({ where: { documentId } }); + if (!document) { + ResponseHandler.error(res, 'Document not found', 404); + return; + } + + // Authorization Check + if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') { + const participant = await Participant.findOne({ + where: { requestId: document.requestId, userId, isActive: true } + }); + + if (!participant) { + ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403); + return; + } + + const canAccess = mode === 'download' ? participant.canDownloadDocuments : participant.canViewDocuments; + if (!canAccess) { + ResponseHandler.error(res, `Access denied. You do not have permission to ${mode} documents in this workflow.`, 403); + return; + } + } + + return this.serveFile(res, { + storageUrl: (document as any).storageUrl || (document as any).storage_url, + filePath: (document as any).filePath || (document as any).file_path, + fileName: (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName, + mimeType: (document as any).mimeType || (document as any).mime_type, + mode + }); + } catch (error) { + logger.error(`[DocumentController] Error getting workflow document:`, error); + ResponseHandler.error(res, 'Failed to access document', 500); + } + } + + /** + * Preview or Download a work note attachment + */ + async getWorkNoteAttachment(req: AuthenticatedRequest, res: Response, mode: 'preview' | 'download'): Promise { + try { + const { attachmentId } = req.params; + const userRole = req.user?.role; + const userId = req.user?.userId; + + const attachment = await WorkNoteAttachment.findOne({ where: { attachmentId } }); + if (!attachment) { + ResponseHandler.error(res, 'Attachment not found', 404); + return; + } + + const note = await WorkNote.findOne({ where: { noteId: attachment.noteId } }); + if (!note) { + ResponseHandler.error(res, 'Associated work note not found', 404); + return; + } + + // Authorization Check (Work note attachments follow general document permissions) + if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') { + const participant = await Participant.findOne({ + where: { requestId: note.requestId, userId, isActive: true } + }); + + if (!participant) { + ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403); + return; + } + + const canAccess = mode === 'download' ? participant.canDownloadDocuments : participant.canViewDocuments; + if (!canAccess) { + ResponseHandler.error(res, `Access denied. You do not have permission to ${mode} documentation in this workflow.`, 403); + return; + } + } + + return this.serveFile(res, { + storageUrl: (attachment as any).storageUrl || (attachment as any).storage_url, + filePath: (attachment as any).filePath || (attachment as any).file_path, + fileName: (attachment as any).fileName || (attachment as any).file_name, + mimeType: (attachment as any).fileType || (attachment as any).file_type, + mode + }); + } catch (error) { + logger.error(`[DocumentController] Error getting work note attachment:`, error); + ResponseHandler.error(res, 'Failed to access attachment', 500); + } + } + + /** + * Common logic to serve files from GCS or local storage + */ + private async serveFile(res: Response, options: { + storageUrl?: string, + filePath?: string, + fileName: string, + mimeType?: string, + mode: 'preview' | 'download' + }): Promise { + const { storageUrl, filePath, fileName, mimeType, mode } = options; + const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://')); + + // Set CORS and basic headers + res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); + const dispositionType = mode === 'download' ? 'attachment' : (mimeType?.includes('pdf') || mimeType?.includes('image') ? 'inline' : 'attachment'); + res.setHeader('Content-Disposition', this.createContentDisposition(dispositionType, fileName)); + res.contentType(mimeType || 'application/octet-stream'); + + if (isGcsUrl) { + res.redirect(storageUrl!); + return; + } + + // Stream from GCS if filePath is a GCS path + if (!storageUrl && filePath && (filePath.startsWith('requests/') || filePath.startsWith('worknotes/'))) { + try { + const keyFilePath = process.env.GCP_KEY_FILE || ''; + const bucketName = process.env.GCP_BUCKET_NAME || ''; + const resolvedKeyPath = path.isAbsolute(keyFilePath) ? keyFilePath : path.resolve(process.cwd(), keyFilePath); + + const storage = new Storage({ + projectId: process.env.GCP_PROJECT_ID || '', + keyFilename: resolvedKeyPath, + }); + + const bucket = storage.bucket(bucketName); + const file = bucket.file(filePath); + + const [exists] = await file.exists(); + if (!exists) { + ResponseHandler.error(res, 'File not found in storage', 404); + return; + } + + file.createReadStream() + .on('error', (err) => { + logger.error('[DocumentController] GCS Stream Error:', err); + if (!res.headersSent) ResponseHandler.error(res, 'Streaming failed', 500); + }) + .pipe(res); + return; + } catch (err) { + logger.error('[DocumentController] GCS Access Error:', err); + ResponseHandler.error(res, 'Failed to access cloud storage', 500); + return; + } + } + + // Local file handling + const absolutePath = filePath && !path.isAbsolute(filePath) ? path.join(UPLOAD_DIR, filePath) : filePath; + if (absolutePath && fs.existsSync(absolutePath)) { + res.sendFile(absolutePath, (err) => { + if (err && !res.headersSent) ResponseHandler.error(res, 'Failed to send file', 500); + }); + return; + } + + ResponseHandler.error(res, 'File not found on server', 404); + } } +export const documentController = new DocumentController(); + diff --git a/src/controllers/workflow.controller.ts b/src/controllers/workflow.controller.ts index a88c940..32f7fb7 100644 --- a/src/controllers/workflow.controller.ts +++ b/src/controllers/workflow.controller.ts @@ -15,6 +15,7 @@ import { getRequestMetadata } from '@utils/requestUtils'; import { enrichApprovalLevels, enrichSpectators, validateInitiator, validateDealerUser } from '@services/userEnrichment.service'; import { DealerClaimService } from '@services/dealerClaim.service'; import { canViewForm16Submission } from '@services/form16Permission.service'; +import { sanitizeObject, isHtmlEmpty } from '@utils/sanitizer'; import logger from '@utils/logger'; const workflowService = new WorkflowService(); @@ -139,7 +140,7 @@ export class WorkflowController { } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; logger.error('[WorkflowController] Failed to create workflow:', error); - ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage); + ResponseHandler.error(res, errorMessage, 400); } } @@ -161,10 +162,24 @@ export class WorkflowController { let parsed; try { parsed = JSON.parse(raw); + // Explicitly sanitize the parsed object since multipart bypasses global middleware + parsed = sanitizeObject(parsed); } catch (parseError) { ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error'); return; } + + // Explicitly check for empty content after sanitization for non-drafts + if (parsed.isDraft !== true) { + if (!parsed.title || !parsed.title.trim()) { + ResponseHandler.error(res, 'A valid title is required. Please ensure the title contains valid content.', 400); + return; + } + if (isHtmlEmpty(parsed.description)) { + ResponseHandler.error(res, 'A valid description is required. Please ensure the description contains valid content.', 400); + return; + } + } // Transform frontend format to backend format BEFORE validation // Map 'approvers' -> 'approvalLevels' for backward compatibility @@ -450,7 +465,7 @@ export class WorkflowController { userId: req.user?.userId, filesCount: (req as any).files?.length || 0, }); - ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage); + ResponseHandler.error(res, errorMessage, 400); } } diff --git a/src/middlewares/sanitization.middleware.ts b/src/middlewares/sanitization.middleware.ts index c8fc13a..78e11ee 100644 --- a/src/middlewares/sanitization.middleware.ts +++ b/src/middlewares/sanitization.middleware.ts @@ -2,133 +2,15 @@ * Sanitization Middleware * Sanitizes string inputs in req.body and req.query to prevent stored XSS. * - * Uses TWO strategies: - * 1. STRICT — strips ALL HTML tags (for normal text fields like names, emails, titles) - * 2. PERMISSIVE — allows safe formatting tags (for rich text fields like description, message, comments) + * Uses the unified sanitizeObject utility from @utils/sanitizer. * * This middleware runs AFTER body parsing and BEFORE route handlers. * File upload routes (multipart) are skipped — those are handled - * by the malwareScan middleware pipeline. + * by the malwareScan middleware pipeline (but can be manually sanitized in controllers). */ import { Request, Response, NextFunction } from 'express'; -import sanitizeHtml from 'sanitize-html'; - -/** - * Fields that intentionally store HTML from rich text editors. - * These get PERMISSIVE sanitization (safe formatting tags allowed). - * All other string fields get STRICT sanitization (all tags stripped). - */ -const RICH_TEXT_FIELDS = new Set([ - 'description', - 'requestDescription', - 'message', - 'content', - 'comments', - 'rejectionReason', - 'pauseReason', - 'conclusionRemark', - 'aiGeneratedRemark', - 'finalRemark', - 'closingRemarks', - 'effectiveFinalRemark', - 'keyDiscussionPoints', - 'keyPoints', - 'remarksText', - 'remark', - 'remarks', - 'feedback', - 'note', - 'notes', - 'skipReason', -]); - -// Strict config: zero allowed tags, zero allowed attributes -const strictSanitizeConfig: sanitizeHtml.IOptions = { - allowedTags: [], - allowedAttributes: {}, - allowedIframeHostnames: [], - disallowedTagsMode: 'discard', - nonTextTags: ['script', 'style', 'iframe', 'embed', 'object'], -}; - -// Permissive config: allow safe formatting tags from rich text editors -// Blocks dangerous elements (script, iframe, object, embed, form, input) -const permissiveSanitizeConfig: sanitizeHtml.IOptions = { - allowedTags: [ - // Text formatting - 'p', 'br', 'b', 'i', 'u', 'em', 'strong', 's', 'strike', 'del', 'sub', 'sup', 'mark', 'small', - // Headings - 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', - // Lists - 'ul', 'ol', 'li', - // Block elements - 'blockquote', 'pre', 'code', 'hr', 'div', 'span', - // Tables - 'table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td', 'caption', 'colgroup', 'col', - // Links (href checked below) - 'a', - // Images (src checked below) - 'img', - ], - allowedAttributes: { - 'a': ['href', 'title', 'target', 'rel'], - 'img': ['src', 'alt', 'title', 'width', 'height'], - 'td': ['colspan', 'rowspan', 'style'], - 'th': ['colspan', 'rowspan', 'style'], - 'span': ['class', 'style'], - 'div': ['class', 'style'], - 'pre': ['class', 'style'], - 'code': ['class', 'style'], - 'p': ['class', 'style'], - 'h1': ['class', 'style'], - 'h2': ['class', 'style'], - 'h3': ['class', 'style'], - 'h4': ['class', 'style'], - 'h5': ['class', 'style'], - 'h6': ['class', 'style'], - 'ul': ['class', 'style'], - 'ol': ['class', 'style', 'start', 'type'], - 'li': ['class', 'style'], - 'blockquote': ['class', 'style'], - 'table': ['class', 'style'], - }, - allowedSchemes: ['http', 'https', 'mailto'], - allowedIframeHostnames: [], - disallowedTagsMode: 'discard', - nonTextTags: ['script', 'style', 'iframe', 'embed', 'object', 'applet', 'form', 'input', 'textarea', 'select', 'button'], -}; - -/** - * Recursively sanitize all string values in an object or array - * Uses the field key to decide strict vs permissive sanitization - */ -function sanitizeValue(value: any, fieldKey?: string): any { - if (typeof value === 'string') { - const isRichTextField = fieldKey && RICH_TEXT_FIELDS.has(fieldKey); - const config = isRichTextField ? permissiveSanitizeConfig : strictSanitizeConfig; - return sanitizeHtml(value, config); - } - if (Array.isArray(value)) { - return value.map((item) => sanitizeValue(item, fieldKey)); - } - if (value !== null && typeof value === 'object') { - return sanitizeObject(value); - } - return value; -} - -/** - * Sanitize all string properties of an object (recursively) - * Passes the key name to sanitizeValue so it can choose the right config - */ -function sanitizeObject(obj: Record): Record { - const sanitized: Record = {}; - for (const key of Object.keys(obj)) { - sanitized[key] = sanitizeValue(obj[key], key); - } - return sanitized; -} +import { sanitizeObject, sanitizeStrict } from '@utils/sanitizer'; /** * Express middleware that sanitizes req.body and req.query @@ -137,6 +19,7 @@ function sanitizeObject(obj: Record): Record { export const sanitizationMiddleware = (req: Request, _res: Response, next: NextFunction): void => { try { // Skip multipart requests — file uploads are sanitized by the malware scan pipeline + // Note: Multipart payloads should be manually sanitized in the controller if used. const contentType = req.headers['content-type'] || ''; if (contentType.includes('multipart/form-data')) { return next(); @@ -153,7 +36,7 @@ export const sanitizationMiddleware = (req: Request, _res: Response, next: NextF for (const key of Object.keys(req.query)) { const val = req.query[key]; if (typeof val === 'string') { - strictQuery[key] = sanitizeHtml(val, strictSanitizeConfig); + strictQuery[key] = sanitizeStrict(val); } else { strictQuery[key] = val; } diff --git a/src/middlewares/validate.middleware.ts b/src/middlewares/validate.middleware.ts index 4769df7..a99b916 100644 --- a/src/middlewares/validate.middleware.ts +++ b/src/middlewares/validate.middleware.ts @@ -13,12 +13,8 @@ export const validateRequest = (schema: ZodSchema) => { next(); } catch (error) { if (error instanceof ZodError) { - const errorMessages = error.errors.map(err => ({ - field: err.path.join('.'), - message: err.message, - })); - - ResponseHandler.validationError(res, 'Validation failed', errorMessages); + const errorMessage = error.errors.map(err => err.message).join(', '); + ResponseHandler.validationError(res, 'Validation failed', errorMessage); } else { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; ResponseHandler.error(res, 'Validation error', 400, errorMessage); @@ -34,12 +30,8 @@ export const validateBody = (schema: ZodSchema) => { next(); } catch (error) { if (error instanceof ZodError) { - const errorMessages = error.errors.map(err => ({ - field: err.path.join('.'), - message: err.message, - })); - - ResponseHandler.validationError(res, 'Request body validation failed', errorMessages); + const errorMessage = error.errors.map(err => err.message).join(', '); + ResponseHandler.validationError(res, 'Request body validation failed', errorMessage); } else { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; ResponseHandler.error(res, 'Validation error', 400, errorMessage); @@ -55,12 +47,8 @@ export const validateQuery = (schema: ZodSchema) => { next(); } catch (error) { if (error instanceof ZodError) { - const errorMessages = error.errors.map(err => ({ - field: err.path.join('.'), - message: err.message, - })); - - ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessages); + const errorMessage = error.errors.map(err => err.message).join(', '); + ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessage); } else { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; ResponseHandler.error(res, 'Validation error', 400, errorMessage); @@ -76,12 +64,8 @@ export const validateParams = (schema: ZodSchema) => { next(); } catch (error) { if (error instanceof ZodError) { - const errorMessages = error.errors.map(err => ({ - field: err.path.join('.'), - message: err.message, - })); - - ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessages); + const errorMessage = error.errors.map(err => err.message).join(', '); + ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessage); } else { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; ResponseHandler.error(res, 'Validation error', 400, errorMessage); diff --git a/src/migrations/20260316-update-holiday-type-enum.ts b/src/migrations/20260316-update-holiday-type-enum.ts new file mode 100644 index 0000000..9c384b6 --- /dev/null +++ b/src/migrations/20260316-update-holiday-type-enum.ts @@ -0,0 +1,38 @@ +import { QueryInterface } from 'sequelize'; + +/** + * Migration to ensure 'ORGANIZATIONAL' exists in the holiday_type enum + * and set 'NATIONAL' as the default value for the holiday_type column. + */ +export async function up(queryInterface: QueryInterface): Promise { + // 1. Add 'ORGANIZATIONAL' to the enum_holidays_holiday_type enum type if it doesn't exist + // PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE, + // so we check if it exists first using a PL/pgSQL block + await queryInterface.sequelize.query(` + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum + WHERE enumlabel = 'ORGANIZATIONAL' + AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_holidays_holiday_type') + ) THEN + ALTER TYPE enum_holidays_holiday_type ADD VALUE 'ORGANIZATIONAL'; + END IF; + END$$; + `); + + // 2. Set 'ORGANIZATIONAL' as the default value for the holiday_type column + await queryInterface.sequelize.query(` + ALTER TABLE "holidays" ALTER COLUMN "holiday_type" SET DEFAULT 'ORGANIZATIONAL'; + `); +} + +export async function down(queryInterface: QueryInterface): Promise { + // PostgreSQL doesn't support removing enum values directly. + // We can revert the default value back to 'ORGANIZATIONAL' if needed. + await queryInterface.sequelize.query(` + ALTER TABLE "holidays" ALTER COLUMN "holiday_type" SET DEFAULT 'NATIONAL'; + `); + + console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. ORGANIZATIONAL will remain in enum_holidays_holiday_type.'); +} diff --git a/src/routes/admin.routes.ts b/src/routes/admin.routes.ts index ab678d9..7851457 100644 --- a/src/routes/admin.routes.ts +++ b/src/routes/admin.routes.ts @@ -15,6 +15,7 @@ import { createActivityTypeSchema, updateActivityTypeSchema, activityTypeParamsSchema, + updateForm16ConfigSchema, } from '../validators/admin.validator'; import { getAllHolidays, @@ -136,7 +137,7 @@ router.get('/form16-config', getForm16Config); * @body { submissionViewerEmails?, twentySixAsViewerEmails?, reminderEnabled?, reminderDays? } * @access Admin */ -router.put('/form16-config', putForm16Config); +router.put('/form16-config', validateBody(updateForm16ConfigSchema), putForm16Config); // ==================== User Role Management Routes (RBAC) ==================== diff --git a/src/routes/workflow.routes.ts b/src/routes/workflow.routes.ts index 7b9f61e..92b8261 100644 --- a/src/routes/workflow.routes.ts +++ b/src/routes/workflow.routes.ts @@ -18,9 +18,8 @@ import { notificationService } from '../services/notification.service'; import { Activity } from '@models/Activity'; import { WorkflowService } from '../services/workflow.service'; import { WorkNoteController } from '../controllers/worknote.controller'; -import { workNoteService } from '../services/worknote.service'; +import { documentController } from '../controllers/document.controller'; import { pauseController } from '../controllers/pause.controller'; -import logger from '@utils/logger'; const router = Router(); @@ -232,505 +231,25 @@ router.post('/:id/work-notes', // Preview workflow document router.get('/documents/:documentId/preview', authenticateToken, - asyncHandler(async (req: any, res: Response) => { - const { documentId } = req.params; - const { Document } = require('@models/Document'); - const { gcsStorageService } = require('../services/gcsStorage.service'); - const fs = require('fs'); - - const document = await Document.findOne({ where: { documentId } }); - if (!document) { - res.status(404).json({ success: false, error: 'Document not found' }); - return; - } - - const storageUrl = (document as any).storageUrl || (document as any).storage_url; - const filePath = (document as any).filePath || (document as any).file_path; - const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName; - const fileType = (document as any).mimeType || (document as any).mime_type; - - // Check if it's a GCS URL - const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://')); - - if (isGcsUrl) { - // Redirect to GCS public URL or use signed URL for private files - res.redirect(storageUrl); - return; - } - - // If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS - if (!storageUrl && filePath && filePath.startsWith('requests/')) { - try { - // Use the existing GCS storage service instance - if (!gcsStorageService.isConfigured()) { - throw new Error('GCS not configured'); - } - - // Access the storage instance from the service - const { Storage } = require('@google-cloud/storage'); - const keyFilePath = process.env.GCP_KEY_FILE || ''; - const bucketName = process.env.GCP_BUCKET_NAME || ''; - const path = require('path'); - const resolvedKeyPath = path.isAbsolute(keyFilePath) - ? keyFilePath - : path.resolve(process.cwd(), keyFilePath); - - const storage = new Storage({ - projectId: process.env.GCP_PROJECT_ID || '', - keyFilename: resolvedKeyPath, - }); - - const bucket = storage.bucket(bucketName); - const file = bucket.file(filePath); - - // Check if file exists - const [exists] = await file.exists(); - if (!exists) { - res.status(404).json({ success: false, error: 'File not found in GCS' }); - return; - } - - // Get file metadata for content type - const [metadata] = await file.getMetadata(); - const contentType = metadata.contentType || fileType || 'application/octet-stream'; - - // Set CORS headers - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - res.setHeader('Content-Type', contentType); - - // For images and PDFs, allow inline viewing - const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf')); - const disposition = isPreviewable ? 'inline' : 'attachment'; - res.setHeader('Content-Disposition', createContentDisposition(disposition, fileName)); - - // Stream file from GCS to response - file.createReadStream() - .on('error', (streamError: Error) => { - const logger = require('../utils/logger').default; - logger.error('[Workflow] Failed to stream file from GCS', { - documentId, - filePath, - error: streamError.message, - }); - if (!res.headersSent) { - res.status(500).json({ - success: false, - error: 'Failed to stream file from storage' - }); - } - }) - .pipe(res); - return; - } catch (gcsError) { - const logger = require('../utils/logger').default; - logger.error('[Workflow] Failed to access GCS file for preview', { - documentId, - filePath, - error: gcsError instanceof Error ? gcsError.message : 'Unknown error', - }); - res.status(500).json({ - success: false, - error: 'Failed to access file. Please try again.' - }); - return; - } - } - - // Local file handling - check if storageUrl is a local path (starts with /uploads/) - if (storageUrl && storageUrl.startsWith('/uploads/')) { - // Extract relative path from storageUrl (remove /uploads/ prefix) - const relativePath = storageUrl.replace(/^\/uploads\//, ''); - const absolutePath = path.join(UPLOAD_DIR, relativePath); - - // Check if file exists - if (!fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found on server' }); - return; - } - - // Set CORS headers to allow blob URL creation when served from same origin - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - // Set appropriate content type - res.contentType(fileType || 'application/octet-stream'); - - // For images and PDFs, allow inline viewing - const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf')); - if (isPreviewable) { - res.setHeader('Content-Disposition', `inline; filename="${fileName}"`); - } else { - res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`); - } - - res.sendFile(absolutePath, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to serve file' }); - } - }); - return; - } - - // Legacy local file handling (absolute path stored in filePath) - // Resolve relative path if needed - const absolutePath = filePath && !path.isAbsolute(filePath) - ? path.join(UPLOAD_DIR, filePath) - : filePath; - - if (!absolutePath || !fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found on server' }); - return; - } - - // Set CORS headers to allow blob URL creation when served from same origin - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - // Set appropriate content type - res.contentType(fileType || 'application/octet-stream'); - - // For images and PDFs, allow inline viewing - const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf')); - if (isPreviewable) { - res.setHeader('Content-Disposition', `inline; filename="${fileName}"`); - } else { - res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`); - } - - res.sendFile(absolutePath, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to serve file' }); - } - }); - }) + asyncHandler((req: any, res: Response) => documentController.getWorkflowDocument(req, res, 'preview')) ); // Download workflow document router.get('/documents/:documentId/download', authenticateToken, - asyncHandler(async (req: any, res: Response) => { - const { documentId } = req.params; - const { Document } = require('@models/Document'); - const { gcsStorageService } = require('../services/gcsStorage.service'); - const fs = require('fs'); - - const document = await Document.findOne({ where: { documentId } }); - if (!document) { - res.status(404).json({ success: false, error: 'Document not found' }); - return; - } - - const storageUrl = (document as any).storageUrl || (document as any).storage_url; - const filePath = (document as any).filePath || (document as any).file_path; - const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName; - - // Check if it's a GCS URL - const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://')); - - if (isGcsUrl) { - // Redirect to GCS public URL for download - res.redirect(storageUrl); - return; - } - - // If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS - if (!storageUrl && filePath && filePath.startsWith('requests/')) { - try { - // Use the existing GCS storage service instance - if (!gcsStorageService.isConfigured()) { - throw new Error('GCS not configured'); - } - - // Access the storage instance from the service - const { Storage } = require('@google-cloud/storage'); - const keyFilePath = process.env.GCP_KEY_FILE || ''; - const bucketName = process.env.GCP_BUCKET_NAME || ''; - const path = require('path'); - const resolvedKeyPath = path.isAbsolute(keyFilePath) - ? keyFilePath - : path.resolve(process.cwd(), keyFilePath); - - const storage = new Storage({ - projectId: process.env.GCP_PROJECT_ID || '', - keyFilename: resolvedKeyPath, - }); - - const bucket = storage.bucket(bucketName); - const file = bucket.file(filePath); - - // Check if file exists - const [exists] = await file.exists(); - if (!exists) { - res.status(404).json({ success: false, error: 'File not found in GCS' }); - return; - } - - // Get file metadata for content type - const [metadata] = await file.getMetadata(); - const contentType = metadata.contentType || (document as any).mimeType || (document as any).mime_type || 'application/octet-stream'; - - // Set CORS headers - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - // Set headers for download - res.setHeader('Content-Type', contentType); - res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName)); - - // Stream file from GCS to response - file.createReadStream() - .on('error', (streamError: Error) => { - const logger = require('../utils/logger').default; - logger.error('[Workflow] Failed to stream file from GCS for download', { - documentId, - filePath, - error: streamError.message, - }); - if (!res.headersSent) { - res.status(500).json({ - success: false, - error: 'Failed to stream file from storage' - }); - } - }) - .pipe(res); - return; - } catch (gcsError) { - const logger = require('../utils/logger').default; - logger.error('[Workflow] Failed to access GCS file for download', { - documentId, - filePath, - error: gcsError instanceof Error ? gcsError.message : 'Unknown error', - }); - res.status(500).json({ - success: false, - error: 'Failed to access file. Please try again.' - }); - return; - } - } - - // Local file handling - check if storageUrl is a local path (starts with /uploads/) - if (storageUrl && storageUrl.startsWith('/uploads/')) { - // Extract relative path from storageUrl (remove /uploads/ prefix) - const relativePath = storageUrl.replace(/^\/uploads\//, ''); - const absolutePath = path.join(UPLOAD_DIR, relativePath); - - // Check if file exists - if (!fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found on server' }); - return; - } - - // Set CORS headers - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - // Set headers for download - const fileTypeForDownload = (document as any).mimeType || (document as any).mime_type || 'application/octet-stream'; - res.setHeader('Content-Type', fileTypeForDownload); - res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName)); - - res.download(absolutePath, fileName, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to download file' }); - } - }); - return; - } - - // Legacy local file handling (absolute path stored in filePath) - // Resolve relative path if needed - const absolutePath = filePath && !path.isAbsolute(filePath) - ? path.join(UPLOAD_DIR, filePath) - : filePath; - - if (!absolutePath || !fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found on server' }); - return; - } - - res.download(absolutePath, fileName, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to download file' }); - } - }); - }) + asyncHandler((req: any, res: Response) => documentController.getWorkflowDocument(req, res, 'download')) ); -// Preview work note attachment (serves file for inline viewing) +// Preview work note attachment router.get('/work-notes/attachments/:attachmentId/preview', authenticateToken, - asyncHandler(async (req: any, res: Response) => { - const { attachmentId } = req.params; - const fileInfo = await workNoteService.downloadAttachment(attachmentId); - const fs = require('fs'); - - // Check if it's a GCS URL - if (fileInfo.isGcsUrl && fileInfo.storageUrl) { - // Redirect to GCS public URL - res.redirect(fileInfo.storageUrl); - return; - } - - // Local file handling - check if storageUrl is a local path (starts with /uploads/) - if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) { - // Extract relative path from storageUrl (remove /uploads/ prefix) - const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, ''); - const absolutePath = path.join(UPLOAD_DIR, relativePath); - - // Check if file exists - if (!fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found' }); - return; - } - - // Set CORS headers to allow blob URL creation when served from same origin - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - // Set appropriate content type - res.contentType(fileInfo.fileType || 'application/octet-stream'); - - // For images and PDFs, allow inline viewing - const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf')); - if (isPreviewable) { - res.setHeader('Content-Disposition', `inline; filename="${fileInfo.fileName}"`); - } else { - res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`); - } - - res.sendFile(absolutePath, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to serve file' }); - } - }); - return; - } - - // Legacy local file handling (absolute path stored in filePath) - // Resolve relative path if needed - const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath) - ? path.join(UPLOAD_DIR, fileInfo.filePath) - : fileInfo.filePath; - - if (!absolutePath || !fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found' }); - return; - } - - // Set CORS headers to allow blob URL creation when served from same origin - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - // Set appropriate content type - res.contentType(fileInfo.fileType || 'application/octet-stream'); - - // For images and PDFs, allow inline viewing - const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf')); - if (isPreviewable) { - res.setHeader('Content-Disposition', `inline; filename="${fileInfo.fileName}"`); - } else { - res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`); - } - - res.sendFile(absolutePath, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to serve file' }); - } - }); - }) + asyncHandler((req: any, res: Response) => documentController.getWorkNoteAttachment(req, res, 'preview')) ); // Download work note attachment router.get('/work-notes/attachments/:attachmentId/download', authenticateToken, - asyncHandler(async (req: any, res: Response) => { - const { attachmentId } = req.params; - const fileInfo = await workNoteService.downloadAttachment(attachmentId); - const fs = require('fs'); - - // Check if it's a GCS URL - if (fileInfo.isGcsUrl && fileInfo.storageUrl) { - // Redirect to GCS public URL for download - res.redirect(fileInfo.storageUrl); - return; - } - - // Local file handling - check if storageUrl is a local path (starts with /uploads/) - if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) { - // Extract relative path from storageUrl (remove /uploads/ prefix) - const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, ''); - const absolutePath = path.join(UPLOAD_DIR, relativePath); - - // Check if file exists - if (!fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found' }); - return; - } - - // Set CORS headers - const origin = req.headers.origin; - if (origin) { - res.setHeader('Access-Control-Allow-Origin', origin); - res.setHeader('Access-Control-Allow-Credentials', 'true'); - } - res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); - - res.download(absolutePath, fileInfo.fileName, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to download file' }); - } - }); - return; - } - - // Legacy local file handling (absolute path stored in filePath) - // Resolve relative path if needed - const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath) - ? path.join(UPLOAD_DIR, fileInfo.filePath) - : fileInfo.filePath; - - if (!absolutePath || !fs.existsSync(absolutePath)) { - res.status(404).json({ success: false, error: 'File not found' }); - return; - } - - res.download(absolutePath, fileInfo.fileName, (err) => { - if (err && !res.headersSent) { - res.status(500).json({ success: false, error: 'Failed to download file' }); - } - }); - }) + asyncHandler((req: any, res: Response) => documentController.getWorkNoteAttachment(req, res, 'download')) ); // Add participant routes diff --git a/src/scripts/auto-setup.ts b/src/scripts/auto-setup.ts index 5ad747e..df3322d 100644 --- a/src/scripts/auto-setup.ts +++ b/src/scripts/auto-setup.ts @@ -174,6 +174,7 @@ async function runMigrations(): Promise { const m57 = require('../migrations/20260225100001-add-form16-archived-at'); const m58 = require('../migrations/20260303100001-drop-form16a-number-unique'); const m59 = require('../migrations/20260309-add-wfm-push-fields'); + const m60 = require('../migrations/20260316-update-holiday-type-enum'); const migrations = [ { name: '2025103000-create-users', module: m0 }, @@ -240,6 +241,7 @@ async function runMigrations(): Promise { { name: '20260225100001-add-form16-archived-at', module: m57 }, { name: '20260303100001-drop-form16a-number-unique', module: m58 }, { name: '20260309-add-wfm-push-fields', module: m59 }, + { name: '20260316-update-holiday-type-enum', module: m60 }, ]; // Dynamically import sequelize after secrets are loaded diff --git a/src/scripts/migrate.ts b/src/scripts/migrate.ts index e388362..ac4f6d2 100644 --- a/src/scripts/migrate.ts +++ b/src/scripts/migrate.ts @@ -64,6 +64,7 @@ import * as m56 from '../migrations/20260225000001-create-form16-non-submitted-n import * as m57 from '../migrations/20260225100001-add-form16-archived-at'; import * as m58 from '../migrations/20260303100001-drop-form16a-number-unique'; import * as m59 from '../migrations/20260309-add-wfm-push-fields'; +import * as m60 from '../migrations/20260316-update-holiday-type-enum'; interface Migration { name: string; @@ -135,6 +136,7 @@ const migrations: Migration[] = [ { name: '20260225100001-add-form16-archived-at', module: m57 }, { name: '20260303100001-drop-form16a-number-unique', module: m58 }, { name: '20260309-add-wfm-push-fields', module: m59 }, + { name: '20260316-update-holiday-type-enum', module: m60 }, ]; diff --git a/src/services/dealerClaim.service.ts b/src/services/dealerClaim.service.ts index 5dad842..3ff7639 100644 --- a/src/services/dealerClaim.service.ts +++ b/src/services/dealerClaim.service.ts @@ -32,6 +32,7 @@ import { activityService } from './activity.service'; import { UserService } from './user.service'; import { dmsIntegrationService } from './dmsIntegration.service'; import { validateDealerUser } from './userEnrichment.service'; +import { sanitizeStrict, sanitizePermissive, isHtmlEmpty } from '../utils/sanitizer'; // findDealerLocally removed (duplicate) @@ -100,7 +101,26 @@ export class DealerClaimService { }>; } ): Promise { + const transaction = await sequelize.transaction(); try { + // 1. Sanitize user inputs + const sanitizedName = sanitizeStrict(claimData.activityName); + const sanitizedLocation = sanitizeStrict(claimData.location); + const sanitizedDesc = sanitizePermissive(claimData.requestDescription); + + // Verify that sanitization didn't empty the required fields + if (!sanitizedName) { + throw new Error('Activity Name is required and must contain valid text'); + } + if (!sanitizedLocation) { + throw new Error('Location is required and must contain valid text'); + } + if (isHtmlEmpty(sanitizedDesc)) { + throw new Error('Description is required and must contain valid text'); + } + + // 2. Map and validate dealer user + const dealerCode = claimData.dealerCode; // 0. Validate Dealer User (jobTitle='Dealer' and employeeId=dealerCode) logger.info(`[DealerClaimService] Validating dealer for code: ${claimData.dealerCode}`); const dealerUser = await validateDealerUser(claimData.dealerCode); @@ -229,49 +249,49 @@ export class DealerClaimService { } } - const workflowService = this.getWorkflowService(); - const workflowRequest = await workflowService.createWorkflow(userId, { - templateType: 'DEALER CLAIM' as any, + const workflow = await this.getWorkflowService().createWorkflow(userId, { + templateType: 'DEALER CLAIM', workflowType: 'CLAIM_MANAGEMENT', - title: `${claimData.activityName} - Claim Request`, - description: claimData.requestDescription, - priority: Priority.STANDARD, + title: `Dealer Claim: ${sanitizedName} (${dealerCode})`, + description: sanitizedDesc, + priority: (claimData as any).priority || Priority.STANDARD, approvalLevels: transformedLevels, participants: transformedParticipants, isDraft: false - } as any); + } as any, { transaction, ipAddress: null, userAgent: 'System/DealerClaimService' }); // Create claim details - await DealerClaimDetails.create({ - requestId: workflowRequest.requestId, - activityName: claimData.activityName, + const claimDetails = await DealerClaimDetails.create({ + requestId: workflow.requestId, + activityName: sanitizedName, activityType: claimData.activityType, - dealerCode: claimData.dealerCode, + dealerCode: dealerCode, dealerName: claimData.dealerName, dealerEmail: claimData.dealerEmail, dealerPhone: claimData.dealerPhone, dealerAddress: claimData.dealerAddress, activityDate: claimData.activityDate, - location: claimData.location, + location: sanitizedLocation, periodStartDate: claimData.periodStartDate, periodEndDate: claimData.periodEndDate, - }); + } as any, { transaction }); // Initialize budget tracking with initial estimated budget (if provided) await ClaimBudgetTracking.upsert({ - requestId: workflowRequest.requestId, + requestId: workflow.requestId, initialEstimatedBudget: claimData.estimatedBudget, budgetStatus: BudgetStatus.DRAFT, currency: 'INR', - }); + }, { transaction }); - // Redundant level creation removed - handled by workflowService.createWorkflow + // 3. Commit transaction + await transaction.commit(); - // Redundant TAT scheduling removed - handled by workflowService.createWorkflow - - logger.info(`[DealerClaimService] Created claim request: ${workflowRequest.requestNumber}`); - return workflowRequest; + logger.info(`[DealerClaimService] Created claim request: ${workflow.requestNumber}`); + return workflow; } catch (error: any) { + // Rollback transaction on error + if (transaction) await transaction.rollback(); // Log detailed error information for debugging const errorDetails: any = { message: error.message, diff --git a/src/services/workflow.service.ts b/src/services/workflow.service.ts index b34c5d9..2fb22c5 100644 --- a/src/services/workflow.service.ts +++ b/src/services/workflow.service.ts @@ -18,7 +18,7 @@ import { notificationService } from './notification.service'; import { activityService } from './activity.service'; import { tatSchedulerService } from './tatScheduler.service'; import { emitToRequestRoom } from '../realtime/socket'; -import { sanitizeHtml } from '@utils/sanitizer'; +import { sanitizeStrict, sanitizePermissive, isHtmlEmpty } from '../utils/sanitizer'; import { canViewForm16Submission } from './form16Permission.service'; export class WorkflowService { @@ -918,9 +918,9 @@ export class WorkflowService { const submissionIds = form16Rows.map((r: any) => r.id); const creditNotes = submissionIds.length ? await Form16CreditNote.findAll({ - where: { submissionId: submissionIds }, - attributes: ['submissionId', 'creditNoteNumber'], - }) + where: { submissionId: submissionIds }, + attributes: ['submissionId', 'creditNoteNumber'], + }) : []; const cnBySubId = new Map(); for (const c of creditNotes as any[]) { @@ -2684,30 +2684,46 @@ export class WorkflowService { } }; } - async createWorkflow(initiatorId: string, workflowData: CreateWorkflowRequest, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise { + async createWorkflow( + initiatorId: string, + workflowData: CreateWorkflowRequest, + requestMetadata?: { ipAddress?: string | null; userAgent?: string | null; transaction?: any } + ): Promise { try { const requestNumber = await generateRequestNumber(); const totalTatHours = workflowData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0); const isDraftRequested = workflowData.isDraft === true; const initialStatus = isDraftRequested ? WorkflowStatus.DRAFT : WorkflowStatus.PENDING; const now = new Date(); + const transaction = requestMetadata?.transaction; + + const sanitizedTitle = workflowData.title ? sanitizeStrict(workflowData.title) : workflowData.title; + const sanitizedDescription = workflowData.description ? sanitizePermissive(workflowData.description) : workflowData.description; + + if (!isDraftRequested && !sanitizedTitle) { + throw new Error('A valid title is required. Please ensure the title contains valid content.'); + } + + if (!isDraftRequested && (!sanitizedDescription || isHtmlEmpty(sanitizedDescription))) { + throw new Error('A valid description is required. Please ensure the description contains valid content.'); + } const workflow = await WorkflowRequest.create({ requestNumber, initiatorId, templateType: workflowData.templateType, workflowType: workflowData.workflowType || 'NON_TEMPLATIZED', - title: workflowData.title ? sanitizeHtml(workflowData.title) : workflowData.title, - description: workflowData.description ? sanitizeHtml(workflowData.description) : workflowData.description, + title: sanitizedTitle, + description: sanitizedDescription, priority: workflowData.priority, + status: initialStatus, currentLevel: 1, totalLevels: workflowData.approvalLevels.length, totalTatHours, - status: initialStatus, isDraft: isDraftRequested, isDeleted: false, submissionDate: isDraftRequested ? undefined : now - }); + }, { transaction }); // Create approval levels if skipCreation is false if (!workflowData.skipCreation) { @@ -2726,7 +2742,7 @@ export class WorkflowService { remainingHours: levelData.tatHours, tatPercentageUsed: 0, isFinalApprover: levelData.isFinalApprover || false - }); + }, { transaction }); } } @@ -2771,7 +2787,7 @@ export class WorkflowService { notificationEnabled: participantData.notificationEnabled ?? true, addedBy: initiatorId, isActive: true - }); + }, { transaction }); } } @@ -3462,10 +3478,10 @@ export class WorkflowService { const previousRequestIdSet = new Set(previousRequestIds); const previousWorkflows = previousRequestIds.length > 0 ? await WorkflowRequest.findAll({ - where: { requestId: { [Op.in]: previousRequestIds } }, - attributes: ['requestId', 'requestNumber'], - raw: true, - }) as any[] + where: { requestId: { [Op.in]: previousRequestIds } }, + attributes: ['requestId', 'requestNumber'], + raw: true, + }) as any[] : []; const requestNumberByRequestId = new Map(); for (const w of previousWorkflows) { @@ -3476,10 +3492,10 @@ export class WorkflowService { const submissionIdsPrev = previousRows.map((r) => r.id); const creditNotesPrev = submissionIdsPrev.length > 0 ? await Form16CreditNote.findAll({ - where: { submissionId: submissionIdsPrev }, - attributes: ['submissionId', 'creditNoteNumber', 'issueDate'], - raw: true, - }) as any[] + where: { submissionId: submissionIdsPrev }, + attributes: ['submissionId', 'creditNoteNumber', 'issueDate'], + raw: true, + }) as any[] : []; const cnBySubId = new Map(); for (const c of creditNotesPrev) { @@ -3540,10 +3556,13 @@ export class WorkflowService { try { // Sanitize title and description if provided if (updateData.title) { - updateData.title = sanitizeHtml(updateData.title); + updateData.title = sanitizeStrict(updateData.title); } if (updateData.description) { - updateData.description = sanitizeHtml(updateData.description); + updateData.description = sanitizePermissive(updateData.description); + } + if (updateData.conclusionRemark) { + updateData.conclusionRemark = sanitizePermissive(updateData.conclusionRemark); } const workflow = await this.findWorkflowByIdentifier(requestId); diff --git a/src/services/worknote.service.ts b/src/services/worknote.service.ts index 1ef9ea3..1111143 100644 --- a/src/services/worknote.service.ts +++ b/src/services/worknote.service.ts @@ -12,7 +12,7 @@ import { gcsStorageService } from './gcsStorage.service'; import logger from '@utils/logger'; import fs from 'fs'; import path from 'path'; -import { sanitizeHtml } from '@utils/sanitizer'; +import { sanitizeStrict, sanitizePermissive, isHtmlEmpty } from '../utils/sanitizer'; export class WorkNoteService { async list(requestId: string) { @@ -81,12 +81,17 @@ export class WorkNoteService { async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise { logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length }); + const sanitizedMessage = sanitizeStrict(payload.message); + if (isHtmlEmpty(sanitizedMessage)) { + throw new Error('Message is required and must contain valid text'); + } + const note = await WorkNote.create({ requestId, userId: user.userId, userName: user.name || null, userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR) - message: sanitizeHtml(payload.message), + message: sanitizedMessage, isPriority: !!payload.isPriority, parentNoteId: payload.parentNoteId || null, mentionedUsers: payload.mentionedUsers || null, diff --git a/src/utils/logger.ts b/src/utils/logger.ts index 2fd0ec7..da260e3 100644 --- a/src/utils/logger.ts +++ b/src/utils/logger.ts @@ -180,6 +180,7 @@ const sanitizeError = (error: any): object => { name: error.name, message: error.message, stack: error.stack, + errors: (error as any).errors, ...(error as any).statusCode && { statusCode: (error as any).statusCode }, }; } diff --git a/src/utils/sanitizer.ts b/src/utils/sanitizer.ts index e65facb..b3d318d 100644 --- a/src/utils/sanitizer.ts +++ b/src/utils/sanitizer.ts @@ -10,6 +10,15 @@ import { FilterXSS, whiteList } from 'xss'; export const sanitizeHtml = (html: string): string => { if (!html) return ''; + // Decode basic entities to prevent bypasses (e.g. <script>) + // and ensuring we catch them during tag stripping + const decoded = html + .replace(/</g, '<') + .replace(/>/g, '>') + .replace(/"/g, '"') + .replace(/'/g, "'") + .replace(/&/g, '&'); + // Custom options can be added here if we need to allow specific tags or attributes // For now, using default options which are quite secure // Custom options to restrict allowed tags @@ -70,29 +79,79 @@ export const sanitizeHtml = (html: string): string => { }; const xssFilter = new FilterXSS(options); - return xssFilter.process(html); + return xssFilter.process(decoded); +}; + +/** + * STRICT Sanitization: Removes ALL HTML tags. + * Use for titles, names, IDs, etc. + * + * @param text The raw string to sanitize + * @returns The sanitized string with all tags removed + */ +export const sanitizeStrict = (text: string): string => { + if (!text) return ''; + + // Decode basic entities to prevent bypasses + const decoded = text + .replace(/</g, '<') + .replace(/>/g, '>') + .replace(/"/g, '"') + .replace(/'/g, "'") + .replace(/&/g, '&'); + + const options = { + whiteList: {}, // No tags allowed + stripIgnoreTag: true, + stripIgnoreTagBody: ['script', 'style', 'iframe', 'object', 'embed'] + }; + + const xssFilter = new FilterXSS(options); + return xssFilter.process(decoded); +}; + +/** + * PERMISSIVE Sanitization: Allows safe formatting tags. + * Use for rich-text fields (descriptions, comments). + * Alias for the existing sanitizeHtml. + * + * @param html The raw HTML string to sanitize + * @returns The sanitized HTML string + */ +export const sanitizePermissive = (html: string): string => { + return sanitizeHtml(html); }; /** * Sanitizes an object by recursively sanitizing all string properties. * Useful for sanitizing request bodies or complex nested structures. * + * Uses field names to decide between strict and permissive sanitization. + * * @param obj The object to sanitize + * @param richTextFields Set of keys that should use permissive sanitization * @returns The sanitized object */ -export const sanitizeObject = (obj: T): T => { +export const sanitizeObject = (obj: T, richTextFields: Set = new Set([ + 'description', 'requestDescription', 'comments', 'remarks', 'message', 'finalRemark', + 'aiGeneratedRemark', 'remark', 'closingRemarks', 'alertSubmitForm16Template', + 'reminderNotificationTemplate', 'templateRe', 'templateDealers', 'template' +])): T => { if (!obj || typeof obj !== 'object') return obj; if (Array.isArray(obj)) { - return obj.map(item => sanitizeObject(item)) as any; + return obj.map(item => sanitizeObject(item, richTextFields)) as any; } const sanitized: any = {}; for (const [key, value] of Object.entries(obj)) { if (typeof value === 'string') { - sanitized[key] = sanitizeHtml(value); + const isRichText = richTextFields.has(key); + const sanitizedValue = isRichText ? sanitizePermissive(value) : sanitizeStrict(value); + // If it's rich text and became effectively empty, keep it empty string for validation + sanitized[key] = (isRichText && isHtmlEmpty(sanitizedValue)) ? '' : sanitizedValue; } else if (typeof value === 'object' && value !== null) { - sanitized[key] = sanitizeObject(value); + sanitized[key] = sanitizeObject(value, richTextFields); } else { sanitized[key] = value; } @@ -100,3 +159,30 @@ export const sanitizeObject = (obj: T): T => { return sanitized as T; }; + +/** + * Checks if HTML content is effectively empty (i.e., only whitespace or empty tags). + * Useful for validating required rich-text fields after sanitization. + * + * @param html The sanitized HTML string to check + * @returns True if the HTML has no meaningful content (no text, no images) + */ +export const isHtmlEmpty = (html: string | null | undefined): boolean => { + if (!html) return true; + + // First, check if there are any images (which are valid content but have no text) + if (html.includes(']*>/g, '').trim(); + + // Check for common non-breaking space and other entities that might stay after tag stripping + const textWithoutEntities = textOnly + .replace(/ /g, '') + .replace(/‍/g, '') + .replace(/‌/g, '') + .replace(/­/g, '') + .trim(); + + return textWithoutEntities === ''; +}; diff --git a/src/validators/admin.validator.ts b/src/validators/admin.validator.ts index 9579ab5..8ad0737 100644 --- a/src/validators/admin.validator.ts +++ b/src/validators/admin.validator.ts @@ -6,7 +6,7 @@ export const createHolidaySchema = z.object({ holidayDate: z.string().min(1, 'Holiday date is required'), holidayName: z.string().min(1, 'Holiday name is required').max(255, 'Holiday name too long'), description: z.string().max(1000, 'Description too long').optional(), - holidayType: z.enum(['NATIONAL', 'REGIONAL', 'COMPANY', 'OPTIONAL']).optional(), + holidayType: z.enum(['NATIONAL', 'REGIONAL', 'ORGANIZATIONAL', 'OPTIONAL']).optional(), isRecurring: z.boolean().optional(), }); @@ -65,3 +65,36 @@ export const updateActivityTypeSchema = createActivityTypeSchema.partial(); export const activityTypeParamsSchema = z.object({ activityTypeId: z.string().uuid('Invalid activity type ID'), }); + +// ── Form 16 Configuration Schemas ── + +const notificationItemSchema = z.object({ + enabled: z.boolean(), + template: z.string().optional(), +}); + +const notification26AsSchema = z.object({ + enabled: z.boolean(), + templateRe: z.string().optional(), + templateDealers: z.string().optional(), +}); + +export const updateForm16ConfigSchema = z.object({ + submissionViewerEmails: z.array(z.string().email()).optional(), + twentySixAsViewerEmails: z.array(z.string().email()).optional(), + reminderEnabled: z.boolean().optional(), + reminderDays: z.number().int().min(0).optional(), + notification26AsDataAdded: notification26AsSchema.optional(), + notificationForm16SuccessCreditNote: notificationItemSchema.optional(), + notificationForm16Unsuccessful: notificationItemSchema.optional(), + alertSubmitForm16Enabled: z.boolean().optional(), + alertSubmitForm16FrequencyDays: z.number().int().min(0).optional(), + alertSubmitForm16FrequencyHours: z.number().int().min(0).optional(), + alertSubmitForm16RunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(), + alertSubmitForm16Template: z.string().optional(), + reminderNotificationEnabled: z.boolean().optional(), + reminderFrequencyDays: z.number().int().min(0).optional(), + reminderFrequencyHours: z.number().int().min(0).optional(), + reminderRunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(), + reminderNotificationTemplate: z.string().optional(), +}); diff --git a/verify-object.ts b/verify-object.ts new file mode 100644 index 0000000..4078ef0 --- /dev/null +++ b/verify-object.ts @@ -0,0 +1,27 @@ + +import { sanitizeObject } from './src/utils/sanitizer'; + +const runTest = (name: string, input: any) => { + const output = sanitizeObject(input); + console.log(`[${name}] Input: ${JSON.stringify(input)}`); + console.log(`[${name}] Output: ${JSON.stringify(output)}`); + console.log(`[${name}] Description Empty String: ${output.description === ''}`); + console.log('---'); +}; + +console.log('--- SYSTEM SANITIZER OBJECT TEST ---'); + +// Case 1: Script only (should become empty string) +runTest('Script Only', { description: "" }); + +// Case 2: Encoded script only (should become empty string) +runTest('Encoded Script', { description: "<script>alert(1)</script>" }); + +// Case 3: Script inside valid tags (should become empty string if result is just empty tags) +runTest('Script in tags', { description: "

" }); + +// Case 4: Valid text (should stay same) +runTest('Valid Text', { description: "

Hello World

" }); + +// Case 5: Empty tags/spaces (should become empty string) +runTest('Empty Tags', { description: "

 

" });