Compare commits

...

2 Commits

22 changed files with 621 additions and 717 deletions

View File

@ -1 +1 @@
import{a as s}from"./index-DK9CP9m9.js";import"./radix-vendor-CYvDqP9X.js";import"./charts-vendor-BVfwAPj-.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-CxsBWvVP.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-BATWUvr6.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion}; import{a as s}from"./index-DsQZmIYq.js";import"./radix-vendor-CYvDqP9X.js";import"./charts-vendor-BVfwAPj-.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-CxsBWvVP.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-BATWUvr6.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};

File diff suppressed because one or more lines are too long

View File

@ -13,7 +13,7 @@
<!-- Preload essential fonts and icons --> <!-- Preload essential fonts and icons -->
<link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<script type="module" crossorigin src="/assets/index-DK9CP9m9.js"></script> <script type="module" crossorigin src="/assets/index-DsQZmIYq.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-BVfwAPj-.js"> <link rel="modulepreload" crossorigin href="/assets/charts-vendor-BVfwAPj-.js">
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-CYvDqP9X.js"> <link rel="modulepreload" crossorigin href="/assets/radix-vendor-CYvDqP9X.js">
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js"> <link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js">

View File

@ -8,7 +8,7 @@ import logger from '@utils/logger';
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils'; import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
import { clearConfigCache } from '@services/configReader.service'; import { clearConfigCache } from '@services/configReader.service';
import { User, UserRole } from '@models/User'; import { User, UserRole } from '@models/User';
import { sanitizeHtml } from '@utils/sanitizer'; import { sanitizeHtml, sanitizeObject, isHtmlEmpty } from '@utils/sanitizer';
/** /**
* Get all holidays (with optional year filter) * Get all holidays (with optional year filter)
@ -125,7 +125,9 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
logger.error('[Admin] Error creating holiday:', error); logger.error('[Admin] Error creating holiday:', error);
res.status(500).json({ res.status(500).json({
success: false, success: false,
error: error.message || 'Failed to create holiday' message: 'Failed to create holiday',
error: error.message,
details: error.errors // Sequelize validation errors are usually in .errors
}); });
} }
}; };
@ -172,7 +174,9 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
logger.error('[Admin] Error updating holiday:', error); logger.error('[Admin] Error updating holiday:', error);
res.status(500).json({ res.status(500).json({
success: false, success: false,
error: error.message || 'Failed to update holiday' message: 'Failed to update holiday',
error: error.message,
details: error.errors
}); });
} }
}; };
@ -403,10 +407,18 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
return; return;
} }
// Sanitize config value if it's likely to be rendered as HTML // Sanitize config value using unified sanitizeObject
// We can be selective or just sanitize all strings for safety // This will handle strings, numbers, and nested objects consistently
if (typeof configValue === 'string') { const sanitizedObj = sanitizeObject({ [configKey]: configValue });
configValue = sanitizeHtml(configValue); configValue = sanitizedObj[configKey];
// If it's a string, ensure it's not effectively empty after sanitization
if (typeof configValue === 'string' && isHtmlEmpty(configValue)) {
res.status(400).json({
success: false,
error: 'Config value is required and must contain valid content'
});
return;
} }
// Update configuration // Update configuration
@ -631,7 +643,7 @@ export const putForm16Config = async (req: Request, res: Response): Promise<void
res.status(401).json({ success: false, error: 'User not authenticated' }); res.status(401).json({ success: false, error: 'User not authenticated' });
return; return;
} }
const body = req.body as Record<string, unknown>; const body = sanitizeObject(req.body as Record<string, unknown>);
const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase(); const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase();
const submissionViewerEmails = Array.isArray(body.submissionViewerEmails) const submissionViewerEmails = Array.isArray(body.submissionViewerEmails)
? body.submissionViewerEmails.map(normalizeEmail).filter(Boolean) ? body.submissionViewerEmails.map(normalizeEmail).filter(Boolean)

View File

@ -1,5 +1,6 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index'; import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
import { isHtmlEmpty } from '../utils/sanitizer';
import { aiService } from '@services/ai.service'; import { aiService } from '@services/ai.service';
import { activityService } from '@services/activity.service'; import { activityService } from '@services/activity.service';
import logger from '@utils/logger'; import logger from '@utils/logger';
@ -227,8 +228,8 @@ export class ConclusionController {
const { finalRemark } = req.body; const { finalRemark } = req.body;
const userId = (req as any).user?.userId; const userId = (req as any).user?.userId;
if (!finalRemark || typeof finalRemark !== 'string') { if (isHtmlEmpty(finalRemark)) {
return res.status(400).json({ error: 'Final remark is required' }); return res.status(400).json({ error: 'A valid final remark is required. Please ensure the remark contains valid content.' });
} }
// Fetch request // Fetch request

View File

@ -12,11 +12,12 @@ import { sapIntegrationService } from '../services/sapIntegration.service';
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import crypto from 'crypto'; import crypto from 'crypto';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { DealerClaimDetails } from '../models/DealerClaimDetails'; import { DealerClaimDetails } from '../models/DealerClaimDetails';
import { ClaimInvoice } from '../models/ClaimInvoice'; import { ClaimInvoice } from '../models/ClaimInvoice';
import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem'; import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem';
import { ActivityType } from '../models/ActivityType'; import { ActivityType } from '../models/ActivityType';
import { Participant } from '../models/Participant';
import { sanitizeObject, sanitizePermissive } from '../utils/sanitizer';
export class DealerClaimController { export class DealerClaimController {
private dealerClaimService = new DealerClaimService(); private dealerClaimService = new DealerClaimService();
@ -50,8 +51,25 @@ export class DealerClaimController {
} = req.body; } = req.body;
// Validation // Validation
if (!activityName || !activityType || !dealerCode || !dealerName || !location || !requestDescription) { const requiredFields = [
return ResponseHandler.error(res, 'Missing required fields', 400); { key: 'activityName', label: 'Activity Name' },
{ key: 'activityType', label: 'Activity Type' },
{ key: 'dealerCode', label: 'Dealer Code' },
{ key: 'dealerName', label: 'Dealer Name' },
{ key: 'location', label: 'Location' },
{ key: 'requestDescription', label: 'Request Description' },
];
const missingFields = requiredFields
.filter(field => !req.body[field.key])
.map(field => field.label);
if (missingFields.length > 0) {
return ResponseHandler.error(
res,
`Required fields are missing or contain invalid content: ${missingFields.join(', ')}`,
400
);
} }
const claimRequest = await this.dealerClaimService.createClaimRequest(userId, { const claimRequest = await this.dealerClaimService.createClaimRequest(userId, {
@ -76,9 +94,16 @@ export class DealerClaimController {
message: 'Claim request created successfully' message: 'Claim request created successfully'
}, 'Claim request created'); }, 'Claim request created');
} catch (error: any) { } catch (error: any) {
// Handle approver validation errors // Handle validation and business logic errors
if (error.message && error.message.includes('Approver')) { const isValidationError = error.message && (
logger.warn('[DealerClaimController] Approver validation error:', { message: error.message }); error.message.includes('Approver') ||
error.message.includes('Valid content is required') ||
error.message.includes('invalid script') ||
error.message.includes('empty input detected')
);
if (isValidationError) {
logger.warn('[DealerClaimController] Validation error:', { message: error.message });
return ResponseHandler.error(res, error.message, 400); return ResponseHandler.error(res, error.message, 400);
} }
@ -173,6 +198,8 @@ export class DealerClaimController {
if (typeof costBreakup === 'string') { if (typeof costBreakup === 'string') {
try { try {
parsedCostBreakup = JSON.parse(costBreakup); parsedCostBreakup = JSON.parse(costBreakup);
// Sanitize cost items
parsedCostBreakup = sanitizeObject(parsedCostBreakup);
} catch (parseError) { } catch (parseError) {
logger.error('[DealerClaimController] Failed to parse costBreakup JSON:', parseError); logger.error('[DealerClaimController] Failed to parse costBreakup JSON:', parseError);
return ResponseHandler.error(res, 'Invalid costBreakup format. Expected JSON array.', 400); return ResponseHandler.error(res, 'Invalid costBreakup format. Expected JSON array.', 400);
@ -232,7 +259,7 @@ export class DealerClaimController {
timelineMode: timelineMode || 'date', timelineMode: timelineMode || 'date',
expectedCompletionDate: expectedCompletionDate ? new Date(expectedCompletionDate) : undefined, expectedCompletionDate: expectedCompletionDate ? new Date(expectedCompletionDate) : undefined,
expectedCompletionDays: expectedCompletionDays ? parseInt(expectedCompletionDays) : undefined, expectedCompletionDays: expectedCompletionDays ? parseInt(expectedCompletionDays) : undefined,
dealerComments: dealerComments || '', dealerComments: dealerComments ? sanitizePermissive(dealerComments) : '',
}); });
return ResponseHandler.success(res, { message: 'Proposal submitted successfully' }, 'Proposal submitted'); return ResponseHandler.success(res, { message: 'Proposal submitted successfully' }, 'Proposal submitted');
@ -264,6 +291,8 @@ export class DealerClaimController {
if (closedExpenses) { if (closedExpenses) {
try { try {
parsedClosedExpenses = typeof closedExpenses === 'string' ? JSON.parse(closedExpenses) : closedExpenses; parsedClosedExpenses = typeof closedExpenses === 'string' ? JSON.parse(closedExpenses) : closedExpenses;
// Sanitize expenses
parsedClosedExpenses = sanitizeObject(parsedClosedExpenses);
} catch (e) { } catch (e) {
logger.warn('[DealerClaimController] Failed to parse closedExpenses JSON:', e); logger.warn('[DealerClaimController] Failed to parse closedExpenses JSON:', e);
parsedClosedExpenses = []; parsedClosedExpenses = [];
@ -547,7 +576,7 @@ export class DealerClaimController {
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0, totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined, invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
attendanceSheet: attendanceSheet || undefined, attendanceSheet: attendanceSheet || undefined,
completionDescription: completionDescription || undefined, completionDescription: completionDescription ? sanitizePermissive(completionDescription) : undefined,
}); });
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted'); return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');
@ -784,6 +813,20 @@ export class DealerClaimController {
return ResponseHandler.error(res, 'Invalid workflow request', 400); return ResponseHandler.error(res, 'Invalid workflow request', 400);
} }
// Authorization Check
const userRole = (req as any).user?.role;
const userId = (req as any).user?.userId;
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
const participant = await Participant.findOne({
where: { requestId, userId, isActive: true }
});
if (!participant) {
return ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
}
}
const { ClaimInvoice } = await import('../models/ClaimInvoice'); const { ClaimInvoice } = await import('../models/ClaimInvoice');
let invoice = await ClaimInvoice.findOne({ where: { requestId } }); let invoice = await ClaimInvoice.findOne({ where: { requestId } });
@ -1005,6 +1048,24 @@ export class DealerClaimController {
const requestId = (workflow as any).requestId || (workflow as any).request_id; const requestId = (workflow as any).requestId || (workflow as any).request_id;
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number; const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
// Authorization Check
const userRole = (req as any).user?.role;
const userId = (req as any).user?.userId;
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
const participant = await Participant.findOne({
where: { requestId, userId, isActive: true }
});
if (!participant) {
return ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
}
}
// Fetch related data // Fetch related data
logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`); logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`);
const [invoice, items, claimDetails, internalOrder] = await Promise.all([ const [invoice, items, claimDetails, internalOrder] = await Promise.all([

View File

@ -7,6 +7,8 @@ import { User } from '@models/User';
import { WorkflowRequest } from '@models/WorkflowRequest'; import { WorkflowRequest } from '@models/WorkflowRequest';
import { Participant } from '@models/Participant'; import { Participant } from '@models/Participant';
import { ApprovalLevel } from '@models/ApprovalLevel'; import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkNote } from '@models/WorkNote';
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
import { Op } from 'sequelize'; import { Op } from 'sequelize';
import { ResponseHandler } from '@utils/responseHandler'; import { ResponseHandler } from '@utils/responseHandler';
import { activityService } from '@services/activity.service'; import { activityService } from '@services/activity.service';
@ -17,6 +19,9 @@ import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils'; import { getRequestMetadata } from '@utils/requestUtils';
import { getConfigNumber, getConfigValue } from '@services/configReader.service'; import { getConfigNumber, getConfigValue } from '@services/configReader.service';
import { logDocumentEvent, logWithContext } from '@utils/logger'; import { logDocumentEvent, logWithContext } from '@utils/logger';
import { UPLOAD_DIR } from '../config/storage';
import { Storage } from '@google-cloud/storage';
import logger from '@utils/logger';
export class DocumentController { export class DocumentController {
async upload(req: AuthenticatedRequest, res: Response): Promise<void> { async upload(req: AuthenticatedRequest, res: Response): Promise<void> {
@ -517,6 +522,196 @@ export class DocumentController {
ResponseHandler.error(res, 'Upload failed', 500, message); ResponseHandler.error(res, 'Upload failed', 500, message);
} }
} }
/**
* Helper function to create proper Content-Disposition header
*/
private createContentDisposition(disposition: 'inline' | 'attachment', filename: string): string {
const cleanFilename = filename
.replace(/[<>:"|?*\x00-\x1F\x7F]/g, '_')
.replace(/\\/g, '_')
.trim();
const hasNonASCII = /[^\x00-\x7F]/.test(filename);
if (hasNonASCII) {
const encodedFilename = encodeURIComponent(filename);
return `${disposition}; filename="${cleanFilename}"; filename*=UTF-8''${encodedFilename}`;
} else {
return `${disposition}; filename="${cleanFilename}"`;
}
}
/**
* Preview or Download a standard workflow document
*/
async getWorkflowDocument(req: AuthenticatedRequest, res: Response, mode: 'preview' | 'download'): Promise<void> {
try {
const { documentId } = req.params;
const userRole = req.user?.role;
const userId = req.user?.userId;
const document = await Document.findOne({ where: { documentId } });
if (!document) {
ResponseHandler.error(res, 'Document not found', 404);
return;
}
// Authorization Check
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
const participant = await Participant.findOne({
where: { requestId: document.requestId, userId, isActive: true }
});
if (!participant) {
ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
return;
}
const canAccess = mode === 'download' ? participant.canDownloadDocuments : participant.canViewDocuments;
if (!canAccess) {
ResponseHandler.error(res, `Access denied. You do not have permission to ${mode} documents in this workflow.`, 403);
return;
}
}
return this.serveFile(res, {
storageUrl: (document as any).storageUrl || (document as any).storage_url,
filePath: (document as any).filePath || (document as any).file_path,
fileName: (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName,
mimeType: (document as any).mimeType || (document as any).mime_type,
mode
});
} catch (error) {
logger.error(`[DocumentController] Error getting workflow document:`, error);
ResponseHandler.error(res, 'Failed to access document', 500);
}
}
/**
* Preview or Download a work note attachment
*/
async getWorkNoteAttachment(req: AuthenticatedRequest, res: Response, mode: 'preview' | 'download'): Promise<void> {
try {
const { attachmentId } = req.params;
const userRole = req.user?.role;
const userId = req.user?.userId;
const attachment = await WorkNoteAttachment.findOne({ where: { attachmentId } });
if (!attachment) {
ResponseHandler.error(res, 'Attachment not found', 404);
return;
}
const note = await WorkNote.findOne({ where: { noteId: attachment.noteId } });
if (!note) {
ResponseHandler.error(res, 'Associated work note not found', 404);
return;
}
// Authorization Check (Work note attachments follow general document permissions)
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
const participant = await Participant.findOne({
where: { requestId: note.requestId, userId, isActive: true }
});
if (!participant) {
ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
return;
}
const canAccess = mode === 'download' ? participant.canDownloadDocuments : participant.canViewDocuments;
if (!canAccess) {
ResponseHandler.error(res, `Access denied. You do not have permission to ${mode} documentation in this workflow.`, 403);
return;
}
}
return this.serveFile(res, {
storageUrl: (attachment as any).storageUrl || (attachment as any).storage_url,
filePath: (attachment as any).filePath || (attachment as any).file_path,
fileName: (attachment as any).fileName || (attachment as any).file_name,
mimeType: (attachment as any).fileType || (attachment as any).file_type,
mode
});
} catch (error) {
logger.error(`[DocumentController] Error getting work note attachment:`, error);
ResponseHandler.error(res, 'Failed to access attachment', 500);
}
}
/**
* Common logic to serve files from GCS or local storage
*/
private async serveFile(res: Response, options: {
storageUrl?: string,
filePath?: string,
fileName: string,
mimeType?: string,
mode: 'preview' | 'download'
}): Promise<void> {
const { storageUrl, filePath, fileName, mimeType, mode } = options;
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
// Set CORS and basic headers
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
const dispositionType = mode === 'download' ? 'attachment' : (mimeType?.includes('pdf') || mimeType?.includes('image') ? 'inline' : 'attachment');
res.setHeader('Content-Disposition', this.createContentDisposition(dispositionType, fileName));
res.contentType(mimeType || 'application/octet-stream');
if (isGcsUrl) {
res.redirect(storageUrl!);
return;
}
// Stream from GCS if filePath is a GCS path
if (!storageUrl && filePath && (filePath.startsWith('requests/') || filePath.startsWith('worknotes/'))) {
try {
const keyFilePath = process.env.GCP_KEY_FILE || '';
const bucketName = process.env.GCP_BUCKET_NAME || '';
const resolvedKeyPath = path.isAbsolute(keyFilePath) ? keyFilePath : path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath,
});
const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath);
const [exists] = await file.exists();
if (!exists) {
ResponseHandler.error(res, 'File not found in storage', 404);
return;
}
file.createReadStream()
.on('error', (err) => {
logger.error('[DocumentController] GCS Stream Error:', err);
if (!res.headersSent) ResponseHandler.error(res, 'Streaming failed', 500);
})
.pipe(res);
return;
} catch (err) {
logger.error('[DocumentController] GCS Access Error:', err);
ResponseHandler.error(res, 'Failed to access cloud storage', 500);
return;
}
}
// Local file handling
const absolutePath = filePath && !path.isAbsolute(filePath) ? path.join(UPLOAD_DIR, filePath) : filePath;
if (absolutePath && fs.existsSync(absolutePath)) {
res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) ResponseHandler.error(res, 'Failed to send file', 500);
});
return;
}
ResponseHandler.error(res, 'File not found on server', 404);
}
} }
export const documentController = new DocumentController();

View File

@ -15,6 +15,7 @@ import { getRequestMetadata } from '@utils/requestUtils';
import { enrichApprovalLevels, enrichSpectators, validateInitiator, validateDealerUser } from '@services/userEnrichment.service'; import { enrichApprovalLevels, enrichSpectators, validateInitiator, validateDealerUser } from '@services/userEnrichment.service';
import { DealerClaimService } from '@services/dealerClaim.service'; import { DealerClaimService } from '@services/dealerClaim.service';
import { canViewForm16Submission } from '@services/form16Permission.service'; import { canViewForm16Submission } from '@services/form16Permission.service';
import { sanitizeObject, isHtmlEmpty } from '@utils/sanitizer';
import logger from '@utils/logger'; import logger from '@utils/logger';
const workflowService = new WorkflowService(); const workflowService = new WorkflowService();
@ -139,7 +140,7 @@ export class WorkflowController {
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[WorkflowController] Failed to create workflow:', error); logger.error('[WorkflowController] Failed to create workflow:', error);
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage); ResponseHandler.error(res, errorMessage, 400);
} }
} }
@ -161,11 +162,25 @@ export class WorkflowController {
let parsed; let parsed;
try { try {
parsed = JSON.parse(raw); parsed = JSON.parse(raw);
// Explicitly sanitize the parsed object since multipart bypasses global middleware
parsed = sanitizeObject(parsed);
} catch (parseError) { } catch (parseError) {
ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error'); ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error');
return; return;
} }
// Explicitly check for empty content after sanitization for non-drafts
if (parsed.isDraft !== true) {
if (!parsed.title || !parsed.title.trim()) {
ResponseHandler.error(res, 'A valid title is required. Please ensure the title contains valid content.', 400);
return;
}
if (isHtmlEmpty(parsed.description)) {
ResponseHandler.error(res, 'A valid description is required. Please ensure the description contains valid content.', 400);
return;
}
}
// Transform frontend format to backend format BEFORE validation // Transform frontend format to backend format BEFORE validation
// Map 'approvers' -> 'approvalLevels' for backward compatibility // Map 'approvers' -> 'approvalLevels' for backward compatibility
if (!parsed.approvalLevels && parsed.approvers) { if (!parsed.approvalLevels && parsed.approvers) {
@ -450,7 +465,7 @@ export class WorkflowController {
userId: req.user?.userId, userId: req.user?.userId,
filesCount: (req as any).files?.length || 0, filesCount: (req as any).files?.length || 0,
}); });
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage); ResponseHandler.error(res, errorMessage, 400);
} }
} }

View File

@ -2,133 +2,15 @@
* Sanitization Middleware * Sanitization Middleware
* Sanitizes string inputs in req.body and req.query to prevent stored XSS. * Sanitizes string inputs in req.body and req.query to prevent stored XSS.
* *
* Uses TWO strategies: * Uses the unified sanitizeObject utility from @utils/sanitizer.
* 1. STRICT strips ALL HTML tags (for normal text fields like names, emails, titles)
* 2. PERMISSIVE allows safe formatting tags (for rich text fields like description, message, comments)
* *
* This middleware runs AFTER body parsing and BEFORE route handlers. * This middleware runs AFTER body parsing and BEFORE route handlers.
* File upload routes (multipart) are skipped those are handled * File upload routes (multipart) are skipped those are handled
* by the malwareScan middleware pipeline. * by the malwareScan middleware pipeline (but can be manually sanitized in controllers).
*/ */
import { Request, Response, NextFunction } from 'express'; import { Request, Response, NextFunction } from 'express';
import sanitizeHtml from 'sanitize-html'; import { sanitizeObject, sanitizeStrict } from '@utils/sanitizer';
/**
* Fields that intentionally store HTML from rich text editors.
* These get PERMISSIVE sanitization (safe formatting tags allowed).
* All other string fields get STRICT sanitization (all tags stripped).
*/
const RICH_TEXT_FIELDS = new Set([
'description',
'requestDescription',
'message',
'content',
'comments',
'rejectionReason',
'pauseReason',
'conclusionRemark',
'aiGeneratedRemark',
'finalRemark',
'closingRemarks',
'effectiveFinalRemark',
'keyDiscussionPoints',
'keyPoints',
'remarksText',
'remark',
'remarks',
'feedback',
'note',
'notes',
'skipReason',
]);
// Strict config: zero allowed tags, zero allowed attributes
const strictSanitizeConfig: sanitizeHtml.IOptions = {
allowedTags: [],
allowedAttributes: {},
allowedIframeHostnames: [],
disallowedTagsMode: 'discard',
nonTextTags: ['script', 'style', 'iframe', 'embed', 'object'],
};
// Permissive config: allow safe formatting tags from rich text editors
// Blocks dangerous elements (script, iframe, object, embed, form, input)
const permissiveSanitizeConfig: sanitizeHtml.IOptions = {
allowedTags: [
// Text formatting
'p', 'br', 'b', 'i', 'u', 'em', 'strong', 's', 'strike', 'del', 'sub', 'sup', 'mark', 'small',
// Headings
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
// Lists
'ul', 'ol', 'li',
// Block elements
'blockquote', 'pre', 'code', 'hr', 'div', 'span',
// Tables
'table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td', 'caption', 'colgroup', 'col',
// Links (href checked below)
'a',
// Images (src checked below)
'img',
],
allowedAttributes: {
'a': ['href', 'title', 'target', 'rel'],
'img': ['src', 'alt', 'title', 'width', 'height'],
'td': ['colspan', 'rowspan', 'style'],
'th': ['colspan', 'rowspan', 'style'],
'span': ['class', 'style'],
'div': ['class', 'style'],
'pre': ['class', 'style'],
'code': ['class', 'style'],
'p': ['class', 'style'],
'h1': ['class', 'style'],
'h2': ['class', 'style'],
'h3': ['class', 'style'],
'h4': ['class', 'style'],
'h5': ['class', 'style'],
'h6': ['class', 'style'],
'ul': ['class', 'style'],
'ol': ['class', 'style', 'start', 'type'],
'li': ['class', 'style'],
'blockquote': ['class', 'style'],
'table': ['class', 'style'],
},
allowedSchemes: ['http', 'https', 'mailto'],
allowedIframeHostnames: [],
disallowedTagsMode: 'discard',
nonTextTags: ['script', 'style', 'iframe', 'embed', 'object', 'applet', 'form', 'input', 'textarea', 'select', 'button'],
};
/**
* Recursively sanitize all string values in an object or array
* Uses the field key to decide strict vs permissive sanitization
*/
function sanitizeValue(value: any, fieldKey?: string): any {
if (typeof value === 'string') {
const isRichTextField = fieldKey && RICH_TEXT_FIELDS.has(fieldKey);
const config = isRichTextField ? permissiveSanitizeConfig : strictSanitizeConfig;
return sanitizeHtml(value, config);
}
if (Array.isArray(value)) {
return value.map((item) => sanitizeValue(item, fieldKey));
}
if (value !== null && typeof value === 'object') {
return sanitizeObject(value);
}
return value;
}
/**
* Sanitize all string properties of an object (recursively)
* Passes the key name to sanitizeValue so it can choose the right config
*/
function sanitizeObject(obj: Record<string, any>): Record<string, any> {
const sanitized: Record<string, any> = {};
for (const key of Object.keys(obj)) {
sanitized[key] = sanitizeValue(obj[key], key);
}
return sanitized;
}
/** /**
* Express middleware that sanitizes req.body and req.query * Express middleware that sanitizes req.body and req.query
@ -137,6 +19,7 @@ function sanitizeObject(obj: Record<string, any>): Record<string, any> {
export const sanitizationMiddleware = (req: Request, _res: Response, next: NextFunction): void => { export const sanitizationMiddleware = (req: Request, _res: Response, next: NextFunction): void => {
try { try {
// Skip multipart requests — file uploads are sanitized by the malware scan pipeline // Skip multipart requests — file uploads are sanitized by the malware scan pipeline
// Note: Multipart payloads should be manually sanitized in the controller if used.
const contentType = req.headers['content-type'] || ''; const contentType = req.headers['content-type'] || '';
if (contentType.includes('multipart/form-data')) { if (contentType.includes('multipart/form-data')) {
return next(); return next();
@ -153,7 +36,7 @@ export const sanitizationMiddleware = (req: Request, _res: Response, next: NextF
for (const key of Object.keys(req.query)) { for (const key of Object.keys(req.query)) {
const val = req.query[key]; const val = req.query[key];
if (typeof val === 'string') { if (typeof val === 'string') {
strictQuery[key] = sanitizeHtml(val, strictSanitizeConfig); strictQuery[key] = sanitizeStrict(val);
} else { } else {
strictQuery[key] = val; strictQuery[key] = val;
} }

View File

@ -13,12 +13,8 @@ export const validateRequest = (schema: ZodSchema) => {
next(); next();
} catch (error) { } catch (error) {
if (error instanceof ZodError) { if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({ const errorMessage = error.errors.map(err => err.message).join(', ');
field: err.path.join('.'), ResponseHandler.validationError(res, 'Validation failed', errorMessage);
message: err.message,
}));
ResponseHandler.validationError(res, 'Validation failed', errorMessages);
} else { } else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage); ResponseHandler.error(res, 'Validation error', 400, errorMessage);
@ -34,12 +30,8 @@ export const validateBody = (schema: ZodSchema) => {
next(); next();
} catch (error) { } catch (error) {
if (error instanceof ZodError) { if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({ const errorMessage = error.errors.map(err => err.message).join(', ');
field: err.path.join('.'), ResponseHandler.validationError(res, 'Request body validation failed', errorMessage);
message: err.message,
}));
ResponseHandler.validationError(res, 'Request body validation failed', errorMessages);
} else { } else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage); ResponseHandler.error(res, 'Validation error', 400, errorMessage);
@ -55,12 +47,8 @@ export const validateQuery = (schema: ZodSchema) => {
next(); next();
} catch (error) { } catch (error) {
if (error instanceof ZodError) { if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({ const errorMessage = error.errors.map(err => err.message).join(', ');
field: err.path.join('.'), ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessage);
message: err.message,
}));
ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessages);
} else { } else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage); ResponseHandler.error(res, 'Validation error', 400, errorMessage);
@ -76,12 +64,8 @@ export const validateParams = (schema: ZodSchema) => {
next(); next();
} catch (error) { } catch (error) {
if (error instanceof ZodError) { if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({ const errorMessage = error.errors.map(err => err.message).join(', ');
field: err.path.join('.'), ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessage);
message: err.message,
}));
ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessages);
} else { } else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage); ResponseHandler.error(res, 'Validation error', 400, errorMessage);

View File

@ -0,0 +1,38 @@
import { QueryInterface } from 'sequelize';
/**
* Migration to ensure 'ORGANIZATIONAL' exists in the holiday_type enum
* and set 'NATIONAL' as the default value for the holiday_type column.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// 1. Add 'ORGANIZATIONAL' to the enum_holidays_holiday_type enum type if it doesn't exist
// PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE,
// so we check if it exists first using a PL/pgSQL block
await queryInterface.sequelize.query(`
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = 'ORGANIZATIONAL'
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_holidays_holiday_type')
) THEN
ALTER TYPE enum_holidays_holiday_type ADD VALUE 'ORGANIZATIONAL';
END IF;
END$$;
`);
// 2. Set 'ORGANIZATIONAL' as the default value for the holiday_type column
await queryInterface.sequelize.query(`
ALTER TABLE "holidays" ALTER COLUMN "holiday_type" SET DEFAULT 'ORGANIZATIONAL';
`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// PostgreSQL doesn't support removing enum values directly.
// We can revert the default value back to 'ORGANIZATIONAL' if needed.
await queryInterface.sequelize.query(`
ALTER TABLE "holidays" ALTER COLUMN "holiday_type" SET DEFAULT 'NATIONAL';
`);
console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. ORGANIZATIONAL will remain in enum_holidays_holiday_type.');
}

View File

@ -15,6 +15,7 @@ import {
createActivityTypeSchema, createActivityTypeSchema,
updateActivityTypeSchema, updateActivityTypeSchema,
activityTypeParamsSchema, activityTypeParamsSchema,
updateForm16ConfigSchema,
} from '../validators/admin.validator'; } from '../validators/admin.validator';
import { import {
getAllHolidays, getAllHolidays,
@ -136,7 +137,7 @@ router.get('/form16-config', getForm16Config);
* @body { submissionViewerEmails?, twentySixAsViewerEmails?, reminderEnabled?, reminderDays? } * @body { submissionViewerEmails?, twentySixAsViewerEmails?, reminderEnabled?, reminderDays? }
* @access Admin * @access Admin
*/ */
router.put('/form16-config', putForm16Config); router.put('/form16-config', validateBody(updateForm16ConfigSchema), putForm16Config);
// ==================== User Role Management Routes (RBAC) ==================== // ==================== User Role Management Routes (RBAC) ====================

View File

@ -18,9 +18,8 @@ import { notificationService } from '../services/notification.service';
import { Activity } from '@models/Activity'; import { Activity } from '@models/Activity';
import { WorkflowService } from '../services/workflow.service'; import { WorkflowService } from '../services/workflow.service';
import { WorkNoteController } from '../controllers/worknote.controller'; import { WorkNoteController } from '../controllers/worknote.controller';
import { workNoteService } from '../services/worknote.service'; import { documentController } from '../controllers/document.controller';
import { pauseController } from '../controllers/pause.controller'; import { pauseController } from '../controllers/pause.controller';
import logger from '@utils/logger';
const router = Router(); const router = Router();
@ -232,505 +231,25 @@ router.post('/:id/work-notes',
// Preview workflow document // Preview workflow document
router.get('/documents/:documentId/preview', router.get('/documents/:documentId/preview',
authenticateToken, authenticateToken,
asyncHandler(async (req: any, res: Response) => { asyncHandler((req: any, res: Response) => documentController.getWorkflowDocument(req, res, 'preview'))
const { documentId } = req.params;
const { Document } = require('@models/Document');
const { gcsStorageService } = require('../services/gcsStorage.service');
const fs = require('fs');
const document = await Document.findOne({ where: { documentId } });
if (!document) {
res.status(404).json({ success: false, error: 'Document not found' });
return;
}
const storageUrl = (document as any).storageUrl || (document as any).storage_url;
const filePath = (document as any).filePath || (document as any).file_path;
const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName;
const fileType = (document as any).mimeType || (document as any).mime_type;
// Check if it's a GCS URL
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
if (isGcsUrl) {
// Redirect to GCS public URL or use signed URL for private files
res.redirect(storageUrl);
return;
}
// If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS
if (!storageUrl && filePath && filePath.startsWith('requests/')) {
try {
// Use the existing GCS storage service instance
if (!gcsStorageService.isConfigured()) {
throw new Error('GCS not configured');
}
// Access the storage instance from the service
const { Storage } = require('@google-cloud/storage');
const keyFilePath = process.env.GCP_KEY_FILE || '';
const bucketName = process.env.GCP_BUCKET_NAME || '';
const path = require('path');
const resolvedKeyPath = path.isAbsolute(keyFilePath)
? keyFilePath
: path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath,
});
const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath);
// Check if file exists
const [exists] = await file.exists();
if (!exists) {
res.status(404).json({ success: false, error: 'File not found in GCS' });
return;
}
// Get file metadata for content type
const [metadata] = await file.getMetadata();
const contentType = metadata.contentType || fileType || 'application/octet-stream';
// Set CORS headers
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
res.setHeader('Content-Type', contentType);
// For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
const disposition = isPreviewable ? 'inline' : 'attachment';
res.setHeader('Content-Disposition', createContentDisposition(disposition, fileName));
// Stream file from GCS to response
file.createReadStream()
.on('error', (streamError: Error) => {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to stream file from GCS', {
documentId,
filePath,
error: streamError.message,
});
if (!res.headersSent) {
res.status(500).json({
success: false,
error: 'Failed to stream file from storage'
});
}
})
.pipe(res);
return;
} catch (gcsError) {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to access GCS file for preview', {
documentId,
filePath,
error: gcsError instanceof Error ? gcsError.message : 'Unknown error',
});
res.status(500).json({
success: false,
error: 'Failed to access file. Please try again.'
});
return;
}
}
// Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (storageUrl && storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists
if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' });
return;
}
// Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type
res.contentType(fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
if (isPreviewable) {
res.setHeader('Content-Disposition', `inline; filename="${fileName}"`);
} else {
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
}
res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' });
}
});
return;
}
// Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed
const absolutePath = filePath && !path.isAbsolute(filePath)
? path.join(UPLOAD_DIR, filePath)
: filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' });
return;
}
// Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type
res.contentType(fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
if (isPreviewable) {
res.setHeader('Content-Disposition', `inline; filename="${fileName}"`);
} else {
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
}
res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' });
}
});
})
); );
// Download workflow document // Download workflow document
router.get('/documents/:documentId/download', router.get('/documents/:documentId/download',
authenticateToken, authenticateToken,
asyncHandler(async (req: any, res: Response) => { asyncHandler((req: any, res: Response) => documentController.getWorkflowDocument(req, res, 'download'))
const { documentId } = req.params;
const { Document } = require('@models/Document');
const { gcsStorageService } = require('../services/gcsStorage.service');
const fs = require('fs');
const document = await Document.findOne({ where: { documentId } });
if (!document) {
res.status(404).json({ success: false, error: 'Document not found' });
return;
}
const storageUrl = (document as any).storageUrl || (document as any).storage_url;
const filePath = (document as any).filePath || (document as any).file_path;
const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName;
// Check if it's a GCS URL
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
if (isGcsUrl) {
// Redirect to GCS public URL for download
res.redirect(storageUrl);
return;
}
// If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS
if (!storageUrl && filePath && filePath.startsWith('requests/')) {
try {
// Use the existing GCS storage service instance
if (!gcsStorageService.isConfigured()) {
throw new Error('GCS not configured');
}
// Access the storage instance from the service
const { Storage } = require('@google-cloud/storage');
const keyFilePath = process.env.GCP_KEY_FILE || '';
const bucketName = process.env.GCP_BUCKET_NAME || '';
const path = require('path');
const resolvedKeyPath = path.isAbsolute(keyFilePath)
? keyFilePath
: path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath,
});
const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath);
// Check if file exists
const [exists] = await file.exists();
if (!exists) {
res.status(404).json({ success: false, error: 'File not found in GCS' });
return;
}
// Get file metadata for content type
const [metadata] = await file.getMetadata();
const contentType = metadata.contentType || (document as any).mimeType || (document as any).mime_type || 'application/octet-stream';
// Set CORS headers
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set headers for download
res.setHeader('Content-Type', contentType);
res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName));
// Stream file from GCS to response
file.createReadStream()
.on('error', (streamError: Error) => {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to stream file from GCS for download', {
documentId,
filePath,
error: streamError.message,
});
if (!res.headersSent) {
res.status(500).json({
success: false,
error: 'Failed to stream file from storage'
});
}
})
.pipe(res);
return;
} catch (gcsError) {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to access GCS file for download', {
documentId,
filePath,
error: gcsError instanceof Error ? gcsError.message : 'Unknown error',
});
res.status(500).json({
success: false,
error: 'Failed to access file. Please try again.'
});
return;
}
}
// Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (storageUrl && storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists
if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' });
return;
}
// Set CORS headers
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set headers for download
const fileTypeForDownload = (document as any).mimeType || (document as any).mime_type || 'application/octet-stream';
res.setHeader('Content-Type', fileTypeForDownload);
res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName));
res.download(absolutePath, fileName, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' });
}
});
return;
}
// Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed
const absolutePath = filePath && !path.isAbsolute(filePath)
? path.join(UPLOAD_DIR, filePath)
: filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' });
return;
}
res.download(absolutePath, fileName, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' });
}
});
})
); );
// Preview work note attachment (serves file for inline viewing) // Preview work note attachment
router.get('/work-notes/attachments/:attachmentId/preview', router.get('/work-notes/attachments/:attachmentId/preview',
authenticateToken, authenticateToken,
asyncHandler(async (req: any, res: Response) => { asyncHandler((req: any, res: Response) => documentController.getWorkNoteAttachment(req, res, 'preview'))
const { attachmentId } = req.params;
const fileInfo = await workNoteService.downloadAttachment(attachmentId);
const fs = require('fs');
// Check if it's a GCS URL
if (fileInfo.isGcsUrl && fileInfo.storageUrl) {
// Redirect to GCS public URL
res.redirect(fileInfo.storageUrl);
return;
}
// Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists
if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' });
return;
}
// Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type
res.contentType(fileInfo.fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing
const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf'));
if (isPreviewable) {
res.setHeader('Content-Disposition', `inline; filename="${fileInfo.fileName}"`);
} else {
res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`);
}
res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' });
}
});
return;
}
// Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed
const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath)
? path.join(UPLOAD_DIR, fileInfo.filePath)
: fileInfo.filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' });
return;
}
// Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type
res.contentType(fileInfo.fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing
const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf'));
if (isPreviewable) {
res.setHeader('Content-Disposition', `inline; filename="${fileInfo.fileName}"`);
} else {
res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`);
}
res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' });
}
});
})
); );
// Download work note attachment // Download work note attachment
router.get('/work-notes/attachments/:attachmentId/download', router.get('/work-notes/attachments/:attachmentId/download',
authenticateToken, authenticateToken,
asyncHandler(async (req: any, res: Response) => { asyncHandler((req: any, res: Response) => documentController.getWorkNoteAttachment(req, res, 'download'))
const { attachmentId } = req.params;
const fileInfo = await workNoteService.downloadAttachment(attachmentId);
const fs = require('fs');
// Check if it's a GCS URL
if (fileInfo.isGcsUrl && fileInfo.storageUrl) {
// Redirect to GCS public URL for download
res.redirect(fileInfo.storageUrl);
return;
}
// Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists
if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' });
return;
}
// Set CORS headers
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
res.download(absolutePath, fileInfo.fileName, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' });
}
});
return;
}
// Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed
const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath)
? path.join(UPLOAD_DIR, fileInfo.filePath)
: fileInfo.filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' });
return;
}
res.download(absolutePath, fileInfo.fileName, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' });
}
});
})
); );
// Add participant routes // Add participant routes

View File

@ -174,6 +174,7 @@ async function runMigrations(): Promise<void> {
const m57 = require('../migrations/20260225100001-add-form16-archived-at'); const m57 = require('../migrations/20260225100001-add-form16-archived-at');
const m58 = require('../migrations/20260303100001-drop-form16a-number-unique'); const m58 = require('../migrations/20260303100001-drop-form16a-number-unique');
const m59 = require('../migrations/20260309-add-wfm-push-fields'); const m59 = require('../migrations/20260309-add-wfm-push-fields');
const m60 = require('../migrations/20260316-update-holiday-type-enum');
const migrations = [ const migrations = [
{ name: '2025103000-create-users', module: m0 }, { name: '2025103000-create-users', module: m0 },
@ -240,6 +241,7 @@ async function runMigrations(): Promise<void> {
{ name: '20260225100001-add-form16-archived-at', module: m57 }, { name: '20260225100001-add-form16-archived-at', module: m57 },
{ name: '20260303100001-drop-form16a-number-unique', module: m58 }, { name: '20260303100001-drop-form16a-number-unique', module: m58 },
{ name: '20260309-add-wfm-push-fields', module: m59 }, { name: '20260309-add-wfm-push-fields', module: m59 },
{ name: '20260316-update-holiday-type-enum', module: m60 },
]; ];
// Dynamically import sequelize after secrets are loaded // Dynamically import sequelize after secrets are loaded

View File

@ -64,6 +64,7 @@ import * as m56 from '../migrations/20260225000001-create-form16-non-submitted-n
import * as m57 from '../migrations/20260225100001-add-form16-archived-at'; import * as m57 from '../migrations/20260225100001-add-form16-archived-at';
import * as m58 from '../migrations/20260303100001-drop-form16a-number-unique'; import * as m58 from '../migrations/20260303100001-drop-form16a-number-unique';
import * as m59 from '../migrations/20260309-add-wfm-push-fields'; import * as m59 from '../migrations/20260309-add-wfm-push-fields';
import * as m60 from '../migrations/20260316-update-holiday-type-enum';
interface Migration { interface Migration {
name: string; name: string;
@ -135,6 +136,7 @@ const migrations: Migration[] = [
{ name: '20260225100001-add-form16-archived-at', module: m57 }, { name: '20260225100001-add-form16-archived-at', module: m57 },
{ name: '20260303100001-drop-form16a-number-unique', module: m58 }, { name: '20260303100001-drop-form16a-number-unique', module: m58 },
{ name: '20260309-add-wfm-push-fields', module: m59 }, { name: '20260309-add-wfm-push-fields', module: m59 },
{ name: '20260316-update-holiday-type-enum', module: m60 },
]; ];

View File

@ -32,6 +32,7 @@ import { activityService } from './activity.service';
import { UserService } from './user.service'; import { UserService } from './user.service';
import { dmsIntegrationService } from './dmsIntegration.service'; import { dmsIntegrationService } from './dmsIntegration.service';
import { validateDealerUser } from './userEnrichment.service'; import { validateDealerUser } from './userEnrichment.service';
import { sanitizeStrict, sanitizePermissive, isHtmlEmpty } from '../utils/sanitizer';
// findDealerLocally removed (duplicate) // findDealerLocally removed (duplicate)
@ -100,7 +101,26 @@ export class DealerClaimService {
}>; }>;
} }
): Promise<WorkflowRequest> { ): Promise<WorkflowRequest> {
const transaction = await sequelize.transaction();
try { try {
// 1. Sanitize user inputs
const sanitizedName = sanitizeStrict(claimData.activityName);
const sanitizedLocation = sanitizeStrict(claimData.location);
const sanitizedDesc = sanitizePermissive(claimData.requestDescription);
// Verify that sanitization didn't empty the required fields
if (!sanitizedName) {
throw new Error('Activity Name is required and must contain valid text');
}
if (!sanitizedLocation) {
throw new Error('Location is required and must contain valid text');
}
if (isHtmlEmpty(sanitizedDesc)) {
throw new Error('Description is required and must contain valid text');
}
// 2. Map and validate dealer user
const dealerCode = claimData.dealerCode;
// 0. Validate Dealer User (jobTitle='Dealer' and employeeId=dealerCode) // 0. Validate Dealer User (jobTitle='Dealer' and employeeId=dealerCode)
logger.info(`[DealerClaimService] Validating dealer for code: ${claimData.dealerCode}`); logger.info(`[DealerClaimService] Validating dealer for code: ${claimData.dealerCode}`);
const dealerUser = await validateDealerUser(claimData.dealerCode); const dealerUser = await validateDealerUser(claimData.dealerCode);
@ -229,49 +249,49 @@ export class DealerClaimService {
} }
} }
const workflowService = this.getWorkflowService(); const workflow = await this.getWorkflowService().createWorkflow(userId, {
const workflowRequest = await workflowService.createWorkflow(userId, { templateType: 'DEALER CLAIM',
templateType: 'DEALER CLAIM' as any,
workflowType: 'CLAIM_MANAGEMENT', workflowType: 'CLAIM_MANAGEMENT',
title: `${claimData.activityName} - Claim Request`, title: `Dealer Claim: ${sanitizedName} (${dealerCode})`,
description: claimData.requestDescription, description: sanitizedDesc,
priority: Priority.STANDARD, priority: (claimData as any).priority || Priority.STANDARD,
approvalLevels: transformedLevels, approvalLevels: transformedLevels,
participants: transformedParticipants, participants: transformedParticipants,
isDraft: false isDraft: false
} as any); } as any, { transaction, ipAddress: null, userAgent: 'System/DealerClaimService' });
// Create claim details // Create claim details
await DealerClaimDetails.create({ const claimDetails = await DealerClaimDetails.create({
requestId: workflowRequest.requestId, requestId: workflow.requestId,
activityName: claimData.activityName, activityName: sanitizedName,
activityType: claimData.activityType, activityType: claimData.activityType,
dealerCode: claimData.dealerCode, dealerCode: dealerCode,
dealerName: claimData.dealerName, dealerName: claimData.dealerName,
dealerEmail: claimData.dealerEmail, dealerEmail: claimData.dealerEmail,
dealerPhone: claimData.dealerPhone, dealerPhone: claimData.dealerPhone,
dealerAddress: claimData.dealerAddress, dealerAddress: claimData.dealerAddress,
activityDate: claimData.activityDate, activityDate: claimData.activityDate,
location: claimData.location, location: sanitizedLocation,
periodStartDate: claimData.periodStartDate, periodStartDate: claimData.periodStartDate,
periodEndDate: claimData.periodEndDate, periodEndDate: claimData.periodEndDate,
}); } as any, { transaction });
// Initialize budget tracking with initial estimated budget (if provided) // Initialize budget tracking with initial estimated budget (if provided)
await ClaimBudgetTracking.upsert({ await ClaimBudgetTracking.upsert({
requestId: workflowRequest.requestId, requestId: workflow.requestId,
initialEstimatedBudget: claimData.estimatedBudget, initialEstimatedBudget: claimData.estimatedBudget,
budgetStatus: BudgetStatus.DRAFT, budgetStatus: BudgetStatus.DRAFT,
currency: 'INR', currency: 'INR',
}); }, { transaction });
// Redundant level creation removed - handled by workflowService.createWorkflow // 3. Commit transaction
await transaction.commit();
// Redundant TAT scheduling removed - handled by workflowService.createWorkflow logger.info(`[DealerClaimService] Created claim request: ${workflow.requestNumber}`);
return workflow;
logger.info(`[DealerClaimService] Created claim request: ${workflowRequest.requestNumber}`);
return workflowRequest;
} catch (error: any) { } catch (error: any) {
// Rollback transaction on error
if (transaction) await transaction.rollback();
// Log detailed error information for debugging // Log detailed error information for debugging
const errorDetails: any = { const errorDetails: any = {
message: error.message, message: error.message,

View File

@ -18,7 +18,7 @@ import { notificationService } from './notification.service';
import { activityService } from './activity.service'; import { activityService } from './activity.service';
import { tatSchedulerService } from './tatScheduler.service'; import { tatSchedulerService } from './tatScheduler.service';
import { emitToRequestRoom } from '../realtime/socket'; import { emitToRequestRoom } from '../realtime/socket';
import { sanitizeHtml } from '@utils/sanitizer'; import { sanitizeStrict, sanitizePermissive, isHtmlEmpty } from '../utils/sanitizer';
import { canViewForm16Submission } from './form16Permission.service'; import { canViewForm16Submission } from './form16Permission.service';
export class WorkflowService { export class WorkflowService {
@ -918,9 +918,9 @@ export class WorkflowService {
const submissionIds = form16Rows.map((r: any) => r.id); const submissionIds = form16Rows.map((r: any) => r.id);
const creditNotes = submissionIds.length const creditNotes = submissionIds.length
? await Form16CreditNote.findAll({ ? await Form16CreditNote.findAll({
where: { submissionId: submissionIds }, where: { submissionId: submissionIds },
attributes: ['submissionId', 'creditNoteNumber'], attributes: ['submissionId', 'creditNoteNumber'],
}) })
: []; : [];
const cnBySubId = new Map<number, string>(); const cnBySubId = new Map<number, string>();
for (const c of creditNotes as any[]) { for (const c of creditNotes as any[]) {
@ -2684,30 +2684,46 @@ export class WorkflowService {
} }
}; };
} }
async createWorkflow(initiatorId: string, workflowData: CreateWorkflowRequest, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<WorkflowRequest> { async createWorkflow(
initiatorId: string,
workflowData: CreateWorkflowRequest,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null; transaction?: any }
): Promise<WorkflowRequest> {
try { try {
const requestNumber = await generateRequestNumber(); const requestNumber = await generateRequestNumber();
const totalTatHours = workflowData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0); const totalTatHours = workflowData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0);
const isDraftRequested = workflowData.isDraft === true; const isDraftRequested = workflowData.isDraft === true;
const initialStatus = isDraftRequested ? WorkflowStatus.DRAFT : WorkflowStatus.PENDING; const initialStatus = isDraftRequested ? WorkflowStatus.DRAFT : WorkflowStatus.PENDING;
const now = new Date(); const now = new Date();
const transaction = requestMetadata?.transaction;
const sanitizedTitle = workflowData.title ? sanitizeStrict(workflowData.title) : workflowData.title;
const sanitizedDescription = workflowData.description ? sanitizePermissive(workflowData.description) : workflowData.description;
if (!isDraftRequested && !sanitizedTitle) {
throw new Error('A valid title is required. Please ensure the title contains valid content.');
}
if (!isDraftRequested && (!sanitizedDescription || isHtmlEmpty(sanitizedDescription))) {
throw new Error('A valid description is required. Please ensure the description contains valid content.');
}
const workflow = await WorkflowRequest.create({ const workflow = await WorkflowRequest.create({
requestNumber, requestNumber,
initiatorId, initiatorId,
templateType: workflowData.templateType, templateType: workflowData.templateType,
workflowType: workflowData.workflowType || 'NON_TEMPLATIZED', workflowType: workflowData.workflowType || 'NON_TEMPLATIZED',
title: workflowData.title ? sanitizeHtml(workflowData.title) : workflowData.title, title: sanitizedTitle,
description: workflowData.description ? sanitizeHtml(workflowData.description) : workflowData.description, description: sanitizedDescription,
priority: workflowData.priority, priority: workflowData.priority,
status: initialStatus,
currentLevel: 1, currentLevel: 1,
totalLevels: workflowData.approvalLevels.length, totalLevels: workflowData.approvalLevels.length,
totalTatHours, totalTatHours,
status: initialStatus,
isDraft: isDraftRequested, isDraft: isDraftRequested,
isDeleted: false, isDeleted: false,
submissionDate: isDraftRequested ? undefined : now submissionDate: isDraftRequested ? undefined : now
}); }, { transaction });
// Create approval levels if skipCreation is false // Create approval levels if skipCreation is false
if (!workflowData.skipCreation) { if (!workflowData.skipCreation) {
@ -2726,7 +2742,7 @@ export class WorkflowService {
remainingHours: levelData.tatHours, remainingHours: levelData.tatHours,
tatPercentageUsed: 0, tatPercentageUsed: 0,
isFinalApprover: levelData.isFinalApprover || false isFinalApprover: levelData.isFinalApprover || false
}); }, { transaction });
} }
} }
@ -2771,7 +2787,7 @@ export class WorkflowService {
notificationEnabled: participantData.notificationEnabled ?? true, notificationEnabled: participantData.notificationEnabled ?? true,
addedBy: initiatorId, addedBy: initiatorId,
isActive: true isActive: true
}); }, { transaction });
} }
} }
@ -3462,10 +3478,10 @@ export class WorkflowService {
const previousRequestIdSet = new Set(previousRequestIds); const previousRequestIdSet = new Set(previousRequestIds);
const previousWorkflows = previousRequestIds.length > 0 const previousWorkflows = previousRequestIds.length > 0
? await WorkflowRequest.findAll({ ? await WorkflowRequest.findAll({
where: { requestId: { [Op.in]: previousRequestIds } }, where: { requestId: { [Op.in]: previousRequestIds } },
attributes: ['requestId', 'requestNumber'], attributes: ['requestId', 'requestNumber'],
raw: true, raw: true,
}) as any[] }) as any[]
: []; : [];
const requestNumberByRequestId = new Map<string, string>(); const requestNumberByRequestId = new Map<string, string>();
for (const w of previousWorkflows) { for (const w of previousWorkflows) {
@ -3476,10 +3492,10 @@ export class WorkflowService {
const submissionIdsPrev = previousRows.map((r) => r.id); const submissionIdsPrev = previousRows.map((r) => r.id);
const creditNotesPrev = submissionIdsPrev.length > 0 const creditNotesPrev = submissionIdsPrev.length > 0
? await Form16CreditNote.findAll({ ? await Form16CreditNote.findAll({
where: { submissionId: submissionIdsPrev }, where: { submissionId: submissionIdsPrev },
attributes: ['submissionId', 'creditNoteNumber', 'issueDate'], attributes: ['submissionId', 'creditNoteNumber', 'issueDate'],
raw: true, raw: true,
}) as any[] }) as any[]
: []; : [];
const cnBySubId = new Map<number, { creditNoteNumber: string; issueDate?: string }>(); const cnBySubId = new Map<number, { creditNoteNumber: string; issueDate?: string }>();
for (const c of creditNotesPrev) { for (const c of creditNotesPrev) {
@ -3540,10 +3556,13 @@ export class WorkflowService {
try { try {
// Sanitize title and description if provided // Sanitize title and description if provided
if (updateData.title) { if (updateData.title) {
updateData.title = sanitizeHtml(updateData.title); updateData.title = sanitizeStrict(updateData.title);
} }
if (updateData.description) { if (updateData.description) {
updateData.description = sanitizeHtml(updateData.description); updateData.description = sanitizePermissive(updateData.description);
}
if (updateData.conclusionRemark) {
updateData.conclusionRemark = sanitizePermissive(updateData.conclusionRemark);
} }
const workflow = await this.findWorkflowByIdentifier(requestId); const workflow = await this.findWorkflowByIdentifier(requestId);

View File

@ -12,7 +12,7 @@ import { gcsStorageService } from './gcsStorage.service';
import logger from '@utils/logger'; import logger from '@utils/logger';
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { sanitizeHtml } from '@utils/sanitizer'; import { sanitizeStrict, sanitizePermissive, isHtmlEmpty } from '../utils/sanitizer';
export class WorkNoteService { export class WorkNoteService {
async list(requestId: string) { async list(requestId: string) {
@ -81,12 +81,17 @@ export class WorkNoteService {
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> { async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> {
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length }); logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
const sanitizedMessage = sanitizeStrict(payload.message);
if (isHtmlEmpty(sanitizedMessage)) {
throw new Error('Message is required and must contain valid text');
}
const note = await WorkNote.create({ const note = await WorkNote.create({
requestId, requestId,
userId: user.userId, userId: user.userId,
userName: user.name || null, userName: user.name || null,
userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR) userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR)
message: sanitizeHtml(payload.message), message: sanitizedMessage,
isPriority: !!payload.isPriority, isPriority: !!payload.isPriority,
parentNoteId: payload.parentNoteId || null, parentNoteId: payload.parentNoteId || null,
mentionedUsers: payload.mentionedUsers || null, mentionedUsers: payload.mentionedUsers || null,

View File

@ -180,6 +180,7 @@ const sanitizeError = (error: any): object => {
name: error.name, name: error.name,
message: error.message, message: error.message,
stack: error.stack, stack: error.stack,
errors: (error as any).errors,
...(error as any).statusCode && { statusCode: (error as any).statusCode }, ...(error as any).statusCode && { statusCode: (error as any).statusCode },
}; };
} }

View File

@ -10,6 +10,15 @@ import { FilterXSS, whiteList } from 'xss';
export const sanitizeHtml = (html: string): string => { export const sanitizeHtml = (html: string): string => {
if (!html) return ''; if (!html) return '';
// Decode basic entities to prevent bypasses (e.g. &lt;script&gt;)
// and ensuring we catch them during tag stripping
const decoded = html
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/&amp;/g, '&');
// Custom options can be added here if we need to allow specific tags or attributes // Custom options can be added here if we need to allow specific tags or attributes
// For now, using default options which are quite secure // For now, using default options which are quite secure
// Custom options to restrict allowed tags // Custom options to restrict allowed tags
@ -70,29 +79,79 @@ export const sanitizeHtml = (html: string): string => {
}; };
const xssFilter = new FilterXSS(options); const xssFilter = new FilterXSS(options);
return xssFilter.process(html); return xssFilter.process(decoded);
};
/**
* STRICT Sanitization: Removes ALL HTML tags.
* Use for titles, names, IDs, etc.
*
* @param text The raw string to sanitize
* @returns The sanitized string with all tags removed
*/
export const sanitizeStrict = (text: string): string => {
if (!text) return '';
// Decode basic entities to prevent bypasses
const decoded = text
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/&amp;/g, '&');
const options = {
whiteList: {}, // No tags allowed
stripIgnoreTag: true,
stripIgnoreTagBody: ['script', 'style', 'iframe', 'object', 'embed']
};
const xssFilter = new FilterXSS(options);
return xssFilter.process(decoded);
};
/**
* PERMISSIVE Sanitization: Allows safe formatting tags.
* Use for rich-text fields (descriptions, comments).
* Alias for the existing sanitizeHtml.
*
* @param html The raw HTML string to sanitize
* @returns The sanitized HTML string
*/
export const sanitizePermissive = (html: string): string => {
return sanitizeHtml(html);
}; };
/** /**
* Sanitizes an object by recursively sanitizing all string properties. * Sanitizes an object by recursively sanitizing all string properties.
* Useful for sanitizing request bodies or complex nested structures. * Useful for sanitizing request bodies or complex nested structures.
* *
* Uses field names to decide between strict and permissive sanitization.
*
* @param obj The object to sanitize * @param obj The object to sanitize
* @param richTextFields Set of keys that should use permissive sanitization
* @returns The sanitized object * @returns The sanitized object
*/ */
export const sanitizeObject = <T>(obj: T): T => { export const sanitizeObject = <T>(obj: T, richTextFields: Set<string> = new Set([
'description', 'requestDescription', 'comments', 'remarks', 'message', 'finalRemark',
'aiGeneratedRemark', 'remark', 'closingRemarks', 'alertSubmitForm16Template',
'reminderNotificationTemplate', 'templateRe', 'templateDealers', 'template'
])): T => {
if (!obj || typeof obj !== 'object') return obj; if (!obj || typeof obj !== 'object') return obj;
if (Array.isArray(obj)) { if (Array.isArray(obj)) {
return obj.map(item => sanitizeObject(item)) as any; return obj.map(item => sanitizeObject(item, richTextFields)) as any;
} }
const sanitized: any = {}; const sanitized: any = {};
for (const [key, value] of Object.entries(obj)) { for (const [key, value] of Object.entries(obj)) {
if (typeof value === 'string') { if (typeof value === 'string') {
sanitized[key] = sanitizeHtml(value); const isRichText = richTextFields.has(key);
const sanitizedValue = isRichText ? sanitizePermissive(value) : sanitizeStrict(value);
// If it's rich text and became effectively empty, keep it empty string for validation
sanitized[key] = (isRichText && isHtmlEmpty(sanitizedValue)) ? '' : sanitizedValue;
} else if (typeof value === 'object' && value !== null) { } else if (typeof value === 'object' && value !== null) {
sanitized[key] = sanitizeObject(value); sanitized[key] = sanitizeObject(value, richTextFields);
} else { } else {
sanitized[key] = value; sanitized[key] = value;
} }
@ -100,3 +159,30 @@ export const sanitizeObject = <T>(obj: T): T => {
return sanitized as T; return sanitized as T;
}; };
/**
* Checks if HTML content is effectively empty (i.e., only whitespace or empty tags).
* Useful for validating required rich-text fields after sanitization.
*
* @param html The sanitized HTML string to check
* @returns True if the HTML has no meaningful content (no text, no images)
*/
export const isHtmlEmpty = (html: string | null | undefined): boolean => {
if (!html) return true;
// First, check if there are any images (which are valid content but have no text)
if (html.includes('<img')) return false;
// Strip all HTML tags and trim
const textOnly = html.replace(/<[^>]*>/g, '').trim();
// Check for common non-breaking space and other entities that might stay after tag stripping
const textWithoutEntities = textOnly
.replace(/&nbsp;/g, '')
.replace(/&zwj;/g, '')
.replace(/&zwnj;/g, '')
.replace(/&shy;/g, '')
.trim();
return textWithoutEntities === '';
};

View File

@ -6,7 +6,7 @@ export const createHolidaySchema = z.object({
holidayDate: z.string().min(1, 'Holiday date is required'), holidayDate: z.string().min(1, 'Holiday date is required'),
holidayName: z.string().min(1, 'Holiday name is required').max(255, 'Holiday name too long'), holidayName: z.string().min(1, 'Holiday name is required').max(255, 'Holiday name too long'),
description: z.string().max(1000, 'Description too long').optional(), description: z.string().max(1000, 'Description too long').optional(),
holidayType: z.enum(['NATIONAL', 'REGIONAL', 'COMPANY', 'OPTIONAL']).optional(), holidayType: z.enum(['NATIONAL', 'REGIONAL', 'ORGANIZATIONAL', 'OPTIONAL']).optional(),
isRecurring: z.boolean().optional(), isRecurring: z.boolean().optional(),
}); });
@ -65,3 +65,36 @@ export const updateActivityTypeSchema = createActivityTypeSchema.partial();
export const activityTypeParamsSchema = z.object({ export const activityTypeParamsSchema = z.object({
activityTypeId: z.string().uuid('Invalid activity type ID'), activityTypeId: z.string().uuid('Invalid activity type ID'),
}); });
// ── Form 16 Configuration Schemas ──
const notificationItemSchema = z.object({
enabled: z.boolean(),
template: z.string().optional(),
});
const notification26AsSchema = z.object({
enabled: z.boolean(),
templateRe: z.string().optional(),
templateDealers: z.string().optional(),
});
export const updateForm16ConfigSchema = z.object({
submissionViewerEmails: z.array(z.string().email()).optional(),
twentySixAsViewerEmails: z.array(z.string().email()).optional(),
reminderEnabled: z.boolean().optional(),
reminderDays: z.number().int().min(0).optional(),
notification26AsDataAdded: notification26AsSchema.optional(),
notificationForm16SuccessCreditNote: notificationItemSchema.optional(),
notificationForm16Unsuccessful: notificationItemSchema.optional(),
alertSubmitForm16Enabled: z.boolean().optional(),
alertSubmitForm16FrequencyDays: z.number().int().min(0).optional(),
alertSubmitForm16FrequencyHours: z.number().int().min(0).optional(),
alertSubmitForm16RunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(),
alertSubmitForm16Template: z.string().optional(),
reminderNotificationEnabled: z.boolean().optional(),
reminderFrequencyDays: z.number().int().min(0).optional(),
reminderFrequencyHours: z.number().int().min(0).optional(),
reminderRunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(),
reminderNotificationTemplate: z.string().optional(),
});

27
verify-object.ts Normal file
View File

@ -0,0 +1,27 @@
import { sanitizeObject } from './src/utils/sanitizer';
const runTest = (name: string, input: any) => {
const output = sanitizeObject(input);
console.log(`[${name}] Input: ${JSON.stringify(input)}`);
console.log(`[${name}] Output: ${JSON.stringify(output)}`);
console.log(`[${name}] Description Empty String: ${output.description === ''}`);
console.log('---');
};
console.log('--- SYSTEM SANITIZER OBJECT TEST ---');
// Case 1: Script only (should become empty string)
runTest('Script Only', { description: "<script>alert(1)</script>" });
// Case 2: Encoded script only (should become empty string)
runTest('Encoded Script', { description: "&lt;script&gt;alert(1)&lt;/script&gt;" });
// Case 3: Script inside valid tags (should become empty string if result is just empty tags)
runTest('Script in tags', { description: "<p><script>alert(1)</script></p>" });
// Case 4: Valid text (should stay same)
runTest('Valid Text', { description: "<p>Hello World</p>" });
// Case 5: Empty tags/spaces (should become empty string)
runTest('Empty Tags', { description: "<p> &nbsp; </p>" });