dealer claim multi iteration implementaion started

This commit is contained in:
laxmanhalaki 2026-01-13 19:18:39 +05:30
parent f89514eb2b
commit e3bda6df15
12 changed files with 2068 additions and 387 deletions

View File

@ -250,6 +250,7 @@ export class DealerClaimController {
numberOfParticipants, numberOfParticipants,
closedExpenses, closedExpenses,
totalClosedExpenses, totalClosedExpenses,
completionDescription,
} = req.body; } = req.body;
// Parse closedExpenses if it's a JSON string // Parse closedExpenses if it's a JSON string
@ -540,6 +541,7 @@ export class DealerClaimController {
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0, totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined, invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
attendanceSheet: attendanceSheet || undefined, attendanceSheet: attendanceSheet || undefined,
completionDescription: completionDescription || undefined,
}); });
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted'); return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');

View File

@ -13,18 +13,20 @@ import path from 'path';
import crypto from 'crypto'; import crypto from 'crypto';
import { getRequestMetadata } from '@utils/requestUtils'; import { getRequestMetadata } from '@utils/requestUtils';
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service'; import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
import { DealerClaimService } from '@services/dealerClaim.service';
import logger from '@utils/logger'; import logger from '@utils/logger';
const workflowService = new WorkflowService(); const workflowService = new WorkflowService();
const dealerClaimService = new DealerClaimService();
export class WorkflowController { export class WorkflowController {
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> { async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
try { try {
const validatedData = validateCreateWorkflow(req.body); const validatedData = validateCreateWorkflow(req.body);
// Validate initiator exists // Validate initiator exists
await validateInitiator(req.user.userId); await validateInitiator(req.user.userId);
// Handle frontend format: map 'approvers' -> 'approvalLevels' for backward compatibility // Handle frontend format: map 'approvers' -> 'approvalLevels' for backward compatibility
let approvalLevels = validatedData.approvalLevels || []; let approvalLevels = validatedData.approvalLevels || [];
if (!approvalLevels.length && (req.body as any).approvers) { if (!approvalLevels.length && (req.body as any).approvers) {
@ -36,38 +38,38 @@ export class WorkflowController {
isFinalApprover: index === approvers.length - 1, isFinalApprover: index === approvers.length - 1,
})); }));
} }
// Normalize approval levels: map approverEmail -> email for backward compatibility // Normalize approval levels: map approverEmail -> email for backward compatibility
const normalizedApprovalLevels = approvalLevels.map((level: any) => ({ const normalizedApprovalLevels = approvalLevels.map((level: any) => ({
...level, ...level,
email: level.email || level.approverEmail, // Support both formats email: level.email || level.approverEmail, // Support both formats
})); }));
// Enrich approval levels with user data (auto-lookup from AD if not in DB) // Enrich approval levels with user data (auto-lookup from AD if not in DB)
logger.info(`[WorkflowController] Enriching ${normalizedApprovalLevels.length} approval levels`); logger.info(`[WorkflowController] Enriching ${normalizedApprovalLevels.length} approval levels`);
const enrichedApprovalLevels = await enrichApprovalLevels(normalizedApprovalLevels as any); const enrichedApprovalLevels = await enrichApprovalLevels(normalizedApprovalLevels as any);
// Enrich spectators if provided // Enrich spectators if provided
// Normalize spectators: map userEmail -> email for backward compatibility // Normalize spectators: map userEmail -> email for backward compatibility
// Filter participants to only include SPECTATOR type (exclude INITIATOR and APPROVER) // Filter participants to only include SPECTATOR type (exclude INITIATOR and APPROVER)
const allParticipants = validatedData.spectators || validatedData.participants || []; const allParticipants = validatedData.spectators || validatedData.participants || [];
const spectators = allParticipants.filter((p: any) => const spectators = allParticipants.filter((p: any) =>
!p.participantType || p.participantType === 'SPECTATOR' !p.participantType || p.participantType === 'SPECTATOR'
); );
const normalizedSpectators = spectators.map((spec: any) => ({ const normalizedSpectators = spectators.map((spec: any) => ({
...spec, ...spec,
email: spec.email || spec.userEmail, // Support both formats email: spec.email || spec.userEmail, // Support both formats
})).filter((spec: any) => spec.email); // Only include entries with email })).filter((spec: any) => spec.email); // Only include entries with email
const enrichedSpectators = normalizedSpectators.length > 0 const enrichedSpectators = normalizedSpectators.length > 0
? await enrichSpectators(normalizedSpectators as any) ? await enrichSpectators(normalizedSpectators as any)
: []; : [];
// Build complete participants array automatically // Build complete participants array automatically
// This includes: INITIATOR + all APPROVERs + all SPECTATORs // This includes: INITIATOR + all APPROVERs + all SPECTATORs
const initiator = await User.findByPk(req.user.userId); const initiator = await User.findByPk(req.user.userId);
const initiatorEmail = (initiator as any).email; const initiatorEmail = (initiator as any).email;
const initiatorName = (initiator as any).displayName || (initiator as any).email; const initiatorName = (initiator as any).displayName || (initiator as any).email;
const autoGeneratedParticipants = [ const autoGeneratedParticipants = [
// Add initiator // Add initiator
{ {
@ -94,7 +96,7 @@ export class WorkflowController {
// Add all spectators // Add all spectators
...enrichedSpectators, ...enrichedSpectators,
]; ];
// Convert string literal priority to enum // Convert string literal priority to enum
const workflowData = { const workflowData = {
...validatedData, ...validatedData,
@ -102,13 +104,13 @@ export class WorkflowController {
approvalLevels: enrichedApprovalLevels, approvalLevels: enrichedApprovalLevels,
participants: autoGeneratedParticipants, participants: autoGeneratedParticipants,
}; };
const requestMeta = getRequestMetadata(req); const requestMeta = getRequestMetadata(req);
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, { const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, {
ipAddress: requestMeta.ipAddress, ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent userAgent: requestMeta.userAgent
}); });
ResponseHandler.success(res, workflow, 'Workflow created successfully', 201); ResponseHandler.success(res, workflow, 'Workflow created successfully', 201);
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -131,7 +133,7 @@ export class WorkflowController {
ResponseHandler.error(res, 'payload is required', 400); ResponseHandler.error(res, 'payload is required', 400);
return; return;
} }
let parsed; let parsed;
try { try {
parsed = JSON.parse(raw); parsed = JSON.parse(raw);
@ -139,7 +141,7 @@ export class WorkflowController {
ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error'); ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error');
return; return;
} }
// Transform frontend format to backend format BEFORE validation // Transform frontend format to backend format BEFORE validation
// Map 'approvers' -> 'approvalLevels' for backward compatibility // Map 'approvers' -> 'approvalLevels' for backward compatibility
if (!parsed.approvalLevels && parsed.approvers) { if (!parsed.approvalLevels && parsed.approvers) {
@ -151,57 +153,57 @@ export class WorkflowController {
isFinalApprover: index === approvers.length - 1, isFinalApprover: index === approvers.length - 1,
})); }));
} }
let validated; let validated;
try { try {
validated = validateCreateWorkflow(parsed); validated = validateCreateWorkflow(parsed);
} catch (validationError: any) { } catch (validationError: any) {
// Zod validation errors provide detailed information // Zod validation errors provide detailed information
const errorMessage = validationError?.errors const errorMessage = validationError?.errors
? validationError.errors.map((e: any) => `${e.path.join('.')}: ${e.message}`).join('; ') ? validationError.errors.map((e: any) => `${e.path.join('.')}: ${e.message}`).join('; ')
: (validationError instanceof Error ? validationError.message : 'Validation failed'); : (validationError instanceof Error ? validationError.message : 'Validation failed');
logger.error(`[WorkflowController] Validation failed:`, errorMessage); logger.error(`[WorkflowController] Validation failed:`, errorMessage);
ResponseHandler.error(res, 'Validation failed', 400, errorMessage); ResponseHandler.error(res, 'Validation failed', 400, errorMessage);
return; return;
} }
// Validate initiator exists // Validate initiator exists
await validateInitiator(userId); await validateInitiator(userId);
// Use the approval levels from validation (already transformed above) // Use the approval levels from validation (already transformed above)
let approvalLevels = validated.approvalLevels || []; let approvalLevels = validated.approvalLevels || [];
// Normalize approval levels: map approverEmail -> email for backward compatibility // Normalize approval levels: map approverEmail -> email for backward compatibility
const normalizedApprovalLevels = approvalLevels.map((level: any) => ({ const normalizedApprovalLevels = approvalLevels.map((level: any) => ({
...level, ...level,
email: level.email || level.approverEmail, // Support both formats email: level.email || level.approverEmail, // Support both formats
})); }));
// Enrich approval levels with user data (auto-lookup from AD if not in DB) // Enrich approval levels with user data (auto-lookup from AD if not in DB)
logger.info(`[WorkflowController] Enriching ${normalizedApprovalLevels.length} approval levels`); logger.info(`[WorkflowController] Enriching ${normalizedApprovalLevels.length} approval levels`);
const enrichedApprovalLevels = await enrichApprovalLevels(normalizedApprovalLevels as any); const enrichedApprovalLevels = await enrichApprovalLevels(normalizedApprovalLevels as any);
// Enrich spectators if provided // Enrich spectators if provided
// Normalize spectators: map userEmail -> email for backward compatibility // Normalize spectators: map userEmail -> email for backward compatibility
// Filter participants to only include SPECTATOR type (exclude INITIATOR and APPROVER) // Filter participants to only include SPECTATOR type (exclude INITIATOR and APPROVER)
const allParticipants = validated.spectators || validated.participants || []; const allParticipants = validated.spectators || validated.participants || [];
const spectators = allParticipants.filter((p: any) => const spectators = allParticipants.filter((p: any) =>
!p.participantType || p.participantType === 'SPECTATOR' !p.participantType || p.participantType === 'SPECTATOR'
); );
const normalizedSpectators = spectators.map((spec: any) => ({ const normalizedSpectators = spectators.map((spec: any) => ({
...spec, ...spec,
email: spec.email || spec.userEmail, // Support both formats email: spec.email || spec.userEmail, // Support both formats
})).filter((spec: any) => spec.email); // Only include entries with email })).filter((spec: any) => spec.email); // Only include entries with email
const enrichedSpectators = normalizedSpectators.length > 0 const enrichedSpectators = normalizedSpectators.length > 0
? await enrichSpectators(normalizedSpectators as any) ? await enrichSpectators(normalizedSpectators as any)
: []; : [];
// Build complete participants array automatically // Build complete participants array automatically
// This includes: INITIATOR + all APPROVERs + all SPECTATORs // This includes: INITIATOR + all APPROVERs + all SPECTATORs
const initiator = await User.findByPk(userId); const initiator = await User.findByPk(userId);
const initiatorEmail = (initiator as any).email; const initiatorEmail = (initiator as any).email;
const initiatorName = (initiator as any).displayName || (initiator as any).email; const initiatorName = (initiator as any).displayName || (initiator as any).email;
const autoGeneratedParticipants = [ const autoGeneratedParticipants = [
// Add initiator // Add initiator
{ {
@ -228,9 +230,9 @@ export class WorkflowController {
// Add all spectators // Add all spectators
...enrichedSpectators, ...enrichedSpectators,
]; ];
const workflowData = { const workflowData = {
...validated, ...validated,
priority: validated.priority as Priority, priority: validated.priority as Priority,
approvalLevels: enrichedApprovalLevels, approvalLevels: enrichedApprovalLevels,
participants: autoGeneratedParticipants, participants: autoGeneratedParticipants,
@ -250,13 +252,13 @@ export class WorkflowController {
const { activityService } = require('../services/activity.service'); const { activityService } = require('../services/activity.service');
const user = await User.findByPk(userId); const user = await User.findByPk(userId);
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User'; const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
for (const file of files) { for (const file of files) {
// Get file buffer - multer.memoryStorage provides buffer, not path // Get file buffer - multer.memoryStorage provides buffer, not path
const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from('')); const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex'); const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const extension = path.extname(file.originalname).replace('.', '').toLowerCase(); const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Upload with automatic fallback to local storage // Upload with automatic fallback to local storage
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number; const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
const uploadResult = await gcsStorageService.uploadFileWithFallback({ const uploadResult = await gcsStorageService.uploadFileWithFallback({
@ -266,10 +268,10 @@ export class WorkflowController {
requestNumber: requestNumber, requestNumber: requestNumber,
fileType: 'documents' fileType: 'documents'
}); });
const storageUrl = uploadResult.storageUrl; const storageUrl = uploadResult.storageUrl;
const gcsFilePath = uploadResult.filePath; const gcsFilePath = uploadResult.filePath;
// Clean up local temporary file if it exists (from multer disk storage) // Clean up local temporary file if it exists (from multer disk storage)
if (file.path && fs.existsSync(file.path)) { if (file.path && fs.existsSync(file.path)) {
try { try {
@ -283,20 +285,20 @@ export class WorkflowController {
const MAX_FILE_NAME_LENGTH = 255; const MAX_FILE_NAME_LENGTH = 255;
const originalFileName = file.originalname; const originalFileName = file.originalname;
let truncatedOriginalFileName = originalFileName; let truncatedOriginalFileName = originalFileName;
if (originalFileName.length > MAX_FILE_NAME_LENGTH) { if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
// Preserve file extension when truncating // Preserve file extension when truncating
const ext = path.extname(originalFileName); const ext = path.extname(originalFileName);
const nameWithoutExt = path.basename(originalFileName, ext); const nameWithoutExt = path.basename(originalFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length; const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) { if (maxNameLength > 0) {
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext; truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else { } else {
// If extension itself is too long, just use the extension // If extension itself is too long, just use the extension
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH); truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
} }
logger.warn('[Workflow] File name truncated to fit database column', { logger.warn('[Workflow] File name truncated to fit database column', {
originalLength: originalFileName.length, originalLength: originalFileName.length,
truncatedLength: truncatedOriginalFileName.length, truncatedLength: truncatedOriginalFileName.length,
@ -308,18 +310,18 @@ export class WorkflowController {
// Generate fileName (basename of the generated file name in GCS) // Generate fileName (basename of the generated file name in GCS)
const generatedFileName = path.basename(gcsFilePath); const generatedFileName = path.basename(gcsFilePath);
let truncatedFileName = generatedFileName; let truncatedFileName = generatedFileName;
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) { if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
const ext = path.extname(generatedFileName); const ext = path.extname(generatedFileName);
const nameWithoutExt = path.basename(generatedFileName, ext); const nameWithoutExt = path.basename(generatedFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length; const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) { if (maxNameLength > 0) {
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext; truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else { } else {
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH); truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
} }
logger.warn('[Workflow] Generated file name truncated', { logger.warn('[Workflow] Generated file name truncated', {
originalLength: generatedFileName.length, originalLength: generatedFileName.length,
truncatedLength: truncatedFileName.length, truncatedLength: truncatedFileName.length,
@ -346,7 +348,7 @@ export class WorkflowController {
storageUrl: finalStorageUrl ? 'present' : 'null (too long)', storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
requestId: workflow.requestId requestId: workflow.requestId
}); });
try { try {
const doc = await Document.create({ const doc = await Document.create({
requestId: workflow.requestId, requestId: workflow.requestId,
@ -387,7 +389,7 @@ export class WorkflowController {
// Re-throw to be caught by outer catch block // Re-throw to be caught by outer catch block
throw docError; throw docError;
} }
// Log document upload activity // Log document upload activity
const requestMeta = getRequestMetadata(req); const requestMeta = getRequestMetadata(req);
activityService.log({ activityService.log({
@ -422,7 +424,7 @@ export class WorkflowController {
try { try {
const { id } = req.params; const { id } = req.params;
const workflow = await workflowService.getWorkflowById(id); const workflow = await workflowService.getWorkflowById(id);
if (!workflow) { if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
return; return;
@ -439,19 +441,19 @@ export class WorkflowController {
try { try {
const { id } = req.params as any; const { id } = req.params as any;
const userId = req.user?.userId; const userId = req.user?.userId;
if (!userId) { if (!userId) {
ResponseHandler.error(res, 'Authentication required', 401); ResponseHandler.error(res, 'Authentication required', 401);
return; return;
} }
// Check if user has access to this request // Check if user has access to this request
const accessCheck = await workflowService.checkUserRequestAccess(userId, id); const accessCheck = await workflowService.checkUserRequestAccess(userId, id);
if (!accessCheck.hasAccess) { if (!accessCheck.hasAccess) {
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403); ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
return; return;
} }
const result = await workflowService.getWorkflowDetails(id); const result = await workflowService.getWorkflowDetails(id);
if (!result) { if (!result) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
@ -468,7 +470,7 @@ export class WorkflowController {
try { try {
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1); const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
// Extract filter parameters // Extract filter parameters
const filters = { const filters = {
search: req.query.search as string | undefined, search: req.query.search as string | undefined,
@ -484,7 +486,7 @@ export class WorkflowController {
startDate: req.query.startDate as string | undefined, startDate: req.query.startDate as string | undefined,
endDate: req.query.endDate as string | undefined, endDate: req.query.endDate as string | undefined,
}; };
const result = await workflowService.listWorkflows(page, limit, filters); const result = await workflowService.listWorkflows(page, limit, filters);
ResponseHandler.success(res, result, 'Workflows fetched'); ResponseHandler.success(res, result, 'Workflows fetched');
} catch (error) { } catch (error) {
@ -498,7 +500,7 @@ export class WorkflowController {
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId; const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1); const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
// Extract filter parameters (same as listWorkflows) // Extract filter parameters (same as listWorkflows)
const search = req.query.search as string | undefined; const search = req.query.search as string | undefined;
const status = req.query.status as string | undefined; const status = req.query.status as string | undefined;
@ -511,9 +513,9 @@ export class WorkflowController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate }; const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
const result = await workflowService.listMyRequests(userId, page, limit, filters); const result = await workflowService.listMyRequests(userId, page, limit, filters);
ResponseHandler.success(res, result, 'My requests fetched'); ResponseHandler.success(res, result, 'My requests fetched');
} catch (error) { } catch (error) {
@ -531,7 +533,7 @@ export class WorkflowController {
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId; const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1); const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
// Extract filter parameters (same as listWorkflows) // Extract filter parameters (same as listWorkflows)
const search = req.query.search as string | undefined; const search = req.query.search as string | undefined;
const status = req.query.status as string | undefined; const status = req.query.status as string | undefined;
@ -545,9 +547,9 @@ export class WorkflowController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate }; const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
const result = await workflowService.listParticipantRequests(userId, page, limit, filters); const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
ResponseHandler.success(res, result, 'Participant requests fetched'); ResponseHandler.success(res, result, 'Participant requests fetched');
} catch (error) { } catch (error) {
@ -564,7 +566,7 @@ export class WorkflowController {
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId; const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1); const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
// Extract filter parameters // Extract filter parameters
const search = req.query.search as string | undefined; const search = req.query.search as string | undefined;
const status = req.query.status as string | undefined; const status = req.query.status as string | undefined;
@ -575,9 +577,9 @@ export class WorkflowController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate }; const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters); const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
ResponseHandler.success(res, result, 'My initiated requests fetched'); ResponseHandler.success(res, result, 'My initiated requests fetched');
} catch (error) { } catch (error) {
@ -591,7 +593,7 @@ export class WorkflowController {
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId; const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1); const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
// Extract filter parameters // Extract filter parameters
const filters = { const filters = {
search: req.query.search as string | undefined, search: req.query.search as string | undefined,
@ -599,11 +601,11 @@ export class WorkflowController {
priority: req.query.priority as string | undefined, priority: req.query.priority as string | undefined,
templateType: req.query.templateType as string | undefined templateType: req.query.templateType as string | undefined
}; };
// Extract sorting parameters // Extract sorting parameters
const sortBy = req.query.sortBy as string | undefined; const sortBy = req.query.sortBy as string | undefined;
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc'; const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder); const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
ResponseHandler.success(res, result, 'Open requests for user fetched'); ResponseHandler.success(res, result, 'Open requests for user fetched');
} catch (error) { } catch (error) {
@ -617,7 +619,7 @@ export class WorkflowController {
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId; const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1); const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
// Extract filter parameters // Extract filter parameters
const filters = { const filters = {
search: req.query.search as string | undefined, search: req.query.search as string | undefined,
@ -625,11 +627,11 @@ export class WorkflowController {
priority: req.query.priority as string | undefined, priority: req.query.priority as string | undefined,
templateType: req.query.templateType as string | undefined templateType: req.query.templateType as string | undefined
}; };
// Extract sorting parameters // Extract sorting parameters
const sortBy = req.query.sortBy as string | undefined; const sortBy = req.query.sortBy as string | undefined;
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc'; const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder); const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
ResponseHandler.success(res, result, 'Closed requests by user fetched'); ResponseHandler.success(res, result, 'Closed requests by user fetched');
} catch (error) { } catch (error) {
@ -648,9 +650,9 @@ export class WorkflowController {
// Map string literal to enum value explicitly // Map string literal to enum value explicitly
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD; updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
} }
const workflow = await workflowService.updateWorkflow(id, updateData); const workflow = await workflowService.updateWorkflow(id, updateData);
if (!workflow) { if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
return; return;
@ -720,7 +722,7 @@ export class WorkflowController {
const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from('')); const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex'); const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const extension = path.extname(file.originalname).replace('.', '').toLowerCase(); const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Upload with automatic fallback to local storage // Upload with automatic fallback to local storage
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number; const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
const uploadResult = await gcsStorageService.uploadFileWithFallback({ const uploadResult = await gcsStorageService.uploadFileWithFallback({
@ -730,10 +732,10 @@ export class WorkflowController {
requestNumber: requestNumber, requestNumber: requestNumber,
fileType: 'documents' fileType: 'documents'
}); });
const storageUrl = uploadResult.storageUrl; const storageUrl = uploadResult.storageUrl;
const gcsFilePath = uploadResult.filePath; const gcsFilePath = uploadResult.filePath;
// Clean up local temporary file if it exists (from multer disk storage) // Clean up local temporary file if it exists (from multer disk storage)
if (file.path && fs.existsSync(file.path)) { if (file.path && fs.existsSync(file.path)) {
try { try {
@ -747,20 +749,20 @@ export class WorkflowController {
const MAX_FILE_NAME_LENGTH = 255; const MAX_FILE_NAME_LENGTH = 255;
const originalFileName = file.originalname; const originalFileName = file.originalname;
let truncatedOriginalFileName = originalFileName; let truncatedOriginalFileName = originalFileName;
if (originalFileName.length > MAX_FILE_NAME_LENGTH) { if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
// Preserve file extension when truncating // Preserve file extension when truncating
const ext = path.extname(originalFileName); const ext = path.extname(originalFileName);
const nameWithoutExt = path.basename(originalFileName, ext); const nameWithoutExt = path.basename(originalFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length; const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) { if (maxNameLength > 0) {
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext; truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else { } else {
// If extension itself is too long, just use the extension // If extension itself is too long, just use the extension
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH); truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
} }
logger.warn('[Workflow] File name truncated to fit database column', { logger.warn('[Workflow] File name truncated to fit database column', {
originalLength: originalFileName.length, originalLength: originalFileName.length,
truncatedLength: truncatedOriginalFileName.length, truncatedLength: truncatedOriginalFileName.length,
@ -772,18 +774,18 @@ export class WorkflowController {
// Generate fileName (basename of the generated file name in GCS) // Generate fileName (basename of the generated file name in GCS)
const generatedFileName = path.basename(gcsFilePath); const generatedFileName = path.basename(gcsFilePath);
let truncatedFileName = generatedFileName; let truncatedFileName = generatedFileName;
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) { if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
const ext = path.extname(generatedFileName); const ext = path.extname(generatedFileName);
const nameWithoutExt = path.basename(generatedFileName, ext); const nameWithoutExt = path.basename(generatedFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length; const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) { if (maxNameLength > 0) {
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext; truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else { } else {
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH); truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
} }
logger.warn('[Workflow] Generated file name truncated', { logger.warn('[Workflow] Generated file name truncated', {
originalLength: generatedFileName.length, originalLength: generatedFileName.length,
truncatedLength: truncatedFileName.length, truncatedLength: truncatedFileName.length,
@ -810,7 +812,7 @@ export class WorkflowController {
storageUrl: finalStorageUrl ? 'present' : 'null (too long)', storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
requestId: actualRequestId requestId: actualRequestId
}); });
try { try {
const doc = await Document.create({ const doc = await Document.create({
requestId: actualRequestId, requestId: actualRequestId,
@ -874,7 +876,7 @@ export class WorkflowController {
try { try {
const { id } = req.params; const { id } = req.params;
const workflow = await workflowService.submitWorkflow(id); const workflow = await workflowService.submitWorkflow(id);
if (!workflow) { if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
return; return;
@ -886,4 +888,54 @@ export class WorkflowController {
ResponseHandler.error(res, 'Failed to submit workflow', 400, errorMessage); ResponseHandler.error(res, 'Failed to submit workflow', 400, errorMessage);
} }
} }
async handleInitiatorAction(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const { action, ...data } = req.body;
const userId = req.user?.userId;
if (!userId) {
ResponseHandler.unauthorized(res, 'User ID missing from request');
return;
}
await dealerClaimService.handleInitiatorAction(id, userId, action as any, data);
ResponseHandler.success(res, null, `Action ${action} performed successfully`);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[WorkflowController] handleInitiatorAction failed', {
error: errorMessage,
requestId: req.params.id,
userId: req.user?.userId,
action: req.body.action
});
ResponseHandler.error(res, 'Failed to perform initiator action', 400, errorMessage);
}
}
async getHistory(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
// Resolve requestId UUID from identifier (could be requestNumber or UUID)
const workflowService = new WorkflowService();
const wf = await (workflowService as any).findWorkflowByIdentifier(id);
if (!wf) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
const requestId = wf.getDataValue('requestId');
const history = await dealerClaimService.getHistory(requestId);
ResponseHandler.success(res, history, 'Revision history fetched successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[WorkflowController] getHistory failed', {
error: errorMessage,
requestId: req.params.id
});
ResponseHandler.error(res, 'Failed to fetch revision history', 400, errorMessage);
}
}
} }

View File

@ -0,0 +1,136 @@
import { QueryInterface, DataTypes } from 'sequelize';
export const up = async (queryInterface: QueryInterface) => {
// 1. Drop the old dealer_claim_history table if it exists
const tables = await queryInterface.showAllTables();
if (tables.includes('dealer_claim_history')) {
await queryInterface.dropTable('dealer_claim_history');
}
// 2. Create or update the enum type for snapshot_type
// Check if enum exists, if not create it, if yes update it
try {
await queryInterface.sequelize.query(`
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_dealer_claim_history_snapshot_type') THEN
CREATE TYPE enum_dealer_claim_history_snapshot_type AS ENUM ('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE');
ELSE
-- Check if APPROVE exists in the enum
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = 'APPROVE'
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_dealer_claim_history_snapshot_type')
) THEN
ALTER TYPE enum_dealer_claim_history_snapshot_type ADD VALUE 'APPROVE';
END IF;
END IF;
END $$;
`);
} catch (error) {
// If enum creation fails, try to continue (might already exist)
console.warn('Enum creation/update warning:', error);
}
// 3. Create new simplified level-based dealer_claim_history table
await queryInterface.createTable('dealer_claim_history', {
history_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
approval_level_id: {
type: DataTypes.UUID,
allowNull: true, // Nullable for workflow-level snapshots
references: {
model: 'approval_levels',
key: 'level_id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL'
},
level_number: {
type: DataTypes.INTEGER,
allowNull: true, // Nullable for workflow-level snapshots
comment: 'Level number for easier querying (e.g., 1=Dealer, 3=Dept Lead, 4/5=Completion)'
},
level_name: {
type: DataTypes.STRING(255),
allowNull: true, // Nullable for workflow-level snapshots
comment: 'Level name for consistent matching (e.g., "Dealer Proposal Submission", "Department Lead Approval")'
},
version: {
type: DataTypes.INTEGER,
allowNull: false,
comment: 'Version number for this specific level (starts at 1 per level)'
},
snapshot_type: {
type: DataTypes.ENUM('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE'),
allowNull: false,
comment: 'Type of snapshot: PROPOSAL (Step 1), COMPLETION (Step 4/5), INTERNAL_ORDER (Step 3), WORKFLOW (general), APPROVE (approver actions with comments)'
},
snapshot_data: {
type: DataTypes.JSONB,
allowNull: false,
comment: 'JSON object containing all snapshot data specific to this level and type. Structure varies by snapshot_type.'
},
change_reason: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Reason for this version change (e.g., "Revision Requested: ...")'
},
changed_by: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
}
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Add indexes for efficient querying
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'level_number', 'version'], {
name: 'idx_history_request_level_version'
});
await queryInterface.addIndex('dealer_claim_history', ['approval_level_id', 'version'], {
name: 'idx_history_level_version'
});
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'snapshot_type'], {
name: 'idx_history_request_type'
});
await queryInterface.addIndex('dealer_claim_history', ['snapshot_type', 'level_number'], {
name: 'idx_history_type_level'
});
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'level_name'], {
name: 'idx_history_request_level_name'
});
await queryInterface.addIndex('dealer_claim_history', ['level_name', 'snapshot_type'], {
name: 'idx_history_level_name_type'
});
// Index for JSONB queries on snapshot_data
await queryInterface.addIndex('dealer_claim_history', ['snapshot_type'], {
name: 'idx_history_snapshot_type',
using: 'BTREE'
});
};
export const down = async (queryInterface: QueryInterface) => {
// Drop the new table
await queryInterface.dropTable('dealer_claim_history');
};

View File

@ -0,0 +1,190 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { ApprovalLevel } from './ApprovalLevel';
import { User } from './User';
export enum SnapshotType {
PROPOSAL = 'PROPOSAL',
COMPLETION = 'COMPLETION',
INTERNAL_ORDER = 'INTERNAL_ORDER',
WORKFLOW = 'WORKFLOW',
APPROVE = 'APPROVE'
}
// Type definitions for snapshot data structures
export interface ProposalSnapshotData {
documentUrl?: string;
totalBudget?: number;
comments?: string;
expectedCompletionDate?: string;
costItems?: Array<{
description: string;
amount: number;
order: number;
}>;
}
export interface CompletionSnapshotData {
documentUrl?: string;
totalExpenses?: number;
comments?: string;
expenses?: Array<{
description: string;
amount: number;
}>;
}
export interface IOSnapshotData {
ioNumber?: string;
blockedAmount?: number;
availableBalance?: number;
remainingBalance?: number;
sapDocumentNumber?: string;
}
export interface WorkflowSnapshotData {
status?: string;
currentLevel?: number;
}
export interface ApprovalSnapshotData {
action: 'APPROVE' | 'REJECT';
comments?: string;
rejectionReason?: string;
approverName?: string;
approverEmail?: string;
levelName?: string;
}
interface DealerClaimHistoryAttributes {
historyId: string;
requestId: string;
approvalLevelId?: string;
levelNumber?: number;
levelName?: string;
version: number;
snapshotType: SnapshotType;
snapshotData: ProposalSnapshotData | CompletionSnapshotData | IOSnapshotData | WorkflowSnapshotData | ApprovalSnapshotData | any;
changeReason?: string;
changedBy: string;
createdAt: Date;
}
interface DealerClaimHistoryCreationAttributes extends Optional<DealerClaimHistoryAttributes, 'historyId' | 'approvalLevelId' | 'levelNumber' | 'levelName' | 'changeReason' | 'createdAt'> { }
class DealerClaimHistory extends Model<DealerClaimHistoryAttributes, DealerClaimHistoryCreationAttributes> implements DealerClaimHistoryAttributes {
public historyId!: string;
public requestId!: string;
public approvalLevelId?: string;
public levelNumber?: number;
public version!: number;
public snapshotType!: SnapshotType;
public snapshotData!: ProposalSnapshotData | CompletionSnapshotData | IOSnapshotData | WorkflowSnapshotData | any;
public changeReason?: string;
public changedBy!: string;
public createdAt!: Date;
}
DealerClaimHistory.init(
{
historyId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'history_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
approvalLevelId: {
type: DataTypes.UUID,
allowNull: true,
field: 'approval_level_id',
references: {
model: 'approval_levels',
key: 'level_id'
}
},
levelNumber: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'level_number'
},
levelName: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'level_name'
},
version: {
type: DataTypes.INTEGER,
allowNull: false
},
snapshotType: {
type: DataTypes.ENUM('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE'),
allowNull: false,
field: 'snapshot_type'
},
snapshotData: {
type: DataTypes.JSONB,
allowNull: false,
field: 'snapshot_data'
},
changeReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'change_reason'
},
changedBy: {
type: DataTypes.UUID,
allowNull: false,
field: 'changed_by',
references: {
model: 'users',
key: 'user_id'
}
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
},
{
sequelize,
modelName: 'DealerClaimHistory',
tableName: 'dealer_claim_history',
timestamps: false,
indexes: [
{
fields: ['request_id', 'level_number', 'version'],
name: 'idx_history_request_level_version'
},
{
fields: ['approval_level_id', 'version'],
name: 'idx_history_level_version'
},
{
fields: ['request_id', 'snapshot_type'],
name: 'idx_history_request_type'
},
{
fields: ['snapshot_type', 'level_number'],
name: 'idx_history_type_level'
}
]
}
);
DealerClaimHistory.belongsTo(WorkflowRequest, { foreignKey: 'requestId' });
DealerClaimHistory.belongsTo(ApprovalLevel, { foreignKey: 'approvalLevelId' });
DealerClaimHistory.belongsTo(User, { as: 'changer', foreignKey: 'changedBy' });
export { DealerClaimHistory };

View File

@ -29,11 +29,12 @@ interface WorkflowRequestAttributes {
pauseReason?: string; pauseReason?: string;
pauseResumeDate?: Date; pauseResumeDate?: Date;
pauseTatSnapshot?: any; pauseTatSnapshot?: any;
version: number;
createdAt: Date; createdAt: Date;
updatedAt: Date; updatedAt: Date;
} }
interface WorkflowRequestCreationAttributes extends Optional<WorkflowRequestAttributes, 'requestId' | 'submissionDate' | 'closureDate' | 'conclusionRemark' | 'aiGeneratedConclusion' | 'isPaused' | 'pausedAt' | 'pausedBy' | 'pauseReason' | 'pauseResumeDate' | 'pauseTatSnapshot' | 'createdAt' | 'updatedAt'> {} interface WorkflowRequestCreationAttributes extends Optional<WorkflowRequestAttributes, 'requestId' | 'submissionDate' | 'closureDate' | 'conclusionRemark' | 'aiGeneratedConclusion' | 'isPaused' | 'pausedAt' | 'pausedBy' | 'pauseReason' | 'pauseResumeDate' | 'pauseTatSnapshot' | 'version' | 'createdAt' | 'updatedAt'> { }
class WorkflowRequest extends Model<WorkflowRequestAttributes, WorkflowRequestCreationAttributes> implements WorkflowRequestAttributes { class WorkflowRequest extends Model<WorkflowRequestAttributes, WorkflowRequestCreationAttributes> implements WorkflowRequestAttributes {
public requestId!: string; public requestId!: string;
@ -61,6 +62,7 @@ class WorkflowRequest extends Model<WorkflowRequestAttributes, WorkflowRequestCr
public pauseReason?: string; public pauseReason?: string;
public pauseResumeDate?: Date; public pauseResumeDate?: Date;
public pauseTatSnapshot?: any; public pauseTatSnapshot?: any;
public version!: number;
public createdAt!: Date; public createdAt!: Date;
public updatedAt!: Date; public updatedAt!: Date;
@ -211,6 +213,11 @@ WorkflowRequest.init(
allowNull: true, allowNull: true,
field: 'pause_tat_snapshot' field: 'pause_tat_snapshot'
}, },
version: {
type: DataTypes.INTEGER,
defaultValue: 1,
allowNull: false
},
createdAt: { createdAt: {
type: DataTypes.DATE, type: DataTypes.DATE,
allowNull: false, allowNull: false,

View File

@ -25,6 +25,7 @@ import { InternalOrder } from './InternalOrder';
import { ClaimBudgetTracking } from './ClaimBudgetTracking'; import { ClaimBudgetTracking } from './ClaimBudgetTracking';
import { Dealer } from './Dealer'; import { Dealer } from './Dealer';
import { ActivityType } from './ActivityType'; import { ActivityType } from './ActivityType';
import { DealerClaimHistory } from './DealerClaimHistory';
// Define associations // Define associations
const defineAssociations = () => { const defineAssociations = () => {
@ -137,6 +138,13 @@ const defineAssociations = () => {
sourceKey: 'requestId' sourceKey: 'requestId'
}); });
// DealerClaimHistory associations
WorkflowRequest.hasMany(DealerClaimHistory, {
as: 'history',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
// Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts // Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts
// Only hasMany associations from WorkflowRequest are defined here since they're one-way // Only hasMany associations from WorkflowRequest are defined here since they're one-way
}; };
@ -170,7 +178,8 @@ export {
InternalOrder, InternalOrder,
ClaimBudgetTracking, ClaimBudgetTracking,
Dealer, Dealer,
ActivityType ActivityType,
DealerClaimHistory
}; };
// Export default sequelize instance // Export default sequelize instance

View File

@ -33,11 +33,11 @@ function createContentDisposition(disposition: 'inline' | 'attachment', filename
.replace(/[<>:"|?*\x00-\x1F\x7F]/g, '_') // Only replace truly problematic chars .replace(/[<>:"|?*\x00-\x1F\x7F]/g, '_') // Only replace truly problematic chars
.replace(/\\/g, '_') // Replace backslashes .replace(/\\/g, '_') // Replace backslashes
.trim(); .trim();
// For ASCII-only filenames, use simple format (browsers prefer this) // For ASCII-only filenames, use simple format (browsers prefer this)
// Only use filename* for non-ASCII characters // Only use filename* for non-ASCII characters
const hasNonASCII = /[^\x00-\x7F]/.test(filename); const hasNonASCII = /[^\x00-\x7F]/.test(filename);
if (hasNonASCII) { if (hasNonASCII) {
// Use RFC 5987 encoding for non-ASCII characters // Use RFC 5987 encoding for non-ASCII characters
const encodedFilename = encodeURIComponent(filename); const encodedFilename = encodeURIComponent(filename);
@ -229,27 +229,27 @@ router.get('/documents/:documentId/preview',
const { Document } = require('@models/Document'); const { Document } = require('@models/Document');
const { gcsStorageService } = require('../services/gcsStorage.service'); const { gcsStorageService } = require('../services/gcsStorage.service');
const fs = require('fs'); const fs = require('fs');
const document = await Document.findOne({ where: { documentId } }); const document = await Document.findOne({ where: { documentId } });
if (!document) { if (!document) {
res.status(404).json({ success: false, error: 'Document not found' }); res.status(404).json({ success: false, error: 'Document not found' });
return; return;
} }
const storageUrl = (document as any).storageUrl || (document as any).storage_url; const storageUrl = (document as any).storageUrl || (document as any).storage_url;
const filePath = (document as any).filePath || (document as any).file_path; const filePath = (document as any).filePath || (document as any).file_path;
const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName; const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName;
const fileType = (document as any).mimeType || (document as any).mime_type; const fileType = (document as any).mimeType || (document as any).mime_type;
// Check if it's a GCS URL // Check if it's a GCS URL
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://')); const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
if (isGcsUrl) { if (isGcsUrl) {
// Redirect to GCS public URL or use signed URL for private files // Redirect to GCS public URL or use signed URL for private files
res.redirect(storageUrl); res.redirect(storageUrl);
return; return;
} }
// If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS // If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS
if (!storageUrl && filePath && filePath.startsWith('requests/')) { if (!storageUrl && filePath && filePath.startsWith('requests/')) {
try { try {
@ -257,7 +257,7 @@ router.get('/documents/:documentId/preview',
if (!gcsStorageService.isConfigured()) { if (!gcsStorageService.isConfigured()) {
throw new Error('GCS not configured'); throw new Error('GCS not configured');
} }
// Access the storage instance from the service // Access the storage instance from the service
const { Storage } = require('@google-cloud/storage'); const { Storage } = require('@google-cloud/storage');
const keyFilePath = process.env.GCP_KEY_FILE || ''; const keyFilePath = process.env.GCP_KEY_FILE || '';
@ -266,26 +266,26 @@ router.get('/documents/:documentId/preview',
const resolvedKeyPath = path.isAbsolute(keyFilePath) const resolvedKeyPath = path.isAbsolute(keyFilePath)
? keyFilePath ? keyFilePath
: path.resolve(process.cwd(), keyFilePath); : path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({ const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '', projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath, keyFilename: resolvedKeyPath,
}); });
const bucket = storage.bucket(bucketName); const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath); const file = bucket.file(filePath);
// Check if file exists // Check if file exists
const [exists] = await file.exists(); const [exists] = await file.exists();
if (!exists) { if (!exists) {
res.status(404).json({ success: false, error: 'File not found in GCS' }); res.status(404).json({ success: false, error: 'File not found in GCS' });
return; return;
} }
// Get file metadata for content type // Get file metadata for content type
const [metadata] = await file.getMetadata(); const [metadata] = await file.getMetadata();
const contentType = metadata.contentType || fileType || 'application/octet-stream'; const contentType = metadata.contentType || fileType || 'application/octet-stream';
// Set CORS headers // Set CORS headers
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -294,12 +294,12 @@ router.get('/documents/:documentId/preview',
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
res.setHeader('Content-Type', contentType); res.setHeader('Content-Type', contentType);
// For images and PDFs, allow inline viewing // For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf')); const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
const disposition = isPreviewable ? 'inline' : 'attachment'; const disposition = isPreviewable ? 'inline' : 'attachment';
res.setHeader('Content-Disposition', createContentDisposition(disposition, fileName)); res.setHeader('Content-Disposition', createContentDisposition(disposition, fileName));
// Stream file from GCS to response // Stream file from GCS to response
file.createReadStream() file.createReadStream()
.on('error', (streamError: Error) => { .on('error', (streamError: Error) => {
@ -310,9 +310,9 @@ router.get('/documents/:documentId/preview',
error: streamError.message, error: streamError.message,
}); });
if (!res.headersSent) { if (!res.headersSent) {
res.status(500).json({ res.status(500).json({
success: false, success: false,
error: 'Failed to stream file from storage' error: 'Failed to stream file from storage'
}); });
} }
}) })
@ -325,26 +325,26 @@ router.get('/documents/:documentId/preview',
filePath, filePath,
error: gcsError instanceof Error ? gcsError.message : 'Unknown error', error: gcsError instanceof Error ? gcsError.message : 'Unknown error',
}); });
res.status(500).json({ res.status(500).json({
success: false, success: false,
error: 'Failed to access file. Please try again.' error: 'Failed to access file. Please try again.'
}); });
return; return;
} }
} }
// Local file handling - check if storageUrl is a local path (starts with /uploads/) // Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (storageUrl && storageUrl.startsWith('/uploads/')) { if (storageUrl && storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix) // Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = storageUrl.replace(/^\/uploads\//, ''); const relativePath = storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath); const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists // Check if file exists
if (!fs.existsSync(absolutePath)) { if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' }); res.status(404).json({ success: false, error: 'File not found on server' });
return; return;
} }
// Set CORS headers to allow blob URL creation when served from same origin // Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -352,10 +352,10 @@ router.get('/documents/:documentId/preview',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type // Set appropriate content type
res.contentType(fileType || 'application/octet-stream'); res.contentType(fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing // For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf')); const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
if (isPreviewable) { if (isPreviewable) {
@ -363,7 +363,7 @@ router.get('/documents/:documentId/preview',
} else { } else {
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`); res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
} }
res.sendFile(absolutePath, (err) => { res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' }); res.status(500).json({ success: false, error: 'Failed to serve file' });
@ -371,18 +371,18 @@ router.get('/documents/:documentId/preview',
}); });
return; return;
} }
// Legacy local file handling (absolute path stored in filePath) // Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed // Resolve relative path if needed
const absolutePath = filePath && !path.isAbsolute(filePath) const absolutePath = filePath && !path.isAbsolute(filePath)
? path.join(UPLOAD_DIR, filePath) ? path.join(UPLOAD_DIR, filePath)
: filePath; : filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) { if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' }); res.status(404).json({ success: false, error: 'File not found on server' });
return; return;
} }
// Set CORS headers to allow blob URL creation when served from same origin // Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -390,10 +390,10 @@ router.get('/documents/:documentId/preview',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type // Set appropriate content type
res.contentType(fileType || 'application/octet-stream'); res.contentType(fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing // For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf')); const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
if (isPreviewable) { if (isPreviewable) {
@ -401,7 +401,7 @@ router.get('/documents/:documentId/preview',
} else { } else {
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`); res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
} }
res.sendFile(absolutePath, (err) => { res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' }); res.status(500).json({ success: false, error: 'Failed to serve file' });
@ -418,26 +418,26 @@ router.get('/documents/:documentId/download',
const { Document } = require('@models/Document'); const { Document } = require('@models/Document');
const { gcsStorageService } = require('../services/gcsStorage.service'); const { gcsStorageService } = require('../services/gcsStorage.service');
const fs = require('fs'); const fs = require('fs');
const document = await Document.findOne({ where: { documentId } }); const document = await Document.findOne({ where: { documentId } });
if (!document) { if (!document) {
res.status(404).json({ success: false, error: 'Document not found' }); res.status(404).json({ success: false, error: 'Document not found' });
return; return;
} }
const storageUrl = (document as any).storageUrl || (document as any).storage_url; const storageUrl = (document as any).storageUrl || (document as any).storage_url;
const filePath = (document as any).filePath || (document as any).file_path; const filePath = (document as any).filePath || (document as any).file_path;
const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName; const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName;
// Check if it's a GCS URL // Check if it's a GCS URL
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://')); const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
if (isGcsUrl) { if (isGcsUrl) {
// Redirect to GCS public URL for download // Redirect to GCS public URL for download
res.redirect(storageUrl); res.redirect(storageUrl);
return; return;
} }
// If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS // If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS
if (!storageUrl && filePath && filePath.startsWith('requests/')) { if (!storageUrl && filePath && filePath.startsWith('requests/')) {
try { try {
@ -445,7 +445,7 @@ router.get('/documents/:documentId/download',
if (!gcsStorageService.isConfigured()) { if (!gcsStorageService.isConfigured()) {
throw new Error('GCS not configured'); throw new Error('GCS not configured');
} }
// Access the storage instance from the service // Access the storage instance from the service
const { Storage } = require('@google-cloud/storage'); const { Storage } = require('@google-cloud/storage');
const keyFilePath = process.env.GCP_KEY_FILE || ''; const keyFilePath = process.env.GCP_KEY_FILE || '';
@ -454,26 +454,26 @@ router.get('/documents/:documentId/download',
const resolvedKeyPath = path.isAbsolute(keyFilePath) const resolvedKeyPath = path.isAbsolute(keyFilePath)
? keyFilePath ? keyFilePath
: path.resolve(process.cwd(), keyFilePath); : path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({ const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '', projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath, keyFilename: resolvedKeyPath,
}); });
const bucket = storage.bucket(bucketName); const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath); const file = bucket.file(filePath);
// Check if file exists // Check if file exists
const [exists] = await file.exists(); const [exists] = await file.exists();
if (!exists) { if (!exists) {
res.status(404).json({ success: false, error: 'File not found in GCS' }); res.status(404).json({ success: false, error: 'File not found in GCS' });
return; return;
} }
// Get file metadata for content type // Get file metadata for content type
const [metadata] = await file.getMetadata(); const [metadata] = await file.getMetadata();
const contentType = metadata.contentType || (document as any).mimeType || (document as any).mime_type || 'application/octet-stream'; const contentType = metadata.contentType || (document as any).mimeType || (document as any).mime_type || 'application/octet-stream';
// Set CORS headers // Set CORS headers
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -481,11 +481,11 @@ router.get('/documents/:documentId/download',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set headers for download // Set headers for download
res.setHeader('Content-Type', contentType); res.setHeader('Content-Type', contentType);
res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName)); res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName));
// Stream file from GCS to response // Stream file from GCS to response
file.createReadStream() file.createReadStream()
.on('error', (streamError: Error) => { .on('error', (streamError: Error) => {
@ -496,9 +496,9 @@ router.get('/documents/:documentId/download',
error: streamError.message, error: streamError.message,
}); });
if (!res.headersSent) { if (!res.headersSent) {
res.status(500).json({ res.status(500).json({
success: false, success: false,
error: 'Failed to stream file from storage' error: 'Failed to stream file from storage'
}); });
} }
}) })
@ -511,26 +511,26 @@ router.get('/documents/:documentId/download',
filePath, filePath,
error: gcsError instanceof Error ? gcsError.message : 'Unknown error', error: gcsError instanceof Error ? gcsError.message : 'Unknown error',
}); });
res.status(500).json({ res.status(500).json({
success: false, success: false,
error: 'Failed to access file. Please try again.' error: 'Failed to access file. Please try again.'
}); });
return; return;
} }
} }
// Local file handling - check if storageUrl is a local path (starts with /uploads/) // Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (storageUrl && storageUrl.startsWith('/uploads/')) { if (storageUrl && storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix) // Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = storageUrl.replace(/^\/uploads\//, ''); const relativePath = storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath); const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists // Check if file exists
if (!fs.existsSync(absolutePath)) { if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' }); res.status(404).json({ success: false, error: 'File not found on server' });
return; return;
} }
// Set CORS headers // Set CORS headers
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -538,12 +538,12 @@ router.get('/documents/:documentId/download',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set headers for download // Set headers for download
const fileTypeForDownload = (document as any).mimeType || (document as any).mime_type || 'application/octet-stream'; const fileTypeForDownload = (document as any).mimeType || (document as any).mime_type || 'application/octet-stream';
res.setHeader('Content-Type', fileTypeForDownload); res.setHeader('Content-Type', fileTypeForDownload);
res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName)); res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName));
res.download(absolutePath, fileName, (err) => { res.download(absolutePath, fileName, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' }); res.status(500).json({ success: false, error: 'Failed to download file' });
@ -551,18 +551,18 @@ router.get('/documents/:documentId/download',
}); });
return; return;
} }
// Legacy local file handling (absolute path stored in filePath) // Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed // Resolve relative path if needed
const absolutePath = filePath && !path.isAbsolute(filePath) const absolutePath = filePath && !path.isAbsolute(filePath)
? path.join(UPLOAD_DIR, filePath) ? path.join(UPLOAD_DIR, filePath)
: filePath; : filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) { if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' }); res.status(404).json({ success: false, error: 'File not found on server' });
return; return;
} }
res.download(absolutePath, fileName, (err) => { res.download(absolutePath, fileName, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' }); res.status(500).json({ success: false, error: 'Failed to download file' });
@ -578,26 +578,26 @@ router.get('/work-notes/attachments/:attachmentId/preview',
const { attachmentId } = req.params; const { attachmentId } = req.params;
const fileInfo = await workNoteService.downloadAttachment(attachmentId); const fileInfo = await workNoteService.downloadAttachment(attachmentId);
const fs = require('fs'); const fs = require('fs');
// Check if it's a GCS URL // Check if it's a GCS URL
if (fileInfo.isGcsUrl && fileInfo.storageUrl) { if (fileInfo.isGcsUrl && fileInfo.storageUrl) {
// Redirect to GCS public URL // Redirect to GCS public URL
res.redirect(fileInfo.storageUrl); res.redirect(fileInfo.storageUrl);
return; return;
} }
// Local file handling - check if storageUrl is a local path (starts with /uploads/) // Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) { if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix) // Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, ''); const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath); const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists // Check if file exists
if (!fs.existsSync(absolutePath)) { if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' }); res.status(404).json({ success: false, error: 'File not found' });
return; return;
} }
// Set CORS headers to allow blob URL creation when served from same origin // Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -605,10 +605,10 @@ router.get('/work-notes/attachments/:attachmentId/preview',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type // Set appropriate content type
res.contentType(fileInfo.fileType || 'application/octet-stream'); res.contentType(fileInfo.fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing // For images and PDFs, allow inline viewing
const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf')); const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf'));
if (isPreviewable) { if (isPreviewable) {
@ -616,7 +616,7 @@ router.get('/work-notes/attachments/:attachmentId/preview',
} else { } else {
res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`); res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`);
} }
res.sendFile(absolutePath, (err) => { res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' }); res.status(500).json({ success: false, error: 'Failed to serve file' });
@ -624,18 +624,18 @@ router.get('/work-notes/attachments/:attachmentId/preview',
}); });
return; return;
} }
// Legacy local file handling (absolute path stored in filePath) // Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed // Resolve relative path if needed
const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath) const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath)
? path.join(UPLOAD_DIR, fileInfo.filePath) ? path.join(UPLOAD_DIR, fileInfo.filePath)
: fileInfo.filePath; : fileInfo.filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) { if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' }); res.status(404).json({ success: false, error: 'File not found' });
return; return;
} }
// Set CORS headers to allow blob URL creation when served from same origin // Set CORS headers to allow blob URL creation when served from same origin
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -643,10 +643,10 @@ router.get('/work-notes/attachments/:attachmentId/preview',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type // Set appropriate content type
res.contentType(fileInfo.fileType || 'application/octet-stream'); res.contentType(fileInfo.fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing // For images and PDFs, allow inline viewing
const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf')); const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf'));
if (isPreviewable) { if (isPreviewable) {
@ -654,7 +654,7 @@ router.get('/work-notes/attachments/:attachmentId/preview',
} else { } else {
res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`); res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`);
} }
res.sendFile(absolutePath, (err) => { res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' }); res.status(500).json({ success: false, error: 'Failed to serve file' });
@ -670,26 +670,26 @@ router.get('/work-notes/attachments/:attachmentId/download',
const { attachmentId } = req.params; const { attachmentId } = req.params;
const fileInfo = await workNoteService.downloadAttachment(attachmentId); const fileInfo = await workNoteService.downloadAttachment(attachmentId);
const fs = require('fs'); const fs = require('fs');
// Check if it's a GCS URL // Check if it's a GCS URL
if (fileInfo.isGcsUrl && fileInfo.storageUrl) { if (fileInfo.isGcsUrl && fileInfo.storageUrl) {
// Redirect to GCS public URL for download // Redirect to GCS public URL for download
res.redirect(fileInfo.storageUrl); res.redirect(fileInfo.storageUrl);
return; return;
} }
// Local file handling - check if storageUrl is a local path (starts with /uploads/) // Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) { if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) {
// Extract relative path from storageUrl (remove /uploads/ prefix) // Extract relative path from storageUrl (remove /uploads/ prefix)
const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, ''); const relativePath = fileInfo.storageUrl.replace(/^\/uploads\//, '');
const absolutePath = path.join(UPLOAD_DIR, relativePath); const absolutePath = path.join(UPLOAD_DIR, relativePath);
// Check if file exists // Check if file exists
if (!fs.existsSync(absolutePath)) { if (!fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' }); res.status(404).json({ success: false, error: 'File not found' });
return; return;
} }
// Set CORS headers // Set CORS headers
const origin = req.headers.origin; const origin = req.headers.origin;
if (origin) { if (origin) {
@ -697,7 +697,7 @@ router.get('/work-notes/attachments/:attachmentId/download',
res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Credentials', 'true');
} }
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition'); res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
res.download(absolutePath, fileInfo.fileName, (err) => { res.download(absolutePath, fileInfo.fileName, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' }); res.status(500).json({ success: false, error: 'Failed to download file' });
@ -705,18 +705,18 @@ router.get('/work-notes/attachments/:attachmentId/download',
}); });
return; return;
} }
// Legacy local file handling (absolute path stored in filePath) // Legacy local file handling (absolute path stored in filePath)
// Resolve relative path if needed // Resolve relative path if needed
const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath) const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath)
? path.join(UPLOAD_DIR, fileInfo.filePath) ? path.join(UPLOAD_DIR, fileInfo.filePath)
: fileInfo.filePath; : fileInfo.filePath;
if (!absolutePath || !fs.existsSync(absolutePath)) { if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' }); res.status(404).json({ success: false, error: 'File not found' });
return; return;
} }
res.download(absolutePath, fileInfo.fileName, (err) => { res.download(absolutePath, fileInfo.fileName, (err) => {
if (err && !res.headersSent) { if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' }); res.status(500).json({ success: false, error: 'Failed to download file' });
@ -738,7 +738,7 @@ router.post('/:id/participants/approver',
} }
const requestId: string = wf.getDataValue('requestId'); const requestId: string = wf.getDataValue('requestId');
const { email } = req.body; const { email } = req.body;
if (!email) { if (!email) {
res.status(400).json({ success: false, error: 'Email is required' }); res.status(400).json({ success: false, error: 'Email is required' });
return; return;
@ -761,7 +761,7 @@ router.post('/:id/participants/spectator',
} }
const requestId: string = wf.getDataValue('requestId'); const requestId: string = wf.getDataValue('requestId');
const { email } = req.body; const { email } = req.body;
if (!email) { if (!email) {
res.status(400).json({ success: false, error: 'Email is required' }); res.status(400).json({ success: false, error: 'Email is required' });
return; return;
@ -794,11 +794,11 @@ router.post('/:id/approvals/:levelId/skip',
reason || '', reason || '',
req.user?.userId req.user?.userId
); );
res.status(200).json({ res.status(200).json({
success: true, success: true,
message: 'Approver skipped successfully', message: 'Approver skipped successfully',
data: result data: result
}); });
}) })
); );
@ -819,9 +819,9 @@ router.post('/:id/approvers/at-level',
const { email, tatHours, level } = req.body; const { email, tatHours, level } = req.body;
if (!email || !tatHours || !level) { if (!email || !tatHours || !level) {
res.status(400).json({ res.status(400).json({
success: false, success: false,
error: 'Email, tatHours, and level are required' error: 'Email, tatHours, and level are required'
}); });
return; return;
} }
@ -833,11 +833,11 @@ router.post('/:id/approvers/at-level',
Number(level), Number(level),
req.user?.userId req.user?.userId
); );
res.status(201).json({ res.status(201).json({
success: true, success: true,
message: 'Approver added successfully', message: 'Approver added successfully',
data: result data: result
}); });
}) })
); );
@ -874,4 +874,19 @@ router.get('/:id/pause',
asyncHandler(pauseController.getPauseDetails.bind(pauseController)) asyncHandler(pauseController.getPauseDetails.bind(pauseController))
); );
// Initiator actions for rejected/returned requests
router.post('/:id/initiator-action',
authenticateToken,
requireParticipantTypes(['INITIATOR']),
validateParams(workflowParamsSchema),
asyncHandler(workflowController.handleInitiatorAction.bind(workflowController))
);
// Get revision history
router.get('/:id/history',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(workflowController.getHistory.bind(workflowController))
);
export default router; export default router;

View File

@ -135,6 +135,7 @@ async function runMigrations(): Promise<void> {
const m40 = require('../migrations/20251218-fix-claim-invoice-credit-note-columns'); const m40 = require('../migrations/20251218-fix-claim-invoice-credit-note-columns');
const m41 = require('../migrations/20250120-create-dealers-table'); const m41 = require('../migrations/20250120-create-dealers-table');
const m42 = require('../migrations/20250125-create-activity-types'); const m42 = require('../migrations/20250125-create-activity-types');
const m43 = require('../migrations/20260113-redesign-dealer-claim-history');
const migrations = [ const migrations = [
{ name: '2025103000-create-users', module: m0 }, { name: '2025103000-create-users', module: m0 },
@ -182,6 +183,7 @@ async function runMigrations(): Promise<void> {
{ name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 }, { name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 },
{ name: '20250120-create-dealers-table', module: m41 }, { name: '20250120-create-dealers-table', module: m41 },
{ name: '20250125-create-activity-types', module: m42 }, { name: '20250125-create-activity-types', module: m42 },
{ name: '20260113-redesign-dealer-claim-history', module: m43 },
]; ];
const queryInterface = sequelize.getQueryInterface(); const queryInterface = sequelize.getQueryInterface();

View File

@ -45,6 +45,7 @@ import * as m39 from '../migrations/20251214-create-dealer-completion-expenses';
import * as m40 from '../migrations/20251218-fix-claim-invoice-credit-note-columns'; import * as m40 from '../migrations/20251218-fix-claim-invoice-credit-note-columns';
import * as m41 from '../migrations/20250120-create-dealers-table'; import * as m41 from '../migrations/20250120-create-dealers-table';
import * as m42 from '../migrations/20250125-create-activity-types'; import * as m42 from '../migrations/20250125-create-activity-types';
import * as m43 from '../migrations/20260113-redesign-dealer-claim-history';
interface Migration { interface Migration {
name: string; name: string;
@ -56,7 +57,7 @@ interface Migration {
const migrations: Migration[] = [ const migrations: Migration[] = [
// 1. FIRST: Create base tables with no dependencies // 1. FIRST: Create base tables with no dependencies
{ name: '2025103000-create-users', module: m0 }, // ← MUST BE FIRST { name: '2025103000-create-users', module: m0 }, // ← MUST BE FIRST
// 2. Tables that depend on users // 2. Tables that depend on users
{ name: '2025103001-create-workflow-requests', module: m1 }, { name: '2025103001-create-workflow-requests', module: m1 },
{ name: '2025103002-create-approval-levels', module: m2 }, { name: '2025103002-create-approval-levels', module: m2 },
@ -66,7 +67,7 @@ const migrations: Migration[] = [
{ name: '20251031_02_create_activities', module: m6 }, { name: '20251031_02_create_activities', module: m6 },
{ name: '20251031_03_create_work_notes', module: m7 }, { name: '20251031_03_create_work_notes', module: m7 },
{ name: '20251031_04_create_work_note_attachments', module: m8 }, { name: '20251031_04_create_work_note_attachments', module: m8 },
// 3. Table modifications and additional features // 3. Table modifications and additional features
{ name: '20251104-add-tat-alert-fields', module: m9 }, { name: '20251104-add-tat-alert-fields', module: m9 },
{ name: '20251104-create-tat-alerts', module: m10 }, { name: '20251104-create-tat-alerts', module: m10 },
@ -104,6 +105,7 @@ const migrations: Migration[] = [
{ name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 }, { name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 },
{ name: '20250120-create-dealers-table', module: m41 }, { name: '20250120-create-dealers-table', module: m41 },
{ name: '20250125-create-activity-types', module: m42 }, { name: '20250125-create-activity-types', module: m42 },
{ name: '20260113-redesign-dealer-claim-history', module: m43 },
]; ];
/** /**
@ -112,7 +114,7 @@ const migrations: Migration[] = [
async function ensureMigrationsTable(queryInterface: QueryInterface): Promise<void> { async function ensureMigrationsTable(queryInterface: QueryInterface): Promise<void> {
try { try {
const tables = await queryInterface.showAllTables(); const tables = await queryInterface.showAllTables();
if (!tables.includes('migrations')) { if (!tables.includes('migrations')) {
await queryInterface.sequelize.query(` await queryInterface.sequelize.query(`
CREATE TABLE migrations ( CREATE TABLE migrations (
@ -164,28 +166,28 @@ async function markMigrationExecuted(name: string): Promise<void> {
async function run() { async function run() {
try { try {
await sequelize.authenticate(); await sequelize.authenticate();
const queryInterface = sequelize.getQueryInterface(); const queryInterface = sequelize.getQueryInterface();
// Ensure migrations tracking table exists // Ensure migrations tracking table exists
await ensureMigrationsTable(queryInterface); await ensureMigrationsTable(queryInterface);
// Get already executed migrations // Get already executed migrations
const executedMigrations = await getExecutedMigrations(); const executedMigrations = await getExecutedMigrations();
// Find pending migrations // Find pending migrations
const pendingMigrations = migrations.filter( const pendingMigrations = migrations.filter(
m => !executedMigrations.includes(m.name) m => !executedMigrations.includes(m.name)
); );
if (pendingMigrations.length === 0) { if (pendingMigrations.length === 0) {
console.log('✅ Migrations up-to-date'); console.log('✅ Migrations up-to-date');
process.exit(0); process.exit(0);
return; return;
} }
console.log(`🔄 Running ${pendingMigrations.length} migration(s)...`); console.log(`🔄 Running ${pendingMigrations.length} migration(s)...`);
// Run each pending migration // Run each pending migration
for (const migration of pendingMigrations) { for (const migration of pendingMigrations) {
try { try {
@ -197,7 +199,7 @@ async function run() {
throw error; throw error;
} }
} }
console.log(`✅ Applied ${pendingMigrations.length} migration(s)`); console.log(`✅ Applied ${pendingMigrations.length} migration(s)`);
process.exit(0); process.exit(0);
} catch (err: any) { } catch (err: any) {

File diff suppressed because it is too large Load Diff

View File

@ -26,14 +26,18 @@ import { DealerClaimService } from './dealerClaim.service';
import { emitToRequestRoom } from '../realtime/socket'; import { emitToRequestRoom } from '../realtime/socket';
export class DealerClaimApprovalService { export class DealerClaimApprovalService {
// Use lazy initialization to avoid circular dependency
private getDealerClaimService(): DealerClaimService {
return new DealerClaimService();
}
/** /**
* Approve a level in a dealer claim workflow * Approve a level in a dealer claim workflow
* Handles dealer claim-specific logic including dynamic approvers and activity creation * Handles dealer claim-specific logic including dynamic approvers and activity creation
*/ */
async approveLevel( async approveLevel(
levelId: string, levelId: string,
action: ApprovalAction, action: ApprovalAction,
userId: string, userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null } requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
): Promise<ApprovalLevel | null> { ): Promise<ApprovalLevel | null> {
try { try {
@ -43,14 +47,14 @@ export class DealerClaimApprovalService {
// Get workflow to determine priority for working hours calculation // Get workflow to determine priority for working hours calculation
const wf = await WorkflowRequest.findByPk(level.requestId); const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null; if (!wf) return null;
// Verify this is a claim management workflow // Verify this is a claim management workflow
const workflowType = (wf as any)?.workflowType; const workflowType = (wf as any)?.workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') { if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`); logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows'); throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
} }
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase(); const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPaused = (wf as any).isPaused || (level as any).isPaused; const isPaused = (wf as any).isPaused || (level as any).isPaused;
@ -67,14 +71,14 @@ export class DealerClaimApprovalService {
} }
const now = new Date(); const now = new Date();
// Calculate elapsed hours using working hours logic (with pause handling) // Calculate elapsed hours using working hours logic (with pause handling)
const isPausedLevel = (level as any).isPaused; const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel && const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null && (level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined && (level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null; (level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? { const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time // Level is currently paused - return frozen elapsed hours at pause time
isPaused: true, isPaused: true,
@ -102,6 +106,34 @@ export class DealerClaimApprovalService {
return await this.handleRejection(level, action, userId, requestMetadata, elapsedHours, tatPercentage, now); return await this.handleRejection(level, action, userId, requestMetadata, elapsedHours, tatPercentage, now);
} }
// Save approval history BEFORE updating level
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'APPROVE',
action.comments || '',
undefined,
userId
);
// Capture workflow snapshot for approval action (before moving to next level)
// This captures the approval action itself, including initiator evaluation
const levelName = (level.levelName || '').toLowerCase();
const isInitiatorEvaluation = levelName.includes('requestor') || levelName.includes('evaluation');
const approvalMessage = isInitiatorEvaluation
? `Initiator evaluated and approved (level ${level.levelNumber})`
: `Approved level ${level.levelNumber}`;
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
approvalMessage,
userId,
level.levelId,
level.levelNumber,
level.levelName || undefined
);
// Update level status and elapsed time for approval // Update level status and elapsed time for approval
await level.update({ await level.update({
status: ApprovalStatus.APPROVED, status: ApprovalStatus.APPROVED,
@ -122,8 +154,8 @@ export class DealerClaimApprovalService {
if (isFinalApprover) { if (isFinalApprover) {
// Final approval - close workflow // Final approval - close workflow
await WorkflowRequest.update( await WorkflowRequest.update(
{ {
status: WorkflowStatus.APPROVED, status: WorkflowStatus.APPROVED,
closureDate: now, closureDate: now,
currentLevel: level.levelNumber || 0 currentLevel: level.levelNumber || 0
}, },
@ -155,33 +187,33 @@ export class DealerClaimApprovalService {
logger.warn(`[DealerClaimApproval] Cannot advance workflow ${level.requestId} - workflow is paused`); logger.warn(`[DealerClaimApproval] Cannot advance workflow ${level.requestId} - workflow is paused`);
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.'); throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
} }
// Find the next PENDING level (supports dynamically added approvers) // Find the next PENDING level (supports dynamically added approvers)
// Strategy: First try sequential, then find next PENDING level if sequential doesn't exist // Strategy: First try sequential, then find next PENDING level if sequential doesn't exist
const currentLevelNumber = level.levelNumber || 0; const currentLevelNumber = level.levelNumber || 0;
logger.info(`[DealerClaimApproval] Finding next level after level ${currentLevelNumber} for request ${level.requestId}`); logger.info(`[DealerClaimApproval] Finding next level after level ${currentLevelNumber} for request ${level.requestId}`);
// First, try sequential approach // First, try sequential approach
let nextLevel = await ApprovalLevel.findOne({ let nextLevel = await ApprovalLevel.findOne({
where: { where: {
requestId: level.requestId, requestId: level.requestId,
levelNumber: currentLevelNumber + 1 levelNumber: currentLevelNumber + 1
} }
}); });
// If sequential level doesn't exist, search for next PENDING level // If sequential level doesn't exist, search for next PENDING level
// This handles cases where additional approvers are added dynamically between steps // This handles cases where additional approvers are added dynamically between steps
if (!nextLevel) { if (!nextLevel) {
logger.info(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} not found, searching for next PENDING level (dynamic approvers)`); logger.info(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} not found, searching for next PENDING level (dynamic approvers)`);
nextLevel = await ApprovalLevel.findOne({ nextLevel = await ApprovalLevel.findOne({
where: { where: {
requestId: level.requestId, requestId: level.requestId,
levelNumber: { [Op.gt]: currentLevelNumber }, levelNumber: { [Op.gt]: currentLevelNumber },
status: ApprovalStatus.PENDING status: ApprovalStatus.PENDING
}, },
order: [['levelNumber', 'ASC']] order: [['levelNumber', 'ASC']]
}); });
if (nextLevel) { if (nextLevel) {
logger.info(`[DealerClaimApproval] Using fallback level ${nextLevel.levelNumber} (${(nextLevel as any).levelName || 'unnamed'})`); logger.info(`[DealerClaimApproval] Using fallback level ${nextLevel.levelNumber} (${(nextLevel as any).levelName || 'unnamed'})`);
} }
@ -195,9 +227,9 @@ export class DealerClaimApprovalService {
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level.`); logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level.`);
} }
} }
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null; const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
if (nextLevel) { if (nextLevel) {
logger.info(`[DealerClaimApproval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`); logger.info(`[DealerClaimApproval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
} else { } else {
@ -210,18 +242,18 @@ export class DealerClaimApprovalService {
logger.warn(`[DealerClaimApproval] Cannot activate next level ${nextLevelNumber} - level is paused`); logger.warn(`[DealerClaimApproval] Cannot activate next level ${nextLevelNumber} - level is paused`);
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.'); throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
} }
// Activate next level // Activate next level
await nextLevel.update({ await nextLevel.update({
status: ApprovalStatus.IN_PROGRESS, status: ApprovalStatus.IN_PROGRESS,
levelStartTime: now, levelStartTime: now,
tatStartTime: now tatStartTime: now
}); });
// Schedule TAT jobs for the next level // Schedule TAT jobs for the next level
try { try {
const workflowPriority = (wf as any)?.priority || 'STANDARD'; const workflowPriority = (wf as any)?.priority || 'STANDARD';
await tatSchedulerService.scheduleTatJobs( await tatSchedulerService.scheduleTatJobs(
level.requestId, level.requestId,
(nextLevel as any).levelId, (nextLevel as any).levelId,
@ -235,29 +267,40 @@ export class DealerClaimApprovalService {
logger.error(`[DealerClaimApproval] Failed to schedule TAT jobs for next level:`, tatError); logger.error(`[DealerClaimApproval] Failed to schedule TAT jobs for next level:`, tatError);
// Don't fail the approval if TAT scheduling fails // Don't fail the approval if TAT scheduling fails
} }
// Update workflow current level // Update workflow current level
if (nextLevelNumber !== null) { if (nextLevelNumber !== null) {
await WorkflowRequest.update( await WorkflowRequest.update(
{ currentLevel: nextLevelNumber }, { currentLevel: nextLevelNumber },
{ where: { requestId: level.requestId } } { where: { requestId: level.requestId } }
); );
// Capture workflow snapshot when moving to next level
// Include both the approved level and the next level in the message
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Level ${level.levelNumber} approved, moved to next level (${nextLevelNumber})`,
userId,
nextLevel?.levelId || undefined, // Store next level's ID since we're moving to it
nextLevelNumber || undefined,
(nextLevel as any)?.levelName || undefined
);
logger.info(`[DealerClaimApproval] Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`); logger.info(`[DealerClaimApproval] Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
} }
// Handle dealer claim-specific step processing // Handle dealer claim-specific step processing
const currentLevelName = (level.levelName || '').toLowerCase(); const currentLevelName = (level.levelName || '').toLowerCase();
// Check by levelName first, use levelNumber only as fallback if levelName is missing // Check by levelName first, use levelNumber only as fallback if levelName is missing
// This handles cases where additional approvers shift step numbers // This handles cases where additional approvers shift step numbers
const hasLevelNameForDeptLead = level.levelName && level.levelName.trim() !== ''; const hasLevelNameForDeptLead = level.levelName && level.levelName.trim() !== '';
const isDeptLeadApproval = hasLevelNameForDeptLead const isDeptLeadApproval = hasLevelNameForDeptLead
? currentLevelName.includes('department lead') ? currentLevelName.includes('department lead')
: (level.levelNumber === 3); // Only use levelNumber if levelName is missing : (level.levelNumber === 3); // Only use levelNumber if levelName is missing
const isRequestorClaimApproval = hasLevelNameForDeptLead const isRequestorClaimApproval = hasLevelNameForDeptLead
? (currentLevelName.includes('requestor') && (currentLevelName.includes('claim') || currentLevelName.includes('approval'))) ? (currentLevelName.includes('requestor') && (currentLevelName.includes('claim') || currentLevelName.includes('approval')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing : (level.levelNumber === 5); // Only use levelNumber if levelName is missing
if (isDeptLeadApproval) { if (isDeptLeadApproval) {
// Activity Creation is now an activity log only - process it automatically // Activity Creation is now an activity log only - process it automatically
logger.info(`[DealerClaimApproval] Department Lead approved. Processing Activity Creation as activity log.`); logger.info(`[DealerClaimApproval] Department Lead approved. Processing Activity Creation as activity log.`);
@ -273,7 +316,7 @@ export class DealerClaimApprovalService {
// E-Invoice Generation is now an activity log only - will be logged when invoice is generated via DMS webhook // E-Invoice Generation is now an activity log only - will be logged when invoice is generated via DMS webhook
logger.info(`[DealerClaimApproval] Requestor Claim Approval approved. E-Invoice generation will be logged as activity when DMS webhook is received.`); logger.info(`[DealerClaimApproval] Requestor Claim Approval approved. E-Invoice generation will be logged as activity when DMS webhook is received.`);
} }
// Log approval activity // Log approval activity
activityService.log({ activityService.log({
requestId: level.requestId, requestId: level.requestId,
@ -285,7 +328,7 @@ export class DealerClaimApprovalService {
ipAddress: requestMetadata?.ipAddress || undefined, ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined userAgent: requestMetadata?.userAgent || undefined
}); });
// Notify initiator about the approval // Notify initiator about the approval
// BUT skip this if it's a dealer proposal or dealer completion step - those have special notifications below // BUT skip this if it's a dealer proposal or dealer completion step - those have special notifications below
// Priority: levelName check first, then levelNumber only if levelName is missing // Priority: levelName check first, then levelNumber only if levelName is missing
@ -297,11 +340,11 @@ export class DealerClaimApprovalService {
const isDealerCompletionApproval = hasLevelNameForApproval const isDealerCompletionApproval = hasLevelNameForApproval
? (levelNameForApproval.includes('dealer') && (levelNameForApproval.includes('completion') || levelNameForApproval.includes('documents'))) ? (levelNameForApproval.includes('dealer') && (levelNameForApproval.includes('completion') || levelNameForApproval.includes('documents')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing : (level.levelNumber === 5); // Only use levelNumber if levelName is missing
// Skip sending approval notification to initiator if they are the approver // Skip sending approval notification to initiator if they are the approver
// (they don't need to be notified that they approved their own request) // (they don't need to be notified that they approved their own request)
const isApproverInitiator = level.approverId && (wf as any).initiatorId && level.approverId === (wf as any).initiatorId; const isApproverInitiator = level.approverId && (wf as any).initiatorId && level.approverId === (wf as any).initiatorId;
if (wf && !isDealerProposalApproval && !isDealerCompletionApproval && !isApproverInitiator) { if (wf && !isDealerProposalApproval && !isDealerCompletionApproval && !isApproverInitiator) {
await notificationService.sendToUsers([(wf as any).initiatorId], { await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Approved - Level ${level.levelNumber}`, title: `Request Approved - Level ${level.levelNumber}`,
@ -315,23 +358,23 @@ export class DealerClaimApprovalService {
} else if (isApproverInitiator) { } else if (isApproverInitiator) {
logger.info(`[DealerClaimApproval] Skipping approval notification to initiator - they are the approver`); logger.info(`[DealerClaimApproval] Skipping approval notification to initiator - they are the approver`);
} }
// Notify next approver - ALWAYS send notification when there's a next level // Notify next approver - ALWAYS send notification when there's a next level
if (wf && nextLevel) { if (wf && nextLevel) {
const nextApproverId = (nextLevel as any).approverId; const nextApproverId = (nextLevel as any).approverId;
const nextApproverEmail = (nextLevel as any).approverEmail || ''; const nextApproverEmail = (nextLevel as any).approverEmail || '';
const nextApproverName = (nextLevel as any).approverName || nextApproverEmail || 'approver'; const nextApproverName = (nextLevel as any).approverName || nextApproverEmail || 'approver';
// Check if it's an auto-step or system process // Check if it's an auto-step or system process
const isAutoStep = nextApproverEmail === 'system@royalenfield.com' const isAutoStep = nextApproverEmail === 'system@royalenfield.com'
|| (nextLevel as any).approverName === 'System Auto-Process' || (nextLevel as any).approverName === 'System Auto-Process'
|| nextApproverId === 'system'; || nextApproverId === 'system';
const isSystemEmail = nextApproverEmail.toLowerCase() === 'system@royalenfield.com' const isSystemEmail = nextApproverEmail.toLowerCase() === 'system@royalenfield.com'
|| nextApproverEmail.toLowerCase().includes('system'); || nextApproverEmail.toLowerCase().includes('system');
const isSystemName = nextApproverName.toLowerCase() === 'system auto-process' const isSystemName = nextApproverName.toLowerCase() === 'system auto-process'
|| nextApproverName.toLowerCase().includes('system'); || nextApproverName.toLowerCase().includes('system');
// Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents) // Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents)
// Check this BEFORE sending assignment notification to avoid duplicates // Check this BEFORE sending assignment notification to avoid duplicates
// Priority: levelName check first, then levelNumber only if levelName is missing // Priority: levelName check first, then levelNumber only if levelName is missing
@ -343,19 +386,19 @@ export class DealerClaimApprovalService {
const isDealerCompletionApproval = hasLevelNameForNotification const isDealerCompletionApproval = hasLevelNameForNotification
? (levelNameForNotification.includes('dealer') && (levelNameForNotification.includes('completion') || levelNameForNotification.includes('documents'))) ? (levelNameForNotification.includes('dealer') && (levelNameForNotification.includes('completion') || levelNameForNotification.includes('documents')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing : (level.levelNumber === 5); // Only use levelNumber if levelName is missing
// Check if next approver is the initiator (to avoid duplicate notifications) // Check if next approver is the initiator (to avoid duplicate notifications)
const isNextApproverInitiator = nextApproverId && (wf as any).initiatorId && nextApproverId === (wf as any).initiatorId; const isNextApproverInitiator = nextApproverId && (wf as any).initiatorId && nextApproverId === (wf as any).initiatorId;
if (isDealerProposalApproval && (wf as any).initiatorId) { if (isDealerProposalApproval && (wf as any).initiatorId) {
// Get dealer and proposal data for the email template // Get dealer and proposal data for the email template
const { DealerClaimDetails } = await import('@models/DealerClaimDetails'); const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
const { DealerProposalDetails } = await import('@models/DealerProposalDetails'); const { DealerProposalDetails } = await import('@models/DealerProposalDetails');
const { DealerProposalCostItem } = await import('@models/DealerProposalCostItem'); const { DealerProposalCostItem } = await import('@models/DealerProposalCostItem');
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } }); const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
const proposalDetails = await DealerProposalDetails.findOne({ where: { requestId: level.requestId } }); const proposalDetails = await DealerProposalDetails.findOne({ where: { requestId: level.requestId } });
// Get cost items if proposal exists // Get cost items if proposal exists
let costBreakup: any[] = []; let costBreakup: any[] = [];
if (proposalDetails) { if (proposalDetails) {
@ -371,7 +414,7 @@ export class DealerClaimApprovalService {
})); }));
} }
} }
// Get dealer user // Get dealer user
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null; const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
const dealerData = dealerUser ? dealerUser.toJSON() : { const dealerData = dealerUser ? dealerUser.toJSON() : {
@ -379,15 +422,15 @@ export class DealerClaimApprovalService {
email: level.approverEmail || '', email: level.approverEmail || '',
displayName: level.approverName || level.approverEmail || 'Dealer' displayName: level.approverName || level.approverEmail || 'Dealer'
}; };
// Get next approver (could be Step 2 - Requestor Evaluation, or an additional approver if one was added between Step 1 and Step 2) // Get next approver (could be Step 2 - Requestor Evaluation, or an additional approver if one was added between Step 1 and Step 2)
// The nextLevel is already found above using dynamic logic that handles additional approvers correctly // The nextLevel is already found above using dynamic logic that handles additional approvers correctly
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null; const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 1 and Step 2) // Check if next approver is an additional approver (handles cases where additional approvers are added between Step 1 and Step 2)
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : ''; const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
const isNextAdditionalApprover = nextLevelName.includes('additional approver'); const isNextAdditionalApprover = nextLevelName.includes('additional approver');
// Send proposal submitted notification with proper type and metadata // Send proposal submitted notification with proper type and metadata
// This will use the dealerProposalSubmitted template, not the multi-level approval template // This will use the dealerProposalSubmitted template, not the multi-level approval template
await notificationService.sendToUsers([(wf as any).initiatorId], { await notificationService.sendToUsers([(wf as any).initiatorId], {
@ -416,17 +459,17 @@ export class DealerClaimApprovalService {
activityType: claimDetails ? (claimDetails as any).activityType : undefined activityType: claimDetails ? (claimDetails as any).activityType : undefined
} }
}); });
logger.info(`[DealerClaimApproval] Sent proposal_submitted notification to initiator for Dealer Proposal Submission. Next approver: ${isNextApproverInitiator ? 'Initiator (self)' : (isNextAdditionalApprover ? 'Additional Approver' : 'Step 2 (Requestor Evaluation)')}`); logger.info(`[DealerClaimApproval] Sent proposal_submitted notification to initiator for Dealer Proposal Submission. Next approver: ${isNextApproverInitiator ? 'Initiator (self)' : (isNextAdditionalApprover ? 'Additional Approver' : 'Step 2 (Requestor Evaluation)')}`);
} else if (isDealerCompletionApproval && (wf as any).initiatorId) { } else if (isDealerCompletionApproval && (wf as any).initiatorId) {
// Get dealer and completion data for the email template // Get dealer and completion data for the email template
const { DealerClaimDetails } = await import('@models/DealerClaimDetails'); const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
const { DealerCompletionDetails } = await import('@models/DealerCompletionDetails'); const { DealerCompletionDetails } = await import('@models/DealerCompletionDetails');
const { DealerCompletionExpense } = await import('@models/DealerCompletionExpense'); const { DealerCompletionExpense } = await import('@models/DealerCompletionExpense');
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } }); const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
const completionDetails = await DealerCompletionDetails.findOne({ where: { requestId: level.requestId } }); const completionDetails = await DealerCompletionDetails.findOne({ where: { requestId: level.requestId } });
// Get expense items if completion exists // Get expense items if completion exists
let closedExpenses: any[] = []; let closedExpenses: any[] = [];
if (completionDetails) { if (completionDetails) {
@ -439,7 +482,7 @@ export class DealerClaimApprovalService {
amount: Number(item.amount) || 0 amount: Number(item.amount) || 0
})); }));
} }
// Get dealer user // Get dealer user
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null; const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
const dealerData = dealerUser ? dealerUser.toJSON() : { const dealerData = dealerUser ? dealerUser.toJSON() : {
@ -447,17 +490,17 @@ export class DealerClaimApprovalService {
email: level.approverEmail || '', email: level.approverEmail || '',
displayName: level.approverName || level.approverEmail || 'Dealer' displayName: level.approverName || level.approverEmail || 'Dealer'
}; };
// Get next approver (could be Step 5 - Requestor Claim Approval, or an additional approver if one was added between Step 4 and Step 5) // Get next approver (could be Step 5 - Requestor Claim Approval, or an additional approver if one was added between Step 4 and Step 5)
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null; const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 4 and Step 5) // Check if next approver is an additional approver (handles cases where additional approvers are added between Step 4 and Step 5)
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : ''; const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
const isNextAdditionalApprover = nextLevelName.includes('additional approver'); const isNextAdditionalApprover = nextLevelName.includes('additional approver');
// Check if next approver is the initiator (to show appropriate message in email) // Check if next approver is the initiator (to show appropriate message in email)
const isNextApproverInitiator = nextApproverData && (wf as any).initiatorId && nextApproverData.userId === (wf as any).initiatorId; const isNextApproverInitiator = nextApproverData && (wf as any).initiatorId && nextApproverData.userId === (wf as any).initiatorId;
// Send completion submitted notification with proper type and metadata // Send completion submitted notification with proper type and metadata
// This will use the completionDocumentsSubmitted template, not the multi-level approval template // This will use the completionDocumentsSubmitted template, not the multi-level approval template
await notificationService.sendToUsers([(wf as any).initiatorId], { await notificationService.sendToUsers([(wf as any).initiatorId], {
@ -484,10 +527,10 @@ export class DealerClaimApprovalService {
nextApproverId: nextApproverData ? nextApproverData.userId : undefined nextApproverId: nextApproverData ? nextApproverData.userId : undefined
} }
}); });
logger.info(`[DealerClaimApproval] Sent completion_submitted notification to initiator for Dealer Completion Documents. Next approver: ${isNextAdditionalApprover ? 'Additional Approver' : 'Step 5 (Requestor Claim Approval)'}`); logger.info(`[DealerClaimApproval] Sent completion_submitted notification to initiator for Dealer Completion Documents. Next approver: ${isNextAdditionalApprover ? 'Additional Approver' : 'Step 5 (Requestor Claim Approval)'}`);
} }
// Only send assignment notification to next approver if: // Only send assignment notification to next approver if:
// 1. It's NOT a dealer proposal/completion step (those have special notifications above) // 1. It's NOT a dealer proposal/completion step (those have special notifications above)
// 2. Next approver is NOT the initiator (to avoid duplicate notifications) // 2. Next approver is NOT the initiator (to avoid duplicate notifications)
@ -496,8 +539,8 @@ export class DealerClaimApprovalService {
if (!isAutoStep && !isSystemEmail && !isSystemName && nextApproverId && nextApproverId !== 'system') { if (!isAutoStep && !isSystemEmail && !isSystemName && nextApproverId && nextApproverId !== 'system') {
try { try {
logger.info(`[DealerClaimApproval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`); logger.info(`[DealerClaimApproval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
await notificationService.sendToUsers([ nextApproverId ], { await notificationService.sendToUsers([nextApproverId], {
title: `Action required: ${(wf as any).requestNumber}`, title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`, body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber, requestNumber: (wf as any).requestNumber,
@ -541,15 +584,15 @@ export class DealerClaimApprovalService {
// No next level found but not final approver - this shouldn't happen // No next level found but not final approver - this shouldn't happen
logger.warn(`[DealerClaimApproval] No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`); logger.warn(`[DealerClaimApproval] No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
await WorkflowRequest.update( await WorkflowRequest.update(
{ {
status: WorkflowStatus.APPROVED, status: WorkflowStatus.APPROVED,
closureDate: now, closureDate: now,
currentLevel: level.levelNumber || 0 currentLevel: level.levelNumber || 0
}, },
{ where: { requestId: level.requestId } } { where: { requestId: level.requestId } }
); );
if (wf) { if (wf) {
await notificationService.sendToUsers([ (wf as any).initiatorId ], { await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Approved: ${(wf as any).requestNumber}`, title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`, body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber, requestNumber: (wf as any).requestNumber,
@ -570,9 +613,9 @@ export class DealerClaimApprovalService {
levelNumber: level.levelNumber, levelNumber: level.levelNumber,
timestamp: now.toISOString() timestamp: now.toISOString()
}); });
logger.info(`[DealerClaimApproval] Approval level ${levelId} ${action.action.toLowerCase()}ed and socket event emitted`); logger.info(`[DealerClaimApproval] Approval level ${levelId} ${action.action.toLowerCase()}ed and socket event emitted`);
return level; return level;
} catch (error) { } catch (error) {
logger.error('[DealerClaimApproval] Error approving level:', error); logger.error('[DealerClaimApproval] Error approving level:', error);
@ -596,6 +639,125 @@ export class DealerClaimApprovalService {
const wf = await WorkflowRequest.findByPk(level.requestId); const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null; if (!wf) return null;
// Check if this is the Department Lead approval step (Step 3)
// Robust check: check level name for variations and level number as fallback
const levelName = (level.levelName || '').toLowerCase();
const isDeptLeadResult =
levelName.includes('department lead') ||
levelName.includes('dept lead');
if (isDeptLeadResult) {
logger.info(`[DealerClaimApproval] Department Lead rejected request ${level.requestId}. Circling back to initiator.`);
// Save approval history (rejection) BEFORE updating level
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'REJECT',
action.comments || '',
action.rejectionReason || undefined,
userId
);
// Update level status to REJECTED (but signifies a return at this level)
await level.update({
status: ApprovalStatus.REJECTED,
actionDate: rejectionNow,
levelEndTime: rejectionNow,
elapsedHours: elapsedHours || 0,
tatPercentageUsed: tatPercentage || 0,
comments: action.comments || action.rejectionReason || undefined
});
// Create or activate initiator action level
const initiatorLevel = await this.getDealerClaimService().createOrActivateInitiatorLevel(
level.requestId,
(wf as any).initiatorId
);
// Update workflow status to REJECTED but DO NOT set closureDate
// Set currentLevel to initiator level if created
const newCurrentLevel = initiatorLevel ? initiatorLevel.levelNumber : wf.currentLevel;
await WorkflowRequest.update(
{
status: WorkflowStatus.REJECTED,
currentLevel: newCurrentLevel
},
{ where: { requestId: level.requestId } }
);
// Capture workflow snapshot when moving back to initiator
// Include the rejected level information in the message
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Department Lead rejected (level ${level.levelNumber}) and moved back to initiator (level ${newCurrentLevel})`,
userId,
level.levelId, // Store the rejected level's ID
level.levelNumber, // Store the rejected level's number
level.levelName || undefined // Store the rejected level's name
);
// Log activity
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: rejectionNow.toISOString(),
action: 'Returned to Initiator',
details: `Request returned to initiator by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify ONLY the initiator
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Action Required: Request Returned - ${(wf as any).requestNumber}`,
body: `Your request "${(wf as any).title}" has been returned to you by the Department Lead for revision/discussion. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH',
actionRequired: true
});
// Emit real-time update
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: 'RETURN',
levelNumber: level.levelNumber,
timestamp: rejectionNow.toISOString()
});
return level;
}
// Default terminal rejection logic for other steps
logger.info(`[DealerClaimApproval] Standard rejection for request ${level.requestId} by level ${level.levelNumber}`);
// Save approval history (rejection) BEFORE updating level
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'REJECT',
action.comments || '',
action.rejectionReason || undefined,
userId
);
// Capture workflow snapshot for terminal rejection action
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Level ${level.levelNumber} rejected (terminal rejection)`,
userId,
level.levelId,
level.levelNumber,
level.levelName || undefined
);
// Update level status // Update level status
await level.update({ await level.update({
status: ApprovalStatus.REJECTED, status: ApprovalStatus.REJECTED,
@ -608,8 +770,8 @@ export class DealerClaimApprovalService {
// Close workflow // Close workflow
await WorkflowRequest.update( await WorkflowRequest.update(
{ {
status: WorkflowStatus.REJECTED, status: WorkflowStatus.REJECTED,
closureDate: rejectionNow closureDate: rejectionNow
}, },
{ where: { requestId: level.requestId } } { where: { requestId: level.requestId } }
@ -688,15 +850,15 @@ export class DealerClaimApprovalService {
} }
const now = new Date(); const now = new Date();
// Calculate elapsed hours // Calculate elapsed hours
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase(); const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPausedLevel = (level as any).isPaused; const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel && const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null && (level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined && (level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null; (level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? { const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time // Level is currently paused - return frozen elapsed hours at pause time
isPaused: true, isPaused: true,

View File

@ -21,16 +21,24 @@ interface UploadResult {
class GCSStorageService { class GCSStorageService {
private storage: Storage | null = null; private storage: Storage | null = null;
private bucketName: string; private bucketName: string = '';
private projectId: string; private projectId: string = '';
constructor() { constructor() {
// Check if Google Secret Manager should be used
const useGoogleSecretManager = process.env.USE_GOOGLE_SECRET_MANAGER === 'true';
if (!useGoogleSecretManager) {
logger.info('[GCS] USE_GOOGLE_SECRET_MANAGER is not enabled. Will use local storage fallback.');
return;
}
this.projectId = process.env.GCP_PROJECT_ID || ''; this.projectId = process.env.GCP_PROJECT_ID || '';
this.bucketName = process.env.GCP_BUCKET_NAME || ''; this.bucketName = process.env.GCP_BUCKET_NAME || '';
const keyFilePath = process.env.GCP_KEY_FILE || ''; const keyFilePath = process.env.GCP_KEY_FILE || '';
if (!this.projectId || !this.bucketName || !keyFilePath) { if (!this.projectId || !this.bucketName || !keyFilePath) {
logger.warn('[GCS] GCP configuration missing. File uploads will fail.'); logger.warn('[GCS] GCP configuration missing. File uploads will use local storage fallback.');
return; return;
} }
@ -41,7 +49,7 @@ class GCSStorageService {
: path.resolve(process.cwd(), keyFilePath); : path.resolve(process.cwd(), keyFilePath);
if (!fs.existsSync(resolvedKeyPath)) { if (!fs.existsSync(resolvedKeyPath)) {
logger.error(`[GCS] Key file not found at: ${resolvedKeyPath}`); logger.error(`[GCS] Key file not found at: ${resolvedKeyPath}. Will use local storage fallback.`);
return; return;
} }
@ -55,7 +63,7 @@ class GCSStorageService {
bucketName: this.bucketName, bucketName: this.bucketName,
}); });
} catch (error) { } catch (error) {
logger.error('[GCS] Failed to initialize:', error); logger.error('[GCS] Failed to initialize. Will use local storage fallback:', error);
} }
} }
@ -341,6 +349,12 @@ class GCSStorageService {
* Check if GCS is properly configured * Check if GCS is properly configured
*/ */
isConfigured(): boolean { isConfigured(): boolean {
// Check if Google Secret Manager is enabled
const useGoogleSecretManager = process.env.USE_GOOGLE_SECRET_MANAGER === 'true';
if (!useGoogleSecretManager) {
return false;
}
return this.storage !== null && this.bucketName !== '' && this.projectId !== ''; return this.storage !== null && this.bucketName !== '' && this.projectId !== '';
} }
} }