Compare commits

...

5 Commits

229 changed files with 30846 additions and 32394 deletions

View File

@ -0,0 +1,114 @@
import logger from '@utils/logger';
// Special UUID for system events (login, etc.) - well-known UUID: 00000000-0000-0000-0000-000000000001
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
export type ActivityEntry = {
requestId: string;
type: 'created' | 'submitted' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'summary_generated' | 'closed' | 'login' | 'paused' | 'resumed' | 'pause_retriggered';
user?: { userId: string; name?: string; email?: string };
timestamp: string;
action: string;
details: string;
metadata?: any;
ipAddress?: string;
userAgent?: string;
category?: string;
severity?: string;
};
class ActivityService {
private byRequest: Map<string, ActivityEntry[]> = new Map();
private inferCategory(type: string): string {
const categoryMap: Record<string, string> = {
'created': 'WORKFLOW',
'submitted': 'WORKFLOW',
'approval': 'WORKFLOW',
'rejection': 'WORKFLOW',
'status_change': 'WORKFLOW',
'assignment': 'WORKFLOW',
'comment': 'COLLABORATION',
'document_added': 'DOCUMENT',
'sla_warning': 'SYSTEM',
'reminder': 'SYSTEM',
'ai_conclusion_generated': 'SYSTEM',
'closed': 'WORKFLOW',
'login': 'AUTHENTICATION',
'paused': 'WORKFLOW',
'resumed': 'WORKFLOW',
'pause_retriggered': 'WORKFLOW'
};
return categoryMap[type] || 'OTHER';
}
private inferSeverity(type: string): string {
const severityMap: Record<string, string> = {
'rejection': 'WARNING',
'sla_warning': 'WARNING',
'approval': 'INFO',
'closed': 'INFO',
'status_change': 'INFO',
'login': 'INFO',
'created': 'INFO',
'submitted': 'INFO',
'comment': 'INFO',
'document_added': 'INFO',
'assignment': 'INFO',
'reminder': 'INFO',
'ai_conclusion_generated': 'INFO',
'paused': 'WARNING',
'resumed': 'INFO',
'pause_retriggered': 'INFO'
};
return severityMap[type] || 'INFO';
}
async log(entry: ActivityEntry) {
const list = this.byRequest.get(entry.requestId) || [];
list.push(entry);
this.byRequest.set(entry.requestId, list);
// Persist to database
try {
const { Activity } = require('@models/Activity');
const userName = entry.user?.name || entry.user?.email || null;
const activityData = {
requestId: entry.requestId,
userId: entry.user?.userId || null,
userName: userName,
activityType: entry.type,
activityDescription: entry.details,
activityCategory: entry.category || this.inferCategory(entry.type),
severity: entry.severity || this.inferSeverity(entry.type),
metadata: entry.metadata || null,
isSystemEvent: !entry.user,
ipAddress: entry.ipAddress || null, // Database accepts null
userAgent: entry.userAgent || null, // Database accepts null
};
logger.info(`[Activity] Creating activity:`, {
requestId: entry.requestId,
userName,
userId: entry.user?.userId,
type: entry.type,
ipAddress: entry.ipAddress ? '***' : null
});
await Activity.create(activityData);
logger.info(`[Activity] Successfully logged activity for request ${entry.requestId} by user: ${userName}`);
} catch (error) {
logger.error('[Activity] Failed to persist activity:', error);
}
}
get(requestId: string): ActivityEntry[] {
return this.byRequest.get(requestId) || [];
}
}
export const activityService = new ActivityService();

View File

@ -0,0 +1,897 @@
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { Participant } from '@models/Participant';
import { TatAlert } from '@models/TatAlert';
import { ApprovalAction } from '../types/approval.types';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { calculateTATPercentage } from '@utils/helpers';
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
import logger, { logWorkflowEvent, logAIEvent } from '@utils/logger';
import { Op } from 'sequelize';
import { notificationService } from './notification.service';
import { activityService } from './activity.service';
import { tatSchedulerService } from './tatScheduler.service';
import { emitToRequestRoom } from '../realtime/socket';
// Note: DealerClaimService import removed - dealer claim approvals are handled by DealerClaimApprovalService
export class ApprovalService {
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
// Get workflow to determine priority for working hours calculation
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Verify this is NOT a claim management workflow (should use DealerClaimApprovalService)
const workflowType = (wf as any)?.workflowType;
if (workflowType === 'CLAIM_MANAGEMENT') {
logger.error(`[Approval] Attempted to use ApprovalService for CLAIM_MANAGEMENT workflow ${level.requestId}. Use DealerClaimApprovalService instead.`);
throw new Error('ApprovalService cannot be used for CLAIM_MANAGEMENT workflows. Use DealerClaimApprovalService instead.');
}
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPaused = (wf as any).isPaused || (level as any).isPaused;
// If paused, resume automatically when approving/rejecting (requirement 3.6)
if (isPaused) {
const { pauseService } = await import('./pause.service');
try {
await pauseService.resumeWorkflow(level.requestId, _userId);
logger.info(`[Approval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
} catch (pauseError) {
logger.warn(`[Approval] Failed to auto-resume paused workflow:`, pauseError);
// Continue with approval/rejection even if resume fails
}
}
const now = new Date();
// Calculate elapsed hours using working hours logic (with pause handling)
// Case 1: Level is currently paused (isPaused = true)
// Case 2: Level was paused and resumed (isPaused = false but pauseElapsedHours and pauseResumeDate exist)
const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time
isPaused: true,
pausedAt: (level as any).pausedAt,
pauseElapsedHours: (level as any).pauseElapsedHours,
pauseResumeDate: (level as any).pauseResumeDate
} : wasResumed ? {
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
isPaused: false,
pausedAt: null,
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
} : undefined;
const elapsedHours = await calculateElapsedWorkingHours(
level.levelStartTime || level.createdAt,
now,
priority,
pauseInfo
);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
const updateData = {
status: action.action === 'APPROVE' ? ApprovalStatus.APPROVED : ApprovalStatus.REJECTED,
actionDate: now,
levelEndTime: now,
elapsedHours,
tatPercentageUsed: tatPercentage,
comments: action.comments,
rejectionReason: action.rejectionReason
};
const updatedLevel = await level.update(updateData);
// Cancel TAT jobs for the current level since it's been actioned
try {
await tatSchedulerService.cancelTatJobs(level.requestId, level.levelId);
logger.info(`[Approval] TAT jobs cancelled for level ${level.levelId}`);
} catch (tatError) {
logger.error(`[Approval] Failed to cancel TAT jobs:`, tatError);
// Don't fail the approval if TAT cancellation fails
}
// Update TAT alerts for this level to mark completion status
try {
const wasOnTime = elapsedHours <= level.tatHours;
await TatAlert.update(
{
wasCompletedOnTime: wasOnTime,
completionTime: now
},
{
where: { levelId: level.levelId }
}
);
logger.info(`[Approval] TAT alerts updated for level ${level.levelId} - Completed ${wasOnTime ? 'on time' : 'late'}`);
} catch (tatAlertError) {
logger.error(`[Approval] Failed to update TAT alerts:`, tatAlertError);
// Don't fail the approval if TAT alert update fails
}
// Handle approval - move to next level or close workflow (wf already loaded above)
if (action.action === 'APPROVE') {
// Check if this is final approval: either isFinalApprover flag is set OR all levels are approved
// This handles cases where additional approvers are added after initial approval
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const approvedLevelsCount = allLevels.filter((l: any) => l.status === 'APPROVED').length;
const totalLevels = allLevels.length;
const isAllLevelsApproved = approvedLevelsCount === totalLevels;
const isFinalApproval = level.isFinalApprover || isAllLevelsApproved;
if (isFinalApproval) {
// Final approver - close workflow as APPROVED
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: (level.levelNumber || 0) + 1
},
{ where: { requestId: level.requestId } }
);
logWorkflowEvent('approved', level.requestId, {
level: level.levelNumber,
isFinalApproval: true,
status: 'APPROVED',
detectedBy: level.isFinalApprover ? 'isFinalApprover flag' : 'all levels approved check'
});
// Log final approval activity first (so it's included in AI context)
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
// This runs in the background without blocking the approval response
(async () => {
try {
const { aiService } = await import('./ai.service');
const { ConclusionRemark } = await import('@models/index');
const { ApprovalLevel } = await import('@models/ApprovalLevel');
const { WorkNote } = await import('@models/WorkNote');
const { Document } = await import('@models/Document');
const { Activity } = await import('@models/Activity');
const { getConfigValue } = await import('./configReader.service');
// Check if AI features and remark generation are enabled in admin config
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
if (aiEnabled && remarkGenerationEnabled && aiService.isAvailable()) {
logAIEvent('request', {
requestId: level.requestId,
action: 'conclusion_generation_started',
});
// Gather context for AI generation
const approvalLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const workNotes = await WorkNote.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 20
});
const documents = await Document.findAll({
where: { requestId: level.requestId },
order: [['uploadedAt', 'DESC']]
});
const activities = await Activity.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 50
});
// Build context object
const context = {
requestTitle: (wf as any).title,
requestDescription: (wf as any).description,
requestNumber: (wf as any).requestNumber,
priority: (wf as any).priority,
approvalFlow: approvalLevels.map((l: any) => {
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
? Number(l.tatPercentageUsed)
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
return {
levelNumber: l.levelNumber,
approverName: l.approverName,
status: l.status,
comments: l.comments,
actionDate: l.actionDate,
tatHours: Number(l.tatHours || 0),
elapsedHours: Number(l.elapsedHours || 0),
tatPercentageUsed: tatPercentage
};
}),
workNotes: workNotes.map((note: any) => ({
userName: note.userName,
message: note.message,
createdAt: note.createdAt
})),
documents: documents.map((doc: any) => ({
fileName: doc.originalFileName || doc.fileName,
uploadedBy: doc.uploadedBy,
uploadedAt: doc.uploadedAt
})),
activities: activities.map((activity: any) => ({
type: activity.activityType,
action: activity.activityDescription,
details: activity.activityDescription,
timestamp: activity.createdAt
}))
};
const aiResult = await aiService.generateConclusionRemark(context);
// Check if conclusion already exists (e.g., from previous final approval before additional approver was added)
const existingConclusion = await ConclusionRemark.findOne({
where: { requestId: level.requestId }
});
if (existingConclusion) {
// Update existing conclusion with new AI-generated remark (regenerated with updated context)
await existingConclusion.update({
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
// Preserve finalRemark if it was already finalized
// Only reset if it wasn't finalized yet
finalRemark: (existingConclusion as any).finalizedAt ? (existingConclusion as any).finalRemark : null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
// Preserve finalizedAt if it was already finalized
finalizedAt: (existingConclusion as any).finalizedAt || null
} as any);
logger.info(`[Approval] Updated existing AI conclusion for request ${level.requestId} with regenerated content (includes new approver)`);
} else {
// Create new conclusion
await ConclusionRemark.create({
requestId: level.requestId,
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
finalizedAt: null
} as any);
}
logAIEvent('response', {
requestId: level.requestId,
action: 'conclusion_generation_completed',
});
// Log activity
activityService.log({
requestId: level.requestId,
type: 'ai_conclusion_generated',
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
timestamp: new Date().toISOString(),
action: 'AI Conclusion Generated',
details: 'AI-powered conclusion remark generated for review by initiator',
ipAddress: undefined, // System-generated, no IP
userAgent: undefined // System-generated, no user agent
});
} else {
// Log why AI generation was skipped
if (!aiEnabled) {
logger.info(`[Approval] AI features disabled in admin config, skipping conclusion generation for ${level.requestId}`);
} else if (!remarkGenerationEnabled) {
logger.info(`[Approval] AI remark generation disabled in admin config, skipping for ${level.requestId}`);
} else if (!aiService.isAvailable()) {
logger.warn(`[Approval] AI service unavailable for ${level.requestId}, skipping conclusion generation`);
}
}
// Auto-generate RequestSummary after final approval (system-level generation)
// This makes the summary immediately available when user views the approved request
try {
const { summaryService } = await import('./summary.service');
const summary = await summaryService.createSummary(level.requestId, 'system', {
isSystemGeneration: true
});
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId}`);
// Log summary generation activity
activityService.log({
requestId: level.requestId,
type: 'summary_generated',
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
timestamp: new Date().toISOString(),
action: 'Summary Auto-Generated',
details: 'Request summary auto-generated after final approval',
ipAddress: undefined,
userAgent: undefined
});
} catch (summaryError: any) {
// Log but don't fail - initiator can regenerate later
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
}
} catch (aiError) {
logAIEvent('error', {
requestId: level.requestId,
action: 'conclusion_generation_failed',
error: aiError,
});
// Silent failure - initiator can write manually
// Still try to generate summary even if AI conclusion failed
try {
const { summaryService } = await import('./summary.service');
const summary = await summaryService.createSummary(level.requestId, 'system', {
isSystemGeneration: true
});
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId} (without AI conclusion)`);
} catch (summaryError: any) {
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
}
}
})().catch(err => {
// Catch any unhandled promise rejections
logger.error(`[Approval] Unhandled error in background AI generation:`, err);
});
// Notify initiator and all participants (including spectators) about approval
// Spectators are CC'd for transparency, similar to email CC
if (wf) {
const participants = await Participant.findAll({
where: { requestId: level.requestId }
});
const targetUserIds = new Set<string>();
targetUserIds.add((wf as any).initiatorId);
for (const p of participants as any[]) {
targetUserIds.add(p.userId); // Includes spectators
}
// Send notification to initiator about final approval (triggers email)
const initiatorId = (wf as any).initiatorId;
await notificationService.sendToUsers([initiatorId], {
title: `Request Approved - All Approvals Complete`,
body: `Your request "${(wf as any).title}" has been fully approved by all approvers. Please review and finalize the conclusion remark to close the request.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'HIGH',
actionRequired: true
});
// Send notification to all participants/spectators (for transparency, no action required)
const participantUserIds = Array.from(targetUserIds).filter(id => id !== initiatorId);
if (participantUserIds.length > 0) {
await notificationService.sendToUsers(participantUserIds, {
title: `Request Approved`,
body: `Request "${(wf as any).title}" has been fully approved. The initiator will finalize the conclusion remark to close the request.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval_pending_closure',
priority: 'MEDIUM',
actionRequired: false
});
}
logger.info(`[Approval] ✅ Final approval complete for ${level.requestId}. Initiator and ${participants.length} participant(s) notified.`);
}
} else {
// Not final - move to next level
// Check if workflow is paused - if so, don't advance
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
logger.warn(`[Approval] Cannot advance workflow ${level.requestId} - workflow is paused`);
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
}
// Find the next PENDING level
// Custom workflows use strict sequential ordering (levelNumber + 1) to maintain intended order
// This ensures custom workflows work predictably and don't skip levels
const currentLevelNumber = level.levelNumber || 0;
logger.info(`[Approval] Finding next level after level ${currentLevelNumber} for request ${level.requestId} (Custom workflow)`);
// Use strict sequential approach for custom workflows
const nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: currentLevelNumber + 1
}
});
if (!nextLevel) {
logger.info(`[Approval] Sequential level ${currentLevelNumber + 1} not found for custom workflow - this may be the final approval`);
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
// Sequential level exists but not PENDING - log warning but proceed
logger.warn(`[Approval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level to maintain workflow order.`);
}
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
if (nextLevel) {
logger.info(`[Approval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
} else {
logger.info(`[Approval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
}
if (nextLevel) {
// Check if next level is paused - if so, don't activate it
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
logger.warn(`[Approval] Cannot activate next level ${nextLevelNumber} - level is paused`);
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
}
// Activate next level
await nextLevel.update({
status: ApprovalStatus.IN_PROGRESS,
levelStartTime: now,
tatStartTime: now
});
// Schedule TAT jobs for the next level
try {
// Get workflow priority for TAT calculation
const workflowPriority = (wf as any)?.priority || 'STANDARD';
await tatSchedulerService.scheduleTatJobs(
level.requestId,
(nextLevel as any).levelId,
(nextLevel as any).approverId,
Number((nextLevel as any).tatHours),
now,
workflowPriority // Pass workflow priority (EXPRESS = 24/7, STANDARD = working hours)
);
logger.info(`[Approval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
} catch (tatError) {
logger.error(`[Approval] Failed to schedule TAT jobs for next level:`, tatError);
// Don't fail the approval if TAT scheduling fails
}
// Update workflow current level (only if nextLevelNumber is not null)
if (nextLevelNumber !== null) {
await WorkflowRequest.update(
{ currentLevel: nextLevelNumber },
{ where: { requestId: level.requestId } }
);
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
} else {
logger.warn(`Approved level ${level.levelNumber} but no next level found - workflow may be complete`);
}
// Note: Dealer claim-specific logic (Activity Creation, E-Invoice) is handled by DealerClaimApprovalService
// This service is for custom workflows only
// Log approval activity
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify initiator about the approval (triggers email for regular workflows)
if (wf) {
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Approved - Level ${level.levelNumber}`,
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
}
// Notify next approver
if (wf && nextLevel) {
// Check if it's an auto-step by checking approverEmail or levelName
// Note: Activity Creation, E-Invoice Generation, and Credit Note Confirmation are now activity logs only, not approval steps
// These steps are processed automatically and should NOT trigger notifications
const isAutoStep = (nextLevel as any).approverEmail === 'system@royalenfield.com'
|| (nextLevel as any).approverName === 'System Auto-Process'
|| (nextLevel as any).approverId === 'system';
// IMPORTANT: Skip notifications and assignment logging for system/auto-steps
// System steps are any step with system@royalenfield.com
// Only send notifications to real users, NOT system processes
if (!isAutoStep && (nextLevel as any).approverId && (nextLevel as any).approverId !== 'system') {
// Additional checks: ensure approverEmail and approverName are not system-related
// This prevents notifications to system accounts even if they pass other checks
const approverEmail = (nextLevel as any).approverEmail || '';
const approverName = (nextLevel as any).approverName || '';
const isSystemEmail = approverEmail.toLowerCase() === 'system@royalenfield.com'
|| approverEmail.toLowerCase().includes('system');
const isSystemName = approverName.toLowerCase() === 'system auto-process'
|| approverName.toLowerCase().includes('system');
// EXCLUDE all system-related steps from notifications
// Only send notifications to real users, NOT system processes
if (!isSystemEmail && !isSystemName) {
// Send notification to next approver (only for real users, not system processes)
// This will send both in-app and email notifications
const nextApproverId = (nextLevel as any).approverId;
const nextApproverName = (nextLevel as any).approverName || (nextLevel as any).approverEmail || 'approver';
logger.info(`[Approval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
await notificationService.sendToUsers([ nextApproverId ], {
title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
logger.info(`[Approval] Assignment notification sent successfully to ${nextApproverName} for level ${nextLevelNumber}`);
// Log assignment activity for the next approver
activityService.log({
requestId: level.requestId,
type: 'assignment',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Assigned to approver',
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
} else {
logger.info(`[Approval] Skipping notification for system process: ${approverEmail} at level ${nextLevelNumber}`);
}
} else {
logger.info(`[Approval] Skipping notification for auto-step at level ${nextLevelNumber}`);
}
// Note: Dealer-specific notifications (proposal/completion submissions) are handled by DealerClaimApprovalService
}
} else {
// No next level found but not final approver - this shouldn't happen
logger.warn(`No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
// Use current level number since there's no next level (workflow is complete)
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: level.levelNumber || 0
},
{ where: { requestId: level.requestId } }
);
if (wf) {
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
url: `/request/${(wf as any).requestNumber}`
});
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
}
}
}
} else if (action.action === 'REJECT') {
// Rejection - mark workflow as REJECTED (closure will happen when initiator finalizes conclusion)
await WorkflowRequest.update(
{
status: WorkflowStatus.REJECTED
// Note: closureDate will be set when initiator finalizes the conclusion
},
{ where: { requestId: level.requestId } }
);
// Mark all pending levels as skipped
await ApprovalLevel.update(
{
status: ApprovalStatus.SKIPPED,
levelEndTime: now
},
{
where: {
requestId: level.requestId,
status: ApprovalStatus.PENDING,
levelNumber: { [Op.gt]: level.levelNumber }
}
}
);
logWorkflowEvent('rejected', level.requestId, {
level: level.levelNumber,
status: 'REJECTED',
message: 'Awaiting closure from initiator',
});
// Log rejection activity first (so it's included in AI context)
if (wf) {
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Rejected',
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}. Awaiting closure from initiator.`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
}
// Notify initiator and all participants
if (wf) {
const participants = await Participant.findAll({ where: { requestId: level.requestId } });
const targetUserIds = new Set<string>();
targetUserIds.add((wf as any).initiatorId);
for (const p of participants as any[]) {
targetUserIds.add(p.userId);
}
// Send notification to initiator with type 'rejection' to trigger email
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Rejected: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH',
metadata: {
rejectionReason: action.rejectionReason || action.comments || 'No reason provided'
}
});
// Send notification to other participants (spectators) for transparency (no email, just in-app)
const participantUserIds = Array.from(targetUserIds).filter(id => id !== (wf as any).initiatorId);
if (participantUserIds.length > 0) {
await notificationService.sendToUsers(participantUserIds, {
title: `Rejected: ${(wf as any).requestNumber}`,
body: `Request "${(wf as any).title}" has been rejected.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'status_change', // Use status_change to avoid triggering emails for participants
priority: 'MEDIUM'
});
}
}
// Generate AI conclusion remark ASYNCHRONOUSLY for rejected requests (similar to approved)
// This runs in the background without blocking the rejection response
(async () => {
try {
const { aiService } = await import('./ai.service');
const { ConclusionRemark } = await import('@models/index');
const { ApprovalLevel } = await import('@models/ApprovalLevel');
const { WorkNote } = await import('@models/WorkNote');
const { Document } = await import('@models/Document');
const { Activity } = await import('@models/Activity');
const { getConfigValue } = await import('./configReader.service');
// Check if AI features and remark generation are enabled in admin config
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
if (!aiEnabled || !remarkGenerationEnabled) {
logger.info(`[Approval] AI conclusion generation skipped for rejected request ${level.requestId} (AI disabled)`);
return;
}
// Check if AI service is available
const { aiService: aiSvc } = await import('./ai.service');
if (!aiSvc.isAvailable()) {
logger.warn(`[Approval] AI service unavailable for rejected request ${level.requestId}`);
return;
}
// Gather context for AI generation (similar to approved flow)
const approvalLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const workNotes = await WorkNote.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 20
});
const documents = await Document.findAll({
where: { requestId: level.requestId },
order: [['uploadedAt', 'DESC']]
});
const activities = await Activity.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 50
});
// Build context object (include rejection reason)
const context = {
requestTitle: (wf as any).title,
requestDescription: (wf as any).description,
requestNumber: (wf as any).requestNumber,
priority: (wf as any).priority,
rejectionReason: action.rejectionReason || action.comments || 'No reason provided',
rejectedBy: level.approverName || level.approverEmail,
approvalFlow: approvalLevels.map((l: any) => {
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
? Number(l.tatPercentageUsed)
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
return {
levelNumber: l.levelNumber,
approverName: l.approverName,
status: l.status,
comments: l.comments,
actionDate: l.actionDate,
tatHours: Number(l.tatHours || 0),
elapsedHours: Number(l.elapsedHours || 0),
tatPercentageUsed: tatPercentage
};
}),
workNotes: workNotes.map((note: any) => ({
userName: note.userName,
message: note.message,
createdAt: note.createdAt
})),
documents: documents.map((doc: any) => ({
fileName: doc.originalFileName || doc.fileName,
uploadedBy: doc.uploadedBy,
uploadedAt: doc.uploadedAt
})),
activities: activities.map((activity: any) => ({
type: activity.activityType,
action: activity.activityDescription,
details: activity.activityDescription,
timestamp: activity.createdAt
}))
};
logger.info(`[Approval] Generating AI conclusion for rejected request ${level.requestId}...`);
// Generate AI conclusion (will adapt to rejection context)
const aiResult = await aiSvc.generateConclusionRemark(context);
// Create or update conclusion remark
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId: level.requestId } });
const conclusionData = {
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
approvalSummary: {
totalLevels: approvalLevels.length,
rejectedLevel: level.levelNumber,
rejectedBy: level.approverName || level.approverEmail,
rejectionReason: action.rejectionReason || action.comments
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date()
};
if (conclusionInstance) {
await conclusionInstance.update(conclusionData as any);
logger.info(`[Approval] ✅ AI conclusion updated for rejected request ${level.requestId}`);
} else {
await ConclusionRemark.create({
requestId: level.requestId,
...conclusionData,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
finalizedAt: null
} as any);
logger.info(`[Approval] ✅ AI conclusion generated for rejected request ${level.requestId}`);
}
} catch (error: any) {
logger.error(`[Approval] Failed to generate AI conclusion for rejected request ${level.requestId}:`, error);
// Don't fail the rejection if AI generation fails
}
})();
}
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);
// Emit real-time update to all users viewing this request
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: action.action,
levelNumber: level.levelNumber,
timestamp: now.toISOString()
});
return updatedLevel;
} catch (error) {
logger.error(`Failed to ${action.action.toLowerCase()} level ${levelId}:`, error);
throw new Error(`Failed to ${action.action.toLowerCase()} level`);
}
}
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
try {
return await ApprovalLevel.findOne({
where: { requestId, status: ApprovalStatus.PENDING },
order: [['levelNumber', 'ASC']]
});
} catch (error) {
logger.error(`Failed to get current approval level for ${requestId}:`, error);
throw new Error('Failed to get current approval level');
}
}
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
try {
return await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
} catch (error) {
logger.error(`Failed to get approval levels for ${requestId}:`, error);
throw new Error('Failed to get approval levels');
}
}
}

View File

@ -0,0 +1,160 @@
/**
* Configuration Reader Service
* Reads admin configurations from database for use in backend logic
*/
import { sequelize } from '@config/database';
import { QueryTypes } from 'sequelize';
import logger from '@utils/logger';
// Cache configurations in memory for performance
let configCache: Map<string, string> = new Map();
let cacheExpiry: Date | null = null;
const CACHE_DURATION_MS = 5 * 60 * 1000; // 5 minutes
// Sensitive config keys that should be masked in logs
const SENSITIVE_CONFIG_PATTERNS = [
'API_KEY', 'SECRET', 'PASSWORD', 'TOKEN', 'CREDENTIAL',
'PRIVATE', 'AUTH', 'KEY', 'VAPID'
];
/**
* Check if a config key contains sensitive data
*/
function isSensitiveConfig(configKey: string): boolean {
const upperKey = configKey.toUpperCase();
return SENSITIVE_CONFIG_PATTERNS.some(pattern => upperKey.includes(pattern));
}
/**
* Mask sensitive value for logging (show first 4 and last 2 chars)
*/
function maskSensitiveValue(value: string): string {
if (!value || value.length <= 8) {
return '***REDACTED***';
}
return `${value.substring(0, 4)}****${value.substring(value.length - 2)}`;
}
/**
* Get a configuration value from database (with caching)
*/
export async function getConfigValue(configKey: string, defaultValue: string = ''): Promise<string> {
try {
// Check cache first
if (configCache.has(configKey) && cacheExpiry && new Date() < cacheExpiry) {
return configCache.get(configKey)!;
}
// Query database
const result = await sequelize.query(`
SELECT config_value
FROM admin_configurations
WHERE config_key = :configKey
LIMIT 1
`, {
replacements: { configKey },
type: QueryTypes.SELECT
});
if (result && result.length > 0) {
const value = (result[0] as any).config_value;
configCache.set(configKey, value);
// Always update cache expiry when loading from database
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
// Mask sensitive values in logs for security
const logValue = isSensitiveConfig(configKey) ? maskSensitiveValue(value) : value;
logger.info(`[ConfigReader] Loaded config '${configKey}' = '${logValue}' from database (cached for 5min)`);
return value;
}
// Mask sensitive default values in logs for security
const logDefault = isSensitiveConfig(configKey) ? maskSensitiveValue(defaultValue) : defaultValue;
logger.warn(`[ConfigReader] Config key '${configKey}' not found, using default: ${logDefault}`);
return defaultValue;
} catch (error) {
logger.error(`[ConfigReader] Error reading config '${configKey}':`, error);
return defaultValue;
}
}
/**
* Get number configuration
*/
export async function getConfigNumber(configKey: string, defaultValue: number): Promise<number> {
const value = await getConfigValue(configKey, String(defaultValue));
return parseFloat(value) || defaultValue;
}
/**
* Get boolean configuration
*/
export async function getConfigBoolean(configKey: string, defaultValue: boolean): Promise<boolean> {
const value = await getConfigValue(configKey, String(defaultValue));
return value === 'true' || value === '1';
}
/**
* Get TAT thresholds from database
*/
export async function getTatThresholds(): Promise<{ first: number; second: number }> {
const first = await getConfigNumber('TAT_REMINDER_THRESHOLD_1', 50);
const second = await getConfigNumber('TAT_REMINDER_THRESHOLD_2', 75);
return { first, second };
}
/**
* Get working hours from database
*/
export async function getWorkingHours(): Promise<{ startHour: number; endHour: number }> {
const startHour = await getConfigNumber('WORK_START_HOUR', 9);
const endHour = await getConfigNumber('WORK_END_HOUR', 18);
return { startHour, endHour };
}
/**
* Clear configuration cache (call after updating configs)
*/
export function clearConfigCache(): void {
configCache.clear();
cacheExpiry = null;
logger.info('[ConfigReader] Configuration cache cleared');
}
/**
* Preload all configurations into cache
*/
export async function preloadConfigurations(): Promise<void> {
try {
const results = await sequelize.query(`
SELECT config_key, config_value
FROM admin_configurations
`, { type: QueryTypes.SELECT });
results.forEach((row: any) => {
configCache.set(row.config_key, row.config_value);
});
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
logger.info(`[ConfigReader] Preloaded ${results.length} configurations into cache`);
} catch (error) {
logger.error('[ConfigReader] Error preloading configurations:', error);
}
}
/**
* Get Vertex AI configurations
*/
export async function getVertexAIConfig(): Promise<{
enabled: boolean;
}> {
const enabled = await getConfigBoolean('AI_ENABLED', true);
return { enabled };
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,967 @@
/**
* Dealer Claim Approval Service
*
* Dedicated approval service for dealer claim workflows (CLAIM_MANAGEMENT).
* Handles dealer claim-specific logic including:
* - Dynamic approver support (additional approvers added between steps)
* - Activity Creation processing
* - Dealer-specific notifications
*
* This service is separate from ApprovalService to prevent conflicts with custom workflows.
*/
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { User } from '@models/User';
import { ApprovalAction } from '../types/approval.types';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { calculateTATPercentage } from '@utils/helpers';
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
import logger from '@utils/logger';
import { Op } from 'sequelize';
import { notificationMongoService } from './notification.mongo.service';
import { activityService } from './activity.service';
import { tatSchedulerService } from './tatScheduler.service';
import { DealerClaimService } from './dealerClaim.service';
import { emitToRequestRoom } from '../realtime/socket';
export class DealerClaimApprovalService {
// Use lazy initialization to avoid circular dependency
private getDealerClaimService(): DealerClaimService {
return new DealerClaimService();
}
/**
* Approve a level in a dealer claim workflow
* Handles dealer claim-specific logic including dynamic approvers and activity creation
*/
async approveLevel(
levelId: string,
action: ApprovalAction,
userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
// Get workflow to determine priority for working hours calculation
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Verify this is a claim management workflow
const workflowType = (wf as any)?.workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
}
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPaused = (wf as any).isPaused || (level as any).isPaused;
// If paused, resume automatically when approving/rejecting
if (isPaused) {
const { pauseService } = await import('./pause.service');
try {
await pauseService.resumeWorkflow(level.requestId, userId);
logger.info(`[DealerClaimApproval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
} catch (pauseError) {
logger.warn(`[DealerClaimApproval] Failed to auto-resume paused workflow:`, pauseError);
// Continue with approval/rejection even if resume fails
}
}
const now = new Date();
// Calculate elapsed hours using working hours logic (with pause handling)
const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time
isPaused: true,
pausedAt: (level as any).pausedAt,
pauseElapsedHours: (level as any).pauseElapsedHours,
pauseResumeDate: (level as any).pauseResumeDate
} : wasResumed ? {
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
isPaused: false,
pausedAt: null,
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
} : undefined;
const elapsedHours = await calculateElapsedWorkingHours(
(level as any).levelStartTime || (level as any).tatStartTime || now,
now,
priority,
pauseInfo
);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
// Handle rejection
if (action.action === 'REJECT') {
return await this.handleRejection(level, action, userId, requestMetadata, elapsedHours, tatPercentage, now);
}
logger.info(`[DealerClaimApproval] Approving level ${levelId} with action:`, JSON.stringify(action));
// Robust comment extraction
const approvalComment = action.comments || (action as any).comment || '';
// Update level status and elapsed time for approval FIRST
// Only save snapshot if the update succeeds
await level.update({
status: ApprovalStatus.APPROVED,
actionDate: now,
levelEndTime: now,
elapsedHours: elapsedHours,
tatPercentageUsed: tatPercentage,
comments: approvalComment || undefined
});
// Check if this is a dealer submission (proposal or completion) - these have their own snapshot types
const levelName = (level.levelName || '').toLowerCase();
const isDealerSubmission = levelName.includes('dealer proposal') || levelName.includes('dealer completion');
// Only save APPROVE snapshot for actual approver actions (not dealer submissions)
// Dealer submissions use PROPOSAL/COMPLETION snapshot types instead
if (!isDealerSubmission) {
try {
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'APPROVE',
approvalComment,
undefined,
userId
);
} catch (snapshotError) {
// Log error but don't fail the approval - snapshot is for audit, not critical
logger.error(`[DealerClaimApproval] Failed to save approval history snapshot (non-critical):`, snapshotError);
}
}
// Note: We don't save workflow history for approval actions
// The approval history (saveApprovalHistory) is sufficient and includes comments
// Workflow movement information is included in the APPROVE snapshot's changeReason
// Check if this is the final approver
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId }
});
const approvedCount = allLevels.filter((l: any) => l.status === ApprovalStatus.APPROVED).length;
const isFinalApprover = approvedCount === allLevels.length;
if (isFinalApprover) {
// Final approval - close workflow
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: level.levelNumber || 0
},
{ where: { requestId: level.requestId } }
);
// Notify all participants
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
where: { requestId: level.requestId, isActive: true }
}));
if (participants && participants.length > 0) {
const participantIds = participants.map((p: any) => p.userId).filter(Boolean);
await notificationService.sendToUsers(participantIds, {
title: `Request Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
logger.info(`[DealerClaimApproval] Final approval complete. ${participants.length} participant(s) notified.`);
}
} else {
// Not final - move to next level
// Check if workflow is paused - if so, don't advance
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
logger.warn(`[DealerClaimApproval] Cannot advance workflow ${level.requestId} - workflow is paused`);
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
}
// Find the next PENDING level (supports dynamically added approvers)
// Strategy: First try sequential, then find next PENDING level if sequential doesn't exist
const currentLevelNumber = level.levelNumber || 0;
logger.info(`[DealerClaimApproval] Finding next level after level ${currentLevelNumber} for request ${level.requestId}`);
// First, try sequential approach
let nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: currentLevelNumber + 1
}
});
// If sequential level doesn't exist, search for next PENDING level
// This handles cases where additional approvers are added dynamically between steps
if (!nextLevel) {
logger.info(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} not found, searching for next PENDING level (dynamic approvers)`);
nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: { [Op.gt]: currentLevelNumber },
status: ApprovalStatus.PENDING
},
order: [['levelNumber', 'ASC']]
});
if (nextLevel) {
logger.info(`[DealerClaimApproval] Using fallback level ${nextLevel.levelNumber} (${(nextLevel as any).levelName || 'unnamed'})`);
}
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
// Sequential level exists but not PENDING - check if it's already approved/rejected
if (nextLevel.status === ApprovalStatus.APPROVED || nextLevel.status === ApprovalStatus.REJECTED) {
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} already ${nextLevel.status}. Skipping activation.`);
nextLevel = null; // Don't activate an already completed level
} else {
// Level exists but in unexpected status - log warning but proceed
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level.`);
}
}
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
if (nextLevel) {
logger.info(`[DealerClaimApproval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
} else {
logger.info(`[DealerClaimApproval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
}
if (nextLevel) {
// Check if next level is paused - if so, don't activate it
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
logger.warn(`[DealerClaimApproval] Cannot activate next level ${nextLevelNumber} - level is paused`);
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
}
// Activate next level
await nextLevel.update({
status: ApprovalStatus.IN_PROGRESS,
levelStartTime: now,
tatStartTime: now
});
// Schedule TAT jobs for the next level
try {
const workflowPriority = (wf as any)?.priority || 'STANDARD';
await tatSchedulerService.scheduleTatJobs(
level.requestId,
(nextLevel as any).levelId,
(nextLevel as any).approverId,
Number((nextLevel as any).tatHours),
now,
workflowPriority
);
logger.info(`[DealerClaimApproval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
} catch (tatError) {
logger.error(`[DealerClaimApproval] Failed to schedule TAT jobs for next level:`, tatError);
// Don't fail the approval if TAT scheduling fails
}
// Update workflow current level
if (nextLevelNumber !== null) {
await WorkflowRequest.update(
{ currentLevel: nextLevelNumber },
{ where: { requestId: level.requestId } }
);
// Update the APPROVE snapshot's changeReason to include movement information
// This ensures the approval snapshot shows both the approval and the movement
// We don't create a separate WORKFLOW snapshot for approvals - only APPROVE snapshot
try {
const { DealerClaimHistory } = await import('@models/DealerClaimHistory');
const { SnapshotType } = await import('@models/DealerClaimHistory');
const approvalHistory = await DealerClaimHistory.findOne({
where: {
requestId: level.requestId,
approvalLevelId: level.levelId,
snapshotType: SnapshotType.APPROVE
},
order: [['createdAt', 'DESC']]
});
if (approvalHistory) {
// Use the robust approvalComment from outer scope
const updatedChangeReason = approvalComment
? `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber}). Comment: ${approvalComment}`
: `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber})`;
await approvalHistory.update({
changeReason: updatedChangeReason
});
}
} catch (updateError) {
// Log error but don't fail - this is just updating the changeReason for better display
logger.warn(`[DealerClaimApproval] Failed to update approval history changeReason (non-critical):`, updateError);
}
logger.info(`[DealerClaimApproval] Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
}
// Handle dealer claim-specific step processing
const currentLevelName = (level.levelName || '').toLowerCase();
// Check by levelName first, use levelNumber only as fallback if levelName is missing
// This handles cases where additional approvers shift step numbers
const hasLevelName = level.levelName && level.levelName.trim() !== '';
const isDeptLeadApproval = hasLevelName
? currentLevelName.includes('department lead')
: (level.levelNumber === 3); // Only use levelNumber if levelName is missing
const isRequestorClaimApproval = hasLevelName
? (currentLevelName.includes('requestor') && (currentLevelName.includes('claim') || currentLevelName.includes('approval')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
if (isDeptLeadApproval) {
// Activity Creation is now an activity log only - process it automatically
logger.info(`[DealerClaimApproval] Department Lead approved. Processing Activity Creation as activity log.`);
try {
const dealerClaimService = new DealerClaimService();
await dealerClaimService.processActivityCreation(level.requestId);
logger.info(`[DealerClaimApproval] Activity Creation activity logged for request ${level.requestId}`);
} catch (activityError) {
logger.error(`[DealerClaimApproval] Error processing Activity Creation activity for request ${level.requestId}:`, activityError);
// Don't fail the Department Lead approval if Activity Creation logging fails
}
} else if (isRequestorClaimApproval) {
// Step 6 (System - E-Invoice Generation) is now an activity log only - process it automatically
logger.info(`[DealerClaimApproval] Requestor Claim Approval approved. Triggering DMS push for E-Invoice generation.`);
try {
// Lazy load DealerClaimService to avoid circular dependency issues during method execution
const dealerClaimService = this.getDealerClaimService();
await dealerClaimService.updateEInvoiceDetails(level.requestId);
logger.info(`[DealerClaimApproval] DMS push initiated for request ${level.requestId}`);
} catch (dmsError) {
logger.error(`[DealerClaimApproval] Error initiating DMS push for request ${level.requestId}:`, dmsError);
// Don't fail the Requestor Claim Approval if DMS push fails
}
}
// Log approval activity
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify initiator about the approval
// BUT skip this if it's a dealer proposal or dealer completion step - those have special notifications below
// Priority: levelName check first, then levelNumber only if levelName is missing
const hasLevelNameForApproval = level.levelName && level.levelName.trim() !== '';
const levelNameForApproval = hasLevelNameForApproval && level.levelName ? level.levelName.toLowerCase() : '';
const isDealerProposalApproval = hasLevelNameForApproval
? (levelNameForApproval.includes('dealer') && levelNameForApproval.includes('proposal'))
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
const isDealerCompletionApproval = hasLevelNameForApproval
? (levelNameForApproval.includes('dealer') && (levelNameForApproval.includes('completion') || levelNameForApproval.includes('documents')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
// Skip sending approval notification to initiator if they are the approver
// (they don't need to be notified that they approved their own request)
const isApproverInitiator = level.approverId && (wf as any).initiatorId && level.approverId === (wf as any).initiatorId;
if (wf && !isDealerProposalApproval && !isDealerCompletionApproval && !isApproverInitiator) {
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Approved - Level ${level.levelNumber}`,
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
} else if (isApproverInitiator) {
logger.info(`[DealerClaimApproval] Skipping approval notification to initiator - they are the approver`);
}
// Notify next approver - ALWAYS send notification when there's a next level
if (wf && nextLevel) {
const nextApproverId = (nextLevel as any).approverId;
const nextApproverEmail = (nextLevel as any).approverEmail || '';
const nextApproverName = (nextLevel as any).approverName || nextApproverEmail || 'approver';
// Check if it's an auto-step or system process
const isAutoStep = nextApproverEmail === 'system@royalenfield.com'
|| (nextLevel as any).approverName === 'System Auto-Process'
|| nextApproverId === 'system';
const isSystemEmail = nextApproverEmail.toLowerCase() === 'system@royalenfield.com'
|| nextApproverEmail.toLowerCase().includes('system');
const isSystemName = nextApproverName.toLowerCase() === 'system auto-process'
|| nextApproverName.toLowerCase().includes('system');
// Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents)
// Check this BEFORE sending assignment notification to avoid duplicates
// Priority: levelName check first, then levelNumber only if levelName is missing
const hasLevelNameForNotification = level.levelName && level.levelName.trim() !== '';
const levelNameForNotification = hasLevelNameForNotification && level.levelName ? level.levelName.toLowerCase() : '';
const isDealerProposalApproval = hasLevelNameForNotification
? (levelNameForNotification.includes('dealer') && levelNameForNotification.includes('proposal'))
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
const isDealerCompletionApproval = hasLevelNameForNotification
? (levelNameForNotification.includes('dealer') && (levelNameForNotification.includes('completion') || levelNameForNotification.includes('documents')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
// Check if next approver is the initiator (to avoid duplicate notifications)
const isNextApproverInitiator = nextApproverId && (wf as any).initiatorId && nextApproverId === (wf as any).initiatorId;
if (isDealerProposalApproval && (wf as any).initiatorId) {
// Get dealer and proposal data for the email template
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
const { DealerProposalDetails } = await import('@models/DealerProposalDetails');
const { DealerProposalCostItem } = await import('@models/DealerProposalCostItem');
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
const proposalDetails = await DealerProposalDetails.findOne({ where: { requestId: level.requestId } });
// Get cost items if proposal exists
let costBreakup: any[] = [];
if (proposalDetails) {
const proposalId = (proposalDetails as any).proposalId || (proposalDetails as any).proposal_id;
if (proposalId) {
const costItems = await DealerProposalCostItem.findAll({
where: { proposalId },
order: [['itemOrder', 'ASC']]
});
costBreakup = costItems.map((item: any) => ({
description: item.itemDescription || item.description,
amount: Number(item.amount) || 0
}));
}
}
// Get dealer user
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
const dealerData = dealerUser ? dealerUser.toJSON() : {
userId: level.approverId,
email: level.approverEmail || '',
displayName: level.approverName || level.approverEmail || 'Dealer'
};
// Get next approver (could be Step 2 - Requestor Evaluation, or an additional approver if one was added between Step 1 and Step 2)
// The nextLevel is already found above using dynamic logic that handles additional approvers correctly
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 1 and Step 2)
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
// Send proposal submitted notification with proper type and metadata
// This will use the dealerProposalSubmitted template, not the multi-level approval template
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: 'Proposal Submitted',
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted a proposal for your claim request "${(wf as any).title}".`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'proposal_submitted',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
dealerData: dealerData,
proposalData: {
totalEstimatedBudget: proposalDetails ? (proposalDetails as any).totalEstimatedBudget : 0,
expectedCompletionDate: proposalDetails ? (proposalDetails as any).expectedCompletionDate : undefined,
dealerComments: proposalDetails ? (proposalDetails as any).dealerComments : undefined,
costBreakup: costBreakup,
submittedAt: proposalDetails ? (proposalDetails as any).submittedAt : new Date(),
nextApproverIsAdditional: isNextAdditionalApprover,
nextApproverIsInitiator: isNextApproverInitiator
},
nextApproverId: nextApproverData ? nextApproverData.userId : undefined,
// Add activity information from claimDetails
activityName: claimDetails ? (claimDetails as any).activityName : undefined,
activityType: claimDetails ? (claimDetails as any).activityType : undefined
}
});
logger.info(`[DealerClaimApproval] Sent proposal_submitted notification to initiator for Dealer Proposal Submission. Next approver: ${isNextApproverInitiator ? 'Initiator (self)' : (isNextAdditionalApprover ? 'Additional Approver' : 'Step 2 (Requestor Evaluation)')}`);
} else if (isDealerCompletionApproval && (wf as any).initiatorId) {
// Get dealer and completion data for the email template
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
const { DealerCompletionDetails } = await import('@models/DealerCompletionDetails');
const { DealerCompletionExpense } = await import('@models/DealerCompletionExpense');
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
const completionDetails = await DealerCompletionDetails.findOne({ where: { requestId: level.requestId } });
// Get expense items if completion exists
let closedExpenses: any[] = [];
if (completionDetails) {
const expenses = await DealerCompletionExpense.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']]
});
closedExpenses = expenses.map((item: any) => ({
description: item.description || '',
amount: Number(item.amount) || 0
}));
}
// Get dealer user
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
const dealerData = dealerUser ? dealerUser.toJSON() : {
userId: level.approverId,
email: level.approverEmail || '',
displayName: level.approverName || level.approverEmail || 'Dealer'
};
// Get next approver (could be Step 5 - Requestor Claim Approval, or an additional approver if one was added between Step 4 and Step 5)
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 4 and Step 5)
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
// Check if next approver is the initiator (to show appropriate message in email)
const isNextApproverInitiator = nextApproverData && (wf as any).initiatorId && nextApproverData.userId === (wf as any).initiatorId;
// Send completion submitted notification with proper type and metadata
// This will use the completionDocumentsSubmitted template, not the multi-level approval template
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: 'Completion Documents Submitted',
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted completion documents for your claim request "${(wf as any).title}".`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'completion_submitted',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
dealerData: dealerData,
completionData: {
activityCompletionDate: completionDetails ? (completionDetails as any).activityCompletionDate : undefined,
numberOfParticipants: completionDetails ? (completionDetails as any).numberOfParticipants : undefined,
totalClosedExpenses: completionDetails ? (completionDetails as any).totalClosedExpenses : 0,
closedExpenses: closedExpenses,
documentsCount: undefined, // Documents count can be retrieved from documents table if needed
submittedAt: completionDetails ? (completionDetails as any).submittedAt : new Date(),
nextApproverIsAdditional: isNextAdditionalApprover,
nextApproverIsInitiator: isNextApproverInitiator
},
nextApproverId: nextApproverData ? nextApproverData.userId : undefined
}
});
logger.info(`[DealerClaimApproval] Sent completion_submitted notification to initiator for Dealer Completion Documents. Next approver: ${isNextAdditionalApprover ? 'Additional Approver' : 'Step 5 (Requestor Claim Approval)'}`);
}
// Only send assignment notification to next approver if:
// 1. It's NOT a dealer proposal/completion step (those have special notifications above)
// 2. Next approver is NOT the initiator (to avoid duplicate notifications)
// 3. It's not a system/auto step
if (!isDealerProposalApproval && !isDealerCompletionApproval && !isNextApproverInitiator) {
if (!isAutoStep && !isSystemEmail && !isSystemName && nextApproverId && nextApproverId !== 'system') {
try {
logger.info(`[DealerClaimApproval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
await notificationService.sendToUsers([nextApproverId], {
title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
logger.info(`[DealerClaimApproval] ✅ Assignment notification sent successfully to ${nextApproverName} (${nextApproverId}) for level ${nextLevelNumber}`);
// Log assignment activity for the next approver
await activityService.log({
requestId: level.requestId,
type: 'assignment',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Assigned to approver',
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
} catch (notifError) {
logger.error(`[DealerClaimApproval] ❌ Failed to send notification to next approver ${nextApproverId} at level ${nextLevelNumber}:`, notifError);
// Don't throw - continue with workflow even if notification fails
}
} else {
logger.info(`[DealerClaimApproval] ⚠️ Skipping notification for system/auto-step: ${nextApproverEmail} (${nextApproverId}) at level ${nextLevelNumber}`);
}
} else {
if (isDealerProposalApproval || isDealerCompletionApproval) {
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - dealer-specific notification already sent`);
}
if (isNextApproverInitiator) {
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - next approver is the initiator (already notified)`);
}
}
}
} else {
// No next level found but not final approver - this shouldn't happen
logger.warn(`[DealerClaimApproval] No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: level.levelNumber || 0
},
{ where: { requestId: level.requestId } }
);
if (wf) {
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
}
}
}
// Emit real-time update to all users viewing this request
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: action.action,
levelNumber: level.levelNumber,
timestamp: now.toISOString()
});
logger.info(`[DealerClaimApproval] Approval level ${levelId} ${action.action.toLowerCase()}ed and socket event emitted`);
return level;
} catch (error) {
logger.error('[DealerClaimApproval] Error approving level:', error);
throw error;
}
}
/**
* Handle rejection (internal method called from approveLevel)
*/
private async handleRejection(
level: ApprovalLevel,
action: ApprovalAction,
userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null },
elapsedHours?: number,
tatPercentage?: number,
now?: Date
): Promise<ApprovalLevel | null> {
const rejectionNow = now || new Date();
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Check if this is the Department Lead approval step (Step 3)
// Robust check: check level name for variations and level number as fallback
// Default rejection logic: Return to immediately previous approval step
logger.info(`[DealerClaimApproval] Rejection for request ${level.requestId} by level ${level.levelNumber}. Finding previous step to return to.`);
// Save approval history (rejection) BEFORE updating level
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'REJECT',
action.comments || '',
action.rejectionReason || undefined,
userId
);
// Find all levels to determine previous step
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
// Find the immediately previous approval level
const currentLevelNumber = level.levelNumber || 0;
const previousLevels = allLevels.filter(l => l.levelNumber < currentLevelNumber && l.levelNumber > 0);
const previousLevel = previousLevels[previousLevels.length - 1];
// Update level status - if returning to previous step, set this level to PENDING (reset)
// If no previous step (terminal rejection), set to REJECTED
const newStatus = previousLevel ? ApprovalStatus.PENDING : ApprovalStatus.REJECTED;
await level.update({
status: newStatus,
// If resetting to PENDING, clear action details so it can be acted upon again later
actionDate: previousLevel ? null : rejectionNow,
levelEndTime: previousLevel ? null : rejectionNow,
elapsedHours: previousLevel ? 0 : (elapsedHours || 0),
tatPercentageUsed: previousLevel ? 0 : (tatPercentage || 0),
comments: previousLevel ? null : (action.comments || action.rejectionReason || undefined)
} as any);
// If no previous level found (this is the first step), close the workflow
if (!previousLevel) {
logger.info(`[DealerClaimApproval] No previous level found. This is the first step. Closing workflow.`);
// Capture workflow snapshot for terminal rejection
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
userId,
level.levelId,
level.levelNumber,
level.levelName || undefined
);
// Close workflow FIRST
await WorkflowRequest.update(
{
status: WorkflowStatus.REJECTED,
closureDate: rejectionNow
},
{ where: { requestId: level.requestId } }
);
// Capture workflow snapshot AFTER workflow is closed successfully
try {
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
userId,
level.levelId,
level.levelNumber,
level.levelName || undefined
);
} catch (snapshotError) {
// Log error but don't fail the rejection - snapshot is for audit, not critical
logger.error(`[DealerClaimApproval] Failed to save workflow history snapshot (non-critical):`, snapshotError);
}
// Log rejection activity (terminal rejection)
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: rejectionNow.toISOString(),
action: 'Rejected',
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify initiator and participants (workflow is closed)
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
where: { requestId: level.requestId, isActive: true }
}));
const userIdsToNotify = [(wf as any).initiatorId];
if (participants && participants.length > 0) {
participants.forEach((p: any) => {
if (p.userId && p.userId !== (wf as any).initiatorId) {
userIdsToNotify.push(p.userId);
}
});
}
await notificationService.sendToUsers(userIdsToNotify, {
title: `Request Rejected: ${(wf as any).requestNumber}`,
body: `${(wf as any).title} - Rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH'
});
} else {
// Return to previous step
logger.info(`[DealerClaimApproval] Returning to previous level ${previousLevel.levelNumber} (${previousLevel.levelName || 'unnamed'})`);
// Reset previous level to IN_PROGRESS so it can be acted upon again
await previousLevel.update({
status: ApprovalStatus.IN_PROGRESS,
levelStartTime: rejectionNow,
tatStartTime: rejectionNow,
actionDate: undefined,
levelEndTime: undefined,
comments: undefined,
elapsedHours: 0,
tatPercentageUsed: 0
});
// Update workflow status to IN_PROGRESS (remains active for rework)
// Set currentLevel to previous level
await WorkflowRequest.update(
{
status: WorkflowStatus.PENDING,
currentLevel: previousLevel.levelNumber
},
{ where: { requestId: level.requestId } }
);
// Log rejection activity (returned to previous step)
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: rejectionNow.toISOString(),
action: 'Returned to Previous Step',
details: `Request rejected by ${level.approverName || level.approverEmail} and returned to level ${previousLevel.levelNumber}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify the approver of the previous level
if (previousLevel.approverId) {
await notificationService.sendToUsers([previousLevel.approverId], {
title: `Request Returned: ${(wf as any).requestNumber}`,
body: `Request "${(wf as any).title}" has been returned to your level for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
}
// Notify initiator when request is returned (not closed)
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Returned: ${(wf as any).requestNumber}`,
body: `Request "${(wf as any).title}" has been returned to level ${previousLevel.levelNumber} for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH',
actionRequired: true
});
}
// Emit real-time update to all users viewing this request
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: 'REJECT',
levelNumber: level.levelNumber,
timestamp: rejectionNow.toISOString()
});
return level;
}
/**
* Reject a level in a dealer claim workflow (legacy method - kept for backward compatibility)
*/
async rejectLevel(
levelId: string,
reason: string,
comments: string,
userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Verify this is a claim management workflow
const workflowType = (wf as any)?.workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
}
const now = new Date();
// Calculate elapsed hours
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time
isPaused: true,
pausedAt: (level as any).pausedAt,
pauseElapsedHours: (level as any).pauseElapsedHours,
pauseResumeDate: (level as any).pauseResumeDate
} : wasResumed ? {
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
isPaused: false,
pausedAt: null,
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
} : undefined;
// Use the internal handleRejection method
const elapsedHours = await calculateElapsedWorkingHours(
(level as any).levelStartTime || (level as any).tatStartTime || now,
now,
priority,
pauseInfo
);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
return await this.handleRejection(
level,
{ action: 'REJECT', comments: comments || reason, rejectionReason: reason || comments },
userId,
requestMetadata,
elapsedHours,
tatPercentage,
now
);
} catch (error) {
logger.error('[DealerClaimApproval] Error rejecting level:', error);
throw error;
}
}
/**
* Get current approval level for a request
*/
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) return null;
const currentLevel = (workflow as any).currentLevel;
if (!currentLevel) return null;
return await ApprovalLevel.findOne({
where: { requestId, levelNumber: currentLevel }
});
}
/**
* Get all approval levels for a request
*/
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
return await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
}
}

View File

@ -0,0 +1,535 @@
import { Request } from 'express';
import { ClaimInvoice } from '../models/ClaimInvoice';
import { ClaimCreditNote } from '../models/ClaimCreditNote';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { ApprovalLevel } from '../models/ApprovalLevel';
import { DealerClaimDetails } from '../models/DealerClaimDetails';
import { User } from '../models/User';
import { ApprovalService } from './approval.service';
import logger from '../utils/logger';
import crypto from 'crypto';
import { activityService } from './activity.service';
import { notificationService } from './notification.service';
/**
* DMS Webhook Service
* Handles processing of webhook callbacks from DMS system
*/
export class DMSWebhookService {
private webhookSecret: string;
private approvalService: ApprovalService;
constructor() {
this.webhookSecret = process.env.DMS_WEBHOOK_SECRET || '';
this.approvalService = new ApprovalService();
}
/**
* Validate webhook signature for security
* DMS should send a signature in the header that we can verify
*/
async validateWebhookSignature(req: Request): Promise<boolean> {
// If webhook secret is not configured, skip validation (for development)
if (!this.webhookSecret) {
logger.warn('[DMSWebhook] Webhook secret not configured, skipping signature validation');
return true;
}
try {
const signature = req.headers['x-dms-signature'] as string;
if (!signature) {
logger.warn('[DMSWebhook] Missing webhook signature in header');
return false;
}
// Create HMAC hash of the request body
const body = JSON.stringify(req.body);
const expectedSignature = crypto
.createHmac('sha256', this.webhookSecret)
.update(body)
.digest('hex');
// Compare signatures (use constant-time comparison to prevent timing attacks)
const isValid = crypto.timingSafeEqual(
Buffer.from(signature),
Buffer.from(expectedSignature)
);
if (!isValid) {
logger.warn('[DMSWebhook] Invalid webhook signature');
}
return isValid;
} catch (error) {
logger.error('[DMSWebhook] Error validating webhook signature:', error);
return false;
}
}
/**
* Process invoice generation webhook from DMS
*/
async processInvoiceWebhook(payload: any): Promise<{
success: boolean;
invoiceNumber?: string;
error?: string;
}> {
try {
// Validate required fields
const requiredFields = ['request_number', 'document_no', 'document_type'];
for (const field of requiredFields) {
if (!payload[field]) {
return {
success: false,
error: `Missing required field: ${field}`,
};
}
}
// Find workflow request by request number
const request = await WorkflowRequest.findOne({
where: {
requestNumber: payload.request_number,
},
});
if (!request) {
return {
success: false,
error: `Request not found: ${payload.request_number}`,
};
}
// Find or create invoice record
let invoice = await ClaimInvoice.findOne({
where: { requestId: request.requestId },
});
// Create invoice if it doesn't exist (new flow: webhook creates invoice)
if (!invoice) {
logger.info('[DMSWebhook] Invoice record not found, creating new invoice from webhook', {
requestNumber: payload.request_number,
});
invoice = await ClaimInvoice.create({
requestId: request.requestId,
invoiceNumber: payload.document_no,
dmsNumber: payload.document_no,
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
amount: payload.total_amount || payload.claim_amount,
status: 'GENERATED',
generatedAt: new Date(),
invoiceFilePath: payload.invoice_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildInvoiceDescription(payload),
});
logger.info('[DMSWebhook] Invoice created successfully from webhook', {
requestNumber: payload.request_number,
invoiceNumber: payload.document_no,
});
} else {
// Update existing invoice with DMS response data
await invoice.update({
invoiceNumber: payload.document_no,
dmsNumber: payload.document_no, // DMS document number
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
amount: payload.total_amount || payload.claim_amount,
status: 'GENERATED',
generatedAt: new Date(),
invoiceFilePath: payload.invoice_file_path || null,
errorMessage: payload.error_message || null,
// Store additional DMS data in description or separate fields if needed
description: this.buildInvoiceDescription(payload),
});
logger.info('[DMSWebhook] Invoice updated successfully', {
requestNumber: payload.request_number,
invoiceNumber: payload.document_no,
irnNo: payload.irn_no,
});
}
// Auto-approve Step 7 and move to Step 8
await this.logEInvoiceGenerationActivity(request.requestId, payload.request_number);
return {
success: true,
invoiceNumber: payload.document_no,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
return {
success: false,
error: errorMessage,
};
}
}
/**
* Process credit note generation webhook from DMS
*/
async processCreditNoteWebhook(payload: any): Promise<{
success: boolean;
creditNoteNumber?: string;
error?: string;
}> {
try {
// Validate required fields
const requiredFields = ['request_number', 'document_no', 'document_type'];
for (const field of requiredFields) {
if (!payload[field]) {
return {
success: false,
error: `Missing required field: ${field}`,
};
}
}
// Find workflow request by request number
const request = await WorkflowRequest.findOne({
where: {
requestNumber: payload.request_number,
},
});
if (!request) {
return {
success: false,
error: `Request not found: ${payload.request_number}`,
};
}
// Find invoice to link credit note (optional - credit note can exist without invoice)
const invoice = await ClaimInvoice.findOne({
where: { requestId: request.requestId },
});
// Find or create credit note record
let creditNote = await ClaimCreditNote.findOne({
where: { requestId: request.requestId },
});
// Create credit note if it doesn't exist (new flow: webhook creates credit note)
if (!creditNote) {
logger.info('[DMSWebhook] Credit note record not found, creating new credit note from webhook', {
requestNumber: payload.request_number,
hasInvoice: !!invoice,
});
creditNote = await ClaimCreditNote.create({
requestId: request.requestId,
invoiceId: invoice?.invoiceId || undefined, // Allow undefined if no invoice exists
creditNoteNumber: payload.document_no,
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
creditNoteAmount: payload.total_amount || payload.credit_amount,
sapDocumentNumber: payload.sap_credit_note_no || null,
status: 'CONFIRMED',
confirmedAt: new Date(),
creditNoteFilePath: payload.credit_note_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildCreditNoteDescription(payload),
});
logger.info('[DMSWebhook] Credit note created successfully from webhook', {
requestNumber: payload.request_number,
creditNoteNumber: payload.document_no,
hasInvoice: !!invoice,
});
// Log activity and notify initiator
await this.logCreditNoteCreationActivity(
request.requestId,
payload.request_number,
payload.document_no,
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
);
} else {
// Update existing credit note with DMS response data
await creditNote.update({
invoiceId: invoice?.invoiceId || creditNote.invoiceId, // Preserve existing invoiceId if no invoice found
creditNoteNumber: payload.document_no,
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
creditNoteAmount: payload.total_amount || payload.credit_amount,
sapDocumentNumber: payload.sap_credit_note_no || null,
status: 'CONFIRMED',
confirmedAt: new Date(),
creditNoteFilePath: payload.credit_note_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildCreditNoteDescription(payload),
});
logger.info('[DMSWebhook] Credit note updated successfully', {
requestNumber: payload.request_number,
creditNoteNumber: payload.document_no,
sapCreditNoteNo: payload.sap_credit_note_no,
irnNo: payload.irn_no,
hasInvoice: !!invoice,
});
// Log activity and notify initiator for updated credit note
await this.logCreditNoteCreationActivity(
request.requestId,
payload.request_number,
payload.document_no,
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
);
}
return {
success: true,
creditNoteNumber: payload.document_no,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
return {
success: false,
error: errorMessage,
};
}
}
/**
* Build invoice description from DMS payload
*/
private buildInvoiceDescription(payload: any): string {
const parts: string[] = [];
if (payload.irn_no) {
parts.push(`IRN: ${payload.irn_no}`);
}
if (payload.item_code_no) {
parts.push(`Item Code: ${payload.item_code_no}`);
}
if (payload.hsn_sac_code) {
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
}
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
}
return parts.length > 0 ? parts.join(' | ') : '';
}
/**
* Build credit note description from DMS payload
*/
private buildCreditNoteDescription(payload: any): string {
const parts: string[] = [];
if (payload.irn_no) {
parts.push(`IRN: ${payload.irn_no}`);
}
if (payload.sap_credit_note_no) {
parts.push(`SAP CN: ${payload.sap_credit_note_no}`);
}
if (payload.credit_type) {
parts.push(`Credit Type: ${payload.credit_type}`);
}
if (payload.item_code_no) {
parts.push(`Item Code: ${payload.item_code_no}`);
}
if (payload.hsn_sac_code) {
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
}
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
}
return parts.length > 0 ? parts.join(' | ') : '';
}
/**
* Log Credit Note Creation as activity and notify initiator
* This is called after credit note is created/updated from DMS webhook
*/
private async logCreditNoteCreationActivity(
requestId: string,
requestNumber: string,
creditNoteNumber: string,
creditNoteAmount: number
): Promise<void> {
try {
// Check if this is a claim management workflow
const request = await WorkflowRequest.findByPk(requestId);
if (!request) {
logger.warn('[DMSWebhook] Request not found for credit note activity logging', { requestId });
return;
}
const workflowType = (request as any).workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.info('[DMSWebhook] Not a claim management workflow, skipping credit note activity logging', {
requestId,
workflowType,
});
return;
}
const initiatorId = (request as any).initiatorId;
if (!initiatorId) {
logger.warn('[DMSWebhook] Initiator ID not found for credit note notification', { requestId });
return;
}
// Log activity
await activityService.log({
requestId,
type: 'status_change',
user: undefined, // System event (no user means it's a system event)
timestamp: new Date().toISOString(),
action: 'Credit Note Generated',
details: `Credit note generated from DMS. Credit Note Number: ${creditNoteNumber}. Credit Note Amount: ₹${creditNoteAmount || 0}. Request: ${requestNumber}`,
category: 'credit_note',
severity: 'INFO',
});
logger.info('[DMSWebhook] Credit note activity logged successfully', {
requestId,
requestNumber,
creditNoteNumber,
});
// Get dealer information from claim details
const claimDetails = await DealerClaimDetails.findOne({
where: { requestId }
});
let dealerUserId: string | null = null;
if (claimDetails?.dealerEmail) {
const dealerUser = await User.findOne({
where: { email: claimDetails.dealerEmail.toLowerCase() },
attributes: ['userId'],
});
dealerUserId = dealerUser?.userId || null;
if (dealerUserId) {
logger.info('[DMSWebhook] Found dealer user for notification', {
requestId,
dealerEmail: claimDetails.dealerEmail,
dealerUserId,
});
} else {
logger.warn('[DMSWebhook] Dealer email found but user not found in system', {
requestId,
dealerEmail: claimDetails.dealerEmail,
});
}
} else {
logger.info('[DMSWebhook] No dealer email found in claim details', { requestId });
}
// Send notification to initiator
await notificationService.sendToUsers([initiatorId], {
title: 'Credit Note Generated',
body: `Credit note ${creditNoteNumber} has been generated for request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
creditNoteNumber,
creditNoteAmount,
source: 'dms_webhook',
},
});
logger.info('[DMSWebhook] Credit note notification sent to initiator', {
requestId,
requestNumber,
initiatorId,
creditNoteNumber,
});
// Send notification to dealer if dealer user exists
if (dealerUserId) {
await notificationService.sendToUsers([dealerUserId], {
title: 'Credit Note Generated',
body: `Credit note ${creditNoteNumber} has been generated for your claim request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
creditNoteNumber,
creditNoteAmount,
source: 'dms_webhook',
recipient: 'dealer',
},
});
logger.info('[DMSWebhook] Credit note notification sent to dealer', {
requestId,
requestNumber,
dealerUserId,
dealerEmail: claimDetails?.dealerEmail,
creditNoteNumber,
});
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error logging credit note activity:', {
requestId,
requestNumber,
error: errorMessage,
});
// Don't throw error - webhook processing should continue even if activity/notification fails
// The credit note is already created/updated, which is the primary goal
}
}
/**
* Log E-Invoice Generation as activity (no longer an approval step)
* This is called after invoice is created/updated from DMS webhook
*/
private async logEInvoiceGenerationActivity(requestId: string, requestNumber: string): Promise<void> {
try {
// Check if this is a claim management workflow
const request = await WorkflowRequest.findByPk(requestId);
if (!request) {
logger.warn('[DMSWebhook] Request not found for Step 7 auto-approval', { requestId });
return;
}
const workflowType = (request as any).workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.info('[DMSWebhook] Not a claim management workflow, skipping Step 7 auto-approval', {
requestId,
workflowType,
});
return;
}
// E-Invoice Generation is now an activity log only, not an approval step
// Log the activity using the dealerClaimService
const { DealerClaimService } = await import('./dealerClaim.service');
const dealerClaimService = new DealerClaimService();
const invoice = await ClaimInvoice.findOne({ where: { requestId } });
const invoiceNumber = invoice?.invoiceNumber || 'N/A';
await dealerClaimService.logEInvoiceGenerationActivity(requestId, invoiceNumber);
logger.info('[DMSWebhook] E-Invoice Generation activity logged successfully', {
requestId,
requestNumber,
invoiceNumber,
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error logging E-Invoice Generation activity:', {
requestId,
requestNumber,
error: errorMessage,
});
// Don't throw error - webhook processing should continue even if activity logging fails
// The invoice is already created/updated, which is the primary goal
}
}
}

View File

@ -0,0 +1,221 @@
import { Holiday, HolidayType } from '@models/Holiday';
import { Op } from 'sequelize';
import logger from '@utils/logger';
import dayjs from 'dayjs';
export class HolidayService {
/**
* Get all holidays within a date range
*/
async getHolidaysInRange(startDate: Date | string, endDate: Date | string): Promise<string[]> {
try {
const holidays = await Holiday.findAll({
where: {
holidayDate: {
[Op.between]: [dayjs(startDate).format('YYYY-MM-DD'), dayjs(endDate).format('YYYY-MM-DD')]
},
isActive: true
},
attributes: ['holidayDate'],
raw: true
});
return holidays.map((h: any) => h.holidayDate || h.holiday_date);
} catch (error) {
logger.error('[Holiday Service] Error fetching holidays:', error);
return [];
}
}
/**
* Check if a specific date is a holiday
*/
async isHoliday(date: Date | string): Promise<boolean> {
try {
const dateStr = dayjs(date).format('YYYY-MM-DD');
const holiday = await Holiday.findOne({
where: {
holidayDate: dateStr,
isActive: true
}
});
return !!holiday;
} catch (error) {
logger.error('[Holiday Service] Error checking holiday:', error);
return false;
}
}
/**
* Check if a date is a working day (not weekend or holiday)
*/
async isWorkingDay(date: Date | string): Promise<boolean> {
const day = dayjs(date);
const dayOfWeek = day.day(); // 0 = Sunday, 6 = Saturday
// Check if weekend
if (dayOfWeek === 0 || dayOfWeek === 6) {
return false;
}
// Check if holiday
const isHol = await this.isHoliday(date);
return !isHol;
}
/**
* Add a new holiday
*/
async createHoliday(holidayData: {
holidayDate: string;
holidayName: string;
description?: string;
holidayType?: HolidayType;
isRecurring?: boolean;
recurrenceRule?: string;
appliesToDepartments?: string[];
appliesToLocations?: string[];
createdBy: string;
}): Promise<Holiday> {
try {
const holiday = await Holiday.create({
...holidayData,
isActive: true
} as any);
logger.info(`[Holiday Service] Holiday created: ${holidayData.holidayName} on ${holidayData.holidayDate}`);
return holiday;
} catch (error) {
logger.error('[Holiday Service] Error creating holiday:', error);
throw error;
}
}
/**
* Update a holiday
*/
async updateHoliday(holidayId: string, updates: any, updatedBy: string): Promise<Holiday | null> {
try {
const holiday = await Holiday.findByPk(holidayId);
if (!holiday) {
throw new Error('Holiday not found');
}
await holiday.update({
...updates,
updatedBy,
updatedAt: new Date()
});
logger.info(`[Holiday Service] Holiday updated: ${holidayId}`);
return holiday;
} catch (error) {
logger.error('[Holiday Service] Error updating holiday:', error);
throw error;
}
}
/**
* Delete (deactivate) a holiday
*/
async deleteHoliday(holidayId: string): Promise<boolean> {
try {
await Holiday.update(
{ isActive: false },
{ where: { holidayId } }
);
logger.info(`[Holiday Service] Holiday deactivated: ${holidayId}`);
return true;
} catch (error) {
logger.error('[Holiday Service] Error deleting holiday:', error);
throw error;
}
}
/**
* Get all active holidays
*/
async getAllActiveHolidays(year?: number): Promise<Holiday[]> {
try {
const whereClause: any = { isActive: true };
if (year) {
const startDate = `${year}-01-01`;
const endDate = `${year}-12-31`;
whereClause.holidayDate = {
[Op.between]: [startDate, endDate]
};
}
const holidays = await Holiday.findAll({
where: whereClause,
order: [['holidayDate', 'ASC']]
});
return holidays;
} catch (error) {
logger.error('[Holiday Service] Error fetching holidays:', error);
return [];
}
}
/**
* Get holidays by year for calendar view
*/
async getHolidayCalendar(year: number): Promise<any[]> {
try {
const startDate = `${year}-01-01`;
const endDate = `${year}-12-31`;
const holidays = await Holiday.findAll({
where: {
holidayDate: {
[Op.between]: [startDate, endDate]
},
isActive: true
},
order: [['holidayDate', 'ASC']]
});
return holidays.map((h: any) => ({
date: h.holidayDate || h.holiday_date,
name: h.holidayName || h.holiday_name,
description: h.description,
type: h.holidayType || h.holiday_type,
isRecurring: h.isRecurring || h.is_recurring
}));
} catch (error) {
logger.error('[Holiday Service] Error fetching holiday calendar:', error);
return [];
}
}
/**
* Import multiple holidays (bulk upload)
*/
async bulkImportHolidays(holidays: any[], createdBy: string): Promise<{ success: number; failed: number }> {
let success = 0;
let failed = 0;
for (const holiday of holidays) {
try {
await this.createHoliday({
...holiday,
createdBy
});
success++;
} catch (error) {
failed++;
logger.error(`[Holiday Service] Failed to import holiday: ${holiday.holidayName}`, error);
}
}
logger.info(`[Holiday Service] Bulk import complete: ${success} success, ${failed} failed`);
return { success, failed };
}
}
export const holidayService = new HolidayService();

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,764 @@
import { WorkflowRequest } from '@models/WorkflowRequest';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { User } from '@models/User';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { Op } from 'sequelize';
import logger from '@utils/logger';
import { tatSchedulerService } from './tatScheduler.service';
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
import { notificationService } from './notification.service';
import { activityService } from './activity.service';
import dayjs from 'dayjs';
import { emitToRequestRoom } from '../realtime/socket';
export class PauseService {
/**
* Pause a workflow at a specific approval level
* @param requestId - The workflow request ID
* @param levelId - The approval level ID to pause (optional, pauses current level if not provided)
* @param userId - The user ID who is pausing
* @param reason - Reason for pausing
* @param resumeDate - Date when workflow should auto-resume (max 1 month from now)
*/
async pauseWorkflow(
requestId: string,
levelId: string | null,
userId: string,
reason: string,
resumeDate: Date
): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
try {
// Validate resume date (max 1 month from now)
const now = new Date();
const maxResumeDate = dayjs(now).add(1, 'month').toDate();
if (resumeDate > maxResumeDate) {
throw new Error('Resume date cannot be more than 1 month from now');
}
if (resumeDate <= now) {
throw new Error('Resume date must be in the future');
}
// Get workflow
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
// Check if already paused
if ((workflow as any).isPaused) {
throw new Error('Workflow is already paused');
}
// Get current approval level
let level: ApprovalLevel | null = null;
if (levelId) {
level = await ApprovalLevel.findByPk(levelId);
if (!level || (level as any).requestId !== requestId) {
throw new Error('Approval level not found or does not belong to this workflow');
}
} else {
// Get current active level
level = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: [ApprovalStatus.PENDING, ApprovalStatus.IN_PROGRESS] }
},
order: [['levelNumber', 'ASC']]
});
}
if (!level) {
throw new Error('No active approval level found to pause');
}
// Verify user is either the approver for this level OR the initiator
const isApprover = (level as any).approverId === userId;
const isInitiator = (workflow as any).initiatorId === userId;
if (!isApprover && !isInitiator) {
throw new Error('Only the assigned approver or the initiator can pause this workflow');
}
// Check if level is already paused
if ((level as any).isPaused) {
throw new Error('This approval level is already paused');
}
// Calculate elapsed hours before pause
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
// Check if this level was previously paused and resumed
// If so, we need to account for the previous pauseElapsedHours
// IMPORTANT: Convert to number to avoid string concatenation (DB returns DECIMAL as string)
const previousPauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
const previousResumeDate = (level as any).pauseResumeDate;
const originalTatStartTime = (level as any).pauseTatStartTime || (level as any).levelStartTime || (level as any).tatStartTime || (level as any).createdAt;
let elapsedHours: number;
let levelStartTimeForCalculation: Date;
if (previousPauseElapsedHours > 0 && previousResumeDate) {
// This is a second (or subsequent) pause
// Calculate: previous elapsed hours + time from resume to now
levelStartTimeForCalculation = previousResumeDate; // Start from last resume time
const timeSinceResume = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
elapsedHours = previousPauseElapsedHours + Number(timeSinceResume);
logger.info(`[Pause] Second pause detected - Previous elapsed: ${previousPauseElapsedHours}h, Since resume: ${timeSinceResume}h, Total: ${elapsedHours}h`);
} else {
// First pause - calculate from original start time
levelStartTimeForCalculation = originalTatStartTime;
elapsedHours = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
}
// Store TAT snapshot
const tatSnapshot = {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
elapsedHours: Number(elapsedHours),
remainingHours: Math.max(0, Number((level as any).tatHours) - elapsedHours),
tatPercentageUsed: (Number((level as any).tatHours) > 0
? Math.min(100, Math.round((elapsedHours / Number((level as any).tatHours)) * 100))
: 0),
pausedAt: now.toISOString(),
originalTatStartTime: originalTatStartTime // Always use the original start time, not the resume time
};
// Update approval level with pause information
await level.update({
isPaused: true,
pausedAt: now,
pausedBy: userId,
pauseReason: reason,
pauseResumeDate: resumeDate,
pauseTatStartTime: originalTatStartTime, // Always preserve the original start time
pauseElapsedHours: elapsedHours,
status: ApprovalStatus.PAUSED
});
// Update workflow with pause information
// Store the current status before pausing so we can restore it on resume
const currentWorkflowStatus = (workflow as any).status;
const currentLevel = (workflow as any).currentLevel || (level as any).levelNumber;
await workflow.update({
isPaused: true,
pausedAt: now,
pausedBy: userId,
pauseReason: reason,
pauseResumeDate: resumeDate,
pauseTatSnapshot: {
...tatSnapshot,
previousStatus: currentWorkflowStatus, // Store previous status for resume
previousCurrentLevel: currentLevel // Store current level to prevent advancement
},
status: WorkflowStatus.PAUSED
// Note: We do NOT update currentLevel here - it should stay at the paused level
});
// Cancel TAT jobs for this level
await tatSchedulerService.cancelTatJobs(requestId, (level as any).levelId);
// Get user details for notifications
const user = await User.findByPk(userId);
const userName = (user as any)?.displayName || (user as any)?.email || 'User';
// Get initiator
const initiator = await User.findByPk((workflow as any).initiatorId);
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
// Send notifications
const requestNumber = (workflow as any).requestNumber;
const title = (workflow as any).title;
// Notify initiator only if someone else (approver) paused the request
// Skip notification if initiator paused their own request
if (!isInitiator) {
await notificationService.sendToUsers([(workflow as any).initiatorId], {
title: 'Workflow Paused',
body: `Your request "${title}" has been paused by ${userName}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_paused',
priority: 'HIGH',
actionRequired: false,
metadata: {
pauseReason: reason,
resumeDate: resumeDate.toISOString(),
pausedBy: userId
}
});
}
// Notify the user who paused (confirmation) - no email for self-action
await notificationService.sendToUsers([userId], {
title: 'Workflow Paused Successfully',
body: `You have paused request "${title}". It will automatically resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change', // Use status_change to avoid email for self-action
priority: 'MEDIUM',
actionRequired: false
});
// If initiator paused, notify the current approver
if (isInitiator && (level as any).approverId) {
const approver = await User.findByPk((level as any).approverId);
const approverUserId = (level as any).approverId;
await notificationService.sendToUsers([approverUserId], {
title: 'Workflow Paused by Initiator',
body: `Request "${title}" has been paused by the initiator (${userName}). Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_paused',
priority: 'HIGH',
actionRequired: false,
metadata: {
pauseReason: reason,
resumeDate: resumeDate.toISOString(),
pausedBy: userId
}
});
}
// Log activity
await activityService.log({
requestId,
type: 'paused',
user: { userId, name: userName },
timestamp: now.toISOString(),
action: 'Workflow Paused',
details: `Workflow paused by ${userName} at level ${(level as any).levelNumber}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
metadata: {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
resumeDate: resumeDate.toISOString()
}
});
logger.info(`[Pause] Workflow ${requestId} paused at level ${(level as any).levelNumber} by ${userId}`);
// Schedule dedicated auto-resume job for this workflow
try {
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
if (pauseResumeQueue && resumeDate) {
const delay = resumeDate.getTime() - now.getTime();
if (delay > 0) {
const jobId = `resume-${requestId}-${(level as any).levelId}`;
await pauseResumeQueue.add(
'auto-resume-workflow',
{
type: 'auto-resume-workflow',
requestId,
levelId: (level as any).levelId,
scheduledResumeDate: resumeDate.toISOString()
},
{
jobId,
delay, // Exact delay in milliseconds until resume time
removeOnComplete: true,
removeOnFail: false
}
);
logger.info(`[Pause] Scheduled dedicated auto-resume job ${jobId} for ${resumeDate.toISOString()} (delay: ${Math.round(delay / 1000 / 60)} minutes)`);
} else {
logger.warn(`[Pause] Resume date ${resumeDate.toISOString()} is in the past, skipping job scheduling`);
}
}
} catch (queueError) {
logger.warn(`[Pause] Could not schedule dedicated auto-resume job:`, queueError);
// Continue with pause even if job scheduling fails (hourly check will handle it as fallback)
}
// Emit real-time update to all users viewing this request
emitToRequestRoom(requestId, 'request:updated', {
requestId,
requestNumber: (workflow as any).requestNumber,
action: 'PAUSE',
levelNumber: (level as any).levelNumber,
timestamp: now.toISOString()
});
return { workflow, level };
} catch (error: any) {
logger.error(`[Pause] Failed to pause workflow:`, error);
throw error;
}
}
/**
* Resume a paused workflow
* @param requestId - The workflow request ID
* @param userId - The user ID who is resuming (optional, for manual resume)
* @param notes - Optional notes for the resume action
*/
async resumeWorkflow(requestId: string, userId?: string, notes?: string): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
try {
const now = new Date();
// Get workflow
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
// Check if paused
if (!(workflow as any).isPaused) {
throw new Error('Workflow is not paused');
}
// Get paused level
const level = await ApprovalLevel.findOne({
where: {
requestId,
isPaused: true
},
order: [['levelNumber', 'ASC']]
});
if (!level) {
throw new Error('Paused approval level not found');
}
// Verify user has permission (if manual resume)
// Both initiator and current approver can resume the workflow
if (userId) {
const isApprover = (level as any).approverId === userId;
const isInitiator = (workflow as any).initiatorId === userId;
if (!isApprover && !isInitiator) {
throw new Error('Only the assigned approver or the initiator can resume this workflow');
}
}
// Calculate remaining TAT from resume time
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
const pauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
const tatHours = Number((level as any).tatHours);
const remainingHours = Math.max(0, tatHours - pauseElapsedHours);
// Get which alerts have already been sent (to avoid re-sending on resume)
const tat50AlertSent = (level as any).tat50AlertSent || false;
const tat75AlertSent = (level as any).tat75AlertSent || false;
const tatBreached = (level as any).tatBreached || false;
// Update approval level - resume TAT
// IMPORTANT: Keep pauseElapsedHours and store resumedAt (pauseResumeDate repurposed)
// This allows SLA calculation to correctly add pre-pause elapsed time
await level.update({
isPaused: false,
pausedAt: null as any,
pausedBy: null as any,
pauseReason: null as any,
pauseResumeDate: now, // Store actual resume time (repurposed from scheduled resume date)
// pauseTatStartTime: null as any, // Keep original TAT start time for reference
// pauseElapsedHours is intentionally NOT cleared - needed for SLA calculations
status: ApprovalStatus.IN_PROGRESS,
tatStartTime: now, // Reset TAT start time to now for new elapsed calculation
levelStartTime: now // This is the new start time from resume
});
// Cancel any scheduled auto-resume job (if exists)
try {
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
if (pauseResumeQueue) {
// Try to remove job by specific ID pattern first (more efficient)
const jobId = `resume-${requestId}-${(level as any).levelId}`;
try {
const specificJob = await pauseResumeQueue.getJob(jobId);
if (specificJob) {
await specificJob.remove();
logger.info(`[Pause] Cancelled scheduled auto-resume job ${jobId} for workflow ${requestId}`);
}
} catch (err) {
// Job might not exist, which is fine
}
// Also check for any other jobs for this request (fallback for old jobs)
const scheduledJobs = await pauseResumeQueue.getJobs(['delayed', 'waiting']);
const otherJobs = scheduledJobs.filter((job: any) =>
job.data.requestId === requestId && job.id !== jobId
);
for (const job of otherJobs) {
await job.remove();
logger.info(`[Pause] Cancelled legacy auto-resume job ${job.id} for workflow ${requestId}`);
}
}
} catch (queueError) {
logger.warn(`[Pause] Could not cancel scheduled auto-resume job:`, queueError);
// Continue with resume even if job cancellation fails
}
// Update workflow - restore previous status or default to PENDING
const pauseSnapshot = (workflow as any).pauseTatSnapshot || {};
const previousStatus = pauseSnapshot.previousStatus || WorkflowStatus.PENDING;
await workflow.update({
isPaused: false,
pausedAt: null as any,
pausedBy: null as any,
pauseReason: null as any,
pauseResumeDate: null as any,
pauseTatSnapshot: null as any,
status: previousStatus // Restore previous status (PENDING or IN_PROGRESS)
});
// Reschedule TAT jobs from resume time - only for alerts that haven't been sent yet
if (remainingHours > 0) {
// Calculate which thresholds are still pending based on remaining time
const percentageUsedAtPause = tatHours > 0 ? (pauseElapsedHours / tatHours) * 100 : 0;
// Only schedule jobs for thresholds that:
// 1. Haven't been sent yet
// 2. Haven't been passed yet (based on percentage used at pause)
await tatSchedulerService.scheduleTatJobsOnResume(
requestId,
(level as any).levelId,
(level as any).approverId,
remainingHours, // Remaining TAT hours
now, // Start from now
priority as any,
{
// Pass which alerts were already sent
tat50AlertSent: tat50AlertSent,
tat75AlertSent: tat75AlertSent,
tatBreached: tatBreached,
// Pass percentage used at pause to determine which thresholds are still relevant
percentageUsedAtPause: percentageUsedAtPause
}
);
}
// Get user details
const resumeUser = userId ? await User.findByPk(userId) : null;
const resumeUserName = resumeUser
? ((resumeUser as any)?.displayName || (resumeUser as any)?.email || 'User')
: 'System (Auto-resume)';
// Get initiator and paused by user
const initiator = await User.findByPk((workflow as any).initiatorId);
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
const pausedByUser = (workflow as any).pausedBy
? await User.findByPk((workflow as any).pausedBy)
: null;
const pausedByName = pausedByUser
? ((pausedByUser as any)?.displayName || (pausedByUser as any)?.email || 'User')
: 'Unknown';
const requestNumber = (workflow as any).requestNumber;
const title = (workflow as any).title;
const initiatorId = (workflow as any).initiatorId;
const approverId = (level as any).approverId;
const isResumedByInitiator = userId === initiatorId;
const isResumedByApprover = userId === approverId;
// Calculate pause duration
const pausedAt = (level as any).pausedAt || (workflow as any).pausedAt;
const pauseDurationMs = pausedAt ? now.getTime() - new Date(pausedAt).getTime() : 0;
const pauseDurationHours = Math.round((pauseDurationMs / (1000 * 60 * 60)) * 100) / 100; // Round to 2 decimal places
const pauseDuration = pauseDurationHours > 0 ? `${pauseDurationHours} hours` : 'less than 1 hour';
// Notify initiator only if someone else resumed (or auto-resume)
// Skip if initiator resumed their own request
if (!isResumedByInitiator) {
await notificationService.sendToUsers([initiatorId], {
title: 'Workflow Resumed',
body: `Your request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_resumed',
priority: 'HIGH',
actionRequired: false,
metadata: {
resumedBy: userId ? { userId, name: resumeUserName } : null,
pauseDuration: pauseDuration
}
});
}
// Notify approver only if someone else resumed (or auto-resume)
// Skip if approver resumed the request themselves
if (!isResumedByApprover && approverId) {
await notificationService.sendToUsers([approverId], {
title: 'Workflow Resumed',
body: `Request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}. Please continue with your review.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_resumed',
priority: 'HIGH',
actionRequired: true,
metadata: {
resumedBy: userId ? { userId, name: resumeUserName } : null,
pauseDuration: pauseDuration
}
});
}
// Send confirmation to the user who resumed (if manual resume) - no email for self-action
if (userId) {
await notificationService.sendToUsers([userId], {
title: 'Workflow Resumed Successfully',
body: `You have resumed request "${title}". ${isResumedByApprover ? 'Please continue with your review.' : ''}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change', // Use status_change to avoid email for self-action
priority: 'MEDIUM',
actionRequired: isResumedByApprover
});
}
// Log activity with notes
const resumeDetails = notes
? `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}. Notes: ${notes}`
: `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}.`;
await activityService.log({
requestId,
type: 'resumed',
user: userId ? { userId, name: resumeUserName } : undefined,
timestamp: now.toISOString(),
action: 'Workflow Resumed',
details: resumeDetails,
metadata: {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
wasAutoResume: !userId,
notes: notes || null
}
});
logger.info(`[Pause] Workflow ${requestId} resumed ${userId ? `by ${userId}` : 'automatically'}`);
// Emit real-time update to all users viewing this request
emitToRequestRoom(requestId, 'request:updated', {
requestId,
requestNumber: (workflow as any).requestNumber,
action: 'RESUME',
levelNumber: (level as any).levelNumber,
timestamp: now.toISOString()
});
return { workflow, level };
} catch (error: any) {
logger.error(`[Pause] Failed to resume workflow:`, error);
throw error;
}
}
/**
* Cancel pause (for retrigger scenario - initiator requests approver to resume)
* This sends a notification to the approver who paused it
* @param requestId - The workflow request ID
* @param userId - The initiator user ID
*/
async retriggerPause(requestId: string, userId: string): Promise<void> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
if (!(workflow as any).isPaused) {
throw new Error('Workflow is not paused');
}
// Verify user is initiator
if ((workflow as any).initiatorId !== userId) {
throw new Error('Only the initiator can retrigger a pause');
}
const pausedBy = (workflow as any).pausedBy;
if (!pausedBy) {
throw new Error('Cannot retrigger - no approver found who paused this workflow');
}
// Get user details
const initiator = await User.findByPk(userId);
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
// Get approver details (who paused the workflow)
const approver = await User.findByPk(pausedBy);
const approverName = (approver as any)?.displayName || (approver as any)?.email || 'Approver';
const requestNumber = (workflow as any).requestNumber;
const title = (workflow as any).title;
// Notify approver who paused it
await notificationService.sendToUsers([pausedBy], {
title: 'Pause Retrigger Request',
body: `${initiatorName} is requesting you to cancel the pause and resume work on request "${title}".`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'pause_retrigger_request',
priority: 'HIGH',
actionRequired: true
});
// Log activity with approver name
await activityService.log({
requestId,
type: 'pause_retriggered',
user: { userId, name: initiatorName },
timestamp: new Date().toISOString(),
action: 'Pause Retrigger Requested',
details: `${initiatorName} requested ${approverName} to cancel the pause and resume work.`,
metadata: {
pausedBy,
approverName
}
});
logger.info(`[Pause] Pause retrigger requested for workflow ${requestId} by initiator ${userId}`);
} catch (error: any) {
logger.error(`[Pause] Failed to retrigger pause:`, error);
throw error;
}
}
/**
* Get pause details for a workflow
*/
async getPauseDetails(requestId: string): Promise<any> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
if (!(workflow as any).isPaused) {
return null;
}
const level = await ApprovalLevel.findOne({
where: {
requestId,
isPaused: true
}
});
const pausedByUser = (workflow as any).pausedBy
? await User.findByPk((workflow as any).pausedBy, { attributes: ['userId', 'email', 'displayName'] })
: null;
return {
isPaused: true,
pausedAt: (workflow as any).pausedAt,
pausedBy: pausedByUser ? {
userId: (pausedByUser as any).userId,
email: (pausedByUser as any).email,
name: (pausedByUser as any).displayName || (pausedByUser as any).email
} : null,
pauseReason: (workflow as any).pauseReason,
pauseResumeDate: (workflow as any).pauseResumeDate,
level: level ? {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
approverName: (level as any).approverName
} : null
};
} catch (error: any) {
logger.error(`[Pause] Failed to get pause details:`, error);
throw error;
}
}
/**
* Check and auto-resume paused workflows whose resume date has passed
* This is called by a scheduled job
*/
async checkAndResumePausedWorkflows(): Promise<number> {
try {
const now = new Date();
// Find all paused workflows where resume date has passed
// Handle backward compatibility: workflow_type column may not exist in old environments
let pausedWorkflows: WorkflowRequest[];
try {
pausedWorkflows = await WorkflowRequest.findAll({
where: {
isPaused: true,
pauseResumeDate: {
[Op.lte]: now
}
}
});
} catch (error: any) {
// If error is due to missing workflow_type column, use raw query
if (error.message?.includes('workflow_type') || (error.message?.includes('column') && error.message?.includes('does not exist'))) {
logger.warn('[Pause] workflow_type column not found, using raw query for backward compatibility');
const { sequelize } = await import('../config/database');
const { QueryTypes } = await import('sequelize');
const results = await sequelize.query(`
SELECT request_id, is_paused, pause_resume_date
FROM workflow_requests
WHERE is_paused = true
AND pause_resume_date <= :now
`, {
replacements: { now },
type: QueryTypes.SELECT
});
// Convert to WorkflowRequest-like objects
// results is an array of objects from SELECT query
pausedWorkflows = (results as any[]).map((r: any) => ({
requestId: r.request_id,
isPaused: r.is_paused,
pauseResumeDate: r.pause_resume_date
})) as any;
} else {
throw error; // Re-throw if it's a different error
}
}
let resumedCount = 0;
for (const workflow of pausedWorkflows) {
try {
await this.resumeWorkflow((workflow as any).requestId);
resumedCount++;
} catch (error: any) {
logger.error(`[Pause] Failed to auto-resume workflow ${(workflow as any).requestId}:`, error);
// Continue with other workflows
}
}
if (resumedCount > 0) {
logger.info(`[Pause] Auto-resumed ${resumedCount} workflow(s)`);
}
return resumedCount;
} catch (error: any) {
logger.error(`[Pause] Failed to check and resume paused workflows:`, error);
throw error;
}
}
/**
* Get all paused workflows (for admin/reporting)
*/
async getPausedWorkflows(): Promise<WorkflowRequest[]> {
try {
return await WorkflowRequest.findAll({
where: {
isPaused: true
},
order: [['pausedAt', 'DESC']]
});
} catch (error: any) {
logger.error(`[Pause] Failed to get paused workflows:`, error);
throw error;
}
}
}
export const pauseService = new PauseService();

View File

@ -0,0 +1,383 @@
import { tatQueue } from '../queues/tatQueue';
import { calculateDelay, addWorkingHours, addWorkingHoursExpress } from '@utils/tatTimeUtils';
import { getTatThresholds } from './configReader.service';
import dayjs from 'dayjs';
import logger, { logTATEvent } from '@utils/logger';
import { Priority } from '../types/common.types';
export class TatSchedulerService {
/**
* Schedule TAT notification jobs for an approval level
* @param requestId - The workflow request ID
* @param levelId - The approval level ID
* @param approverId - The approver user ID
* @param tatDurationHours - TAT duration in hours
* @param startTime - Optional start time (defaults to now)
* @param priority - Request priority (EXPRESS = 24/7, STANDARD = working hours only)
*/
async scheduleTatJobs(
requestId: string,
levelId: string,
approverId: string,
tatDurationHours: number,
startTime?: Date,
priority: Priority = Priority.STANDARD
): Promise<void> {
try {
// Check if tatQueue is available
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling.`);
return;
}
const now = startTime || new Date();
// Handle both enum and string (case-insensitive) priority values
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
// Get current thresholds from database configuration
const thresholds = await getTatThresholds();
// Calculate milestone times using configured thresholds
// EXPRESS mode: 24/7 calculation (includes holidays, weekends, non-working hours)
// STANDARD mode: Working hours only (excludes holidays, weekends, non-working hours)
let threshold1Time: Date;
let threshold2Time: Date;
let breachTime: Date;
if (isExpress) {
// EXPRESS: All calendar days (Mon-Sun, including weekends/holidays) but working hours only (9 AM - 6 PM)
const t1 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.first / 100));
const t2 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.second / 100));
const tBreach = await addWorkingHoursExpress(now, tatDurationHours);
threshold1Time = t1.toDate();
threshold2Time = t2.toDate();
breachTime = tBreach.toDate();
} else {
// STANDARD: Working days only (Mon-Fri), working hours (9 AM - 6 PM), excludes holidays
const t1 = await addWorkingHours(now, tatDurationHours * (thresholds.first / 100));
const t2 = await addWorkingHours(now, tatDurationHours * (thresholds.second / 100));
const tBreach = await addWorkingHours(now, tatDurationHours);
threshold1Time = t1.toDate();
threshold2Time = t2.toDate();
breachTime = tBreach.toDate();
}
logger.info(`[TAT Scheduler] Scheduling TAT jobs - Request: ${requestId}, Priority: ${priority}, TAT: ${tatDurationHours}h`);
const jobs = [
{
type: 'threshold1' as const,
threshold: thresholds.first,
delay: calculateDelay(threshold1Time),
targetTime: threshold1Time
},
{
type: 'threshold2' as const,
threshold: thresholds.second,
delay: calculateDelay(threshold2Time),
targetTime: threshold2Time
},
{
type: 'breach' as const,
threshold: 100,
delay: calculateDelay(breachTime),
targetTime: breachTime
}
];
// Check if test mode enabled (1 hour = 1 minute)
const isTestMode = process.env.TAT_TEST_MODE === 'true';
// Check if times collide (working hours calculation issue)
const uniqueTimes = new Set(jobs.map(j => j.targetTime.getTime()));
const hasCollision = uniqueTimes.size < jobs.length;
let jobIndex = 0;
for (const job of jobs) {
if (job.delay < 0) {
logger.error(`[TAT Scheduler] Skipping ${job.type} - time in past`);
continue;
}
let spacedDelay: number;
if (isTestMode) {
// Test mode: times are already in minutes (tatTimeUtils converts hours to minutes)
// Just ensure they have minimum spacing for BullMQ reliability
spacedDelay = Math.max(job.delay, 5000) + (jobIndex * 5000);
} else if (hasCollision) {
// Production with collision: add 5-minute spacing
spacedDelay = job.delay + (jobIndex * 300000);
} else {
// Production without collision: use calculated delays
spacedDelay = job.delay;
}
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
await tatQueue.add(
job.type,
{
type: job.type,
threshold: job.threshold,
requestId,
levelId,
approverId
},
{
delay: spacedDelay,
jobId: jobId,
removeOnComplete: {
age: 3600, // Keep for 1 hour for debugging
count: 1000
},
removeOnFail: false
}
);
jobIndex++;
}
logTATEvent('warning', requestId, {
level: parseInt(levelId.split('-').pop() || '1'),
tatHours: tatDurationHours,
priority,
message: 'TAT jobs scheduled',
});
} catch (error) {
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs:`, error);
throw error;
}
}
/**
* Schedule TAT jobs on resume - only schedules jobs for alerts that haven't been sent yet
* @param requestId - The workflow request ID
* @param levelId - The approval level ID
* @param approverId - The approver user ID
* @param remainingTatHours - Remaining TAT duration in hours (from resume point)
* @param startTime - Resume start time
* @param priority - Request priority
* @param alertStatus - Object indicating which alerts have already been sent and percentage used at pause
*/
async scheduleTatJobsOnResume(
requestId: string,
levelId: string,
approverId: string,
remainingTatHours: number,
startTime: Date,
priority: Priority = Priority.STANDARD,
alertStatus: {
tat50AlertSent: boolean;
tat75AlertSent: boolean;
tatBreached: boolean;
percentageUsedAtPause: number;
}
): Promise<void> {
try {
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling on resume.`);
return;
}
const now = startTime;
// Handle both enum and string (case-insensitive) priority values
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
// Get current thresholds from database configuration
const thresholds = await getTatThresholds();
// Calculate original TAT from remaining + elapsed
// Example: If 35 min used (58.33%) and 25 min remaining, original TAT = 60 min
const elapsedHours = alertStatus.percentageUsedAtPause > 0
? (remainingTatHours * alertStatus.percentageUsedAtPause) / (100 - alertStatus.percentageUsedAtPause)
: 0;
const originalTatHours = elapsedHours + remainingTatHours;
logger.info(`[TAT Scheduler] Resuming TAT scheduling - Request: ${requestId}, Remaining: ${(remainingTatHours * 60).toFixed(1)} min, Priority: ${isExpress ? 'EXPRESS' : 'STANDARD'}`);
// Jobs to schedule - only include those that haven't been sent and haven't been passed
const jobsToSchedule: Array<{
type: 'threshold1' | 'threshold2' | 'breach';
threshold: number;
alreadySent: boolean;
alreadyPassed: boolean;
hoursFromNow: number;
}> = [];
// Threshold 1 (e.g., 50%)
// Skip if: already sent OR already passed the threshold
if (!alertStatus.tat50AlertSent && alertStatus.percentageUsedAtPause < thresholds.first) {
// Calculate: How many hours from NOW until we reach this threshold?
// Formula: (thresholdHours - elapsedHours)
// thresholdHours = originalTatHours * (threshold/100)
const thresholdHours = originalTatHours * (thresholds.first / 100);
const hoursFromNow = thresholdHours - elapsedHours;
if (hoursFromNow > 0) {
jobsToSchedule.push({
type: 'threshold1',
threshold: thresholds.first,
alreadySent: false,
alreadyPassed: false,
hoursFromNow: hoursFromNow
});
}
}
// Threshold 2 (e.g., 75%)
if (!alertStatus.tat75AlertSent && alertStatus.percentageUsedAtPause < thresholds.second) {
const thresholdHours = originalTatHours * (thresholds.second / 100);
const hoursFromNow = thresholdHours - elapsedHours;
if (hoursFromNow > 0) {
jobsToSchedule.push({
type: 'threshold2',
threshold: thresholds.second,
alreadySent: false,
alreadyPassed: false,
hoursFromNow: hoursFromNow
});
}
}
// Breach (100%)
if (!alertStatus.tatBreached) {
// Breach is always scheduled for the end of remaining TAT
jobsToSchedule.push({
type: 'breach',
threshold: 100,
alreadySent: false,
alreadyPassed: false,
hoursFromNow: remainingTatHours
});
}
if (jobsToSchedule.length === 0) {
logger.info(`[TAT Scheduler] No TAT jobs to schedule (all alerts already sent)`);
return;
}
// Calculate actual times and schedule jobs
for (const job of jobsToSchedule) {
let targetTime: Date;
if (isExpress) {
targetTime = (await addWorkingHoursExpress(now, job.hoursFromNow)).toDate();
} else {
targetTime = (await addWorkingHours(now, job.hoursFromNow)).toDate();
}
const delay = calculateDelay(targetTime);
if (delay < 0) {
logger.warn(`[TAT Scheduler] Skipping ${job.type} - calculated time is in past`);
continue;
}
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
await tatQueue.add(
job.type,
{
type: job.type,
threshold: job.threshold,
requestId,
levelId,
approverId
},
{
delay: delay,
jobId: jobId,
removeOnComplete: {
age: 3600,
count: 1000
},
removeOnFail: false
}
);
logger.info(`[TAT Scheduler] ✓ Scheduled ${job.type} (${job.threshold}%) for ${dayjs(targetTime).format('YYYY-MM-DD HH:mm')}`);
}
logger.info(`[TAT Scheduler] ✅ ${jobsToSchedule.length} TAT job(s) scheduled for request ${requestId}`);
} catch (error) {
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs on resume:`, error);
throw error;
}
}
/**
* Cancel TAT jobs for a specific approval level
* Useful when an approver acts before TAT expires
* @param requestId - The workflow request ID
* @param levelId - The approval level ID
*/
async cancelTatJobs(requestId: string, levelId: string): Promise<void> {
try {
// Check if tatQueue is available
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
return;
}
// Use generic job names that don't depend on threshold percentages
const jobIds = [
`tat-threshold1-${requestId}-${levelId}`,
`tat-threshold2-${requestId}-${levelId}`,
`tat-breach-${requestId}-${levelId}`
];
for (const jobId of jobIds) {
try {
const job = await tatQueue.getJob(jobId);
if (job) {
await job.remove();
logger.info(`[TAT Scheduler] Cancelled job ${jobId}`);
}
} catch (error) {
// Job might not exist, which is fine
logger.debug(`[TAT Scheduler] Job ${jobId} not found (may have already been processed)`);
}
}
logger.info(`[TAT Scheduler] ✅ TAT jobs cancelled for level ${levelId}`);
} catch (error) {
logger.error(`[TAT Scheduler] Failed to cancel TAT jobs:`, error);
// Don't throw - cancellation failure shouldn't break the workflow
}
}
/**
* Cancel all TAT jobs for a workflow request
* @param requestId - The workflow request ID
*/
async cancelAllTatJobsForRequest(requestId: string): Promise<void> {
try {
// Check if tatQueue is available
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
return;
}
const jobs = await tatQueue.getJobs(['delayed', 'waiting']);
const requestJobs = jobs.filter(job => job.data.requestId === requestId);
for (const job of requestJobs) {
await job.remove();
logger.info(`[TAT Scheduler] Cancelled job ${job.id}`);
}
logger.info(`[TAT Scheduler] ✅ All TAT jobs cancelled for request ${requestId}`);
} catch (error) {
logger.error(`[TAT Scheduler] Failed to cancel all TAT jobs:`, error);
// Don't throw - cancellation failure shouldn't break the workflow
}
}
}
export const tatSchedulerService = new TatSchedulerService();

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,446 @@
import { Op } from 'sequelize';
import { WorkNote } from '@models/WorkNote';
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
import { Participant } from '@models/Participant';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { User } from '@models/User';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { activityService } from './activity.service';
import { notificationService } from './notification.service';
import { emailNotificationService } from './emailNotification.service';
import { gcsStorageService } from './gcsStorage.service';
import logger from '@utils/logger';
import fs from 'fs';
import path from 'path';
export class WorkNoteService {
async list(requestId: string) {
const notes = await WorkNote.findAll({
where: { requestId },
order: [['created_at' as any, 'ASC']]
});
// Load attachments for each note
const enriched = await Promise.all(notes.map(async (note) => {
const noteId = (note as any).noteId;
const attachments = await WorkNoteAttachment.findAll({
where: { noteId }
});
const noteData = (note as any).toJSON();
const mappedAttachments = attachments.map((a: any) => {
const attData = typeof a.toJSON === 'function' ? a.toJSON() : a;
return {
attachmentId: attData.attachmentId || attData.attachment_id,
fileName: attData.fileName || attData.file_name,
fileType: attData.fileType || attData.file_type,
fileSize: attData.fileSize || attData.file_size,
filePath: attData.filePath || attData.file_path,
storageUrl: attData.storageUrl || attData.storage_url,
isDownloadable: attData.isDownloadable || attData.is_downloadable,
uploadedAt: attData.uploadedAt || attData.uploaded_at
};
});
return {
noteId: noteData.noteId || noteData.note_id,
requestId: noteData.requestId || noteData.request_id,
userId: noteData.userId || noteData.user_id,
userName: noteData.userName || noteData.user_name,
userRole: noteData.userRole || noteData.user_role,
message: noteData.message,
isPriority: noteData.isPriority || noteData.is_priority,
hasAttachment: noteData.hasAttachment || noteData.has_attachment,
createdAt: noteData.createdAt || noteData.created_at,
updatedAt: noteData.updatedAt || noteData.updated_at,
attachments: mappedAttachments
};
}));
return enriched;
}
async getUserRole(requestId: string, userId: string): Promise<string> {
try {
const participant = await Participant.findOne({
where: { requestId, userId }
});
if (participant) {
const type = (participant as any).participantType || (participant as any).participant_type;
return type ? type.toString() : 'Participant';
}
return 'Participant';
} catch (error) {
logger.error('[WorkNote] Error fetching user role:', error);
return 'Participant';
}
}
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> {
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
const note = await WorkNote.create({
requestId,
userId: user.userId,
userName: user.name || null,
userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR)
message: payload.message,
isPriority: !!payload.isPriority,
parentNoteId: payload.parentNoteId || null,
mentionedUsers: payload.mentionedUsers || null,
hasAttachment: files && files.length > 0 ? true : false
} as any);
logger.info('[WorkNote] Created note:', {
noteId: (note as any).noteId,
userId: (note as any).userId,
userName: (note as any).userName,
userRole: (note as any).userRole
});
const attachments = [];
if (files && files.length) {
// Get request number for folder structure
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
const requestNumber = workflow ? ((workflow as any).requestNumber || (workflow as any).request_number) : null;
for (const f of files) {
// Read file buffer if path exists, otherwise use provided buffer
const fileBuffer = f.buffer || (f.path ? fs.readFileSync(f.path) : Buffer.from(''));
// Upload with automatic fallback to local storage
// If requestNumber is not available, use a default structure
const effectiveRequestNumber = requestNumber || 'UNKNOWN';
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: f.originalname,
mimeType: f.mimetype,
requestNumber: effectiveRequestNumber,
fileType: 'attachments'
});
const storageUrl = uploadResult.storageUrl;
const gcsFilePath = uploadResult.filePath;
// Clean up local temporary file if it exists (from multer disk storage)
if (f.path && fs.existsSync(f.path)) {
try {
fs.unlinkSync(f.path);
} catch (unlinkError) {
logger.warn('[WorkNote] Failed to delete local temporary file:', unlinkError);
}
}
const attachment = await WorkNoteAttachment.create({
noteId: (note as any).noteId,
fileName: f.originalname,
fileType: f.mimetype,
fileSize: f.size,
filePath: gcsFilePath, // Store GCS path or local path
storageUrl: storageUrl, // Store GCS URL or local URL
isDownloadable: true
} as any);
attachments.push({
attachmentId: (attachment as any).attachmentId,
fileName: (attachment as any).fileName,
fileType: (attachment as any).fileType,
fileSize: (attachment as any).fileSize,
filePath: (attachment as any).filePath,
storageUrl: (attachment as any).storageUrl,
isDownloadable: (attachment as any).isDownloadable
});
}
// Send notifications for additional document added via work notes
if (attachments.length > 0) {
try {
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
if (workflow) {
const initiatorId = (workflow as any).initiatorId || (workflow as any).initiator_id;
const isInitiator = user.userId === initiatorId;
// Get all participants (spectators)
const spectators = await Participant.findAll({
where: {
requestId,
participantType: 'SPECTATOR'
},
include: [{
model: User,
as: 'user',
attributes: ['userId', 'email', 'displayName']
}]
});
// Get current approver (pending or in-progress approval level)
const currentApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
},
order: [['levelNumber', 'ASC']],
include: [{
model: User,
as: 'approver',
attributes: ['userId', 'email', 'displayName']
}]
});
// Determine who to notify based on who uploaded
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
if (isInitiator) {
// Initiator added → notify spectators and current approver
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== user.userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== user.userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Check if uploader is a spectator
const uploaderParticipant = await Participant.findOne({
where: {
requestId,
userId: user.userId,
participantType: 'SPECTATOR'
}
});
if (uploaderParticipant) {
// Spectator added → notify initiator and current approver
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== user.userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== user.userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Approver added → notify initiator and spectators
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== user.userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== user.userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
}
}
// Send notifications (email, in-app, and web-push)
const requestNumber = (workflow as any).requestNumber || requestId;
const requestData = {
requestNumber: requestNumber,
requestId: requestId,
title: (workflow as any).title || 'Request'
};
// Prepare user IDs for in-app and web-push notifications
const recipientUserIds = recipientsToNotify.map(r => r.userId);
// Send in-app and web-push notifications for each attachment
if (recipientUserIds.length > 0 && attachments.length > 0) {
try {
for (const attachment of attachments) {
await notificationService.sendToUsers(
recipientUserIds,
{
title: 'Additional Document Added',
body: `${user.name || 'User'} added "${attachment.fileName}" to ${requestNumber}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'document_added',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
documentName: attachment.fileName,
fileSize: attachment.fileSize,
addedByName: user.name || 'User',
source: 'Work Notes'
}
}
);
}
logger.info('[WorkNote] In-app and web-push notifications sent for additional documents', {
requestId,
attachmentsCount: attachments.length,
recipientsCount: recipientUserIds.length
});
} catch (notifyError) {
logger.error('[WorkNote] Failed to send in-app/web-push notifications for additional documents:', notifyError);
}
}
// Send email notifications for each attachment
for (const attachment of attachments) {
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(
requestData,
recipient,
{
documentName: attachment.fileName,
fileSize: attachment.fileSize,
addedByName: user.name || 'User',
source: 'Work Notes'
}
);
}
}
logger.info('[WorkNote] Additional document notifications sent', {
requestId,
attachmentsCount: attachments.length,
recipientsCount: recipientsToNotify.length,
isInitiator
});
}
} catch (notifyError) {
// Don't fail work note creation if notifications fail
logger.error('[WorkNote] Failed to send additional document notifications:', notifyError);
}
}
}
// Log activity for work note
activityService.log({
requestId,
type: 'comment',
user: { userId: user.userId, name: user.name || 'User' },
timestamp: new Date().toISOString(),
action: 'Work Note Added',
details: `${user.name || 'User'} added a work note: ${payload.message.substring(0, 100)}${payload.message.length > 100 ? '...' : ''}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
try {
// Optional realtime emit (if socket layer is initialized)
const { emitToRequestRoom } = require('../realtime/socket');
if (emitToRequestRoom) {
// Emit note with all fields explicitly (to ensure camelCase fields are sent)
const noteData = {
noteId: (note as any).noteId,
requestId: (note as any).requestId,
userId: (note as any).userId,
userName: (note as any).userName,
userRole: (note as any).userRole, // Include participant role
message: (note as any).message,
createdAt: (note as any).createdAt,
hasAttachment: (note as any).hasAttachment,
attachments: attachments // Include attachments
};
emitToRequestRoom(requestId, 'worknote:new', { note: noteData });
}
} catch (e) { logger.warn('Realtime emit failed (not initialized)'); }
// Send notifications to mentioned users
if (payload.mentionedUsers && Array.isArray(payload.mentionedUsers) && payload.mentionedUsers.length > 0) {
try {
// Get workflow details for request number and title
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
const requestNumber = (workflow as any)?.requestNumber || requestId;
const requestTitle = (workflow as any)?.title || 'Request';
logger.info(`[WorkNote] Sending mention notifications to ${payload.mentionedUsers.length} users`);
await notificationService.sendToUsers(
payload.mentionedUsers,
{
title: '💬 Mentioned in Work Note',
body: `${user.name || 'Someone'} mentioned you in ${requestNumber}: "${payload.message.substring(0, 50)}${payload.message.length > 50 ? '...' : ''}"`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'mention'
}
);
logger.info(`[WorkNote] Mention notifications sent successfully`);
} catch (notifyError) {
logger.error('[WorkNote] Failed to send mention notifications:', notifyError);
// Don't fail the work note creation if notifications fail
}
}
return { ...note, attachments };
}
async downloadAttachment(attachmentId: string) {
const attachment = await WorkNoteAttachment.findOne({
where: { attachmentId }
});
if (!attachment) {
throw new Error('Attachment not found');
}
const storageUrl = (attachment as any).storageUrl || (attachment as any).storage_url;
const filePath = (attachment as any).filePath || (attachment as any).file_path;
const fileName = (attachment as any).fileName || (attachment as any).file_name;
const fileType = (attachment as any).fileType || (attachment as any).file_type;
// Check if it's a GCS URL
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
return {
filePath: filePath,
storageUrl: storageUrl,
fileName: fileName,
fileType: fileType,
isGcsUrl: isGcsUrl
};
}
}
export const workNoteService = new WorkNoteService();

View File

@ -382,4 +382,222 @@ report_cache {
%% 7. Multi-channel notifications (in-app, email, SMS, push)
%% 8. TAT thresholds: 50%, 80%, 100%
%% 9. Max approval levels: 10
%% 10. Max file size: 10 MB
%% 10. Max file size: 10 MB
erDiagram
workflow_requests ||--|| dealer_claim_details : "has_claim_details"
workflow_requests ||--o{ dealer_claim_history : "has_claim_history"
workflow_requests ||--|| dealer_proposal_details : "has_proposal"
workflow_requests ||--|| dealer_completion_details : "has_completion"
workflow_requests ||--|| claim_budget_tracking : "tracks_budget"
workflow_requests ||--|| internal_orders : "has_io"
workflow_requests ||--o{ claim_invoices : "has_invoices"
workflow_requests ||--o{ claim_credit_notes : "has_credit_notes"
workflow_requests ||--o{ tat_alerts : "triggers_alerts"
workflow_requests ||--|| request_summaries : "has_summary"
dealer_proposal_details ||--o{ dealer_proposal_cost_items : "has_items"
dealer_completion_details ||--o{ dealer_completion_expenses : "has_expenses"
claim_invoices ||--o{ claim_credit_notes : "has_credit_notes"
request_summaries ||--o{ shared_summaries : "shared_as"
users ||--o{ shared_summaries : "shares"
users ||--o{ subscriptions : "has_subscription"
users ||--o{ holidays : "creates"
users ||--o{ activity_types : "creates"
dealers {
uuid dealer_id PK
varchar sales_code
varchar service_code
varchar dealer_name
varchar region
varchar state
varchar city
varchar location
boolean is_active
timestamp created_at
timestamp updated_at
}
dealer_claim_details {
uuid claim_id PK
uuid request_id FK
varchar activity_name
varchar activity_type
varchar dealer_code
varchar dealer_name
date activity_date
date period_start_date
date period_end_date
timestamp created_at
timestamp updated_at
}
dealer_claim_history {
uuid history_id PK
uuid request_id FK
uuid approval_level_id FK
integer version
enum snapshot_type
jsonb snapshot_data
text change_reason
uuid changed_by FK
timestamp created_at
}
dealer_proposal_details {
uuid proposal_id PK
uuid request_id FK
varchar proposal_document_path
decimal total_estimated_budget
date expected_completion_date
text dealer_comments
timestamp submitted_at
timestamp created_at
timestamp updated_at
}
dealer_proposal_cost_items {
uuid cost_item_id PK
uuid proposal_id FK
uuid request_id FK
varchar item_description
decimal amount
integer item_order
timestamp created_at
timestamp updated_at
}
dealer_completion_details {
uuid completion_id PK
uuid request_id FK
date activity_completion_date
integer number_of_participants
decimal total_closed_expenses
timestamp submitted_at
timestamp created_at
timestamp updated_at
}
dealer_completion_expenses {
uuid expense_id PK
uuid completion_id FK
uuid request_id FK
varchar description
decimal amount
timestamp created_at
timestamp updated_at
}
claim_budget_tracking {
uuid budget_id PK
uuid request_id FK
decimal initial_estimated_budget
decimal proposal_estimated_budget
decimal approved_budget
decimal io_blocked_amount
decimal closed_expenses
decimal final_claim_amount
decimal credit_note_amount
enum budget_status
timestamp created_at
timestamp updated_at
}
claim_invoices {
uuid invoice_id PK
uuid request_id FK
varchar invoice_number
date invoice_date
decimal amount
varchar status
timestamp created_at
timestamp updated_at
}
claim_credit_notes {
uuid credit_note_id PK
uuid request_id FK
uuid invoice_id FK
varchar credit_note_number
decimal credit_note_amount
varchar status
timestamp created_at
timestamp updated_at
}
internal_orders {
uuid io_id PK
uuid request_id FK
varchar io_number
decimal io_available_balance
decimal io_blocked_amount
enum status
timestamp created_at
timestamp updated_at
}
holidays {
uuid holiday_id PK
date holiday_date
varchar holiday_name
enum holiday_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
activity_types {
uuid activity_type_id PK
varchar title
varchar item_code
varchar taxation_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
tat_alerts {
uuid alert_id PK
uuid request_id FK
uuid level_id FK
uuid approver_id FK
enum alert_type
boolean is_breached
timestamp alert_sent_at
timestamp created_at
}
request_summaries {
uuid summary_id PK
uuid request_id FK
uuid initiator_id FK
varchar title
text description
text closing_remarks
boolean is_ai_generated
timestamp created_at
timestamp updated_at
}
shared_summaries {
uuid shared_summary_id PK
uuid summary_id FK
uuid shared_by FK
uuid shared_with FK
boolean is_read
timestamp shared_at
timestamp created_at
}
subscriptions {
uuid subscription_id PK
uuid user_id FK
varchar endpoint
varchar p256dh
varchar auth
timestamp created_at
}

43
debug-finalize.ts Normal file
View File

@ -0,0 +1,43 @@
import mongoose from 'mongoose';
import dotenv from 'dotenv';
import dns from 'dns';
import { WorkflowRequestModel } from './src/models/mongoose/WorkflowRequest.schema';
dotenv.config();
async function check() {
try {
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL;
if (!mongoUri) {
console.error('MONGO_URI not found in .env');
process.exit(1);
}
if (mongoUri.startsWith('mongodb+srv://')) {
dns.setServers(['8.8.8.8', '8.8.4.4', '1.1.1.1', '1.0.0.1']);
}
await mongoose.connect(mongoUri);
console.log('✅ Connected to MongoDB');
const requests = await WorkflowRequestModel.find({
$or: [
{ conclusionRemark: { $exists: true, $ne: null } },
{ workflowState: 'CLOSED' }
]
}).sort({ updatedAt: -1 }).limit(10);
console.log('Results (Last 10 finalized/closed):');
requests.forEach(r => {
console.log(`- REQ: ${r.requestNumber}, Status: ${r.status}, State: ${r.workflowState}, HasRemark: ${!!r.conclusionRemark}`);
});
process.exit(0);
} catch (error) {
console.error('Check failed:', error);
process.exit(1);
}
}
check();

310
docs/DATABASE_SCHEMA.md Normal file
View File

@ -0,0 +1,310 @@
# Database Schema Documentation
## 1. Overview
This document provides a detailed reference for the backend database schema of the Royal Enfield Workflow Management System.
**Database System:** PostgreSQL 16.x
**Schema Conventions:**
* **Primary Keys:** UUID (v4) for all tables.
* **Naming:** Snake_case for tables and columns.
* **Audit Columns:** Most tables include `created_at`, `updated_at`, `created_by`, `updated_by`.
* **Soft Deletes:** `is_deleted` flag used on critical entities.
## 2. Architecture Diagrams (A4 Optimized)
### 2.1. Core Workflow Architecture
Focuses on the request lifecycle, approval chains, and direct interactions.
```mermaid
erDiagram
users ||--o{ workflow_requests : "initiates"
users ||--o{ approval_levels : "approves"
users ||--o{ participants : "collaborates"
workflow_requests ||--|{ approval_levels : "has_steps"
workflow_requests ||--o{ participants : "has_users"
workflow_requests ||--o{ documents : "contains"
workflow_requests ||--o{ work_notes : "discussions"
workflow_requests ||--o{ activities : "audit_trail"
workflow_templates ||--o{ workflow_requests : "spawns"
workflow_requests ||--|| conclusion_remarks : "finalizes"
workflow_requests {
uuid request_id PK
varchar request_number
enum status
integer current_level
}
approval_levels {
uuid level_id PK
integer level_number
enum status
uuid approver_id FK
}
```
### 2.2. Business Domain Data
Focuses on the specific data payloads (Dealers, Finance, Claims) attached to requests.
```mermaid
erDiagram
workflow_requests ||--o{ dealers : "context"
workflow_requests ||--|| dealer_claim_details : "claim_data"
workflow_requests ||--|| dealer_proposal_details : "proposal"
workflow_requests ||--|| dealer_completion_details : "evidence"
workflow_requests ||--o{ dealer_claim_history : "versions"
workflow_requests ||--|| claim_budget_tracking : "financials"
workflow_requests ||--|| internal_orders : "sap_ref"
workflow_requests ||--o{ claim_invoices : "billing"
claim_invoices ||--o{ claim_credit_notes : "adjustments"
dealer_claim_details {
uuid claim_id PK
varchar activity_type
}
claim_budget_tracking {
decimal approved_budget
decimal final_claim_amount
}
```
### 2.3. System Support Services
Focuses on cross-cutting concerns like logging, notifications, and monitoring.
```mermaid
erDiagram
users ||--o{ notifications : "receives"
users ||--o{ system_settings : "configures"
users ||--o{ audit_logs : "actions"
workflow_requests ||--o{ notifications : "triggers"
workflow_requests ||--o{ tat_tracking : "monitors_sla"
workflow_requests ||--o{ tat_alerts : "sla_breaches"
workflow_requests ||--o{ request_summaries : "ai_summary"
workflow_requests ||--o{ report_cache : "reporting"
notifications ||--o{ email_logs : "outbound"
notifications ||--o{ sms_logs : "outbound"
tat_tracking {
decimal total_tat_hours
boolean threshold_breached
}
```
## 3. Schema Modules
### 3.1. User & Authentication Module
Manages user identities, sessions, and system-wide configurations.
```mermaid
erDiagram
users ||--o{ user_sessions : "has"
users ||--o{ subscriptions : "has_device"
users ||--o{ system_settings : "modifies"
users {
uuid user_id PK
varchar employee_id
varchar email
varchar display_name
enum role
boolean is_active
}
user_sessions {
uuid session_id PK
uuid user_id FK
varchar session_token
timestamp expires_at
}
subscriptions {
uuid subscription_id PK
uuid user_id FK
varchar endpoint
}
```
#### Tables
**`users`**
Core user registry. synced with Okta/HRMS.
* `user_id` (PK): Unique UUID.
* `employee_id` (Unique): HR system ID.
* `email` (Unique): Official email address.
* `role`: RBAC role (USER, ADMIN, etc.).
* `is_active`: Soft delete/account link status.
**`user_sessions`**
Active JWT sessions for invalidation/tracking.
* `session_token`: The JWT access token.
* `refresh_token`: For renewing access tokens.
* `device_type`: Web/Mobile classification.
**`system_settings`**
Dynamic configuration (e.g., global TAT thresholds).
* `setting_key` (Unique): Config identifier name.
* `setting_value`: The value (text/json).
---
### 3.2. Workflow Engine Module
The core engine driving request lifecycles, approvals, and tracking.
```mermaid
erDiagram
workflow_requests ||--|{ approval_levels : "steps"
workflow_requests ||--o{ activities : "events"
workflow_requests ||--|{ participants : "access"
workflow_templates ||--o{ workflow_requests : "spawns"
workflow_requests {
uuid request_id PK
varchar request_number
enum status
uuid initiator_id FK
}
approval_levels {
uuid level_id PK
uuid request_id FK
integer level_number
enum status
uuid approver_id FK
}
```
#### Tables
**`workflow_requests`**
The central entity representing a business process instance.
* `request_number`: Human-readable ID (e.g., REQ-2024-001).
* `current_level`: Pointer to the active approval step.
* `status`: DRAFT, PENDING, APPROVED, REJECTED, CLOSED.
**`approval_levels`**
Defines the sequence of approvers for a request.
* `level_number`: Sequence index (1, 2, 3...).
* `approver_id`: User responsible for this step.
* `tat_hours`: SLA for this specific step.
* `status`: PENDING, APPROVED, REJECTED.
**`participants`**
Users with visibility/access to the request (spectators, contributors).
* `participant_type`: SPECTATOR, CONTRIBUTOR.
* `can_comment`, `can_view_documents`: Granular permissions.
**`activities`**
Audit trail of all actions performed on a request.
* `activity_type`: CREATED, APPROVED, COMMENTED, FILE_UPLOADED.
* `metadata`: JSON payload with specific details of the event.
**`workflow_templates`**
Blueprints for creating new requests.
* `approval_levels_config`: JSON defining the default approver chain structure.
---
### 3.3. Dealer Management Module
Stores specific data related to dealer claims, onboardings, and performance.
```mermaid
erDiagram
workflow_requests ||--|| dealer_claim_details : "details"
workflow_requests ||--|| dealer_proposal_details : "proposal"
workflow_requests ||--|| dealer_completion_details : "completion"
workflow_requests ||--o{ dealer_claim_history : "versions"
workflow_requests ||--o{ dealers : "related_to"
dealers {
uuid dealer_id PK
varchar dealer_name
varchar sales_code
}
```
#### Tables
**`dealers`**
Master data for dealerships.
* `sales_code`, `service_code`: Dealer unique identifiers.
* `dealer_name`, `region`, `city`: Location details.
**`dealer_claim_details`**
Specific attributes for a Dealer Claim request.
* `activity_name`, `activity_type`: Marketing/Sales activity details.
* `period_start_date`, `period_end_date`: Duration of the claim activity.
**`dealer_proposal_details`**
Stores the initial proposal data for a claim.
* `total_estimated_budget`: The proposed validation amount.
* `proposal_document_url`: Link to the uploaded proposal PDF/Doc.
**`dealer_claim_history`**
Snapshots of the claim data at various approval stages.
* `snapshot_data`: JSON dump of the claim state.
* `version`: Incremental version number.
---
### 3.4. Financial Module
Manages budgeting, internal orders, and invoicing.
```mermaid
erDiagram
workflow_requests ||--|| claim_budget_tracking : "budget"
workflow_requests ||--|| internal_orders : "io"
workflow_requests ||--o{ claim_invoices : "invoices"
claim_invoices ||--o{ claim_credit_notes : "credit_notes"
```
#### Tables
**`claim_budget_tracking`**
Central ledger for a request's financial lifecycle.
* `initial_estimated_budget`: Original requested amount.
* `approved_budget`: Validated amount after approvals.
* `io_blocked_amount`: Amount reserved in SAP.
* `final_claim_amount`: Actual payout amount.
**`internal_orders`**
SAP Internal Order references.
* `io_number`: The IO code from SAP.
* `io_available_balance`, `io_blocked_amount`: Balance tracking.
**`claim_invoices`**
Invoices submitted against the claim.
* `invoice_number`: Vendor invoice ID.
* `amount`: Invoice value.
* `dms_number`: Document Management System reference.
**`claim_credit_notes`**
Adjustments/Returns linked to invoices.
* `credit_note_amount`: Value to be deducted/adjusted.
---
### 3.5. Ancillary Modules
Support functions like notifications, tracking, and logs.
#### Tables
**`notifications`**
User alerts.
* `is_read`: Read status.
* `action_url`: Deep link to the relevant request.
**`tat_tracking`**
Turnaround Time monitoring.
* `tracking_type`: REQUEST (overall) or LEVEL (step-specific).
* `total_tat_hours`: The allowed time.
* `elapsed_hours`: Time consumed so far.
* `breached_flags`: `threshold_50_breached`, etc.
**`tat_alerts`**
Logs of TAT breach notifications sent.
* `alert_type`: TAT_50, TAT_75, TAT_100.
* `is_breached`: Confirmed breach status.
**`request_summaries`**
AI or manually generated summaries of complex requests.
* `is_ai_generated`: Origin flag.
* `description`, `closing_remarks`: Narrative text.

View File

@ -24,12 +24,19 @@ erDiagram
workflow_requests ||--|| claim_invoices : claim_invoice
workflow_requests ||--|| claim_credit_notes : claim_credit_note
work_notes ||--o{ work_note_attachments : has
notifications ||--o{ email_logs : sends
notifications ||--o{ sms_logs : sends
workflow_requests ||--o{ report_cache : caches
workflow_requests ||--o{ audit_logs : audits
workflow_requests ||--o{ workflow_templates : templates
users ||--o{ system_settings : updates
workflow_requests ||--o{ dealer_claim_history : has_history
workflow_requests ||--o{ tat_alerts : triggers
workflow_requests ||--|| request_summaries : summarizes
request_summaries ||--o{ shared_summaries : shared_as
users ||--o{ shared_summaries : shares
users ||--o{ subscriptions : has_device
users ||--o{ holidays : manages
users ||--o{ activity_types : manages
users {
uuid user_id PK
@ -286,46 +293,7 @@ erDiagram
varchar logout_reason
}
email_logs {
uuid email_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_email
uuid recipient_user_id FK
text[] cc_emails
text[] bcc_emails
varchar subject
text body
varchar email_type
varchar status
integer send_attempts
timestamp sent_at
timestamp failed_at
text failure_reason
timestamp opened_at
timestamp clicked_at
timestamp created_at
}
sms_logs {
uuid sms_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_phone
uuid recipient_user_id FK
text message
varchar sms_type
varchar status
integer send_attempts
timestamp sent_at
timestamp delivered_at
timestamp failed_at
text failure_reason
varchar sms_provider
varchar sms_provider_message_id
decimal cost
timestamp created_at
}
system_settings {
uuid setting_id PK
@ -505,3 +473,94 @@ erDiagram
timestamp updated_at
}
dealers {
uuid dealer_id PK
varchar sales_code
varchar service_code
varchar dealer_name
varchar region
varchar state
varchar city
varchar location
boolean is_active
timestamp created_at
timestamp updated_at
}
dealer_claim_history {
uuid history_id PK
uuid request_id FK
uuid approval_level_id FK
integer version
enum snapshot_type
jsonb snapshot_data
text change_reason
uuid changed_by FK
timestamp created_at
}
holidays {
uuid holiday_id PK
date holiday_date
varchar holiday_name
enum holiday_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
activity_types {
uuid activity_type_id PK
varchar title
varchar item_code
varchar taxation_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
tat_alerts {
uuid alert_id PK
uuid request_id FK
uuid level_id FK
uuid approver_id FK
enum alert_type
boolean is_breached
timestamp alert_sent_at
timestamp created_at
}
request_summaries {
uuid summary_id PK
uuid request_id FK
uuid initiator_id FK
varchar title
text description
text closing_remarks
boolean is_ai_generated
timestamp created_at
timestamp updated_at
}
shared_summaries {
uuid shared_summary_id PK
uuid summary_id FK
uuid shared_by FK
uuid shared_with FK
boolean is_read
timestamp shared_at
timestamp created_at
}
subscriptions {
uuid subscription_id PK
uuid user_id FK
varchar endpoint
varchar p256dh
varchar auth
timestamp created_at
}

View File

@ -0,0 +1,41 @@
# MongoDB Atlas v8.0 Readiness Update
**Date**: 2026-02-05
**Project**: Royal Enfield Workflow Management System
**Subject**: Technical Audit and Readiness for MongoDB v8.0 Upgrade
## Executive Summary
Following a comprehensive technical audit of the Workflow Management System backend, we have confirmed that the application layer is fully compatible with MongoDB Atlas v8.0. The current stack (Node.js 22, Mongoose 9) is optimized for the v8 engine, and the codebase has been verified to be free of any deprecated legacy features.
## 💻 Tech Stack Compatibility
| Component | Version | Readiness Status |
| :--- | :--- | :--- |
| **Node.js Runtime** | v22.x | Fully Compatible |
| **Mongoose ODM** | v9.1.5 | Native v8.0 Support |
| **Connection Driver** | MongoDB Node.js Driver v6+ equivalent | Verified |
## 🔍 Codebase Audit Results
### 1. Feature Deprecation Check
We have verified that the following legacy features, removed in v8.0, are **not used** in our codebase:
- **Map-Reduce**: All reporting and KPI logic has been migrated to the modern Aggregation Pipeline.
- **Legacy Group Command**: Using `$group` within aggregation pipelines instead.
- **$where Operator**: All dynamic queries have been refactored to use `$expr` or standard filters to improve performance and security.
- **geoHaystack Indexes**: Not utilized in the project.
### 2. Connection Strategy
Our connection logic is designed for resilient SRV connectivity:
- Implements DNS resolution workarounds for reliable Atlas SRV lookups.
- Configured with robust timeout and selection parameters.
## 🚀 Post-Upgrade Optimization Roadmap
Once the cluster is upgraded to v8.0, the application team recommends the following optimizations:
1. **Atlas Search Integration**: Migrate full-text search requirements from standard regex to Lucene-based Atlas Search.
2. **Encryption**: Evaluate **Queryable Encryption** for enhanced protection of sensitive workflow data.
3. **Performance Advisor**: Review Atlas Performance Advisor recommendations for any new compound index opportunities enabled by the v8 engine's improved query optimizer.
## ✅ Conclusion
The application is **ready for upgrade**. No blockers have been identified in the current production codebase.

View File

@ -0,0 +1,61 @@
# Implementation Plan: Status Ambiguity Refinement
This document outlines the specific code changes required to implement the **Dual-Key Status Architecture**.
## 1. Goal
Decouple the business outcome (Approved/Rejected) from the lifecycle state (Open/Closed/Draft) to ensure transparency in finalized requests.
## 2. Schema Changes
### `WorkflowRequest.schema.ts`
- **Update `status` Enum**: Remove `CLOSED` and `CANCELLED`.
- **Add `workflowState`**:
- Type: `String`
- Enum: `['DRAFT', 'OPEN', 'CLOSED']`
- Default: `'DRAFT'`
- Index: `true`
## 3. Logic Updates
### A. Workflow Creation (`WorkflowService.createWorkflow`)
- Initialize `status: 'DRAFT'`.
- Initialize `workflowState: 'DRAFT'`.
- Set `isDraft: true`.
### B. Workflow Submission (`WorkflowService.submitRequest`)
- Update `status: 'PENDING'`.
- Update `workflowState: 'OPEN'`.
- Set `isDraft: false`.
### C. Approval/Rejection (`WorkflowService`)
- When approved at a level: Keep `status` as `IN_PROGRESS` or set to `APPROVED` if final.
- When rejected: Set `status` to `REJECTED`.
- **Crucial**: The `workflowState` remains `OPEN` during these actions.
### D. Finalization (`ConclusionController.finalizeConclusion`)
- **Current Behavior**: Sets `status = 'CLOSED'`.
- **New Behavior**:
- Sets `workflowState = 'CLOSED'`.
- **Does NOT** change `status`. The `status` will remain `APPROVED` or `REJECTED`.
- Sets `closureDate = new Date()`.
### E. Pause Logic (`PauseMongoService`)
- Set `status = 'PAUSED'`.
- Set `isPaused = true`.
- Keep `workflowState = 'OPEN'`.
## 4. Dashboard & KPI Updates (`DashboardMongoService`)
### `getRequestStats`
- Update the aggregation pipeline to group by `workflowState`.
- `OPEN` category will now include all requests where `workflowState == 'OPEN'`.
- `CLOSED` category will now include all requests where `workflowState == 'CLOSED'`.
- This ensures that a "Closed" count on the dashboard includes both Approved and Rejected requests that have been finalized.
### `getTATEfficiency`
- Update match criteria to `workflowState: 'CLOSED'` instead of `status: 'CLOSED'`.
## 5. Filter Alignment (`listWorkflowsInternal`)
- Update the status filter to handle the new field mapping.
- If user filters by `status: 'CLOSED'`, the query will target `workflowState: 'CLOSED'`.
- If user filters by `status: 'APPROVED'`, the query will target `status: 'APPROVED'`.

View File

@ -0,0 +1,113 @@
# Why PostgreSQL Wins for "Royal Enfield Workflow"
## Executive Summary
For "Royal Enfield Workflow", **PostgreSQL is superior to MongoDB**.
The decision rests on **Reporting Speed** and **Deep Filtering capabilities**. Your workflow requires filtering by *Relationships* (Approvers, Departments), not just static data.
---
## 1. Complex Workflow Filters (The "My Tasks" Problem)
Users need specific views like "Requests waiting for me" or "Paused requests".
### A. "Requests Open For Me" (The Join Filter)
*Scenario: Show all requests where **I am the current approver**.*
#### PostgreSQL (Simple SQL `JOIN`)
Index usage is perfect. The DB jumps mainly to the few rows in `approval_levels` assigned to you.
```sql
SELECT r.id, r.status, r.created_at
FROM workflow_requests r
JOIN approval_levels al ON r.id = al.request_id
WHERE al.approver_id = 'USER_UUID_123'
AND al.status = 'PENDING'
ORDER BY r.created_at DESC;
```
#### MongoDB (Array Query + Sort Issue)
You must index inside an array. If you sort by "Date", Mongo often cannot use the index effectively for both the *array match* and the *sort*, leading to slow scans.
```javascript
db.requests.find({
"approvers": {
$elemMatch: {
userId: "USER_UUID_123",
status: "PENDING"
}
}
}).sort({ createdAt: -1 });
// WARNING: Performance degrades heavily if user has many historical requests
```
### B. "Paused & Resumed" History
*Scenario: Show requests that were previously Paused but are now Active (requires checking history).*
#### PostgreSQL (Audit Log Join)
You query the history table directly without loading the main request data until the match is found.
```sql
SELECT DISTINCT r.*
FROM workflow_requests r
JOIN audit_logs log ON r.id = log.request_id
WHERE log.action = 'PAUSED'
AND r.status = 'IN_PROGRESS';
```
#### MongoDB (The "Lookup" or "Bloat" Trade-off)
**Option 1: Lookups (Slow)**
You have to join the separate `audit_logs` collection for every request.
```javascript
db.requests.aggregate([
{ $match: { status: "IN_PROGRESS" } },
{
$lookup: {
from: "audit_logs",
localField: "_id",
foreignField: "requestId",
as: "history"
}
},
{ $match: { "history.action": "PAUSED" } }
]);
```
**Option 2: Embedding (Bloated)**
You store every log inside the Request document.
* *Result*: Your generic `db.requests.find({})` becomes 10x slower because it's dragging megabytes of history logs across the network for every result.
## 2. The Filter Nightmare: "Deep Filtering"
Users expect to slice-and-dice data freely. *Example: "Show requests initiated by users in the 'Sales' Department".*
* **Postgres (Cross-Table Filter)**:
```sql
SELECT * FROM workflow_requests r
JOIN users u ON r.initiator_id = u.id
WHERE u.department = 'Sales'
```
* **Result**: Instant. SQL simply filters the `users` table first (using an index on `department`) and then grabs the matching requests.
* **MongoDB (The "Lookup" Trap)**:
* `Department` is stored on the **User** document, not the Request.
* To filter Requests by "Department", you must `$lookup` (join) the User collection for *every single request* before you can filter them.
* *Alternative*: Copy `department` into every Request document.
* *Maintenance Cost*: If a user transfers from 'Sales' to 'Marketing', you must run a script to update all their historical requests, or your reports will be wrong.
## 3. Dashboard: The "Aggregation" Bottleneck
Your dashboard provides real-time insights (e.g., "Approver Efficiency," "TAT per Region").
* **Window Functions (SQL Superpower)**:
* *Requirement*: Rank dealers by "Average Approval Time" compared to their peers.
* *Postgres*: `RANK() OVER (PARTITION BY region ORDER BY avg_tat)` runs natively and instanly.
* *MongoDB*: Requires complex Aggregation Pipelines (`$setWindowFields`) that are memory-intensive and harder to optimize.
## 4. Audit & Compliance
* **Postgres**: Foreign Key constraints prevent "Orphaned Logs." You cannot delete a User if they are referenced in an Audit Log. This guarantees **legal traceability**.
* **MongoDB**: No constraints. Deleting a user can leave "Ghost Logs" (Referencing a null ID), breaking compliance reports.
## Summary Verdict
| Feature | PostgreSQL | MongoDB |
| :--- | :--- | :--- |
| **"Open For Me"** | **Simple Join** | **Complex Array Indexing** |
| **Dept/Region Filters** | **Simple Join** | **Slow Lookup** or **Duplicated Data** |
| **Ad-Hoc Reports** | **Flexible** | **Rigid** (Needs Indexes) |
| **Audit Compliance** | **Guaranteed** | **Risk of Orphaned Data** |
**Recommendation**: Stick with PostgreSQL.
The "Relational" nature of your reporting (Connecting Requests -> Users -> Departments -> Regions) is exactly what SQL was built to solve efficiently.

View File

@ -0,0 +1,55 @@
# Dual-Key Status Architecture
This document defines the status management system for the Royal Enfield Workflow application. It uses a "Dual-Key" approach to resolve ambiguity between request lifecycles and business outcomes.
## 1. Core Concepts
| Key | Purpose | Possible Values |
| :--- | :--- | :--- |
| **`status`** | **Business Outcome**. Tells you *what* happened or the current granular action. | `DRAFT`, `PENDING`, `IN_PROGRESS`, `APPROVED`, `REJECTED`, `PAUSED` |
| **`workflowState`** | **Lifecycle State**. Tells you *where* the request is in its journey. | `DRAFT`, `OPEN`, `CLOSED` |
---
## 2. Status Mapping Table
The `workflowState` is automatically derived from the `status` and the finalization event (Conclusion Remark).
| Primary Status | Finalized? | workflowState | Description |
| :--- | :--- | :--- | :--- |
| `DRAFT` | No | `DRAFT` | Request is being prepared by the initiator. |
| `PENDING` | No | `OPEN` | Waiting for first level activation or system processing. |
| `IN_PROGRESS` | No | `OPEN` | Actively moving through approval levels. |
| `PAUSED` | No | `OPEN` | Temporarily frozen; `isPaused` flag is `true`. |
| `APPROVED` | No | `OPEN` | All levels approved, but initiator hasn't written the final conclusion. |
| `REJECTED` | No | `OPEN` | Rejected by an approver, but initiator hasn't acknowledged/finalized. |
| **`APPROVED`** | **Yes** | **`CLOSED`** | **Final state: Approved and Archived.** |
| **`REJECTED`** | **Yes** | **`CLOSED`** | **Final state: Rejected and Archived.** |
---
## 3. Ambiguity Resolution (The "Why")
Previously, the system changed `status` to `CLOSED` after finalization, which destroyed the information about whether the request was Approved or Rejected.
**Corrected Behavior:**
- **Outcome remains visible**: A finalized request will now keep its `status` as `APPROVED` or `REJECTED`.
- **Filtering made easy**: Dashboard charts use `workflowState: 'CLOSED'` to count all finished work, while list filters use `status: 'APPROVED'` to find specific results.
---
## 4. Technical Implementation Notes
### Schema Changes
- **`WorkflowRequest`**: Added `workflowState` (String, Indexed).
- **`status` Enum**: Removed `CLOSED` (deprecated) and `CANCELLED`.
### Transition Logic
1. **Approval/Rejection**: Updates `status` to `APPROVED` or `REJECTED`. `workflowState` remains `OPEN`.
2. **Finalization (Conclusion)**: Triggered by initiator. Updates `workflowState` to `CLOSED`. **Does NOT change `status`.**
3. **Pause**: Set `status` to `PAUSED` and `isPaused: true`. `workflowState` stays `OPEN`.
### Impacted Services
- `DashboardMongoService`: Uses `workflowState` for Facet/KPI counts.
- `WorkflowService`: Filter logic updated to respect both keys.
- `ConclusionController`: `finalizeConclusion` logic updated to toggle `workflowState`.

159
docs/SYSTEM_ARCHITECTURE.md Normal file
View File

@ -0,0 +1,159 @@
# Royal Enfield Workflow Management System - Technical Architecture Definition
## 1. Platform Overview
The Royal Enfield (RE) Workflow Management System is a resilient, horizontally scalable infrastructure designed to orchestrate complex internal business processes. It utilizes a decoupled, service-oriented architecture leveraging **Node.js (TypeScript)**, **MongoDB Atlas (v8)**, and **Google Cloud Storage (GCS)** to ensure high availability and performance across enterprise workflows.
This document focus exclusively on the core platform infrastructure and custom workflow engine, excluding legacy dealer claim modules.
---
## 2. Global Architecture & Ingress
### A. High-Level System Architecture
```mermaid
graph TD
User((User / Client))
subgraph "Public Interface"
Nginx[Nginx Reverse Proxy]
end
subgraph "Application Layer (Node.js)"
Auth[Auth Middleware]
Core[Workflow Service]
Dynamic[Ad-hoc Logic]
AI[Vertex AI Service]
TAT[TAT Worker / BullMQ]
end
subgraph "Persistence & Infrastructure"
Atlas[(MongoDB Atlas v8)]
GCS_Bucket[GCS Bucket - Artifacts]
GSM[Google Secret Manager]
Redis[(Redis Cache)]
end
User --> Nginx
Nginx --> Auth
Auth --> Core
Core --> Dynamic
Core --> Atlas
Core --> GCS_Bucket
Core --> AI
TAT --> Redis
TAT --> Atlas
Core --> GSM
```
### B. Professional Entrance: Nginx Proxy
All incoming traffic is managed by **Nginx**, acting as the "Deployed Server" facade.
- **SSL Termination**: Encrypts traffic at the edge.
- **Micro-caching**: Caches static metadata to reduce load on Node.js.
- **Proxying**: Strategically routes `/api` to the backend and serves the production React bundle for root requests.
### C. Stateless Authentication (JWT + RBAC)
The platform follows a stateless security model:
1. **JWT Validation**: `auth.middleware.ts` verifies signatures using secrets managed by **Google Secret Manager (GSM)**.
2. **Context Enrichment**: User identity is synchronized from the `users` collection in MongoDB Atlas.
3. **Granular RBAC**: Access is governed by roles (`ADMIN`, `MANAGEMENT`, `USER`) and dynamic participant checks.
---
## 3. Background Processing & SLA Management (BullMQ)
At the heart of the platform's performance is the **Asynchronous Task Engine** powered by **BullMQ** and **Redis**.
### A. TAT (Turnaround Time) Tracking Logic
Turnaround time is monitored per-level using a highly accurate calculation engine that accounts for:
- **Business Days/Hours**: Weekend and holiday filtering via `tatTimeUtils.ts`.
- **Priority Multipliers**: Scaling TAT for `STANDARD` vs `EXPRESS` requests.
- **Pause Impact**: Snapshot-based SLA halting during business-case pauses.
### B. TAT Worker Flow (Redis Backed)
```mermaid
graph TD
Trigger[Request Assignment] --> Queue[tatQueue - BullMQ]
Queue --> Redis[(Redis Cache)]
Redis --> Worker[tatWorker.ts]
Worker --> Processor[tatProcessor.mongo.ts]
Processor --> Check{Threshold Reached?}
Check -->|50/75%| Notify[Reminder Notification]
Check -->|100%| Breach[Breach Alert + Escalation]
```
---
## 4. Multi-Channel Notification Dispatch Engine
The system ensures critical workflow events (Approvals, Breaches, Comments) reach users through three distinct synchronous and asynchronous channels.
### A. Channel Orchestration
Managed by `notification.service.ts`, the engine handles:
1. **Real-time (Socket.io)**: Immediate UI updates via room-based events.
2. **Web Push (Vapid)**: Browser-level push notifications for offline users.
3. **Enterprise Email**: Specialized services like `emailNotification.service.ts` dispatch templated HTML emails.
### B. Notification Lifecycle
```mermaid
sequenceDiagram
participant S as Service Layer
participant N as Notification Service
participant DB as MongoDB (NotificationModel)
participant SK as Socket.io
participant E as Email Service
S->>N: Trigger Event (e.g. "Assignment")
N->>DB: Persist Notification Record (Audit)
N->>SK: broadcast(user:id, "notification:new")
N->>E: dispatchAsync(EmailTemplate)
DB-->>S: Success
```
---
## 5. Cloud-Native Storage & Assets (GCS)
The architecture treats **Google Cloud Storage (GCS)** as a first-class citizen for both operational and deployment data.
### A. Deployment Artifact Architecture
- **Static Site Hosting**: GCS stores the compiled frontend artifacts.
- **Production Secrets**: `Google Secret Manager` ensures that no production passwords or API keys reside in the codebase.
### B. Scalable Document Storage
- **Decoupling**: Binaries are never stored in the database. MongoDB only stores the URI.
- **Privacy Mode**: Documents are retrieved via **Signed URLs** with a configurable TTL.
- **Structure**: `requests/{requestNumber}/documents/`
---
## 6. Real-time Collaboration (Socket.io)
Collaborative features like "Who else is viewing this request?" and "Instant Alerts" are powered by a persistent WebSocket layer.
- **Presence Tracking**: A `Map<requestId, Set<userId>>` tracks online users per workflow request.
- **Room Logic**: Users join specific "Rooms" based on their current active request view.
- **Bi-directional Sync**: Frontend emits `presence:join` when entering a request page.
---
## 7. Intelligent Monitoring & Observability
The platform includes a dedicated monitoring stack for "Day 2" operations.
- **Metrics (Prometheus)**: Scrapes the `/metrics` endpoint provided by our Prometheus middleware.
- **Log Aggregation (Grafana Loki)**: `promtail` ships container logs to Loki for centralized debugging.
- **Alerting**: **Alertmanager** triggers PagerDuty/Email alerts for critical system failures.
```mermaid
graph LR
App[RE Backend] -->|Prometheus| P[Prometheus DB]
App -->|Logs| L[Loki]
P --> G[Grafana Dashboards]
L --> G
```
---
## 8. Dynamic Workflow Flexibility
The "Custom Workflow" module provides logic for ad-hoc adjustments:
1. **Skip Approver**: Bypasses a level while maintaining a forced audit reason.
2. **Ad-hoc Insertion**: Inserts an approver level mid-flight, dynamically recalculating the downstream chain.

View File

@ -0,0 +1,108 @@
# Analysis: Dealer Claim & Unified Request Architecture
This document analyzes the current architecture and proposes an efficient approach to unify Dealer Claims and Custom Requests while supporting specialized data capture and versioning.
## Current State
Both **Custom Requests** and **Dealer Claims** are already "unified" at the base level:
- **Primary Collection**: `workflow_requests` stores the core data (id, requestNumber, initiator, status, currentLevel).
- **Secondary Collection**: `dealer_claims` stores the business-specific metadata (proposal, expenses, invoices, etc.) and is linked via `requestId`.
This architecture naturally supports showing both in the same list.
## Proposed Efficient Approach
To make these two paths truly "inline" and handle specialized steps efficiently, we recommend a **Metadata-Driven Activity System**.
### 1. Unified Listing
The UI should continue to use the existing `listWorkflows` endpoints. The backend already returns `templateType`, which the frontend can use to decide which icon or detail view to render.
### 2. Specialized Step Identification (Dual-Tag System)
To handle dynamic level shifts and accurately recognize the purpose of each step, we use two categories of tags on each `ApprovalLevel`.
#### Category A: Action Tags (`stepAction`)
Defines **what** special behavior is required in this step.
- `DEALER_PROPOSAL`: Show proposal submission form.
- `EXPENSE_CAPTURE`: Show expense document upload form.
- `PROPOSAL_EVALUATION`: Show evaluation tools for the initiator/manager.
- `NONE`: Standard approve/reject UI.
#### Category B: Persona Tags (`stepPersona`)
Defines **who** is acting in this step (role-based logic).
- `INITIATOR`: Used when the initiator acts as an approver (e.g., evaluating a dealer proposal).
- `DEPARTMENT_LEAD`: Standard leadership approval.
- `ADDITIONAL_APPROVER`: Distinguishes steps added manually from the template.
#### How it works together:
| Level | Level Name | `stepAction` | `stepPersona` | UI Behavior |
| :--- | :--- | :--- | :--- | :--- |
| **1** | Dealer Proposal | `DEALER_PROPOSAL` | `DEALER` | Full Proposal Form |
| **2** | Initiator Review | `PROPOSAL_EVALUATION` | `INITIATOR` | Inline evaluation checklist |
| **3** | Lead Approval | `NONE` | `DEPARTMENT_LEAD` | Simple Approve/Reject |
| **3b** | Extra Check | `NONE` | `ADDITIONAL_APPROVER` | Manual Approval UI |
- **Dynamic Insertions**: If `Extra Check` is added, the following levels shift, but their `stepAction` tags remain, so the UI NEVER breaks.
- **Resubmission**: Rejection logic targets the latest completed level with `stepAction: 'DEALER_PROPOSAL'`.
### 3. Versioning & Iterations
The user's requirement to track previous proposals during resubmission is handled via the **Snapshotted Revisions** pattern:
- **The Main Store**: `DealerClaim.proposal` and `DealerClaim.completion` always hold the **active/latest** values.
- **The Revision Store**: `DealerClaim.revisions[]` acts as an append-only audit trail.
**Resubmission Flow:**
1. Request is rejected at Level 2/3/5.
2. Workflow moves back to Level 1 or 4 (Dealer).
3. Dealer edits the data.
4. **On Submit**:
- Backend takes the *current* `proposal` or `completion` data.
- Pushes it into `revisions` with a timestamp and `triggeredBy: 'SYSTEM_VERSION_SNAPSHOT'`.
- Overwrites the main object with the *new* data.
- Advances the workflow.
### 4. KPI & Deep Filtering Strategy (Hybrid Approach)
To support complex KPIs and high-performance filtering across thousands of requests, we use a **Referential Flat Pattern**:
- **Workflow Index (Speed)**: `WorkflowRequest` remains light. It handles high-frequency queries like "My Pending Tasks" or "Recent Activity".
- **Business Index (Depth)**: `DealerClaim` holds the "Deep Data". We apply Mongoose/MongoDB indexes on fields like:
- `dealer.region`, `dealer.state` (for Geospatial/Regional KPIs).
- `budgetTracking.utilizedBudget` (for Financial KPIs).
- `completion.expenses.category` (for operational analysis).
**The "Hybrid" Advantage:**
1. **Performance**: We don't bloat the main `workflow_requests` collection with hundreds of dealer-specific fields. This keeps "Total Request" counts and general listing extremely fast.
2. **Scalability**: For deep filters (e.g., "Show all claims in South Region with expenses > 50k"), we query the `dealer_claims` collection first to get the `requestId`s, then fetch the workflow status. This is much faster than a massive `$lookup` on a single bloated collection.
3. **Clean KPIs**: KPIs like "Budget vs Actual" are calculated directly from `DealerClaim` without interfering with generic workflow TAT metrics.
### 5. Ad-Hoc & Additional Approver Handling
When a user manually adds an approver (Ad-hoc) to a Dealer Claim or Custom Flow:
- **Tag Assignment**: The new level is automatically tagged with `stepAction: 'NONE'` and `stepPersona: 'ADDITIONAL_APPROVER'`.
- **UI Consistency**: The frontend sees `stepAction: 'NONE'` and renders the standard approval interface (comments + buttons).
- **Rejection Intelligence**:
- If an *Additional Approver* rejects, the system looks back for the nearest **anchor step** (e.g., `stepAction: 'DEALER_PROPOSAL'`).
- This prevents the workflow from getting "stuck" between two manually added levels if the business rule requires a return to the initiator or dealer.
### 6. Impact on Custom Flows & Compatibility
**Zero Breaking Changes**:
- Existing Custom Flows will default to `stepAction: 'NONE'`. The UI behavior remains identical to the current state.
- The `WorkflowRequest` collection structure is not being modified; we are only adding two optional metadata fields to the `ApprovalLevel` sub-documents.
**Future-Proofing**:
- Custom Flows can now "unlock" specialized steps (like `PROPOSAL_EVALUATION`) simply by updating their template metadata, without any backend code changes.
### 7. Implementation Strategy
| Feature | Custom Request Path | Dealer Claim Path |
| :--- | :--- | :--- |
| **Listing** | Unified `listWorkflows` | Unified `listWorkflows` |
| **Details View** | Standard UI | Enhanced UI (tabs for Expenses/Proposal) |
| **Logic** | Generic `approveRequest` | `approveRequest` + `DealerClaimService` hook |
| **Versioning** | Activity Logs only | Snapshotted Revisions for re-submissions |
---
### Key Advantage
This approach avoids creating "two separate systems". It treats a Dealer Claim as a "Custom Request with a specific metadata payload". The UI remains cohesive, and the backend logic for TAT, notifications, and status transitions stays shared.

49
fix-imports.ps1 Normal file
View File

@ -0,0 +1,49 @@
# Fix all simple imports to use MongoDB services
$replacements = @{
'from ''@services/activity.service''' = 'from ''@services/activity.mongo.service'''
'from ''../services/activity.service''' = 'from ''../services/activity.mongo.service'''
'from ''@services/notification.service''' = 'from ''@services/notification.mongo.service'''
'from ''../services/notification.service''' = 'from ''../services/notification.mongo.service'''
'from ''@services/configReader.service''' = 'from ''@services/configReader.mongo.service'''
'from ''../services/configReader.service''' = 'from ''../services/configReader.mongo.service'''
'from ''./configReader.service''' = 'from ''./configReader.mongo.service'''
'from ''../services/holiday.service''' = 'from ''../services/holiday.mongo.service'''
'from ''../services/workflow.service''' = 'from ''../services/workflow.service.mongo'''
'from ''../services/worknote.service''' = 'from ''../services/worknote.mongo.service'''
# Service instance renames
'\bactivityService\b' = 'activityMongoService'
'\bnotificationService\b' = 'notificationMongoService'
'\bholidayService\b' = 'holidayMongoService'
'\bworkNoteService\b' = 'workNoteMongoService'
}
$files = @(
'src/controllers/conclusion.controller.ts',
'src/controllers/document.controller.ts',
'src/controllers/notification.controller.ts',
'src/controllers/tat.controller.ts',
'src/routes/workflow.routes.ts',
'src/emailtemplates/emailPreferences.helper.ts',
'src/routes/debug.routes.ts',
'src/services/ai.service.ts',
'src/utils/tatTimeUtils.ts'
)
foreach ($file in $files) {
if (Test-Path $file) {
$content = Get-Content $file -Raw
foreach ($key in $replacements.Keys) {
$content = $content -replace $key, $replacements[$key]
}
Set-Content $file $content -NoNewline
Write-Host "✓ Updated: $file"
} else {
Write-Host "✗ Not found: $file"
}
}
Write-Host "`n✅ Import replacements complete!"

940
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -16,10 +16,9 @@
"type-check": "tsc --noEmit",
"clean": "rm -rf dist",
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-configs.ts",
"reset:mongo": "ts-node -r tsconfig-paths/register src/scripts/reset-mongo-db.ts",
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.mongo.ts"
},
"dependencies": {
"@google-cloud/secret-manager": "^6.1.1",
@ -40,6 +39,7 @@
"helmet": "^8.0.0",
"ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2",
"mongoose": "^9.1.5",
"morgan": "^1.10.0",
"multer": "^1.4.5-lts.1",
"node-cron": "^3.0.3",
@ -47,10 +47,7 @@
"openai": "^6.8.1",
"passport": "^0.7.0",
"passport-jwt": "^4.0.1",
"pg": "^8.13.1",
"pg-hstore": "^2.3.4",
"prom-client": "^15.1.3",
"sequelize": "^6.37.5",
"socket.io": "^4.8.1",
"uuid": "^8.3.2",
"web-push": "^3.6.7",
@ -65,12 +62,12 @@
"@types/express": "^5.0.0",
"@types/jest": "^29.5.14",
"@types/jsonwebtoken": "^9.0.7",
"@types/mongoose": "^5.11.96",
"@types/morgan": "^1.9.9",
"@types/multer": "^1.4.12",
"@types/node": "^22.19.1",
"@types/passport": "^1.0.16",
"@types/passport-jwt": "^4.0.1",
"@types/pg": "^8.15.6",
"@types/supertest": "^6.0.2",
"@types/web-push": "^3.6.4",
"@typescript-eslint/eslint-plugin": "^8.19.1",
@ -79,7 +76,6 @@
"jest": "^29.7.0",
"nodemon": "^3.1.9",
"prettier": "^3.4.2",
"sequelize-cli": "^6.6.2",
"supertest": "^7.0.0",
"ts-jest": "^29.2.5",
"ts-node": "^10.9.2",

View File

@ -5,7 +5,7 @@ import dotenv from 'dotenv';
import cookieParser from 'cookie-parser';
import { UserService } from './services/user.service';
import { SSOUserData } from './types/auth.types';
import { sequelize } from './config/database';
import { corsMiddleware } from './middlewares/cors.middleware';
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
import routes from './routes/index';
@ -21,13 +21,10 @@ dotenv.config();
const app: express.Application = express();
const userService = new UserService();
// Initialize database connection
// Database initialization
const initializeDatabase = async () => {
try {
await sequelize.authenticate();
} catch (error) {
console.error('❌ Database connection failed:', error);
}
// MongoDB is connected via server.ts or separate config
// No Sequelize initialization needed
};
// Initialize database

View File

@ -1,28 +1,56 @@
import { Sequelize } from 'sequelize';
import mongoose from 'mongoose';
import dotenv from 'dotenv';
import logger from '../utils/logger';
import dns from 'dns';
dotenv.config();
const sequelize = new Sequelize({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432', 10),
database: process.env.DB_NAME || 're_workflow_db',
username: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
dialect: 'postgres',
logging: false, // Disable SQL query logging for cleaner console output
pool: {
min: parseInt(process.env.DB_POOL_MIN || '2', 10),
max: parseInt(process.env.DB_POOL_MAX || '10', 10),
acquire: 30000,
idle: 10000,
},
dialectOptions: {
ssl: process.env.DB_SSL === 'true' ? {
require: true,
rejectUnauthorized: false,
} : false,
},
});
export const connectMongoDB = async () => {
try {
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL || 'mongodb://localhost:27017/re_workflow_db';
export { sequelize };
// Workaround for querySrv ECONNREFUSED in specific network environments (e.g. some Windows setups/VPNs)
// Set DNS servers BEFORE any connection attempt to fix SRV resolution issues
if (mongoUri.startsWith('mongodb+srv://')) {
logger.info('[Database] Detected Atlas SRV URI, configuring DNS resolution...');
try {
// Set public DNS servers globally to fix Windows DNS resolution issues
dns.setServers(['8.8.8.8', '8.8.4.4', '1.1.1.1', '1.0.0.1']);
logger.info('[Database] DNS servers configured: Google DNS (8.8.8.8, 8.8.4.4) and Cloudflare DNS (1.1.1.1, 1.0.0.1)');
// Add a small delay to ensure DNS settings take effect
await new Promise(resolve => setTimeout(resolve, 100));
} catch (dnsErr) {
logger.warn('[Database] Failed to set public DNS servers:', dnsErr);
}
}
logger.info('[Database] Connecting to MongoDB...');
await mongoose.connect(mongoUri, {
serverSelectionTimeoutMS: 10000, // Increase timeout to 10 seconds
socketTimeoutMS: 45000,
});
logger.info('✅ MongoDB Connected Successfully');
} catch (error: any) {
logger.error('❌ MongoDB Connection Error:', error.message);
if (error.stack) {
logger.error('Stack trace:', error.stack);
}
// Provide helpful error messages
if (error.message.includes('querySrv ECONNREFUSED') || error.message.includes('ENOTFOUND')) {
logger.error('');
logger.error('🔍 DNS Resolution Failed. Possible solutions:');
logger.error(' 1. Check your internet connection');
logger.error(' 2. Verify the MongoDB Atlas cluster is running');
logger.error(' 3. Try disabling VPN if you\'re using one');
logger.error(' 4. Check Windows Firewall settings');
logger.error(' 5. Verify your MongoDB Atlas connection string is correct');
logger.error('');
}
throw error; // Re-throw to stop server startup
}
};
export { mongoose };

View File

@ -9,7 +9,7 @@ export const SYSTEM_CONFIG = {
APP_NAME: 'Royal Enfield Workflow Management',
APP_VERSION: '1.2.0',
APP_ENV: process.env.NODE_ENV || 'development',
// Working Hours Configuration
WORKING_HOURS: {
START_HOUR: parseInt(process.env.WORK_START_HOUR || '9', 10),
@ -18,23 +18,23 @@ export const SYSTEM_CONFIG = {
END_DAY: 5, // Friday
TIMEZONE: process.env.TZ || 'Asia/Kolkata',
},
// TAT (Turnaround Time) Settings
TAT: {
// Notification thresholds (percentage)
THRESHOLD_50_PERCENT: 50,
THRESHOLD_75_PERCENT: 75,
THRESHOLD_100_PERCENT: 100,
// Test mode for faster testing
TEST_MODE: process.env.TAT_TEST_MODE === 'true',
TEST_TIME_MULTIPLIER: process.env.TAT_TEST_MODE === 'true' ? 1/60 : 1, // 1 hour = 1 minute in test mode
TEST_TIME_MULTIPLIER: process.env.TAT_TEST_MODE === 'true' ? 1 / 60 : 1, // 1 hour = 1 minute in test mode
// Default TAT values by priority (in hours)
DEFAULT_EXPRESS_TAT: parseInt(process.env.DEFAULT_EXPRESS_TAT || '24', 10),
DEFAULT_STANDARD_TAT: parseInt(process.env.DEFAULT_STANDARD_TAT || '72', 10),
},
// File Upload Limits
UPLOAD: {
MAX_FILE_SIZE_MB: parseInt(process.env.MAX_FILE_SIZE_MB || '10', 10),
@ -42,7 +42,7 @@ export const SYSTEM_CONFIG = {
ALLOWED_FILE_TYPES: ['pdf', 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx', 'jpg', 'jpeg', 'png', 'gif', 'txt'],
MAX_FILES_PER_REQUEST: parseInt(process.env.MAX_FILES_PER_REQUEST || '10', 10),
},
// Workflow Limits
WORKFLOW: {
MAX_APPROVAL_LEVELS: parseInt(process.env.MAX_APPROVAL_LEVELS || '10', 10),
@ -50,7 +50,7 @@ export const SYSTEM_CONFIG = {
MAX_SPECTATORS: parseInt(process.env.MAX_SPECTATORS || '20', 10),
MIN_APPROVAL_LEVELS: 1,
},
// Work Notes Configuration
WORK_NOTES: {
MAX_MESSAGE_LENGTH: parseInt(process.env.MAX_MESSAGE_LENGTH || '2000', 10),
@ -58,20 +58,20 @@ export const SYSTEM_CONFIG = {
ENABLE_REACTIONS: process.env.ENABLE_REACTIONS !== 'false',
ENABLE_MENTIONS: process.env.ENABLE_MENTIONS !== 'false',
},
// Pagination
PAGINATION: {
DEFAULT_PAGE_SIZE: parseInt(process.env.DEFAULT_PAGE_SIZE || '20', 10),
MAX_PAGE_SIZE: parseInt(process.env.MAX_PAGE_SIZE || '100', 10),
},
// Session & Security
SECURITY: {
SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '480', 10), // 8 hours
JWT_EXPIRY: process.env.JWT_EXPIRY || '8h',
ENABLE_2FA: process.env.ENABLE_2FA === 'true',
},
// Notification Settings
NOTIFICATIONS: {
ENABLE_EMAIL: process.env.ENABLE_EMAIL_NOTIFICATIONS !== 'false',
@ -79,7 +79,7 @@ export const SYSTEM_CONFIG = {
ENABLE_IN_APP: true, // Always enabled
BATCH_DELAY_MS: parseInt(process.env.NOTIFICATION_BATCH_DELAY || '5000', 10),
},
// Feature Flags
FEATURES: {
ENABLE_AI_CONCLUSION: process.env.ENABLE_AI_CONCLUSION !== 'false',
@ -87,7 +87,7 @@ export const SYSTEM_CONFIG = {
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS !== 'false',
ENABLE_EXPORT: process.env.ENABLE_EXPORT !== 'false',
},
// Redis & Queue
REDIS: {
URL: process.env.REDIS_URL || 'redis://localhost:6379',
@ -95,7 +95,7 @@ export const SYSTEM_CONFIG = {
RATE_LIMIT_MAX: parseInt(process.env.RATE_LIMIT_MAX || '10', 10),
RATE_LIMIT_DURATION: parseInt(process.env.RATE_LIMIT_DURATION || '1000', 10),
},
// UI Preferences (can be overridden per user in future)
UI: {
DEFAULT_THEME: 'light',
@ -147,16 +147,16 @@ export async function getPublicConfig() {
// Get configuration from database first (always try to read from DB)
const { getConfigValue } = require('../services/configReader.service');
// Get AI configuration from admin settings (database)
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
const aiEnabled = String(await getConfigValue('AI_ENABLED', 'true')).toLowerCase() === 'true';
const remarkGenerationEnabled = String(await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true')).toLowerCase() === 'true';
const maxRemarkLength = parseInt(await getConfigValue('AI_MAX_REMARK_LENGTH', '2000') || '2000', 10);
// Try to get AI service status (gracefully handle if not available)
try {
const { aiService } = require('../services/ai.service');
return {
...baseConfig,
ai: {

View File

@ -1,13 +1,14 @@
import { Request, Response } from 'express';
import { Holiday, HolidayType } from '@models/Holiday';
import { holidayService } from '@services/holiday.service';
import { activityTypeService } from '@services/activityType.service';
import { sequelize } from '@config/database';
import { QueryTypes, Op } from 'sequelize';
import logger from '@utils/logger';
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
import { clearConfigCache } from '@services/configReader.service';
import { User, UserRole } from '@models/User';
import { HolidayModel as Holiday, HolidayType } from '../models/mongoose/Holiday.schema';
import { holidayMongoService as holidayService } from '../services/holiday.service';
import { activityTypeService } from '../services/activityType.service';
import { adminConfigMongoService } from '../services/adminConfig.service';
import logger from '../utils/logger';
import dayjs from 'dayjs';
import { initializeHolidaysCache, clearWorkingHoursCache } from '../utils/tatTimeUtils';
import { clearConfigCache } from '../services/configReader.service';
import { UserModel as User, IUser } from '../models/mongoose/User.schema';
import { UserRole } from '../types/user.types';
/**
* Get all holidays (with optional year filter)
@ -19,10 +20,13 @@ export const getAllHolidays = async (req: Request, res: Response): Promise<void>
const holidays = await holidayService.getAllActiveHolidays(yearNum);
// Format response to match legacy structure
const formattedHolidays = holidays.map(mapToLegacyHoliday);
res.json({
success: true,
data: holidays,
count: holidays.length
data: formattedHolidays,
count: formattedHolidays.length
});
} catch (error) {
logger.error('[Admin] Error fetching holidays:', error);
@ -49,13 +53,17 @@ export const getHolidayCalendar = async (req: Request, res: Response): Promise<v
return;
}
const calendar = await holidayService.getHolidayCalendar(yearNum);
// Use getAllActiveHolidays to get full docs, then filter by year in memory or update service
// Service has getHolidayCalendar(year) which returns partial objects.
// Better to use getAllActiveHolidays(year) and map ourselves.
const holidays = await holidayService.getAllActiveHolidays(yearNum);
const formattedHolidays = holidays.map(mapToLegacyHoliday);
res.json({
success: true,
year: yearNum,
holidays: calendar,
count: calendar.length
holidays: formattedHolidays,
count: formattedHolidays.length
});
} catch (error) {
logger.error('[Admin] Error fetching holiday calendar:', error);
@ -103,22 +111,26 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
const holiday = await holidayService.createHoliday({
holidayDate,
holidayName,
description,
holidayType: holidayType || HolidayType.ORGANIZATIONAL,
isRecurring: isRecurring || false,
recurrenceRule,
holidayType: (holidayType as any) || HolidayType.ORGANIZATIONAL,
year: new Date(holidayDate).getFullYear(),
appliesToDepartments,
appliesToLocations,
description,
isRecurring,
recurrenceRule,
createdBy: userId
});
// Reload holidays cache
await initializeHolidaysCache();
// Format response to match legacy structure
const legacyResponse = mapToLegacyHoliday(holiday);
res.status(201).json({
success: true,
message: 'Holiday created successfully',
data: holiday
data: [legacyResponse] // Returning array as requested
});
} catch (error: any) {
logger.error('[Admin] Error creating holiday:', error);
@ -129,6 +141,28 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
}
};
/**
* Helper to map Mongoose document to Legacy JSON format
*/
const mapToLegacyHoliday = (holiday: any) => ({
holidayId: holiday._id,
holidayDate: dayjs(holiday.holidayDate).format('YYYY-MM-DD'),
holidayName: holiday.holidayName,
description: holiday.description || null,
isRecurring: holiday.isRecurring || false,
recurrenceRule: holiday.recurrenceRule || null,
holidayType: holiday.holidayType,
isActive: holiday.isActive !== undefined ? holiday.isActive : true,
appliesToDepartments: (holiday.appliesToDepartments && holiday.appliesToDepartments.length > 0) ? holiday.appliesToDepartments : null,
appliesToLocations: (holiday.appliesToLocations && holiday.appliesToLocations.length > 0) ? holiday.appliesToLocations : null,
createdBy: holiday.createdBy || null,
updatedBy: holiday.updatedBy || null,
createdAt: holiday.createdAt,
updatedAt: holiday.updatedAt,
created_at: holiday.createdAt,
updated_at: holiday.updatedAt
});
/**
* Update a holiday
*/
@ -146,7 +180,7 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
const { holidayId } = req.params;
const updates = req.body;
const holiday = await holidayService.updateHoliday(holidayId, updates, userId);
const holiday = await holidayService.updateHoliday(holidayId, updates);
if (!holiday) {
res.status(404).json({
@ -162,7 +196,7 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
res.json({
success: true,
message: 'Holiday updated successfully',
data: holiday
data: [mapToLegacyHoliday(holiday)] // Returning array for consistency
});
} catch (error: any) {
logger.error('[Admin] Error updating holiday:', error);
@ -222,7 +256,7 @@ export const bulkImportHolidays = async (req: Request, res: Response): Promise<v
return;
}
const result = await holidayService.bulkImportHolidays(holidays, userId);
const result = await holidayService.bulkImportHolidays(holidays);
// Reload holidays cache
await initializeHolidaysCache();
@ -259,35 +293,7 @@ export const getPublicConfigurations = async (req: Request, res: Response): Prom
return;
}
let whereClause = '';
if (category) {
whereClause = `WHERE config_category = '${category}' AND is_sensitive = false`;
} else {
whereClause = `WHERE config_category IN ('DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING', 'SYSTEM_SETTINGS') AND is_sensitive = false`;
}
const rawConfigurations = await sequelize.query(`
SELECT
config_key,
config_category,
config_value,
value_type,
display_name,
description
FROM admin_configurations
${whereClause}
ORDER BY config_category, sort_order
`, { type: QueryTypes.SELECT });
// Map snake_case to camelCase for frontend
const configurations = (rawConfigurations as any[]).map((config: any) => ({
configKey: config.config_key,
configCategory: config.config_category,
configValue: config.config_value,
valueType: config.value_type,
displayName: config.display_name,
description: config.description
}));
const configurations = await adminConfigMongoService.getPublicConfigurations(category as string);
res.json({
success: true,
@ -310,55 +316,7 @@ export const getAllConfigurations = async (req: Request, res: Response): Promise
try {
const { category } = req.query;
let whereClause = '';
if (category) {
whereClause = `WHERE config_category = '${category}'`;
}
const rawConfigurations = await sequelize.query(`
SELECT
config_id,
config_key,
config_category,
config_value,
value_type,
display_name,
description,
default_value,
is_editable,
is_sensitive,
validation_rules,
ui_component,
options,
sort_order,
requires_restart,
last_modified_at,
last_modified_by
FROM admin_configurations
${whereClause}
ORDER BY config_category, sort_order
`, { type: QueryTypes.SELECT });
// Map snake_case to camelCase for frontend
const configurations = (rawConfigurations as any[]).map((config: any) => ({
configId: config.config_id,
configKey: config.config_key,
configCategory: config.config_category,
configValue: config.config_value,
valueType: config.value_type,
displayName: config.display_name,
description: config.description,
defaultValue: config.default_value,
isEditable: config.is_editable,
isSensitive: config.is_sensitive || false,
validationRules: config.validation_rules,
uiComponent: config.ui_component,
options: config.options,
sortOrder: config.sort_order,
requiresRestart: config.requires_restart || false,
lastModifiedAt: config.last_modified_at,
lastModifiedBy: config.last_modified_by
}));
const configurations = await adminConfigMongoService.getAllConfigurations(category as string);
res.json({
success: true,
@ -400,22 +358,9 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
}
// Update configuration
const result = await sequelize.query(`
UPDATE admin_configurations
SET
config_value = :configValue,
last_modified_by = :userId,
last_modified_at = NOW(),
updated_at = NOW()
WHERE config_key = :configKey
AND is_editable = true
RETURNING *
`, {
replacements: { configValue, userId, configKey },
type: QueryTypes.UPDATE
});
const config = await adminConfigMongoService.updateConfig(configKey, configValue, userId);
if (!result || (result[1] as any) === 0) {
if (!config) {
res.status(404).json({
success: false,
error: 'Configuration not found or not editable'
@ -425,14 +370,14 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
// Clear config cache so new values are used immediately
clearConfigCache();
// If working hours config was updated, also clear working hours cache
const workingHoursKeys = ['WORK_START_HOUR', 'WORK_END_HOUR', 'WORK_START_DAY', 'WORK_END_DAY'];
if (workingHoursKeys.includes(configKey)) {
await clearWorkingHoursCache();
logger.info(`[Admin] Working hours configuration '${configKey}' updated - cache cleared and reloaded`);
}
// If AI config was updated, reinitialize AI service
const aiConfigKeys = ['AI_ENABLED'];
if (aiConfigKeys.includes(configKey)) {
@ -467,19 +412,19 @@ export const resetConfiguration = async (req: Request, res: Response): Promise<v
try {
const { configKey } = req.params;
await sequelize.query(`
UPDATE admin_configurations
SET config_value = default_value,
updated_at = NOW()
WHERE config_key = :configKey
`, {
replacements: { configKey },
type: QueryTypes.UPDATE
});
const config = await adminConfigMongoService.resetConfig(configKey);
if (!config) {
res.status(404).json({
success: false,
error: 'Configuration not found'
});
return;
}
// Clear config cache so reset values are used immediately
clearConfigCache();
// If working hours config was reset, also clear working hours cache
const workingHoursKeys = ['WORK_START_HOUR', 'WORK_END_HOUR', 'WORK_START_DAY', 'WORK_END_DAY'];
if (workingHoursKeys.includes(configKey)) {
@ -521,7 +466,7 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
try {
const { userId } = req.params;
const { role } = req.body;
// Validate role
const validRoles: UserRole[] = ['USER', 'MANAGEMENT', 'ADMIN'];
if (!role || !validRoles.includes(role)) {
@ -531,9 +476,9 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
});
return;
}
// Find user
const user = await User.findByPk(userId);
const user = await User.findOne({ userId });
if (!user) {
res.status(404).json({
success: false,
@ -541,10 +486,10 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
});
return;
}
// Store old role for logging
const oldRole = user.role;
// Prevent self-demotion from ADMIN (safety check)
const adminUser = req.user;
if (adminUser?.userId === userId && role !== 'ADMIN') {
@ -554,13 +499,13 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
});
return;
}
// Update role
user.role = role;
await user.save();
logger.info(`✅ User role updated by ${adminUser?.email}: ${user.email} - ${oldRole}${role}`);
res.json({
success: true,
message: `User role updated from ${oldRole} to ${role}`,
@ -597,17 +542,17 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
export const getUsersByRole = async (req: Request, res: Response): Promise<void> => {
try {
const { role, page = '1', limit = '10' } = req.query;
const pageNum = parseInt(page as string) || 1;
const limitNum = Math.min(parseInt(limit as string) || 10, 100); // Max 100 per page
const offset = (pageNum - 1) * limitNum;
const whereClause: any = { isActive: true };
// Handle role filtering
if (role && role !== 'ALL' && role !== 'ELEVATED') {
const validRoles: UserRole[] = ['USER', 'MANAGEMENT', 'ADMIN'];
if (!validRoles.includes(role as UserRole)) {
const validRoles: string[] = ['USER', 'MANAGEMENT', 'ADMIN'];
if (!validRoles.includes(role as string)) {
res.status(400).json({
success: false,
error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED'
@ -617,63 +562,34 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
whereClause.role = role;
} else if (role === 'ELEVATED' || !role) {
// Default: Show only ADMIN and MANAGEMENT (elevated users)
whereClause.role = { [Op.in]: ['ADMIN', 'MANAGEMENT'] };
whereClause.role = { $in: ['ADMIN', 'MANAGEMENT'] };
}
// If role === 'ALL', don't filter by role (show all users)
// Get total count for pagination
const totalUsers = await User.count({ where: whereClause });
const totalUsers = await User.countDocuments(whereClause);
const totalPages = Math.ceil(totalUsers / limitNum);
// Get paginated users
const users = await User.findAll({
where: whereClause,
attributes: [
'userId',
'email',
'displayName',
'firstName',
'lastName',
'department',
'designation',
'role',
'manager',
'postalAddress',
'lastLogin',
'createdAt'
],
order: [
['role', 'ASC'], // ADMIN first, then MANAGEMENT, then USER
['displayName', 'ASC']
],
limit: limitNum,
offset: offset
});
const users = await User.find(whereClause)
.select('userId email displayName firstName lastName department designation role manager postalAddress lastLogin createdAt')
.sort({ role: 1, displayName: 1 })
.skip(offset)
.limit(limitNum);
// Get role summary (across all users, not just current page)
const roleStats = await sequelize.query(`
SELECT
role,
COUNT(*) as count
FROM users
WHERE is_active = true
GROUP BY role
ORDER BY
CASE role
WHEN 'ADMIN' THEN 1
WHEN 'MANAGEMENT' THEN 2
WHEN 'USER' THEN 3
END
`, {
type: QueryTypes.SELECT
});
const roleStatsRaw = await User.aggregate([
{ $match: { isActive: true } },
{ $group: { _id: '$role', count: { $sum: 1 } } },
{ $sort: { _id: 1 } }
]);
const summary = {
ADMIN: parseInt((roleStats.find((s: any) => s.role === 'ADMIN') as any)?.count || '0'),
MANAGEMENT: parseInt((roleStats.find((s: any) => s.role === 'MANAGEMENT') as any)?.count || '0'),
USER: parseInt((roleStats.find((s: any) => s.role === 'USER') as any)?.count || '0')
ADMIN: roleStatsRaw.find((s: any) => s._id === 'ADMIN')?.count || 0,
MANAGEMENT: roleStatsRaw.find((s: any) => s._id === 'MANAGEMENT')?.count || 0,
USER: roleStatsRaw.find((s: any) => s._id === 'USER')?.count || 0
};
res.json({
success: true,
data: {
@ -708,29 +624,31 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
*/
export const getRoleStatistics = async (req: Request, res: Response): Promise<void> => {
try {
const stats = await sequelize.query(`
SELECT
role,
COUNT(*) as count,
COUNT(CASE WHEN is_active = true THEN 1 END) as active_count,
COUNT(CASE WHEN is_active = false THEN 1 END) as inactive_count
FROM users
GROUP BY role
ORDER BY
CASE role
WHEN 'ADMIN' THEN 1
WHEN 'MANAGEMENT' THEN 2
WHEN 'USER' THEN 3
END
`, {
type: QueryTypes.SELECT
});
const stats = await User.aggregate([
{
$group: {
_id: '$role',
count: { $sum: 1 },
activeCount: { $sum: { $cond: ['$isActive', 1, 0] } },
inactiveCount: { $sum: { $cond: ['$isActive', 0, 1] } }
}
},
{ $sort: { _id: 1 } }
]);
// Format for frontend
const formattedStats = stats.map((stat: any) => ({
role: stat._id,
count: stat.count,
active_count: stat.activeCount,
inactive_count: stat.inactiveCount
}));
res.json({
success: true,
data: {
statistics: stats,
total: stats.reduce((sum: number, stat: any) => sum + parseInt(stat.count), 0)
statistics: formattedStats,
total: formattedStats.reduce((sum: number, stat: any) => sum + stat.count, 0)
}
});
} catch (error) {
@ -777,12 +695,12 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`);
// First, check if user already exists in our database
let user = await User.findOne({ where: { email } });
let user: IUser | null = await User.findOne({ email });
if (!user) {
// User doesn't exist, need to fetch from Okta and create
logger.info(`[Admin] User ${email} not found in database, fetching from Okta...`);
// Import UserService to fetch full profile from Okta
const { UserService } = await import('@services/user.service');
const userService = new UserService();
@ -790,7 +708,7 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
try {
// Fetch full user profile from Okta Users API (includes manager, jobTitle, etc.)
const oktaUserData = await userService.fetchAndExtractOktaUserByEmail(email);
if (!oktaUserData) {
res.status(404).json({
success: false,
@ -800,12 +718,11 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
}
// Create user in our database via centralized userService with all fields including manager
const ensured = await userService.createOrUpdateUser({
user = (await userService.createOrUpdateUser({
...oktaUserData,
role, // Set the assigned role
role: role as any, // Set the assigned role
isActive: true, // Ensure user is active
});
user = ensured;
})) as IUser;
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`);
} catch (oktaError: any) {
@ -836,30 +753,39 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
try {
// Fetch full user profile from Okta Users API to sync manager and other fields
const oktaUserData = await userService.fetchAndExtractOktaUserByEmail(email);
if (oktaUserData) {
// Sync all fields from Okta including the new role using centralized method
const updated = await userService.createOrUpdateUser({
user = (await userService.createOrUpdateUser({
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
role, // Set the new role
role: role as any, // Set the new role
isActive: true, // Ensure user is active
});
user = updated;
})) as IUser;
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
} else {
// Okta user not found, just update role
await user.update({ role });
user.role = role as any;
await user.save();
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
}
} catch (oktaError: any) {
// If Okta fetch fails, just update the role
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
await user.update({ role });
user.role = role as any;
await user.save();
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
}
}
if (!user) {
res.status(500).json({
success: false,
error: 'Failed to create or update user'
});
return;
}
res.json({
success: true,
message: `Successfully assigned ${role} role to ${user.displayName || email}`,
@ -1049,4 +975,3 @@ export const deleteActivityType = async (req: Request, res: Response): Promise<v
});
}
};

View File

@ -1,30 +1,30 @@
import { Request, Response } from 'express';
import { ApprovalService } from '@services/approval.service';
import { DealerClaimApprovalService } from '@services/dealerClaimApproval.service';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { DealerClaimApprovalMongoService } from '@services/dealerClaimApproval.service';
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
import { validateApprovalAction } from '@validators/approval.validator';
import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils';
const approvalService = new ApprovalService();
const dealerClaimApprovalService = new DealerClaimApprovalService();
const dealerClaimApprovalService = new DealerClaimApprovalMongoService();
export class ApprovalController {
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const { levelId } = req.params;
const validatedData = validateApprovalAction(req.body);
// Determine which service to use based on workflow type
const level = await ApprovalLevel.findByPk(levelId);
const level = await ApprovalLevel.findOne({ levelId });
if (!level) {
ResponseHandler.notFound(res, 'Approval level not found');
return;
}
const workflow = await WorkflowRequest.findByPk(level.requestId);
const workflow = await WorkflowRequest.findOne({ requestId: level.requestId });
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
@ -32,15 +32,15 @@ export class ApprovalController {
const workflowType = (workflow as any)?.workflowType;
const requestMeta = getRequestMetadata(req);
// Route to appropriate service based on workflow type
let approvedLevel: any;
if (workflowType === 'CLAIM_MANAGEMENT') {
// Use DealerClaimApprovalService for claim management workflows
approvedLevel = await dealerClaimApprovalService.approveLevel(
levelId,
validatedData,
req.user.userId,
levelId,
validatedData,
req.user.userId,
{
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
@ -49,16 +49,16 @@ export class ApprovalController {
} else {
// Use ApprovalService for custom workflows
approvedLevel = await approvalService.approveLevel(
levelId,
validatedData,
req.user.userId,
levelId,
validatedData,
req.user.userId,
{
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
}
);
}
if (!approvedLevel) {
ResponseHandler.notFound(res, 'Approval level not found');
return;
@ -74,16 +74,18 @@ export class ApprovalController {
async getCurrentApprovalLevel(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
// Determine which service to use based on workflow type
const workflow = await WorkflowRequest.findByPk(id);
// Determine which service to use based on workflow type (handle both requestId and requestNumber)
const workflow = await WorkflowRequest.findOne({
$or: [{ requestId: id }, { requestNumber: id }]
});
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
const workflowType = (workflow as any)?.workflowType;
// Route to appropriate service based on workflow type
let level: any;
if (workflowType === 'CLAIM_MANAGEMENT') {
@ -91,7 +93,7 @@ export class ApprovalController {
} else {
level = await approvalService.getCurrentApprovalLevel(id);
}
ResponseHandler.success(res, level, 'Current approval level retrieved successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -102,16 +104,18 @@ export class ApprovalController {
async getApprovalLevels(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
// Determine which service to use based on workflow type
const workflow = await WorkflowRequest.findByPk(id);
// Determine which service to use based on workflow type (handle both requestId and requestNumber)
const workflow = await WorkflowRequest.findOne({
$or: [{ requestId: id }, { requestNumber: id }]
});
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
const workflowType = (workflow as any)?.workflowType;
// Route to appropriate service based on workflow type
let levels: any[];
if (workflowType === 'CLAIM_MANAGEMENT') {
@ -119,7 +123,7 @@ export class ApprovalController {
} else {
levels = await approvalService.getApprovalLevels(id);
}
ResponseHandler.success(res, levels, 'Approval levels retrieved successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';

View File

@ -4,7 +4,7 @@ import { validateSSOCallback, validateRefreshToken, validateTokenExchange, valid
import { ResponseHandler } from '../utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
import logger from '../utils/logger';
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
import { activityMongoService as activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
import { getRequestMetadata } from '../utils/requestUtils';
export class AuthController {
@ -22,18 +22,18 @@ export class AuthController {
try {
// Validate request body
const validatedData = validateSSOCallback(req.body);
const result = await this.authService.handleSSOCallback(validatedData as any);
// Log login activity
const requestMeta = getRequestMetadata(req);
await activityService.log({
requestId: SYSTEM_EVENT_REQUEST_ID, // Special UUID for system events
type: 'login',
user: {
userId: result.user.userId,
user: {
userId: result.user.userId,
name: result.user.displayName || result.user.email,
email: result.user.email
email: result.user.email
},
timestamp: new Date().toISOString(),
action: 'User Login',
@ -49,7 +49,7 @@ export class AuthController {
category: 'AUTHENTICATION',
severity: 'INFO'
});
ResponseHandler.success(res, {
user: result.user,
accessToken: result.accessToken,
@ -69,7 +69,7 @@ export class AuthController {
async getCurrentUser(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const user = await this.authService.getUserProfile(req.user.userId);
if (!user) {
ResponseHandler.notFound(res, 'User not found');
return;
@ -109,7 +109,7 @@ export class AuthController {
try {
// Try to get refresh token from request body first, then from cookies
let refreshToken: string | undefined;
if (req.body?.refreshToken) {
const validated = validateRefreshToken(req.body);
refreshToken = validated.refreshToken;
@ -117,19 +117,19 @@ export class AuthController {
// Fallback to cookie if available (requires cookie-parser middleware)
refreshToken = (req as any).cookies.refreshToken;
}
if (!refreshToken) {
res.status(400).json({
success: false,
res.status(400).json({
success: false,
error: 'Refresh token is required in request body or cookies',
message: 'Request body validation failed',
timestamp: new Date().toISOString()
});
return;
}
const newAccessToken = await this.authService.refreshAccessToken(refreshToken);
// Set new access token in cookie if using cookie-based auth
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
@ -138,9 +138,9 @@ export class AuthController {
sameSite: isProduction ? 'none' as const : 'lax' as const, // 'none' for cross-domain in production
maxAge: 24 * 60 * 60 * 1000, // 24 hours
};
res.cookie('accessToken', newAccessToken, cookieOptions);
// SECURITY: In production, don't return token in response body
// Token is securely stored in httpOnly cookie
if (isProduction) {
@ -173,21 +173,21 @@ export class AuthController {
state: req.body?.state ? 'PRESENT' : 'MISSING',
},
});
const { code, redirectUri } = validateTokenExchange(req.body);
logger.info('Tanflow token exchange validation passed', { redirectUri });
const result = await this.authService.exchangeTanflowCodeForTokens(code, redirectUri);
// Log login activity
const requestMeta = getRequestMetadata(req);
await activityService.log({
requestId: SYSTEM_EVENT_REQUEST_ID,
type: 'login',
user: {
userId: result.user.userId,
user: {
userId: result.user.userId,
name: result.user.displayName || result.user.email,
email: result.user.email
email: result.user.email
},
timestamp: new Date().toISOString(),
action: 'User Login',
@ -203,7 +203,7 @@ export class AuthController {
category: 'AUTHENTICATION',
severity: 'INFO'
});
// Set tokens in httpOnly cookies (production) or return in body (development)
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
@ -213,10 +213,10 @@ export class AuthController {
maxAge: 24 * 60 * 60 * 1000, // 24 hours
path: '/',
};
res.cookie('accessToken', result.accessToken, cookieOptions);
res.cookie('refreshToken', result.refreshToken, cookieOptions);
// In production, don't return tokens in response body (security)
// In development, include tokens for cross-port setup
if (isProduction) {
@ -246,14 +246,14 @@ export class AuthController {
async refreshTanflowToken(req: Request, res: Response): Promise<void> {
try {
const refreshToken = req.body?.refreshToken;
if (!refreshToken) {
ResponseHandler.error(res, 'Refresh token is required', 400, 'Refresh token is required in request body');
return;
}
const newAccessToken = await this.authService.refreshTanflowToken(refreshToken);
// Set new access token in cookie
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
@ -263,9 +263,9 @@ export class AuthController {
maxAge: 24 * 60 * 60 * 1000,
path: '/',
};
res.cookie('accessToken', newAccessToken, cookieOptions);
if (isProduction) {
ResponseHandler.success(res, {
message: 'Token refreshed successfully'
@ -290,11 +290,11 @@ export class AuthController {
*/
async logout(req: Request, res: Response): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
// Helper function to clear cookies with all possible option combinations
const clearCookiesCompletely = () => {
const cookieNames = ['accessToken', 'refreshToken'];
// Get the EXACT options used when setting cookies (from exchangeToken)
// These MUST match exactly: httpOnly, secure, sameSite, path
const cookieOptions = {
@ -371,7 +371,7 @@ export class AuthController {
// User might be null if token was invalid/expired
const userId = req.user?.userId || 'unknown';
const email = req.user?.email || 'unknown';
logger.info('User logout initiated', {
userId,
email,
@ -393,14 +393,14 @@ export class AuthController {
} catch (error) {
logger.error('Logout failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
// Even on error, try to clear cookies as last resort
try {
clearCookiesCompletely();
} catch (cookieError) {
logger.error('Error clearing cookies in catch block:', cookieError);
}
ResponseHandler.error(res, 'Logout failed', 500, errorMessage);
}
}
@ -439,18 +439,18 @@ export class AuthController {
});
const { username, password } = validatePasswordLogin(req.body);
const result = await this.authService.authenticateWithPassword(username, password);
// Log login activity
const requestMeta = getRequestMetadata(req);
await activityService.log({
requestId: SYSTEM_EVENT_REQUEST_ID,
type: 'login',
user: {
userId: result.user.userId,
user: {
userId: result.user.userId,
name: result.user.displayName || result.user.email,
email: result.user.email
email: result.user.email
},
timestamp: new Date().toISOString(),
action: 'User Login',
@ -466,7 +466,7 @@ export class AuthController {
category: 'AUTHENTICATION',
severity: 'INFO'
});
// Set cookies for web clients
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
@ -477,12 +477,12 @@ export class AuthController {
};
res.cookie('accessToken', result.accessToken, cookieOptions);
const refreshCookieOptions = {
...cookieOptions,
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days
};
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
logger.info('Password login successful', {
@ -516,21 +516,21 @@ export class AuthController {
},
headers: req.headers,
});
const { code, redirectUri } = validateTokenExchange(req.body);
logger.info('Token exchange validation passed', { redirectUri });
const result = await this.authService.exchangeCodeForTokens(code, redirectUri);
// Log login activity
const requestMeta = getRequestMetadata(req);
await activityService.log({
requestId: SYSTEM_EVENT_REQUEST_ID, // Special UUID for system events
type: 'login',
user: {
userId: result.user.userId,
user: {
userId: result.user.userId,
name: result.user.displayName || result.user.email,
email: result.user.email
email: result.user.email
},
timestamp: new Date().toISOString(),
action: 'User Login',
@ -546,7 +546,7 @@ export class AuthController {
category: 'AUTHENTICATION',
severity: 'INFO'
});
// Set cookies with httpOnly flag for security
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
@ -557,24 +557,24 @@ export class AuthController {
};
res.cookie('accessToken', result.accessToken, cookieOptions);
const refreshCookieOptions = {
...cookieOptions,
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days for refresh token
};
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
// Ensure Content-Type is set to JSON
res.setHeader('Content-Type', 'application/json');
logger.info('Sending token exchange response', {
hasUser: !!result.user,
hasAccessToken: !!result.accessToken,
hasRefreshToken: !!result.refreshToken,
isProduction,
});
// SECURITY: In production, don't return tokens in response body
// Tokens are securely stored in httpOnly cookies
if (isProduction) {

View File

@ -1,9 +1,10 @@
import { Request, Response } from 'express';
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
import { aiService } from '@services/ai.service';
import { activityService } from '@services/activity.service';
import logger from '@utils/logger';
import { getRequestMetadata } from '@utils/requestUtils';
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark, User } from '../models'; // Fixed imports
import { aiService } from '../services/ai.service';
import { conclusionMongoService } from '../services/conclusion.service';
import { activityMongoService as activityService } from '../services/activity.service';
import logger from '../utils/logger';
import { getRequestMetadata } from '../utils/requestUtils';
export class ConclusionController {
/**
@ -15,20 +16,17 @@ export class ConclusionController {
const { requestId } = req.params;
const userId = (req as any).user?.userId;
// Fetch request with all related data
const request = await WorkflowRequest.findOne({
where: { requestId },
include: [
{ association: 'initiator', attributes: ['userId', 'displayName', 'email'] }
]
});
// Fetch request
// Mongoose doesn't support 'include' directly like Sequelize.
// We'll fetch the request first.
const request = await WorkflowRequest.findOne({ requestId });
if (!request) {
return res.status(404).json({ error: 'Request not found' });
}
// Check if user is the initiator
if ((request as any).initiatorId !== userId) {
// Check if user is the initiator (compare userId strings)
if ((request as any).initiator.userId !== userId) {
return res.status(403).json({ error: 'Only the initiator can generate conclusion remarks' });
}
@ -41,19 +39,19 @@ export class ConclusionController {
const { getConfigValue } = await import('../services/configReader.service');
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
if (!aiEnabled) {
logger.warn(`[Conclusion] AI features disabled in admin config for request ${requestId}`);
return res.status(400).json({
return res.status(400).json({
error: 'AI features disabled',
message: 'AI features are currently disabled by administrator. Please write the conclusion manually.',
canContinueManually: true
});
}
if (!remarkGenerationEnabled) {
logger.warn(`[Conclusion] AI remark generation disabled in admin config for request ${requestId}`);
return res.status(400).json({
return res.status(400).json({
error: 'AI remark generation disabled',
message: 'AI-powered conclusion generation is currently disabled by administrator. Please write the conclusion manually.',
canContinueManually: true
@ -63,125 +61,31 @@ export class ConclusionController {
// Check if AI service is available
if (!aiService.isAvailable()) {
logger.warn(`[Conclusion] AI service unavailable for request ${requestId}`);
return res.status(503).json({
return res.status(503).json({
error: 'AI service not available',
message: 'AI features are currently unavailable. Please verify Vertex AI configuration and service account credentials, or write the conclusion manually.',
canContinueManually: true
});
}
// Gather context for AI generation
const approvalLevels = await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
const workNotes = await WorkNote.findAll({
where: { requestId },
order: [['createdAt', 'ASC']],
limit: 20 // Last 20 work notes - keep full context for better conclusions
});
const documents = await Document.findAll({
where: { requestId },
order: [['uploadedAt', 'DESC']]
});
const activities = await Activity.findAll({
where: { requestId },
order: [['createdAt', 'ASC']],
limit: 50 // Last 50 activities - keep full context for better conclusions
});
// Build context object
const context = {
requestTitle: (request as any).title,
requestDescription: (request as any).description,
requestNumber: (request as any).requestNumber,
priority: (request as any).priority,
approvalFlow: approvalLevels.map((level: any) => {
const tatPercentage = level.tatPercentageUsed !== undefined && level.tatPercentageUsed !== null
? Number(level.tatPercentageUsed)
: (level.elapsedHours && level.tatHours ? (Number(level.elapsedHours) / Number(level.tatHours)) * 100 : 0);
return {
levelNumber: level.levelNumber,
approverName: level.approverName,
status: level.status,
comments: level.comments,
actionDate: level.actionDate,
tatHours: Number(level.tatHours || 0),
elapsedHours: Number(level.elapsedHours || 0),
tatPercentageUsed: tatPercentage
};
}),
workNotes: workNotes.map((note: any) => ({
userName: note.userName,
message: note.message,
createdAt: note.createdAt
})),
documents: documents.map((doc: any) => ({
fileName: doc.originalFileName || doc.fileName,
uploadedBy: doc.uploadedBy,
uploadedAt: doc.uploadedAt
})),
activities: activities.map((activity: any) => ({
type: activity.activityType,
action: activity.activityDescription,
details: activity.activityDescription,
timestamp: activity.createdAt
}))
};
logger.info(`[Conclusion] Generating AI remark for request ${requestId}...`);
// Generate AI conclusion
const aiResult = await aiService.generateConclusionRemark(context);
// Use the service to generate and save (consistent with automatic trigger)
const conclusionInstance = await conclusionMongoService.generateAndSaveAIConclusion(requestId);
// Check if conclusion already exists
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId } });
const conclusionData = {
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date()
};
if (conclusionInstance) {
// Update existing conclusion (allow regeneration)
await conclusionInstance.update(conclusionData as any);
logger.info(`[Conclusion] ✅ AI conclusion regenerated for request ${requestId}`);
} else {
// Create new conclusion
conclusionInstance = await ConclusionRemark.create({
requestId,
...conclusionData,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
finalizedAt: null
} as any);
logger.info(`[Conclusion] ✅ AI conclusion generated for request ${requestId}`);
if (!conclusionInstance) {
return res.status(500).json({ error: 'Failed to generate conclusion' });
}
// Fetch initiator details manually for logging
const initiator = await User.findOne({ userId: (request as any).initiatorId });
// Log activity
const requestMeta = getRequestMetadata(req);
await activityService.log({
requestId,
type: 'ai_conclusion_generated',
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
user: { userId, name: initiator?.displayName || 'Initiator' },
timestamp: new Date().toISOString(),
action: 'AI Conclusion Generated',
details: 'AI-powered conclusion remark generated for review',
@ -192,23 +96,23 @@ export class ConclusionController {
return res.status(200).json({
message: 'Conclusion generated successfully',
data: {
conclusionId: (conclusionInstance as any).conclusionId,
aiGeneratedRemark: aiResult.remark,
keyDiscussionPoints: aiResult.keyPoints,
confidence: aiResult.confidence,
provider: aiResult.provider,
generatedAt: new Date()
conclusionId: (conclusionInstance as any).conclusionId || (conclusionInstance as any)._id,
aiGeneratedRemark: conclusionInstance.aiGeneratedRemark,
keyDiscussionPoints: conclusionInstance.keyDiscussionPoints,
confidence: conclusionInstance.aiConfidenceScore,
provider: conclusionInstance.aiModelUsed,
generatedAt: conclusionInstance.generatedAt
}
});
} catch (error: any) {
logger.error('[Conclusion] Error generating conclusion:', error);
// Provide helpful error messages
const isConfigError = error.message?.includes('not configured') ||
error.message?.includes('not available') ||
error.message?.includes('not initialized');
return res.status(isConfigError ? 503 : 500).json({
const isConfigError = error.message?.includes('not configured') ||
error.message?.includes('not available') ||
error.message?.includes('not initialized');
return res.status(isConfigError ? 503 : 500).json({
error: isConfigError ? 'AI service not configured' : 'Failed to generate conclusion',
message: error.message || 'An unexpected error occurred',
canContinueManually: true // User can still write manual conclusion
@ -231,31 +135,32 @@ export class ConclusionController {
}
// Fetch request
const request = await WorkflowRequest.findOne({ where: { requestId } });
const request = await WorkflowRequest.findOne({ requestId });
if (!request) {
return res.status(404).json({ error: 'Request not found' });
}
// Check if user is the initiator
if ((request as any).initiatorId !== userId) {
if ((request as any).initiator.userId !== userId) {
return res.status(403).json({ error: 'Only the initiator can update conclusion remarks' });
}
// Find conclusion
const conclusion = await ConclusionRemark.findOne({ where: { requestId } });
const conclusion = await ConclusionRemark.findOne({ requestId });
if (!conclusion) {
return res.status(404).json({ error: 'Conclusion not found. Generate it first.' });
}
// Update conclusion
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
await conclusion.update({
finalRemark: finalRemark,
editedBy: userId,
isEdited: wasEdited,
editCount: wasEdited ? (conclusion as any).editCount + 1 : (conclusion as any).editCount
} as any);
conclusion.finalRemark = finalRemark;
conclusion.editedBy = userId;
conclusion.isEdited = wasEdited;
if (wasEdited) {
conclusion.editCount = ((conclusion as any).editCount || 0) + 1;
}
await conclusion.save();
logger.info(`[Conclusion] Updated conclusion for request ${requestId} (edited: ${wasEdited})`);
@ -284,19 +189,17 @@ export class ConclusionController {
}
// Fetch request
const request = await WorkflowRequest.findOne({
where: { requestId },
include: [
{ association: 'initiator', attributes: ['userId', 'displayName', 'email'] }
]
});
const request = await WorkflowRequest.findOne({ requestId });
if (!request) {
return res.status(404).json({ error: 'Request not found' });
}
// Fetch initiator manually
const initiator = await User.findOne({ userId: (request as any).initiator.userId });
// Check if user is the initiator
if ((request as any).initiatorId !== userId) {
if ((request as any).initiator.userId !== userId) {
return res.status(403).json({ error: 'Only the initiator can finalize conclusion remarks' });
}
@ -306,15 +209,15 @@ export class ConclusionController {
}
// Find or create conclusion
let conclusion = await ConclusionRemark.findOne({ where: { requestId } });
let conclusion = await ConclusionRemark.findOne({ requestId });
if (!conclusion) {
// Create if doesn't exist (manual conclusion without AI)
conclusion = await ConclusionRemark.create({
requestId,
aiGeneratedRemark: null,
aiModelUsed: null,
aiConfidenceScore: null,
aiGeneratedRemark: undefined,
aiModelUsed: undefined,
aiConfidenceScore: undefined,
finalRemark: finalRemark,
editedBy: userId,
isEdited: false,
@ -322,28 +225,28 @@ export class ConclusionController {
approvalSummary: {},
documentSummary: {},
keyDiscussionPoints: [],
generatedAt: null,
generatedAt: undefined,
finalizedAt: new Date()
} as any);
});
} else {
// Update existing conclusion
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
await conclusion.update({
finalRemark: finalRemark,
editedBy: userId,
isEdited: wasEdited,
editCount: wasEdited ? (conclusion as any).editCount + 1 : (conclusion as any).editCount,
finalizedAt: new Date()
} as any);
conclusion.finalRemark = finalRemark;
conclusion.editedBy = userId;
conclusion.isEdited = wasEdited;
if (wasEdited) {
conclusion.editCount = ((conclusion as any).editCount || 0) + 1;
}
conclusion.finalizedAt = new Date();
await conclusion.save();
}
// Update request status to CLOSED
await request.update({
status: 'CLOSED',
conclusionRemark: finalRemark,
closureDate: new Date()
} as any);
// Update request workflowState to CLOSED (keep granular status as APPROVED/REJECTED)
request.workflowState = 'CLOSED';
(request as any).conclusionRemark = finalRemark;
(request as any).closureDate = new Date();
await request.save();
logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`);
@ -351,7 +254,7 @@ export class ConclusionController {
// Since the initiator is finalizing, this should always succeed
let summaryId = null;
try {
const { summaryService } = await import('@services/summary.service');
const { summaryService } = await import('../services/summary.service');
const userRole = (req as any).user?.role || (req as any).auth?.role;
const summary = await summaryService.createSummary(requestId, userId, { userRole });
summaryId = (summary as any).summaryId;
@ -367,10 +270,10 @@ export class ConclusionController {
await activityService.log({
requestId,
type: 'closed',
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
user: { userId, name: initiator?.displayName || 'Initiator' },
timestamp: new Date().toISOString(),
action: 'Request Closed',
details: `Request closed with conclusion remark by ${(request as any).initiator?.displayName}`,
details: `Request closed with conclusion remark by ${initiator?.displayName}`,
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
});
@ -378,7 +281,7 @@ export class ConclusionController {
return res.status(200).json({
message: 'Request finalized and closed successfully',
data: {
conclusionId: (conclusion as any).conclusionId,
conclusionId: (conclusion as any).conclusionId || (conclusion as any)._id,
requestNumber: (request as any).requestNumber,
status: 'CLOSED',
finalRemark: finalRemark,
@ -400,20 +303,31 @@ export class ConclusionController {
try {
const { requestId } = req.params;
const conclusion = await ConclusionRemark.findOne({
where: { requestId },
include: [
{ association: 'editor', attributes: ['userId', 'displayName', 'email'] }
]
});
const conclusion = await ConclusionRemark.findOne({ requestId });
if (!conclusion) {
return res.status(404).json({ error: 'Conclusion not found' });
}
// Manually fetch editor if needed
let editor = null;
if (conclusion.editedBy) {
editor = await User.findOne({ userId: conclusion.editedBy });
}
// Append editor info to result if needed, or just return conclusion
const result = (conclusion as any).toJSON ? (conclusion as any).toJSON() : conclusion;
if (editor) {
result.editor = {
userId: editor.userId,
displayName: editor.displayName,
email: editor.email
};
}
return res.status(200).json({
message: 'Conclusion retrieved successfully',
data: conclusion
data: result
});
} catch (error: any) {
logger.error('[Conclusion] Error getting conclusion:', error);
@ -423,4 +337,3 @@ export class ConclusionController {
}
export const conclusionController = new ConclusionController();

View File

@ -1,12 +1,12 @@
import { Request, Response } from 'express';
import { DashboardService } from '../services/dashboard.service';
import { DashboardMongoService, dashboardMongoService } from '../services/dashboard.service';
import logger from '@utils/logger';
export class DashboardController {
private dashboardService: DashboardService;
private dashboardService: DashboardMongoService = dashboardMongoService;
constructor() {
this.dashboardService = new DashboardService();
// Service is now injected via import singleton
}
/**
@ -19,9 +19,9 @@ export class DashboardController {
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const kpis = await this.dashboardService.getKPIs(userId, dateRange, startDate, endDate, viewAsUser);
res.json({
success: true,
data: kpis
@ -53,13 +53,14 @@ export class DashboardController {
const approverType = req.query.approverType as 'current' | 'any' | undefined;
const search = req.query.search as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined;
const lifecycle = req.query.lifecycle as string | undefined;
const viewAsUser = req.query.viewAsUser === 'true'; // When true, treat admin as normal user
const stats = await this.dashboardService.getRequestStats(
userId,
dateRange,
startDate,
endDate,
userId,
dateRange,
startDate,
endDate,
status,
priority,
templateType,
@ -69,9 +70,10 @@ export class DashboardController {
approverType,
search,
slaCompliance,
viewAsUser
viewAsUser,
lifecycle
);
res.json({
success: true,
data: stats
@ -94,9 +96,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const efficiency = await this.dashboardService.getTATEfficiency(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: efficiency
@ -119,9 +121,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const load = await this.dashboardService.getApproverLoad(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: load
@ -144,9 +146,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const engagement = await this.dashboardService.getEngagementStats(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: engagement
@ -169,9 +171,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const insights = await this.dashboardService.getAIInsights(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: insights
@ -194,9 +196,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const utilization = await this.dashboardService.getAIRemarkUtilization(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: utilization
@ -223,9 +225,9 @@ export class DashboardController {
const limit = Number(req.query.limit || 10);
const priority = req.query.priority as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined;
const result = await this.dashboardService.getApproverPerformance(userId, dateRange, page, limit, startDate, endDate, priority, slaCompliance);
res.json({
success: true,
data: result.performance,
@ -254,9 +256,9 @@ export class DashboardController {
const page = Number(req.query.page || 1);
const limit = Number(req.query.limit || 10);
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const result = await this.dashboardService.getRecentActivity(userId, page, limit, viewAsUser);
res.json({
success: true,
data: result.activities,
@ -285,9 +287,9 @@ export class DashboardController {
const page = Number(req.query.page || 1);
const limit = Number(req.query.limit || 10);
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const result = await this.dashboardService.getCriticalRequests(userId, page, limit, viewAsUser);
res.json({
success: true,
data: result.criticalRequests,
@ -316,9 +318,9 @@ export class DashboardController {
const page = Number(req.query.page || 1);
const limit = Number(req.query.limit || 10);
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const result = await this.dashboardService.getUpcomingDeadlines(userId, page, limit, viewAsUser);
res.json({
success: true,
data: result.deadlines,
@ -347,9 +349,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const stats = await this.dashboardService.getDepartmentStats(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: stats
@ -372,9 +374,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const distribution = await this.dashboardService.getPriorityDistribution(userId, dateRange, startDate, endDate);
res.json({
success: true,
data: distribution
@ -399,9 +401,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const result = await this.dashboardService.getLifecycleReport(userId, page, limit, dateRange, startDate, endDate);
res.json({
success: true,
data: result.lifecycleData,
@ -436,11 +438,11 @@ export class DashboardController {
const filterType = req.query.filterType as string | undefined;
const filterCategory = req.query.filterCategory as string | undefined;
const filterSeverity = req.query.filterSeverity as string | undefined;
const result = await this.dashboardService.getActivityLogReport(
userId,
page,
limit,
userId,
page,
limit,
dateRange,
filterUserId,
filterType,
@ -449,7 +451,7 @@ export class DashboardController {
startDate,
endDate
);
res.json({
success: true,
data: result.activities,
@ -514,7 +516,7 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const result = await this.dashboardService.getWorkflowAgingReport(
userId,
threshold,
@ -524,7 +526,7 @@ export class DashboardController {
startDate,
endDate
);
res.json({
success: true,
data: result.agingData,
@ -556,7 +558,7 @@ export class DashboardController {
const endDate = req.query.endDate as string | undefined;
const priority = req.query.priority as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined;
if (!approverId) {
res.status(400).json({
success: false,
@ -564,7 +566,7 @@ export class DashboardController {
});
return;
}
const stats = await this.dashboardService.getSingleApproverStats(
userId,
approverId,
@ -574,7 +576,7 @@ export class DashboardController {
priority,
slaCompliance
);
res.json({
success: true,
data: stats
@ -604,7 +606,7 @@ export class DashboardController {
const priority = req.query.priority as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined;
const search = req.query.search as string | undefined;
if (!approverId) {
res.status(400).json({
success: false,
@ -612,7 +614,7 @@ export class DashboardController {
});
return;
}
const result = await this.dashboardService.getRequestsByApprover(
userId,
approverId,
@ -626,7 +628,7 @@ export class DashboardController {
slaCompliance,
search
);
res.json({
success: true,
data: result.requests,
@ -646,4 +648,3 @@ export class DashboardController {
}
}
}

View File

@ -1,11 +1,10 @@
import { Request, Response } from 'express';
import type { AuthenticatedRequest } from '../types/express';
import { DealerClaimService } from '../services/dealerClaim.service';
import { DealerClaimMongoService } from '../services/dealerClaim.service';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '../utils/logger';
import { gcsStorageService } from '../services/gcsStorage.service';
import { Document } from '../models/Document';
import { InternalOrder } from '../models/InternalOrder';
import { Document, InternalOrder, WorkflowRequest } from '../models'; // Fixed imports
import { constants } from '../config/constants';
import { sapIntegrationService } from '../services/sapIntegration.service';
import fs from 'fs';
@ -13,7 +12,7 @@ import path from 'path';
import crypto from 'crypto';
export class DealerClaimController {
private dealerClaimService = new DealerClaimService();
private dealerClaimService = new DealerClaimMongoService();
/**
* Create a new dealer claim request
@ -75,7 +74,7 @@ export class DealerClaimController {
logger.warn('[DealerClaimController] Approver validation error:', { message: error.message });
return ResponseHandler.error(res, error.message, 400);
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error creating claim request:', error);
return ResponseHandler.error(res, 'Failed to create claim request', 500, errorMessage);
@ -121,11 +120,11 @@ export class DealerClaimController {
return uuidRegex.test(id);
};
const { WorkflowRequest } = await import('../models/WorkflowRequest');
// Use WorkflowRequest from imports (Mongoose model)
if (isUuid(identifier)) {
return await WorkflowRequest.findByPk(identifier);
return await WorkflowRequest.findOne({ requestId: identifier });
} else {
return await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
return await WorkflowRequest.findOne({ requestNumber: identifier });
}
}
@ -301,7 +300,7 @@ export class DealerClaimController {
try {
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
@ -312,8 +311,9 @@ export class DealerClaimController {
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Save to documents table
// Save to documents table (Mongoose)
const doc = await Document.create({
documentId: crypto.randomUUID(), // Generate UUID if model requires it and doesn't auto-gen
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
@ -332,10 +332,11 @@ export class DealerClaimController {
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
uploadedAt: new Date()
});
completionDocuments.push({
documentId: doc.documentId,
documentId: (doc as any).documentId,
name: file.originalname,
url: uploadResult.storageUrl,
size: file.size,
@ -360,7 +361,7 @@ export class DealerClaimController {
try {
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
@ -373,6 +374,7 @@ export class DealerClaimController {
// Save to documents table
const doc = await Document.create({
documentId: crypto.randomUUID(),
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
@ -391,10 +393,11 @@ export class DealerClaimController {
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
uploadedAt: new Date()
});
activityPhotos.push({
documentId: doc.documentId,
documentId: (doc as any).documentId,
name: file.originalname,
url: uploadResult.storageUrl,
size: file.size,
@ -420,7 +423,7 @@ export class DealerClaimController {
try {
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
@ -433,6 +436,7 @@ export class DealerClaimController {
// Save to documents table
const doc = await Document.create({
documentId: crypto.randomUUID(), // UUID gen
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
@ -451,10 +455,11 @@ export class DealerClaimController {
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
uploadedAt: new Date()
});
invoicesReceipts.push({
documentId: doc.documentId,
documentId: (doc as any).documentId,
name: file.originalname,
url: uploadResult.storageUrl,
size: file.size,
@ -480,7 +485,7 @@ export class DealerClaimController {
try {
const fileBuffer = attendanceSheetFile.buffer || (attendanceSheetFile.path ? fs.readFileSync(attendanceSheetFile.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: attendanceSheetFile.originalname,
@ -493,6 +498,7 @@ export class DealerClaimController {
// Save to documents table
const doc = await Document.create({
documentId: crypto.randomUUID(), // UUID gen
requestId,
uploadedBy: userId,
fileName: path.basename(attendanceSheetFile.filename || attendanceSheetFile.originalname),
@ -511,10 +517,11 @@ export class DealerClaimController {
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
uploadedAt: new Date()
});
attendanceSheet = {
documentId: doc.documentId,
documentId: (doc as any).documentId,
name: attendanceSheetFile.originalname,
url: uploadResult.storageUrl,
size: attendanceSheetFile.size,
@ -561,18 +568,18 @@ export class DealerClaimController {
async validateIO(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const { ioNumber } = req.query;
if (!ioNumber || typeof ioNumber !== 'string') {
return ResponseHandler.error(res, 'IO number is required', 400);
}
// Fetch IO details from SAP (will return mock data until SAP is integrated)
const ioValidation = await sapIntegrationService.validateIONumber(ioNumber.trim());
if (!ioValidation.isValid) {
return ResponseHandler.error(res, ioValidation.error || 'Invalid IO number', 400);
}
return ResponseHandler.success(res, {
ioNumber: ioValidation.ioNumber,
availableBalance: ioValidation.availableBalance,
@ -623,7 +630,7 @@ export class DealerClaimController {
}
const blockAmount = blockedAmount ? parseFloat(blockedAmount) : 0;
// Log received data for debugging
logger.info('[DealerClaimController] updateIODetails received:', {
requestId,
@ -633,7 +640,7 @@ export class DealerClaimController {
receivedBlockedAmount: blockedAmount, // Original value from request
userId,
});
// Store in database when blocking amount > 0 OR when ioNumber and ioRemark are provided (for Step 3 approval)
if (blockAmount > 0) {
if (availableBalance === undefined) {
@ -649,9 +656,9 @@ export class DealerClaimController {
blockedAmount: blockAmount,
// remainingBalance will be calculated by the service from SAP's response
};
logger.info('[DealerClaimController] Calling updateIODetails service with:', ioData);
await this.dealerClaimService.updateIODetails(
requestId,
ioData,
@ -659,8 +666,8 @@ export class DealerClaimController {
);
// Fetch and return the updated IO details from database
const updatedIO = await InternalOrder.findOne({ where: { requestId } });
const updatedIO = await InternalOrder.findOne({ requestId });
if (updatedIO) {
return ResponseHandler.success(res, {
message: 'IO blocked successfully in SAP',
@ -803,125 +810,4 @@ export class DealerClaimController {
return ResponseHandler.error(res, 'Failed to update credit note details', 500, errorMessage);
}
}
/**
* Send credit note to dealer and auto-approve Step 8
* POST /api/v1/dealer-claims/:requestId/credit-note/send
* Accepts either UUID or requestNumber
*/
async sendCreditNoteToDealer(
req: AuthenticatedRequest,
res: Response
): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
await this.dealerClaimService.sendCreditNoteToDealer(requestId, userId);
return ResponseHandler.success(res, { message: 'Credit note sent to dealer and Step 8 approved successfully' }, 'Credit note sent');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error sending credit note to dealer:', error);
return ResponseHandler.error(res, 'Failed to send credit note to dealer', 500, errorMessage);
}
}
/**
* Test SAP Budget Blocking (for testing/debugging)
* POST /api/v1/dealer-claims/test/sap-block
*
* This endpoint allows direct testing of SAP budget blocking without creating a full request
*/
async testSapBudgetBlock(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
const { ioNumber, amount, requestNumber } = req.body;
// Validation
if (!ioNumber || !amount) {
return ResponseHandler.error(res, 'Missing required fields: ioNumber and amount are required', 400);
}
const blockAmount = parseFloat(amount);
if (isNaN(blockAmount) || blockAmount <= 0) {
return ResponseHandler.error(res, 'Amount must be a positive number', 400);
}
logger.info(`[DealerClaimController] Testing SAP budget block:`, {
ioNumber,
amount: blockAmount,
requestNumber: requestNumber || 'TEST-REQUEST',
userId
});
// First validate IO number
const ioValidation = await sapIntegrationService.validateIONumber(ioNumber);
if (!ioValidation.isValid) {
return ResponseHandler.error(res, `Invalid IO number: ${ioValidation.error || 'IO number not found in SAP'}`, 400);
}
logger.info(`[DealerClaimController] IO validation successful:`, {
ioNumber,
availableBalance: ioValidation.availableBalance
});
// Block budget in SAP
const testRequestNumber = requestNumber || `TEST-${Date.now()}`;
const blockResult = await sapIntegrationService.blockBudget(
ioNumber,
blockAmount,
testRequestNumber,
`Test budget block for ${testRequestNumber}`
);
if (!blockResult.success) {
return ResponseHandler.error(res, `Failed to block budget in SAP: ${blockResult.error}`, 500);
}
// Return detailed response
return ResponseHandler.success(res, {
message: 'SAP budget block test successful',
ioNumber,
requestedAmount: blockAmount,
availableBalance: ioValidation.availableBalance,
sapResponse: {
success: blockResult.success,
blockedAmount: blockResult.blockedAmount,
remainingBalance: blockResult.remainingBalance,
sapDocumentNumber: blockResult.blockId || null,
error: blockResult.error || null
},
calculatedRemainingBalance: ioValidation.availableBalance - blockResult.blockedAmount,
validation: {
isValid: ioValidation.isValid,
availableBalance: ioValidation.availableBalance,
error: ioValidation.error || null
}
}, 'SAP budget block test completed');
} catch (error: any) {
logger.error('[DealerClaimController] Error testing SAP budget block:', error);
return ResponseHandler.error(res, error.message || 'Failed to test SAP budget block', 500);
}
}
}

View File

@ -1,18 +1,18 @@
import { Request, Response } from 'express';
import crypto from 'crypto';
import path from 'path';
import fs from 'fs';
import { Document } from '@models/Document';
import { User } from '@models/User';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { Participant } from '@models/Participant';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { Op } from 'sequelize';
import { DocumentModel } from '../models/mongoose/Document.schema';
import { UserModel } from '../models/mongoose/User.schema';
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
import { ParticipantModel as Participant } from '../models/mongoose/Participant.schema';
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
import { ResponseHandler } from '@utils/responseHandler';
import { activityService } from '@services/activity.service';
import { activityMongoService as activityService } from '@services/activity.service';
import { gcsStorageService } from '@services/gcsStorage.service';
import { emailNotificationService } from '@services/emailNotification.service';
import { notificationService } from '@services/notification.service';
import { notificationMongoService as notificationService } from '@services/notification.service';
import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils';
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
@ -28,9 +28,18 @@ export class DocumentController {
}
// Extract requestId from body (multer should parse form fields)
// Try both req.body and req.body.requestId for compatibility
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
console.log('[DEBUG] Document upload attempt:', {
identifier,
bodyKeys: Object.keys(req.body || {}),
bodyRequestId: req.body?.requestId,
bodyRequest_id: req.body?.request_id,
userId: req.user?.userId
});
if (!identifier || identifier === 'undefined' || identifier === 'null') {
console.log('[DEBUG] RequestId missing or invalid');
logWithContext('error', 'RequestId missing or invalid in document upload', {
body: req.body,
bodyKeys: Object.keys(req.body || {}),
@ -46,19 +55,45 @@ export class DocumentController {
return uuidRegex.test(id);
};
// Get workflow request - handle both UUID (requestId) and requestNumber
let workflowRequest: WorkflowRequest | null = null;
if (isUuid(identifier)) {
workflowRequest = await WorkflowRequest.findByPk(identifier);
// Helper to check if identifier is MongoDB ObjectId
const isObjectId = (id: string): boolean => {
return /^[0-9a-f]{24}$/i.test(id);
};
// Get workflow request - handle UUID (requestId), requestNumber, or MongoDB ObjectId (_id)
let workflowRequest: any = null;
const identifierIsUuid = isUuid(identifier);
const identifierIsObjectId = isObjectId(identifier);
console.log('[DEBUG] Looking up workflow request:', {
identifier,
identifierIsUuid,
identifierIsObjectId,
lookupField: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
});
if (identifierIsUuid) {
workflowRequest = await WorkflowRequest.findOne({ requestId: identifier });
} else if (identifierIsObjectId) {
workflowRequest = await WorkflowRequest.findById(identifier);
} else {
workflowRequest = await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
workflowRequest = await WorkflowRequest.findOne({ requestNumber: identifier });
}
console.log('[DEBUG] Workflow lookup result:', {
found: !!workflowRequest,
requestId: workflowRequest?.requestId,
requestNumber: workflowRequest?.requestNumber,
_id: workflowRequest?._id?.toString()
});
if (!workflowRequest) {
logWithContext('error', 'Workflow request not found for document upload', {
identifier,
isUuid: isUuid(identifier),
userId: req.user?.userId
isUuid: identifierIsUuid,
isObjectId: identifierIsObjectId,
userId: req.user?.userId,
attemptedLookup: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
});
ResponseHandler.error(res, 'Workflow request not found', 404);
return;
@ -67,11 +102,10 @@ export class DocumentController {
// Get the actual requestId (UUID) and requestNumber
const requestId = (workflowRequest as any).requestId || (workflowRequest as any).request_id;
const requestNumber = (workflowRequest as any).requestNumber || (workflowRequest as any).request_number;
if (!requestNumber) {
logWithContext('error', 'Request number not found for workflow', {
requestId,
workflowRequest: JSON.stringify(workflowRequest.toJSON()),
userId: req.user?.userId
});
ResponseHandler.error(res, 'Request number not found for workflow', 500);
@ -84,28 +118,28 @@ export class DocumentController {
return;
}
// Validate file size against database configuration
// Validate file size
const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10);
const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024;
if (file.size > maxFileSizeBytes) {
ResponseHandler.error(
res,
`File size exceeds the maximum allowed size of ${maxFileSizeMB}MB. Current size: ${(file.size / (1024 * 1024)).toFixed(2)}MB`,
`File size exceeds the maximum allowed size of ${maxFileSizeMB} MB.Current size: ${(file.size / (1024 * 1024)).toFixed(2)} MB`,
400
);
return;
}
// Validate file type against database configuration
// Validate file type
const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif');
const allowedFileTypes = allowedFileTypesStr.split(',').map(ext => ext.trim().toLowerCase());
const allowedFileTypes = allowedFileTypesStr.split(',').map((ext: string) => ext.trim().toLowerCase());
const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase();
if (!allowedFileTypes.includes(fileExtension)) {
ResponseHandler.error(
res,
`File type "${fileExtension}" is not allowed. Allowed types: ${allowedFileTypes.join(', ')}`,
`File type "${fileExtension}" is not allowed.Allowed types: ${allowedFileTypes.join(', ')} `,
400
);
return;
@ -117,7 +151,7 @@ export class DocumentController {
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
const category = (req.body?.category as string) || 'OTHER';
// Upload with automatic fallback to local storage
// Upload file
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
@ -125,11 +159,11 @@ export class DocumentController {
requestNumber: requestNumber,
fileType: 'documents'
});
const storageUrl = uploadResult.storageUrl;
const gcsFilePath = uploadResult.filePath;
// Clean up local temporary file if it exists (from multer disk storage)
// Clean up local temp file
if (file.path && fs.existsSync(file.path)) {
try {
fs.unlinkSync(file.path);
@ -138,134 +172,30 @@ export class DocumentController {
}
}
// Check if storageUrl exceeds database column limit (500 chars)
// GCS signed URLs can be very long (500-1000+ chars)
const MAX_STORAGE_URL_LENGTH = 500;
let finalStorageUrl = storageUrl;
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
logWithContext('warn', 'Storage URL exceeds database column limit, truncating', {
originalLength: storageUrl.length,
maxLength: MAX_STORAGE_URL_LENGTH,
urlPrefix: storageUrl.substring(0, 100),
});
// For signed URLs, we can't truncate as it will break the URL
// Instead, store null and generate signed URLs on-demand when needed
// The filePath is sufficient to generate a new signed URL later
finalStorageUrl = null as any;
logWithContext('info', 'Storing null storageUrl - will generate signed URL on-demand', {
filePath: gcsFilePath,
reason: 'Signed URL too long for database column',
});
}
// Truncate file names if they exceed database column limits (255 chars)
const MAX_FILE_NAME_LENGTH = 255;
const originalFileName = file.originalname;
let truncatedOriginalFileName = originalFileName;
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
// Preserve file extension when truncating
const ext = path.extname(originalFileName);
const nameWithoutExt = path.basename(originalFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
// If extension itself is too long, just use the extension
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logWithContext('warn', 'File name truncated to fit database column', {
originalLength: originalFileName.length,
truncatedLength: truncatedOriginalFileName.length,
originalName: originalFileName.substring(0, 100) + '...',
truncatedName: truncatedOriginalFileName,
});
}
// Generate fileName (basename of the generated file name in GCS)
const generatedFileName = path.basename(gcsFilePath);
let truncatedFileName = generatedFileName;
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
const ext = path.extname(generatedFileName);
const nameWithoutExt = path.basename(generatedFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logWithContext('warn', 'Generated file name truncated', {
originalLength: generatedFileName.length,
truncatedLength: truncatedFileName.length,
});
}
// Prepare document data
const documentData = {
documentId: require('crypto').randomUUID(),
requestId,
uploadedBy: userId,
fileName: truncatedFileName,
originalFileName: truncatedOriginalFileName,
fileName: path.basename(gcsFilePath).substring(0, 255),
originalFileName: file.originalname.substring(0, 255),
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: gcsFilePath, // Store GCS path or local path
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
filePath: gcsFilePath,
storageUrl: (storageUrl && storageUrl.length < 500) ? storageUrl : undefined,
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category,
category: category as any,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
};
logWithContext('info', 'Creating document record', {
requestId,
userId,
fileName: file.originalname,
filePath: gcsFilePath,
storageUrl: storageUrl,
documentData: JSON.stringify(documentData, null, 2),
});
const doc = await (DocumentModel as any).create(documentData);
let doc;
try {
doc = await Document.create(documentData as any);
logWithContext('info', 'Document record created successfully', {
documentId: doc.documentId,
requestId,
fileName: file.originalname,
});
} catch (createError) {
const createErrorMessage = createError instanceof Error ? createError.message : 'Unknown error';
const createErrorStack = createError instanceof Error ? createError.stack : undefined;
// Check if it's a Sequelize validation error
const sequelizeError = (createError as any)?.errors || (createError as any)?.parent;
logWithContext('error', 'Document.create() failed', {
error: createErrorMessage,
stack: createErrorStack,
sequelizeErrors: sequelizeError,
requestId,
userId,
fileName: file.originalname,
filePath: gcsFilePath,
storageUrl: storageUrl,
documentData: JSON.stringify(documentData, null, 2),
});
throw createError; // Re-throw to be caught by outer catch block
}
// Log document upload event
logDocumentEvent('uploaded', doc.documentId, {
requestId,
// Log event
logDocumentEvent('uploaded', (doc as any).documentId, {
requestId: workflowRequest.requestId, // Standardized to UUID
userId,
fileName: file.originalname,
fileType: extension,
@ -274,249 +204,128 @@ export class DocumentController {
});
// Get user details for activity logging
const user = await User.findByPk(userId);
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
// Log activity for document upload
const uploader = await UserModel.findOne({ userId });
const uploaderName = uploader?.displayName || uploader?.email || 'User';
// Log activity
const requestMeta = getRequestMetadata(req);
await activityService.log({
requestId,
requestId: workflowRequest.requestId, // Standardized to UUID
type: 'document_added',
user: { userId, name: uploaderName },
timestamp: new Date().toISOString(),
action: 'Document Added',
details: `Added ${file.originalname} as supporting document by ${uploaderName}`,
metadata: {
fileName: file.originalname,
fileSize: file.size,
details: `Added ${file.originalname} as supporting document by ${uploaderName} `,
metadata: {
fileName: file.originalname,
fileSize: file.size,
fileType: extension,
category
category
},
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
});
// Send notifications for additional document added
// Send notifications
try {
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
const isInitiator = userId === initiatorId;
// Get all participants (spectators)
const spectators = await Participant.findAll({
where: {
requestId,
participantType: 'SPECTATOR'
},
include: [{
model: User,
as: 'user',
attributes: ['userId', 'email', 'displayName']
}]
// Get participants
const participants = await Participant.find({
requestId: workflowRequest.requestId, // Standardized to UUID
participantType: 'SPECTATOR'
});
// Get current approver (pending or in-progress approval level)
const currentApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
},
order: [['levelNumber', 'ASC']],
include: [{
model: User,
as: 'approver',
attributes: ['userId', 'email', 'displayName']
}]
});
// Get current approver
const currentLevel = await ApprovalLevel.findOne({
requestId: requestId,
status: { $in: ['PENDING', 'IN_PROGRESS'] }
}).sort({ levelNumber: 1 });
logWithContext('info', 'Current approver lookup for document notification', {
requestId,
currentApprovalLevelFound: !!currentApprovalLevel,
approverUserId: currentApprovalLevel ? ((currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver)?.userId : null,
isInitiator
});
// Determine who to notify based on who uploaded
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
if (isInitiator) {
// Initiator added → notify spectators and current approver
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Check if uploader is a spectator
const uploaderParticipant = await Participant.findOne({
where: {
requestId,
userId,
participantType: 'SPECTATOR'
}
});
if (uploaderParticipant) {
// Spectator added → notify initiator and current approver
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Approver added → notify initiator and spectators
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
// Add initiator if they are not the uploader
if (!isInitiator) {
const initiator = await UserModel.findOne({ userId: initiatorId });
if (initiator) {
recipientsToNotify.push({
userId: initiator.userId,
email: initiator.email,
displayName: initiator.displayName || initiator.email
});
}
}
// Send notifications (email, in-app, and web-push)
const requestData = {
requestNumber: requestNumber,
requestId: requestId,
title: (workflowRequest as any).title || 'Request'
};
// Add current approver if not the uploader
if (currentLevel?.approver?.userId && currentLevel.approver.userId !== userId) {
const approver = await UserModel.findOne({ userId: currentLevel.approver.userId });
if (approver) {
recipientsToNotify.push({
userId: approver.userId,
email: approver.email,
displayName: approver.displayName || approver.email
});
}
}
// Prepare user IDs for in-app and web-push notifications
const recipientUserIds = recipientsToNotify.map(r => r.userId);
// Add spectators
for (const p of participants) {
if (p.userId !== userId && !recipientsToNotify.some(r => r.userId === p.userId)) {
const spectator = await UserModel.findOne({ userId: p.userId });
if (spectator) {
recipientsToNotify.push({
userId: spectator.userId,
email: spectator.email,
displayName: spectator.displayName || spectator.email
});
}
}
}
// Send in-app and web-push notifications
if (recipientUserIds.length > 0) {
try {
await notificationService.sendToUsers(
recipientUserIds,
{
title: 'Additional Document Added',
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'document_added',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
documentName: file.originalname,
fileSize: file.size,
addedByName: uploaderName,
source: 'Documents Tab'
}
}
);
logWithContext('info', 'In-app and web-push notifications sent for additional document', {
requestId,
// Send notifications
if (recipientsToNotify.length > 0) {
const recipientIds = recipientsToNotify.map(r => r.userId);
await notificationService.sendToUsers(recipientIds, {
title: 'Additional Document Added',
body: `${uploaderName} added "${file.originalname}" to ${requestNumber} `,
requestId,
requestNumber,
url: `/ request / ${requestNumber} `,
type: 'document_added',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
documentName: file.originalname,
recipientsCount: recipientUserIds.length
});
} catch (notifyError) {
logWithContext('error', 'Failed to send in-app/web-push notifications for additional document', {
requestId,
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
});
}
}
addedByName: uploaderName
}
});
// Send email notifications
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(
requestData,
recipient,
{
const requestData = {
requestNumber,
requestId,
title: (workflowRequest as any).title || 'Request'
};
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(requestData, recipient, {
documentName: file.originalname,
fileSize: file.size,
addedByName: uploaderName,
source: 'Documents Tab'
}
);
});
}
}
logWithContext('info', 'Additional document notifications sent', {
requestId,
documentName: file.originalname,
recipientsCount: recipientsToNotify.length,
isInitiator
});
} catch (notifyError) {
// Don't fail document upload if notifications fail
logWithContext('error', 'Failed to send additional document notifications', {
requestId,
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
});
logWithContext('error', 'Failed to send document notifications', { error: notifyError });
}
ResponseHandler.success(res, doc, 'File uploaded', 201);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
const errorStack = error instanceof Error ? error.stack : undefined;
logWithContext('error', 'Document upload failed', {
userId: req.user?.userId,
requestId: req.body?.requestId || req.body?.request_id,
body: req.body,
bodyKeys: Object.keys(req.body || {}),
file: req.file ? {
originalname: req.file.originalname,
size: req.file.size,
mimetype: req.file.mimetype,
hasBuffer: !!req.file.buffer,
hasPath: !!req.file.path
} : 'No file',
error: message,
stack: errorStack
});
logWithContext('error', 'Document upload failed', { error: message });
ResponseHandler.error(res, 'Upload failed', 500, message);
}
}
}

View File

@ -1,8 +1,8 @@
import { Request, Response } from 'express';
import { Notification } from '@models/Notification';
import { Op } from 'sequelize';
import logger from '@utils/logger';
import { notificationService } from '@services/notification.service';
import mongoose from 'mongoose';
import { NotificationModel as Notification } from '../models/mongoose/Notification.schema';
import logger from '../utils/logger';
import { notificationMongoService as notificationService } from '../services/notification.service';
export class NotificationController {
/**
@ -25,12 +25,12 @@ export class NotificationController {
const offset = (Number(page) - 1) * Number(limit);
const { rows, count } = await Notification.findAndCountAll({
where,
order: [['createdAt', 'DESC']],
limit: Number(limit),
offset
});
const rows = await Notification.find(where)
.sort({ createdAt: -1 })
.limit(Number(limit))
.skip(offset);
const count = await Notification.countDocuments(where);
res.json({
success: true,
@ -42,7 +42,7 @@ export class NotificationController {
total: count,
totalPages: Math.ceil(count / Number(limit))
},
unreadCount: unreadOnly === 'true' ? count : await Notification.count({ where: { userId, isRead: false } })
unreadCount: unreadOnly === 'true' ? count : await Notification.countDocuments({ userId, isRead: false })
}
});
} catch (error: any) {
@ -63,8 +63,8 @@ export class NotificationController {
return;
}
const count = await Notification.count({
where: { userId, isRead: false }
const count = await Notification.countDocuments({
userId, isRead: false
});
res.json({
@ -90,8 +90,13 @@ export class NotificationController {
return;
}
if (!mongoose.Types.ObjectId.isValid(notificationId)) {
res.status(400).json({ success: false, message: 'Invalid notification ID' });
return;
}
const notification = await Notification.findOne({
where: { notificationId, userId }
_id: notificationId, userId
});
if (!notification) {
@ -99,10 +104,10 @@ export class NotificationController {
return;
}
await notification.update({
isRead: true,
readAt: new Date()
});
notification.isRead = true;
notification.metadata = notification.metadata || {};
notification.metadata.readAt = new Date();
await notification.save();
res.json({
success: true,
@ -127,9 +132,9 @@ export class NotificationController {
return;
}
await Notification.update(
{ isRead: true, readAt: new Date() },
{ where: { userId, isRead: false } }
await Notification.updateMany(
{ userId, isRead: false },
{ $set: { isRead: true } }
);
res.json({
@ -155,10 +160,17 @@ export class NotificationController {
return;
}
const deleted = await Notification.destroy({
where: { notificationId, userId }
if (!mongoose.Types.ObjectId.isValid(notificationId)) {
res.status(400).json({ success: false, message: 'Invalid notification ID' });
return;
}
const result = await Notification.deleteOne({
_id: notificationId, userId
});
const deleted = result.deletedCount;
if (deleted === 0) {
res.status(404).json({ success: false, message: 'Notification not found' });
return;
@ -201,4 +213,3 @@ export class NotificationController {
}
}
}

View File

@ -1,12 +1,14 @@
import { Response } from 'express';
import { pauseService } from '@services/pause.service';
import { pauseMongoService } from '@services/pause.service';
import { workflowServiceMongo } from '@services/workflow.service';
import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
import { z } from 'zod';
// Validation schemas
// In MongoDB, levelId could be a string (ObjectId)
const pauseWorkflowSchema = z.object({
levelId: z.string().uuid().optional().nullable(),
levelId: z.string().optional().nullable(),
reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'),
resumeDate: z.string().datetime().or(z.date())
});
@ -26,18 +28,25 @@ export class PauseController {
const userId = req.user?.userId;
if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401);
ResponseHandler.unauthorized(res, 'Unauthorized');
return;
}
// Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
// Validate request body
const validated = pauseWorkflowSchema.parse(req.body);
const resumeDate = validated.resumeDate instanceof Date
? validated.resumeDate
const resumeDate = validated.resumeDate instanceof Date
? validated.resumeDate
: new Date(validated.resumeDate);
const result = await pauseService.pauseWorkflow(
id,
const result = await pauseMongoService.pauseWorkflow(
requestId,
validated.levelId || null,
userId,
validated.reason,
@ -68,14 +77,21 @@ export class PauseController {
const userId = req.user?.userId;
if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401);
ResponseHandler.unauthorized(res, 'Unauthorized');
return;
}
// Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
// Validate request body (notes is optional)
const validated = resumeWorkflowSchema.parse(req.body || {});
const result = await pauseService.resumeWorkflow(id, userId, validated.notes);
const result = await pauseMongoService.resumeWorkflow(requestId, userId, validated.notes);
ResponseHandler.success(res, {
workflow: result.workflow,
@ -101,11 +117,18 @@ export class PauseController {
const userId = req.user?.userId;
if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401);
ResponseHandler.unauthorized(res, 'Unauthorized');
return;
}
await pauseService.retriggerPause(id, userId);
// Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
await pauseMongoService.retriggerPause(requestId, userId);
ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200);
} catch (error: any) {
@ -122,7 +145,14 @@ export class PauseController {
try {
const { id } = req.params;
const pauseDetails = await pauseService.getPauseDetails(id);
// Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
const pauseDetails = await pauseMongoService.getPauseDetails(requestId);
if (!pauseDetails) {
ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200);
@ -138,4 +168,3 @@ export class PauseController {
}
export const pauseController = new PauseController();

View File

@ -1,13 +1,11 @@
import { Request, Response } from 'express';
import { TatAlert } from '@models/TatAlert';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { User } from '@models/User';
import { WorkflowRequest } from '@models/WorkflowRequest';
import logger from '@utils/logger';
import { sequelize } from '@config/database';
import { QueryTypes } from 'sequelize';
import { activityService } from '@services/activity.service';
import { getRequestMetadata } from '@utils/requestUtils';
import { TatAlertModel as TatAlert } from '../models/mongoose/TatAlert.schema';
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
import { UserModel } from '../models/mongoose/User.schema';
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
import logger from '../utils/logger';
import { activityMongoService as activityService } from '../services/activity.service';
import { getRequestMetadata } from '../utils/requestUtils';
import type { AuthenticatedRequest } from '../types/express';
/**
@ -16,27 +14,36 @@ import type { AuthenticatedRequest } from '../types/express';
export const getTatAlertsByRequest = async (req: Request, res: Response) => {
try {
const { requestId } = req.params;
const alerts = await TatAlert.findAll({
where: { requestId },
include: [
{
model: ApprovalLevel,
as: 'level',
attributes: ['levelNumber', 'levelName', 'approverName', 'status']
},
{
model: User,
as: 'approver',
attributes: ['userId', 'displayName', 'email', 'department']
const alerts = await TatAlert.find({ requestId })
.sort({ alertSentAt: 1 })
.lean();
// Enrich with level info manually since we can't easily populate across collections if not using ObjectIds strictly for references in Mongoose style (using strings here)
// Or we can query ApprovalLevel
const enrichedAlerts = await Promise.all(alerts.map(async (alert: any) => {
// Fetch level info
const level = await ApprovalLevel.findOne({ levelId: alert.levelId }).select('levelNumber levelName approverName status').lean(); // Use findOne with levelId (string)
const alertData = { ...alert, level };
if (alert.approverId) {
const approver = await UserModel.findOne({ userId: alert.approverId }).select('userId displayName email department').lean();
if (approver) {
alertData.approver = {
userId: approver.userId,
displayName: approver.displayName,
email: approver.email,
department: approver.department
};
}
],
order: [['alertSentAt', 'ASC']]
});
}
return alertData;
}));
res.json({
success: true,
data: alerts
data: enrichedAlerts
});
} catch (error) {
logger.error('[TAT Controller] Error fetching TAT alerts:', error);
@ -53,12 +60,10 @@ export const getTatAlertsByRequest = async (req: Request, res: Response) => {
export const getTatAlertsByLevel = async (req: Request, res: Response) => {
try {
const { levelId } = req.params;
const alerts = await TatAlert.findAll({
where: { levelId },
order: [['alertSentAt', 'ASC']]
});
const alerts = await TatAlert.find({ levelId })
.sort({ alertSentAt: 1 });
res.json({
success: true,
data: alerts
@ -78,32 +83,62 @@ export const getTatAlertsByLevel = async (req: Request, res: Response) => {
export const getTatComplianceSummary = async (req: Request, res: Response) => {
try {
const { startDate, endDate } = req.query;
let dateFilter = '';
const matchStage: any = {};
if (startDate && endDate) {
dateFilter = `AND alert_sent_at BETWEEN '${startDate}' AND '${endDate}'`;
matchStage.alertSentAt = {
$gte: new Date(startDate as string),
$lte: new Date(endDate as string)
};
}
const summary = await sequelize.query(`
SELECT
COUNT(*) as total_alerts,
COUNT(CASE WHEN alert_type = 'TAT_50' THEN 1 END) as alerts_50,
COUNT(CASE WHEN alert_type = 'TAT_75' THEN 1 END) as alerts_75,
COUNT(CASE WHEN alert_type = 'TAT_100' THEN 1 END) as breaches,
COUNT(CASE WHEN was_completed_on_time = true THEN 1 END) as completed_on_time,
COUNT(CASE WHEN was_completed_on_time = false THEN 1 END) as completed_late,
ROUND(
COUNT(CASE WHEN was_completed_on_time = true THEN 1 END) * 100.0 /
NULLIF(COUNT(CASE WHEN was_completed_on_time IS NOT NULL THEN 1 END), 0),
2
) as compliance_percentage
FROM tat_alerts
WHERE 1=1 ${dateFilter}
`, { type: QueryTypes.SELECT });
const summary = await TatAlert.aggregate([
{ $match: matchStage },
{
$group: {
_id: null,
total_alerts: { $sum: 1 },
alerts_50: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_50'] }, 1, 0] } },
alerts_75: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_75'] }, 1, 0] } },
breaches: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_100'] }, 1, 0] } },
completed_on_time: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', true] }, 1, 0] } },
completed_late: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', false] }, 1, 0] } },
completed_total: {
$sum: { $cond: [{ $ne: ['$wasCompletedOnTime', null] }, 1, 0] }
}
}
},
{
$project: {
_id: 0,
total_alerts: 1,
alerts_50: 1,
alerts_75: 1,
breaches: 1,
completed_on_time: 1,
completed_late: 1,
compliance_percentage: {
$cond: [
{ $eq: ['$completed_total', 0] },
0,
{ $round: [{ $multiply: [{ $divide: ['$completed_on_time', '$completed_total'] }, 100] }, 2] }
]
}
}
}
]);
res.json({
success: true,
data: summary[0] || {}
data: summary[0] || {
total_alerts: 0,
alerts_50: 0,
alerts_75: 0,
breaches: 0,
completed_on_time: 0,
completed_late: 0,
compliance_percentage: 0
}
});
} catch (error) {
logger.error('[TAT Controller] Error fetching TAT compliance summary:', error);
@ -119,33 +154,57 @@ export const getTatComplianceSummary = async (req: Request, res: Response) => {
*/
export const getTatBreachReport = async (req: Request, res: Response) => {
try {
const breaches = await sequelize.query(`
SELECT
ta.alert_id,
ta.request_id,
w.request_number,
w.title as request_title,
w.priority,
al.level_number,
al.approver_name,
ta.tat_hours_allocated,
ta.tat_hours_elapsed,
ta.alert_sent_at,
ta.completion_time,
ta.was_completed_on_time,
CASE
WHEN ta.completion_time IS NULL THEN 'Still Pending'
WHEN ta.was_completed_on_time = false THEN 'Completed Late'
ELSE 'Completed On Time'
END as completion_status
FROM tat_alerts ta
JOIN workflow_requests w ON ta.request_id = w.request_id
JOIN approval_levels al ON ta.level_id = al.level_id
WHERE ta.is_breached = true
ORDER BY ta.alert_sent_at DESC
LIMIT 100
`, { type: QueryTypes.SELECT });
const breaches = await TatAlert.aggregate([
{ $match: { isBreached: true } },
{ $sort: { alertSentAt: -1 } },
{ $limit: 100 },
// Lookup WorkflowRequest
{
$lookup: {
from: 'workflow_requests',
localField: 'requestId',
foreignField: 'requestId',
as: 'request'
}
},
{ $unwind: { path: '$request', preserveNullAndEmptyArrays: true } },
// Lookup ApprovalLevel
{
$lookup: {
from: 'approval_levels',
localField: 'levelId',
foreignField: 'levelId',
as: 'level'
}
},
{ $unwind: { path: '$level', preserveNullAndEmptyArrays: true } },
{
$project: {
alert_id: '$_id',
request_id: '$requestId',
request_number: '$request.requestNumber',
request_title: '$request.title',
priority: '$request.priority',
level_number: '$level.levelNumber',
approver_name: '$level.approverName',
tat_hours_allocated: '$tatHoursAllocated',
tat_hours_elapsed: '$tatHoursElapsed',
alert_sent_at: '$alertSentAt',
completion_time: '$completionTime',
was_completed_on_time: '$wasCompletedOnTime',
completion_status: {
$switch: {
branches: [
{ case: { $eq: ['$completionTime', null] }, then: 'Still Pending' },
{ case: { $eq: ['$wasCompletedOnTime', false] }, then: 'Completed Late' }
],
default: 'Completed On Time'
}
}
}
}
]);
res.json({
success: true,
data: breaches
@ -184,7 +243,9 @@ export const updateBreachReason = async (req: Request, res: Response) => {
}
// Get the approval level to verify permissions
const level = await ApprovalLevel.findByPk(levelId);
// Note: levelId in params likely refers to the level document UUID
const level = await ApprovalLevel.findOne({ levelId }); // Use findOne with levelId custom ID
if (!level) {
return res.status(404).json({
success: false,
@ -193,7 +254,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
}
// Get user to check role
const user = await User.findByPk(userId);
const user = await UserModel.findOne({ userId });
if (!user) {
return res.status(404).json({
success: false,
@ -201,13 +262,13 @@ export const updateBreachReason = async (req: Request, res: Response) => {
});
}
const userRole = (user as any).role;
const approverId = (level as any).approverId;
const userRole = user.role;
const approverId = (level as any).approverId || (level.approver ? level.approver.userId : null);
// Check permissions: ADMIN, MANAGEMENT, or the approver
const hasPermission =
userRole === 'ADMIN' ||
userRole === 'MANAGEMENT' ||
const hasPermission =
userRole === 'ADMIN' ||
userRole === 'MANAGEMENT' ||
approverId === userId;
if (!hasPermission) {
@ -218,28 +279,25 @@ export const updateBreachReason = async (req: Request, res: Response) => {
}
// Get user details for activity logging
const userDisplayName = (user as any).displayName || (user as any).email || 'Unknown User';
const userDisplayName = user.displayName || user.email || 'Unknown User';
const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time
const levelNumber = (level as any).levelNumber;
const approverName = (level as any).approverName || 'Unknown Approver';
const approverName = (level as any).approverName || (level.approver ? level.approver.name : 'Unknown Approver');
// Update breach reason directly in approval_levels table
await level.update({
breachReason: breachReason.trim()
});
// Reload to get updated data
await level.reload();
// Update breach reason directly in approval_levels
// Mongoose update
(level as any).breachReason = breachReason.trim();
await level.save();
// Log activity for the request
const userRoleLabel = userRole === 'ADMIN' ? 'Admin' : userRole === 'MANAGEMENT' ? 'Management' : 'Approver';
await activityService.log({
requestId: level.requestId,
type: 'comment', // Using comment type for breach reason entry
user: {
userId: userId,
user: {
userId: userId,
name: userDisplayName,
email: (user as any).email
email: user.email
},
timestamp: new Date().toISOString(),
action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason',
@ -280,29 +338,53 @@ export const updateBreachReason = async (req: Request, res: Response) => {
export const getApproverTatPerformance = async (req: Request, res: Response) => {
try {
const { approverId } = req.params;
const performance = await sequelize.query(`
SELECT
COUNT(DISTINCT ta.level_id) as total_approvals,
COUNT(CASE WHEN ta.alert_type = 'TAT_50' THEN 1 END) as alerts_50_received,
COUNT(CASE WHEN ta.alert_type = 'TAT_75' THEN 1 END) as alerts_75_received,
COUNT(CASE WHEN ta.is_breached = true THEN 1 END) as breaches,
AVG(ta.tat_hours_elapsed) as avg_hours_taken,
ROUND(
COUNT(CASE WHEN ta.was_completed_on_time = true THEN 1 END) * 100.0 /
NULLIF(COUNT(CASE WHEN ta.was_completed_on_time IS NOT NULL THEN 1 END), 0),
2
) as compliance_rate
FROM tat_alerts ta
WHERE ta.approver_id = :approverId
`, {
replacements: { approverId },
type: QueryTypes.SELECT
});
const performance = await TatAlert.aggregate([
{ $match: { approverId: approverId } },
{
$group: {
_id: null,
total_approvals: { $addToSet: '$levelId' }, // Count distinct levels? Or count alerts? Query said count distinct level_id.
alerts_50_received: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_50'] }, 1, 0] } },
alerts_75_received: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_75'] }, 1, 0] } },
breaches: { $sum: { $cond: [{ $eq: ['$isBreached', true] }, 1, 0] } },
min_hours: { $min: '$tatHoursElapsed' }, // Helper to ensure avg works if field exists
tatHoursElapsedSum: { $sum: '$tatHoursElapsed' },
tatHoursElapsedCount: { $sum: 1 },
completed_on_time: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', true] }, 1, 0] } },
completed_total: { $sum: { $cond: [{ $ne: ['$wasCompletedOnTime', null] }, 1, 0] } }
}
},
{
$project: {
_id: 0,
total_approvals: { $size: '$total_approvals' },
alerts_50_received: 1,
alerts_75_received: 1,
breaches: 1,
avg_hours_taken: { $divide: ['$tatHoursElapsedSum', '$tatHoursElapsedCount'] },
compliance_rate: {
$cond: [
{ $eq: ['$completed_total', 0] },
0,
{ $round: [{ $multiply: [{ $divide: ['$completed_on_time', '$completed_total'] }, 100] }, 2] }
]
}
}
}
]);
res.json({
success: true,
data: performance[0] || {}
data: performance[0] || {
total_approvals: 0,
alerts_50_received: 0,
alerts_75_received: 0,
breaches: 0,
avg_hours_taken: 0,
compliance_rate: 0
}
});
} catch (error) {
logger.error('[TAT Controller] Error fetching approver TAT performance:', error);
@ -312,4 +394,3 @@ export const getApproverTatPerformance = async (req: Request, res: Response) =>
});
}
};

View File

@ -158,6 +158,7 @@ export class TemplateController {
templateName,
templateDescription,
templateCategory,
workflowType, // Added
approvalLevelsConfig,
defaultTatHours,
formStepsConfig,
@ -174,9 +175,10 @@ export class TemplateController {
} = req.body;
const template = await this.templateService.updateTemplate(templateId, userId, {
templateName: templateName || name,
templateDescription: templateDescription || description,
templateCategory: templateCategory || category,
name: templateName || name,
description: templateDescription || description,
department: templateCategory || category,
workflowType,
approvalLevelsConfig: approvalLevelsConfig || approvers,
defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined,
formStepsConfig,

View File

@ -1,5 +1,5 @@
import { Request, Response } from 'express';
import { User } from '@models/User';
import { UserModel } from '../models/mongoose/User.schema';
import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator';
import logger from '@utils/logger';
@ -10,14 +10,7 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
try {
const userId = req.user!.userId;
const user = await User.findByPk(userId, {
attributes: [
'userId',
'emailNotificationsEnabled',
'pushNotificationsEnabled',
'inAppNotificationsEnabled'
]
});
const user = await UserModel.findOne({ userId });
if (!user) {
res.status(404).json({
@ -32,9 +25,9 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
res.json({
success: true,
data: {
emailNotificationsEnabled: user.emailNotificationsEnabled,
pushNotificationsEnabled: user.pushNotificationsEnabled,
inAppNotificationsEnabled: user.inAppNotificationsEnabled
emailNotificationsEnabled: user.notifications?.email ?? true,
pushNotificationsEnabled: user.notifications?.push ?? true,
inAppNotificationsEnabled: user.notifications?.inApp ?? true
}
});
} catch (error: any) {
@ -57,7 +50,7 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
// Validate request body
const validated = updateNotificationPreferencesSchema.parse(req.body);
const user = await User.findByPk(userId);
const user = await UserModel.findOne({ userId });
if (!user) {
res.status(404).json({
@ -67,29 +60,32 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
return;
}
// Update only provided fields
const updateData: any = {};
// Update only provided fields in nested notifications object
if (!user.notifications) {
user.notifications = { email: true, push: true, inApp: true };
}
if (validated.emailNotificationsEnabled !== undefined) {
updateData.emailNotificationsEnabled = validated.emailNotificationsEnabled;
user.notifications.email = validated.emailNotificationsEnabled;
}
if (validated.pushNotificationsEnabled !== undefined) {
updateData.pushNotificationsEnabled = validated.pushNotificationsEnabled;
user.notifications.push = validated.pushNotificationsEnabled;
}
if (validated.inAppNotificationsEnabled !== undefined) {
updateData.inAppNotificationsEnabled = validated.inAppNotificationsEnabled;
user.notifications.inApp = validated.inAppNotificationsEnabled;
}
await user.update(updateData);
await user.save();
logger.info(`[UserPreference] Updated notification preferences for user ${userId}:`, updateData);
logger.info(`[UserPreference] Updated notification preferences for user ${userId}`);
res.json({
success: true,
message: 'Notification preferences updated successfully',
data: {
emailNotificationsEnabled: user.emailNotificationsEnabled,
pushNotificationsEnabled: user.pushNotificationsEnabled,
inAppNotificationsEnabled: user.inAppNotificationsEnabled
emailNotificationsEnabled: user.notifications.email,
pushNotificationsEnabled: user.notifications.push,
inAppNotificationsEnabled: user.notifications.inApp
}
});
} catch (error: any) {
@ -110,4 +106,3 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
});
}
};

View File

@ -1,23 +1,23 @@
import { Request, Response } from 'express';
import { WorkflowService } from '@services/workflow.service';
import { workflowServiceMongo } from '@services/workflow.service';
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
import { Priority } from '../types/common.types';
import type { UpdateWorkflowRequest } from '../types/workflow.types';
import { Document } from '@models/Document';
import { User } from '@models/User';
import { DocumentModel } from '../models/mongoose/Document.schema';
import { UserModel } from '../models/mongoose/User.schema';
import { gcsStorageService } from '@services/gcsStorage.service';
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import { getRequestMetadata } from '@utils/requestUtils';
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
import { DealerClaimService } from '@services/dealerClaim.service';
import { DealerClaimMongoService } from '@services/dealerClaim.service';
import { activityMongoService as activityService } from '@services/activity.service';
import logger from '@utils/logger';
const workflowService = new WorkflowService();
const dealerClaimService = new DealerClaimService();
const dealerClaimService = new DealerClaimMongoService();
export class WorkflowController {
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
@ -66,9 +66,9 @@ export class WorkflowController {
// Build complete participants array automatically
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
const initiator = await User.findByPk(req.user.userId);
const initiatorEmail = (initiator as any).email;
const initiatorName = (initiator as any).displayName || (initiator as any).email;
const initiator = await UserModel.findOne({ userId: req.user.userId });
const initiatorEmail = (initiator as any)?.email;
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email;
const autoGeneratedParticipants = [
// Add initiator
@ -100,17 +100,31 @@ export class WorkflowController {
// Convert string literal priority to enum
const workflowData = {
...validatedData,
initiatorEmail,
initiatorName,
priority: validatedData.priority as Priority,
approvalLevels: enrichedApprovalLevels,
participants: autoGeneratedParticipants,
};
const requestMeta = getRequestMetadata(req);
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, {
const workflow = await workflowServiceMongo.createWorkflow(req.user.userId, workflowData, {
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
});
// Handle auto-submit unless isDraft is true
const isDraftRequested = req.body.isDraft === true; // Default to false if not explicitly true
if (!isDraftRequested) {
logger.info(`[WorkflowController] Auto-submitting workflow ${workflow.requestNumber}`);
await workflowServiceMongo.submitWorkflow(workflow.requestId);
// Refetch to get updated status/state
const updatedWorkflow = await workflowServiceMongo.getWorkflowById(workflow.requestId);
ResponseHandler.success(res, updatedWorkflow, 'Workflow created and submitted successfully', 201);
return;
}
ResponseHandler.success(res, workflow, 'Workflow created successfully', 201);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -200,9 +214,9 @@ export class WorkflowController {
// Build complete participants array automatically
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
const initiator = await User.findByPk(userId);
const initiatorEmail = (initiator as any).email;
const initiatorName = (initiator as any).displayName || (initiator as any).email;
const initiator = await UserModel.findOne({ userId: userId });
const initiatorEmail = (initiator as any)?.email;
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || initiatorEmail;
const autoGeneratedParticipants = [
// Add initiator
@ -233,13 +247,15 @@ export class WorkflowController {
const workflowData = {
...validated,
initiatorEmail,
initiatorName,
priority: validated.priority as Priority,
approvalLevels: enrichedApprovalLevels,
participants: autoGeneratedParticipants,
} as any;
const requestMeta = getRequestMetadata(req);
const workflow = await workflowService.createWorkflow(userId, workflowData, {
const workflow = await workflowServiceMongo.createWorkflow(userId, workflowData, {
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
});
@ -249,8 +265,7 @@ export class WorkflowController {
const category = (req.body?.category as string) || 'OTHER';
const docs: any[] = [];
if (files && files.length > 0) {
const { activityService } = require('../services/activity.service');
const user = await User.findByPk(userId);
const user = await UserModel.findOne({ userId });
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
for (const file of files) {
@ -346,12 +361,13 @@ export class WorkflowController {
fileName: truncatedOriginalFileName,
filePath: gcsFilePath,
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
requestId: workflow.requestId
requestId: workflow.requestNumber
});
try {
const doc = await Document.create({
requestId: workflow.requestId,
const doc = await DocumentModel.create({
documentId: require('crypto').randomUUID(),
requestId: workflow.requestId, // Standardized to UUID
uploadedBy: userId,
fileName: truncatedFileName,
originalFileName: truncatedOriginalFileName,
@ -362,14 +378,10 @@ export class WorkflowController {
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: category || 'OTHER',
category: (category || 'OTHER') as any,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
});
docs.push(doc);
logger.info('[Workflow] Document record created successfully', {
documentId: doc.documentId,
@ -382,7 +394,7 @@ export class WorkflowController {
error: docErrorMessage,
stack: docErrorStack,
fileName: file.originalname,
requestId: workflow.requestId,
requestId: workflow.requestNumber,
filePath: gcsFilePath,
storageUrl: storageUrl,
});
@ -393,7 +405,7 @@ export class WorkflowController {
// Log document upload activity
const requestMeta = getRequestMetadata(req);
activityService.log({
requestId: workflow.requestId,
requestId: workflow.requestId, // Use UUID
type: 'document_added',
user: { userId, name: uploaderName },
timestamp: new Date().toISOString(),
@ -406,7 +418,24 @@ export class WorkflowController {
}
}
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
// Handle auto-submit unless isDraft is true
const isDraftRequested = parsed.isDraft === true; // Default to false if not explicitly true
if (!isDraftRequested) {
logger.info(`[WorkflowController] Auto-submitting multipart workflow ${workflow.requestNumber}`);
await workflowServiceMongo.submitWorkflow(workflow.requestId);
// Get updated workflow to return complete state
const updatedWorkflow = await workflowServiceMongo.getWorkflowById(workflow.requestId);
ResponseHandler.success(res, {
requestId: workflow.requestNumber,
status: updatedWorkflow.status,
workflowState: updatedWorkflow.workflowState,
documents: docs
}, 'Workflow created and submitted with documents', 201);
return;
}
ResponseHandler.success(res, { requestId: workflow.requestNumber, documents: docs }, 'Workflow created with documents', 201);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const errorStack = error instanceof Error ? error.stack : undefined;
@ -423,7 +452,7 @@ export class WorkflowController {
async getWorkflow(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const workflow = await workflowService.getWorkflowById(id);
const workflow = await workflowServiceMongo.getWorkflowById(id);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
@ -448,13 +477,13 @@ export class WorkflowController {
}
// Check if user has access to this request
const accessCheck = await workflowService.checkUserRequestAccess(userId, id);
const accessCheck = await workflowServiceMongo.checkUserRequestAccess(userId, id);
if (!accessCheck.hasAccess) {
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
return;
}
const result = await workflowService.getWorkflowDetails(id);
const result = await workflowServiceMongo.getWorkflowDetails(id);
if (!result) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
@ -479,15 +508,17 @@ export class WorkflowController {
templateType: req.query.templateType as string | undefined,
department: req.query.department as string | undefined,
initiator: req.query.initiator as string | undefined,
approver: req.query.approver as string | undefined,
approverName: req.query.approver as string | undefined, // Mapping 'approver' to 'approverName' for Mongo deep filter
approverType: req.query.approverType as 'current' | 'any' | undefined,
slaCompliance: req.query.slaCompliance as string | undefined,
dateRange: req.query.dateRange as string | undefined,
startDate: req.query.startDate as string | undefined,
endDate: req.query.endDate as string | undefined,
lifecycle: req.query.lifecycle as string | undefined,
};
const result = await workflowService.listWorkflows(page, limit, filters);
// USE MONGODB SERVICE FOR LISTING
const result = await workflowServiceMongo.listWorkflows(page, limit, filters);
ResponseHandler.success(res, result, 'Workflows fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -513,10 +544,11 @@ export class WorkflowController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const lifecycle = req.query.lifecycle as string | undefined;
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
const filters = { search, status, priority, department, initiator, approverName: approver, approverType, slaCompliance, dateRange, startDate, endDate, lifecycle };
const result = await workflowService.listMyRequests(userId, page, limit, filters);
const result = await workflowServiceMongo.listMyRequests(userId, page, limit, filters);
ResponseHandler.success(res, result, 'My requests fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -547,10 +579,11 @@ export class WorkflowController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const lifecycle = req.query.lifecycle as string | undefined;
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
const filters = { search, status, priority, templateType, department, initiator, approverName: approver, approverType, slaCompliance, dateRange, startDate, endDate, lifecycle };
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
const result = await workflowServiceMongo.listParticipantRequests(userId, page, limit, filters);
ResponseHandler.success(res, result, 'Participant requests fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -577,10 +610,11 @@ export class WorkflowController {
const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined;
const lifecycle = req.query.lifecycle as string | undefined;
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate, lifecycle };
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
const result = await workflowServiceMongo.listMyInitiatedRequests(userId, page, limit, filters);
ResponseHandler.success(res, result, 'My initiated requests fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -606,7 +640,7 @@ export class WorkflowController {
const sortBy = req.query.sortBy as string | undefined;
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
const result = await workflowServiceMongo.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
ResponseHandler.success(res, result, 'Open requests for user fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -632,7 +666,7 @@ export class WorkflowController {
const sortBy = req.query.sortBy as string | undefined;
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
const result = await workflowServiceMongo.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
ResponseHandler.success(res, result, 'Closed requests by user fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -651,13 +685,26 @@ export class WorkflowController {
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
}
const workflow = await workflowService.updateWorkflow(id, updateData);
const workflow = await workflowServiceMongo.updateWorkflow(id, updateData);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
// Handle auto-submit if isDraft is explicitly false or missing
// If it's already submitted, submitWorkflow will throw or handle gracefully
const isDraftRequested = req.body.isDraft === true; // Default to false
if (!isDraftRequested && (workflow as any).isDraft) {
logger.info(`[WorkflowController] Auto-submitting workflow ${workflow.requestNumber} after update`);
await workflowServiceMongo.submitWorkflow(workflow.requestId);
// Refetch updated workflow
const updatedWorkflow = await workflowServiceMongo.getWorkflowById(workflow.requestId);
ResponseHandler.success(res, updatedWorkflow, 'Workflow updated and submitted successfully');
return;
}
ResponseHandler.success(res, workflow, 'Workflow updated successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -690,7 +737,7 @@ export class WorkflowController {
// Update workflow
let workflow;
try {
workflow = await workflowService.updateWorkflow(id, updateData);
workflow = await workflowServiceMongo.updateWorkflow(id, updateData);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
@ -814,7 +861,8 @@ export class WorkflowController {
});
try {
const doc = await Document.create({
const doc = await DocumentModel.create({
documentId: require('crypto').randomUUID(),
requestId: actualRequestId,
uploadedBy: userId,
fileName: truncatedFileName,
@ -826,14 +874,10 @@ export class WorkflowController {
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: category || 'OTHER',
category: (category || 'OTHER') as any,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
});
docs.push(doc);
logger.info('[Workflow] Document record created successfully', {
documentId: doc.documentId,
@ -856,6 +900,18 @@ export class WorkflowController {
}
}
// Handle auto-submit if isDraft is false
const isDraftRequested = parsed.isDraft === true; // Default to false
if (!isDraftRequested && (workflow as any).isDraft) {
logger.info(`[WorkflowController] Auto-submitting multipart workflow ${workflow.requestNumber} after update`);
await workflowServiceMongo.submitWorkflow(workflow.requestId);
// Return updated workflow
const updatedWorkflow = await workflowServiceMongo.getWorkflowById(workflow.requestId);
ResponseHandler.success(res, updatedWorkflow, 'Workflow updated and submitted successfully');
return;
}
ResponseHandler.success(res, { workflow, newDocuments: docs }, 'Workflow updated with documents', 200);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -875,7 +931,7 @@ export class WorkflowController {
async submitWorkflow(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const workflow = await workflowService.submitWorkflow(id);
const workflow = await workflowServiceMongo.submitWorkflow(id);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
@ -918,14 +974,12 @@ export class WorkflowController {
try {
const { id } = req.params;
// Resolve requestId UUID from identifier (could be requestNumber or UUID)
const workflowService = new WorkflowService();
const wf = await (workflowService as any).findWorkflowByIdentifier(id);
if (!wf) {
// Resolve requestId from identifier (could be requestNumber or UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
const requestId = wf.getDataValue('requestId');
const history = await dealerClaimService.getHistory(requestId);
ResponseHandler.success(res, history, 'Revision history fetched successfully');

View File

@ -1,5 +1,5 @@
import { Request, Response } from 'express';
import { WorkflowTemplate } from '../models';
import { WorkflowTemplateModel as WorkflowTemplate } from '../models/mongoose/WorkflowTemplate.schema';
import logger from '../utils/logger';
export const createTemplate = async (req: Request, res: Response) => {
@ -36,10 +36,8 @@ export const createTemplate = async (req: Request, res: Response) => {
export const getTemplates = async (req: Request, res: Response) => {
try {
const templates = await WorkflowTemplate.findAll({
where: { isActive: true },
order: [['createdAt', 'DESC']]
});
const templates = await WorkflowTemplate.find({ isActive: true })
.sort({ createdAt: -1 });
res.status(200).json({
success: true,
@ -69,7 +67,7 @@ export const updateTemplate = async (req: Request, res: Response) => {
if (suggestedSLA) updates.defaultTatHours = suggestedSLA;
if (isActive !== undefined) updates.isActive = isActive;
const template = await WorkflowTemplate.findByPk(id);
const template = await WorkflowTemplate.findByIdAndUpdate(id, updates, { new: true });
if (!template) {
return res.status(404).json({
@ -78,8 +76,6 @@ export const updateTemplate = async (req: Request, res: Response) => {
});
}
await template.update(updates);
return res.status(200).json({
success: true,
message: 'Workflow template updated successfully',
@ -98,7 +94,7 @@ export const updateTemplate = async (req: Request, res: Response) => {
export const deleteTemplate = async (req: Request, res: Response) => {
try {
const { id } = req.params;
const template = await WorkflowTemplate.findByPk(id);
const template = await WorkflowTemplate.findById(id);
if (!template) {
return res.status(404).json({
@ -107,13 +103,8 @@ export const deleteTemplate = async (req: Request, res: Response) => {
});
}
// Hard delete or Soft delete based on preference.
// Since we have isActive flag, let's use that (Soft Delete) or just destroy if it's unused.
// For now, let's do a hard delete to match the expectation of "Delete" in the UI
// unless there are FK constraints (which sequelize handles).
// Actually, safer to Soft Delete by setting isActive = false if we want history,
// but user asked for Delete. Let's do destroy.
await template.destroy();
// Hard delete
await template.deleteOne();
return res.status(200).json({
success: true,

View File

@ -1,70 +1,93 @@
import type { Request, Response } from 'express';
import { workNoteService } from '../services/worknote.service';
import { WorkflowService } from '../services/workflow.service';
import { getRequestMetadata } from '@utils/requestUtils';
import type { Response } from 'express';
import { workNoteMongoService } from '../services/worknote.service';
import { workflowServiceMongo } from '../services/workflow.service';
import { ResponseHandler } from '@utils/responseHandler';
import { AuthenticatedRequest } from '../types/express';
import { ParticipantModel } from '../models/mongoose/Participant.schema';
export class WorkNoteController {
private workflowService = new WorkflowService();
/**
* List notes for a request
*/
async list(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const identifier = req.params.id; // Could be requestNumber or UUID
const requestId = await workflowServiceMongo.resolveRequestId(identifier);
async list(req: any, res: Response): Promise<void> {
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
const requestId: string = wf.getDataValue('requestId');
const rows = await workNoteService.list(requestId);
res.json({ success: true, data: rows });
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
const rows = await workNoteMongoService.list(requestId);
ResponseHandler.success(res, rows, 'Work notes retrieved');
} catch (error) {
ResponseHandler.error(res, 'Failed to list work notes', 500);
}
}
async create(req: any, res: Response): Promise<void> {
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
const requestId: string = wf.getDataValue('requestId');
// Get user's participant info (includes userName and role)
const { Participant } = require('@models/Participant');
const participant = await Participant.findOne({
where: { requestId, userId: req.user?.userId }
});
let userName = req.user?.email || 'Unknown User';
let userRole = 'SPECTATOR';
if (participant) {
userName = (participant as any).userName || (participant as any).user_name || req.user?.email || 'Unknown User';
userRole = (participant as any).participantType || (participant as any).participant_type || 'SPECTATOR';
/**
* Create a new work note
*/
async create(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const identifier = req.params.id; // Could be requestNumber or UUID
const requestId = await workflowServiceMongo.resolveRequestId(identifier);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
// Get user's participant info from Mongo using UUID
const participant = await ParticipantModel.findOne({
requestId: requestId,
userId: req.user.userId
});
let userName = req.user.email || 'Unknown User';
let userRole = 'SPECTATOR';
if (participant) {
userName = participant.userName || req.user.email || 'Unknown User';
userRole = participant.participantType || 'SPECTATOR';
}
const user = {
userId: req.user.userId,
name: userName,
role: userRole
};
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
// Map files
const files = (req.files as any[])?.map(f => ({
buffer: f.buffer,
path: f.path || null,
originalname: f.originalname,
mimetype: f.mimetype,
size: f.size
})) || [];
const workNotePayload = {
message: payload.message,
type: payload.type || 'COMMENT',
isVisibleToDealer: payload.isVisibleToDealer || false,
mentionedUsers: payload.mentions || []
};
const note = await workNoteMongoService.create(
requestId,
user,
workNotePayload,
files
);
ResponseHandler.success(res, note, 'Work note created', 201);
} catch (error) {
const msg = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to create work note', 500, msg);
}
const user = {
userId: req.user?.userId,
name: userName,
role: userRole
};
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
// Map files with buffer for GCS upload (multer.memoryStorage provides buffer, not path)
const files = (req.files as any[])?.map(f => ({
buffer: f.buffer,
path: f.path || null, // May not exist with memory storage
originalname: f.originalname,
mimetype: f.mimetype,
size: f.size
})) || [];
// Extract mentions from payload (sent by frontend)
const mentions = payload.mentions || [];
const workNotePayload = {
message: payload.message,
isPriority: payload.isPriority,
parentNoteId: payload.parentNoteId,
mentionedUsers: mentions // Pass mentioned user IDs to service
};
const requestMeta = getRequestMetadata(req);
const note = await workNoteService.create(requestId, user, workNotePayload, files, {
ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent
});
res.status(201).json({ success: true, data: note });
}
}

View File

@ -5,7 +5,8 @@
* Logic: Email only sent if BOTH admin AND user have it enabled
*/
import { User } from '@models/User';
import { SYSTEM_CONFIG } from '../config/system.config';
import { getConfigValue } from '../services/configReader.service';
import logger from '../utils/logger';
@ -49,7 +50,7 @@ export async function shouldSendEmail(
try {
// Step 1: Check admin-level configuration (System Config)
const adminEmailEnabled = await isAdminEmailEnabled(emailType);
if (!adminEmailEnabled) {
logger.info(`[Email] Admin disabled emails for ${emailType} - skipping`);
return false;
@ -57,7 +58,7 @@ export async function shouldSendEmail(
// Step 2: Check user-level preferences
const userEmailEnabled = await isUserEmailEnabled(userId, emailType);
if (!userEmailEnabled) {
logger.info(`[Email] User ${userId} disabled emails for ${emailType} - skipping`);
return false;
@ -82,28 +83,28 @@ async function isAdminEmailEnabled(emailType: EmailNotificationType): Promise<bo
try {
// Step 1: Check database configuration (admin panel setting)
const dbConfigValue = await getConfigValue('ENABLE_EMAIL_NOTIFICATIONS', '');
if (dbConfigValue) {
// Parse database value (it's stored as string 'true' or 'false')
const dbEnabled = dbConfigValue.toLowerCase() === 'true';
const dbEnabled = String(dbConfigValue).toLowerCase() === 'true';
if (!dbEnabled) {
logger.info('[Email] Admin has disabled email notifications globally (from database config)');
return false;
}
logger.debug('[Email] Email notifications enabled (from database config)');
return true;
}
// Step 2: Fall back to environment variable if database config not found
const envEnabled = SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_EMAIL;
if (!envEnabled) {
logger.info('[Email] Admin has disabled email notifications globally (from environment variable)');
return false;
}
logger.debug('[Email] Email notifications enabled (from environment variable)');
return true;
} catch (error) {
@ -119,19 +120,19 @@ async function isAdminEmailEnabled(emailType: EmailNotificationType): Promise<bo
*/
async function isUserEmailEnabled(userId: string, emailType: EmailNotificationType): Promise<boolean> {
try {
const { UserModel } = await import('../models/mongoose/User.schema');
// Fetch user and check emailNotificationsEnabled field
const user = await User.findByPk(userId, {
attributes: ['userId', 'emailNotificationsEnabled']
});
const user = await UserModel.findOne({ userId });
if (!user) {
logger.warn(`[Email] User ${userId} not found - defaulting to enabled`);
return true;
}
// Check user's global email notification setting
const enabled = (user as any).emailNotificationsEnabled !== false;
// Check user's global email notification setting (Mongoose uses nested 'notifications.email')
// Fallback to true if undefined
const enabled = user.notifications?.email !== false;
if (!enabled) {
logger.info(`[Email] User ${userId} has disabled email notifications globally`);
}
@ -154,24 +155,23 @@ export async function shouldSendInAppNotification(
try {
// Check admin config first (if SystemConfig model exists)
const adminEnabled = await isAdminInAppEnabled(notificationType);
if (!adminEnabled) {
return false;
}
const { UserModel } = await import('../models/mongoose/User.schema');
// Fetch user and check inAppNotificationsEnabled field
const user = await User.findByPk(userId, {
attributes: ['userId', 'inAppNotificationsEnabled']
});
const user = await UserModel.findOne({ userId });
if (!user) {
logger.warn(`[Notification] User ${userId} not found - defaulting to enabled`);
return true;
}
// Check user's global in-app notification setting
const enabled = (user as any).inAppNotificationsEnabled !== false;
// Check user's global in-app notification setting (Mongoose uses nested 'notifications.inApp')
const enabled = user.notifications?.inApp !== false;
if (!enabled) {
logger.info(`[Notification] User ${userId} has disabled in-app notifications globally`);
}
@ -191,20 +191,20 @@ async function isAdminInAppEnabled(notificationType: string): Promise<boolean> {
try {
// Step 1: Check database configuration (admin panel setting)
const dbConfigValue = await getConfigValue('ENABLE_IN_APP_NOTIFICATIONS', '');
if (dbConfigValue) {
// Parse database value (it's stored as string 'true' or 'false')
const dbEnabled = dbConfigValue.toLowerCase() === 'true';
const dbEnabled = String(dbConfigValue).toLowerCase() === 'true';
if (!dbEnabled) {
logger.info('[Notification] Admin has disabled in-app notifications globally (from database config)');
return false;
}
logger.debug('[Notification] In-app notifications enabled (from database config)');
return true;
}
// Step 2: Fall back to environment variable if database config not found
const envValue = process.env.ENABLE_IN_APP_NOTIFICATIONS;
if (envValue !== undefined) {
@ -216,15 +216,15 @@ async function isAdminInAppEnabled(notificationType: string): Promise<boolean> {
logger.debug('[Notification] In-app notifications enabled (from environment variable)');
return true;
}
// Step 3: Final fallback to system config (defaults to true)
const adminInAppEnabled = SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_IN_APP;
if (!adminInAppEnabled) {
logger.info('[Notification] Admin has disabled in-app notifications globally (from system config)');
return false;
}
logger.debug('[Notification] In-app notifications enabled (from system config default)');
return true;
} catch (error) {
@ -282,4 +282,3 @@ export async function shouldSendEmailWithOverride(
// Non-critical emails - check both admin and user preferences
return await shouldSendEmail(userId, emailType);
}

View File

@ -91,6 +91,7 @@ export interface WorkflowPausedData extends BaseEmailData {
pausedTime: string;
resumeDate: string;
pauseReason: string;
isApprover?: boolean;
}
export interface WorkflowResumedData extends BaseEmailData {

View File

@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from 'express';
import jwt from 'jsonwebtoken';
import { User } from '../models/User';
import { UserModel } from '../models/mongoose/User.schema';
import { ssoConfig } from '../config/sso';
import { ResponseHandler } from '../utils/responseHandler';
@ -35,10 +35,10 @@ export const authenticateToken = async (
// Verify JWT token
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
// Fetch user from database to ensure they still exist and are active
const user = await User.findByPk(decoded.userId);
const user = await UserModel.findOne({ userId: decoded.userId });
if (!user || !user.isActive) {
ResponseHandler.unauthorized(res, 'User not found or inactive');
return;
@ -88,8 +88,8 @@ export const optionalAuth = async (
if (token) {
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
const user = await User.findByPk(decoded.userId);
const user = await UserModel.findOne({ userId: decoded.userId });
if (user && user.isActive) {
req.user = {
userId: user.userId,
@ -99,7 +99,7 @@ export const optionalAuth = async (
};
}
}
next();
} catch (error) {
// For optional auth, we don't throw errors, just continue without user

View File

@ -1,7 +1,7 @@
import { Request, Response, NextFunction } from 'express';
import { Participant } from '@models/Participant';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { Op } from 'sequelize';
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
import { ParticipantModel } from '../models/mongoose/Participant.schema';
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
@ -12,14 +12,11 @@ function isUuid(identifier: string): boolean {
}
// Helper to find workflow by either requestId or requestNumber
async function findWorkflowByIdentifier(identifier: string): Promise<WorkflowRequest | null> {
if (isUuid(identifier)) {
return await WorkflowRequest.findByPk(identifier);
} else {
return await WorkflowRequest.findOne({
where: { requestNumber: identifier }
});
}
async function findWorkflowByIdentifier(identifier: string): Promise<any | null> {
const query = isUuid(identifier)
? { requestId: identifier }
: { requestNumber: identifier };
return await WorkflowRequestModel.findOne(query);
}
export function requireParticipantTypes(allowed: AllowedType[]) {
@ -36,24 +33,22 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
if (!workflow) {
return res.status(404).json({ success: false, error: 'Workflow not found' });
}
const actualRequestId = (workflow as any).requestId;
const actualRequestId = workflow.requestId;
// Check initiator
if (allowed.includes('INITIATOR')) {
if ((workflow as any).initiatorId === userId) {
if (workflow.initiator?.userId === userId) {
return next();
}
}
// Check participants table for SPECTATOR
if (allowed.includes('SPECTATOR')) {
const participant = await Participant.findOne({
where: {
requestId: actualRequestId,
userId,
participantType: 'SPECTATOR',
isActive: true
},
const participant = await ParticipantModel.findOne({
requestId: actualRequestId,
userId,
participantType: 'SPECTATOR',
isActive: true
});
if (participant) {
return next();
@ -63,26 +58,21 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
// For APPROVER role, check ApprovalLevel table
// This is the primary source of truth for approvers
if (allowed.includes('APPROVER')) {
const { ApprovalLevel } = await import('@models/ApprovalLevel');
const approvalLevel = await ApprovalLevel.findOne({
where: {
requestId: actualRequestId,
approverId: userId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any }
}
const approvalLevel = await ApprovalLevelModel.findOne({
requestId: actualRequestId,
'approver.userId': userId,
status: { $in: ['PENDING', 'IN_PROGRESS'] }
});
if (approvalLevel) {
return next();
}
// Fallback: also check Participants table (some approvers might be added there)
const participant = await Participant.findOne({
where: {
requestId: actualRequestId,
userId,
participantType: 'APPROVER',
isActive: true
},
const participant = await ParticipantModel.findOne({
requestId: actualRequestId,
userId,
participantType: 'APPROVER',
isActive: true
});
if (participant) {
return next();

View File

@ -49,4 +49,4 @@ export const corsMiddleware = cors({
allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With', 'Accept'],
exposedHeaders: ['X-Total-Count', 'X-Page-Count'],
optionsSuccessStatus: 200,
});
});

View File

@ -1,92 +0,0 @@
import { QueryInterface, QueryTypes } from 'sequelize';
/**
* Migration to add AI model configuration entries
* Adds CLAUDE_MODEL, OPENAI_MODEL, and GEMINI_MODEL to admin_configurations
*
* This migration is idempotent - it will only insert if the configs don't exist
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Insert AI model configurations if they don't exist
await queryInterface.sequelize.query(`
INSERT INTO admin_configurations (
config_id, config_key, config_category, config_value, value_type,
display_name, description, default_value, is_editable, is_sensitive,
validation_rules, ui_component, options, sort_order, requires_restart,
last_modified_by, last_modified_at, created_at, updated_at
) VALUES
(
gen_random_uuid(),
'CLAUDE_MODEL',
'AI_CONFIGURATION',
'claude-sonnet-4-20250514',
'STRING',
'Claude Model',
'Claude (Anthropic) model to use for AI generation',
'claude-sonnet-4-20250514',
true,
false,
'{}'::jsonb,
'input',
NULL,
27,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'OPENAI_MODEL',
'AI_CONFIGURATION',
'gpt-4o',
'STRING',
'OpenAI Model',
'OpenAI model to use for AI generation',
'gpt-4o',
true,
false,
'{}'::jsonb,
'input',
NULL,
28,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'GEMINI_MODEL',
'AI_CONFIGURATION',
'gemini-2.0-flash-lite',
'STRING',
'Gemini Model',
'Gemini (Google) model to use for AI generation',
'gemini-2.0-flash-lite',
true,
false,
'{}'::jsonb,
'input',
NULL,
29,
false,
NULL,
NULL,
NOW(),
NOW()
)
ON CONFLICT (config_key) DO NOTHING
`, { type: QueryTypes.INSERT });
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove the AI model configurations
await queryInterface.sequelize.query(`
DELETE FROM admin_configurations
WHERE config_key IN ('CLAUDE_MODEL', 'OPENAI_MODEL', 'GEMINI_MODEL')
`, { type: QueryTypes.DELETE });
}

View File

@ -1,322 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
import { Sequelize } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Ensure uuid-ossp extension is enabled (required for uuid_generate_v4())
await queryInterface.sequelize.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
// Create dealers table with all fields from sample data
await queryInterface.createTable('dealers', {
dealer_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: Sequelize.literal('uuid_generate_v4()')
},
sales_code: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'Sales Code'
},
service_code: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'Service Code'
},
gear_code: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'Gear Code'
},
gma_code: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'GMA CODE'
},
region: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'Region'
},
dealership: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Dealership name'
},
state: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'State'
},
district: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'District'
},
city: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'City'
},
location: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Location'
},
city_category_pst: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'City category (PST)'
},
layout_format: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'Layout format'
},
tier_city_category: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'TIER City Category'
},
on_boarding_charges: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'On Boarding Charges (stored as text to allow text values)'
},
date: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'DATE (stored as text to avoid format validation)'
},
single_format_month_year: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Single Format of Month/Year (stored as text)'
},
domain_id: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Domain Id'
},
replacement: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Replacement (stored as text to allow longer values)'
},
termination_resignation_status: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Termination / Resignation under Proposal or Evaluation'
},
date_of_termination_resignation: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Date Of termination/ resignation (stored as text to avoid format validation)'
},
last_date_of_operations: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Last date of operations (stored as text to avoid format validation)'
},
old_codes: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Old Codes'
},
branch_details: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Branch Details'
},
dealer_principal_name: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Dealer Principal Name'
},
dealer_principal_email_id: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Dealer Principal Email Id'
},
dp_contact_number: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'DP CONTACT NUMBER (stored as text to allow multiple numbers)'
},
dp_contacts: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'DP CONTACTS (stored as text to allow multiple contacts)'
},
showroom_address: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Showroom Address'
},
showroom_pincode: {
type: DataTypes.STRING(10),
allowNull: true,
comment: 'Showroom Pincode'
},
workshop_address: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Workshop Address'
},
workshop_pincode: {
type: DataTypes.STRING(10),
allowNull: true,
comment: 'Workshop Pincode'
},
location_district: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'Location / District'
},
state_workshop: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'State (for workshop)'
},
no_of_studios: {
type: DataTypes.INTEGER,
allowNull: true,
defaultValue: 0,
comment: 'No Of Studios'
},
website_update: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Website update (stored as text to allow longer values)'
},
gst: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'GST'
},
pan: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'PAN'
},
firm_type: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'Firm Type'
},
prop_managing_partners_directors: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Prop. / Managing Partners / Managing Directors'
},
total_prop_partners_directors: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Total Prop. / Partners / Directors'
},
docs_folder_link: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'DOCS Folder Link'
},
workshop_gma_codes: {
type: DataTypes.STRING(255),
allowNull: true,
comment: 'Workshop GMA Codes'
},
existing_new: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'Existing / New'
},
dlrcode: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'dlrcode'
},
is_active: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
comment: 'Whether the dealer is currently active'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
}
});
// Create indexes
await queryInterface.addIndex('dealers', ['sales_code'], {
name: 'idx_dealers_sales_code',
unique: false
});
await queryInterface.addIndex('dealers', ['service_code'], {
name: 'idx_dealers_service_code',
unique: false
});
await queryInterface.addIndex('dealers', ['gma_code'], {
name: 'idx_dealers_gma_code',
unique: false
});
await queryInterface.addIndex('dealers', ['domain_id'], {
name: 'idx_dealers_domain_id',
unique: false
});
await queryInterface.addIndex('dealers', ['region'], {
name: 'idx_dealers_region',
unique: false
});
await queryInterface.addIndex('dealers', ['state'], {
name: 'idx_dealers_state',
unique: false
});
await queryInterface.addIndex('dealers', ['city'], {
name: 'idx_dealers_city',
unique: false
});
await queryInterface.addIndex('dealers', ['district'], {
name: 'idx_dealers_district',
unique: false
});
await queryInterface.addIndex('dealers', ['dlrcode'], {
name: 'idx_dealers_dlrcode',
unique: false
});
await queryInterface.addIndex('dealers', ['is_active'], {
name: 'idx_dealers_is_active',
unique: false
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Drop indexes first
await queryInterface.removeIndex('dealers', 'idx_dealers_sales_code');
await queryInterface.removeIndex('dealers', 'idx_dealers_service_code');
await queryInterface.removeIndex('dealers', 'idx_dealers_gma_code');
await queryInterface.removeIndex('dealers', 'idx_dealers_domain_id');
await queryInterface.removeIndex('dealers', 'idx_dealers_region');
await queryInterface.removeIndex('dealers', 'idx_dealers_state');
await queryInterface.removeIndex('dealers', 'idx_dealers_city');
await queryInterface.removeIndex('dealers', 'idx_dealers_district');
await queryInterface.removeIndex('dealers', 'idx_dealers_dlrcode');
await queryInterface.removeIndex('dealers', 'idx_dealers_is_active');
// Drop table
await queryInterface.dropTable('dealers');
}

View File

@ -1,92 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create request_summaries table
* Stores comprehensive summaries of closed workflow requests
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('request_summaries', {
summary_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
allowNull: false
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE',
unique: true // One summary per request
},
initiator_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
title: {
type: DataTypes.STRING(500),
allowNull: false
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
closing_remarks: {
type: DataTypes.TEXT,
allowNull: true
},
is_ai_generated: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
},
conclusion_id: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'conclusion_remarks',
key: 'conclusion_id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('request_summaries', ['request_id'], {
name: 'idx_request_summaries_request_id'
});
await queryInterface.addIndex('request_summaries', ['initiator_id'], {
name: 'idx_request_summaries_initiator_id'
});
await queryInterface.addIndex('request_summaries', ['created_at'], {
name: 'idx_request_summaries_created_at'
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('request_summaries');
}

View File

@ -1,99 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create shared_summaries table
* Stores sharing relationships for request summaries
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('shared_summaries', {
shared_summary_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
allowNull: false
},
summary_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'request_summaries',
key: 'summary_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
shared_by: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
shared_with: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
shared_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
viewed_at: {
type: DataTypes.DATE,
allowNull: true
},
is_read: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create unique constraint to prevent duplicate shares
await queryInterface.addConstraint('shared_summaries', {
fields: ['summary_id', 'shared_with'],
type: 'unique',
name: 'uk_shared_summary'
});
// Create indexes
await queryInterface.addIndex('shared_summaries', ['summary_id'], {
name: 'idx_shared_summaries_summary_id'
});
await queryInterface.addIndex('shared_summaries', ['shared_by'], {
name: 'idx_shared_summaries_shared_by'
});
await queryInterface.addIndex('shared_summaries', ['shared_with'], {
name: 'idx_shared_summaries_shared_with'
});
await queryInterface.addIndex('shared_summaries', ['shared_at'], {
name: 'idx_shared_summaries_shared_at'
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('shared_summaries');
}

View File

@ -1,34 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Migration: Update Request Number Format
*
* This migration documents the change in request number format from:
* - Old: REQ-YYYY-NNNNN (e.g., REQ-2025-12345)
* - New: REQ-YYYY-MM-XXXX (e.g., REQ-2025-11-0001)
*
* The counter now resets every month automatically.
*
* No schema changes are required as the request_number column (VARCHAR(20))
* is already sufficient for the new format (16 characters).
*
* Existing request numbers will remain unchanged.
* New requests will use the new format starting from this migration.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// No schema changes needed - this is a code-level change only
// The generateRequestNumber() function in helpers.ts has been updated
// to generate the new format: REQ-YYYY-MM-XXXX
// Log the change for reference
console.log('[Migration] Request number format updated to REQ-YYYY-MM-XXXX');
console.log('[Migration] Counter will reset automatically each month');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// No rollback needed - this is a code-level change
// To revert, simply update the generateRequestNumber() function
// in helpers.ts back to the old format
console.log('[Migration] Request number format can be reverted by updating generateRequestNumber() function');
}

View File

@ -1,83 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create activity_types table for claim management activity types
* Admin can manage activity types similar to holiday management
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('activity_types', {
activity_type_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: {
type: DataTypes.STRING(200),
allowNull: false,
unique: true,
comment: 'Activity type title/name (e.g., "Riders Mania Claims", "Legal Claims Reimbursement")'
},
item_code: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
comment: 'Optional item code for the activity type'
},
taxation_type: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
comment: 'Optional taxation type for the activity'
},
sap_ref_no: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
comment: 'Optional SAP reference number'
},
is_active: {
type: DataTypes.BOOLEAN,
defaultValue: true,
comment: 'Whether this activity type is currently active/available for selection'
},
created_by: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin user who created this activity type'
},
updated_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin user who last updated this activity type'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Indexes for performance
await queryInterface.sequelize.query('CREATE UNIQUE INDEX IF NOT EXISTS "activity_types_title_unique" ON "activity_types" ("title");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_is_active" ON "activity_types" ("is_active");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_item_code" ON "activity_types" ("item_code");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_created_by" ON "activity_types" ("created_by");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('activity_types');
}

View File

@ -1,73 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Add pause fields to approval_levels table
// Note: The 'PAUSED' enum value is added in a separate migration (20250126-add-paused-to-enum.ts)
await queryInterface.addColumn('approval_levels', 'is_paused', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
await queryInterface.addColumn('approval_levels', 'paused_at', {
type: DataTypes.DATE,
allowNull: true
});
await queryInterface.addColumn('approval_levels', 'paused_by', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
}
});
await queryInterface.addColumn('approval_levels', 'pause_reason', {
type: DataTypes.TEXT,
allowNull: true
});
await queryInterface.addColumn('approval_levels', 'pause_resume_date', {
type: DataTypes.DATE,
allowNull: true
});
await queryInterface.addColumn('approval_levels', 'pause_tat_start_time', {
type: DataTypes.DATE,
allowNull: true,
comment: 'Original TAT start time before pause'
});
await queryInterface.addColumn('approval_levels', 'pause_elapsed_hours', {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
comment: 'Elapsed hours at pause time'
});
// Create index on is_paused for faster queries
await queryInterface.sequelize.query(
'CREATE INDEX IF NOT EXISTS "approval_levels_is_paused" ON "approval_levels" ("is_paused");'
);
// Create index on pause_resume_date for auto-resume job
await queryInterface.sequelize.query(
'CREATE INDEX IF NOT EXISTS "approval_levels_pause_resume_date" ON "approval_levels" ("pause_resume_date") WHERE "is_paused" = true;'
);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.removeColumn('approval_levels', 'pause_elapsed_hours');
await queryInterface.removeColumn('approval_levels', 'pause_tat_start_time');
await queryInterface.removeColumn('approval_levels', 'pause_resume_date');
await queryInterface.removeColumn('approval_levels', 'pause_reason');
await queryInterface.removeColumn('approval_levels', 'paused_by');
await queryInterface.removeColumn('approval_levels', 'paused_at');
await queryInterface.removeColumn('approval_levels', 'is_paused');
// Note: PostgreSQL doesn't support removing enum values directly
// To fully rollback, you would need to recreate the enum type
// This is a limitation of PostgreSQL enums
// For now, we'll leave 'PAUSED' in the enum even after rollback
}

View File

@ -1,59 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Add pause fields to workflow_requests table
await queryInterface.addColumn('workflow_requests', 'is_paused', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
await queryInterface.addColumn('workflow_requests', 'paused_at', {
type: DataTypes.DATE,
allowNull: true
});
await queryInterface.addColumn('workflow_requests', 'paused_by', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
}
});
await queryInterface.addColumn('workflow_requests', 'pause_reason', {
type: DataTypes.TEXT,
allowNull: true
});
await queryInterface.addColumn('workflow_requests', 'pause_resume_date', {
type: DataTypes.DATE,
allowNull: true
});
await queryInterface.addColumn('workflow_requests', 'pause_tat_snapshot', {
type: DataTypes.JSONB,
allowNull: true
});
// Create index on is_paused for faster queries
await queryInterface.sequelize.query(
'CREATE INDEX IF NOT EXISTS "workflow_requests_is_paused" ON "workflow_requests" ("is_paused");'
);
// Create index on pause_resume_date for auto-resume job
await queryInterface.sequelize.query(
'CREATE INDEX IF NOT EXISTS "workflow_requests_pause_resume_date" ON "workflow_requests" ("pause_resume_date") WHERE "is_paused" = true;'
);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.removeColumn('workflow_requests', 'pause_tat_snapshot');
await queryInterface.removeColumn('workflow_requests', 'pause_resume_date');
await queryInterface.removeColumn('workflow_requests', 'pause_reason');
await queryInterface.removeColumn('workflow_requests', 'paused_by');
await queryInterface.removeColumn('workflow_requests', 'paused_at');
await queryInterface.removeColumn('workflow_requests', 'is_paused');
}

View File

@ -1,35 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Migration to add 'PAUSED' value to enum_approval_status enum type
* This is required for the pause workflow feature
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Add 'PAUSED' to the enum_approval_status enum type
// PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE,
// so we check if it exists first
await queryInterface.sequelize.query(`
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = 'PAUSED'
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_approval_status')
) THEN
ALTER TYPE enum_approval_status ADD VALUE 'PAUSED';
END IF;
END$$;
`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Note: PostgreSQL doesn't support removing enum values directly
// To fully rollback, you would need to:
// 1. Create a new enum without 'PAUSED'
// 2. Update all columns to use the new enum
// 3. Drop the old enum
// This is complex and risky, so we'll leave 'PAUSED' in the enum
// even after rollback. This is a limitation of PostgreSQL enums.
console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. PAUSED will remain in enum_approval_status.');
}

View File

@ -1,35 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Migration to add 'PAUSED' value to enum_workflow_status enum type
* This allows workflows to have a PAUSED status in addition to the isPaused boolean flag
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Add 'PAUSED' to the enum_workflow_status enum type
// PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE,
// so we check if it exists first
await queryInterface.sequelize.query(`
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = 'PAUSED'
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_workflow_status')
) THEN
ALTER TYPE enum_workflow_status ADD VALUE 'PAUSED';
END IF;
END$$;
`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Note: PostgreSQL doesn't support removing enum values directly
// To fully rollback, you would need to:
// 1. Create a new enum without 'PAUSED'
// 2. Update all columns to use the new enum
// 3. Drop the old enum
// This is complex and risky, so we'll leave 'PAUSED' in the enum
// even after rollback. This is a limitation of PostgreSQL enums.
console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. PAUSED will remain in enum_workflow_status.');
}

View File

@ -1,24 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Migration to update any workflow requests with IN_PROGRESS status to PENDING
* Since IN_PROGRESS is essentially the same as PENDING for workflow requests
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Update any workflow requests with IN_PROGRESS status to PENDING
await queryInterface.sequelize.query(`
UPDATE workflow_requests
SET status = 'PENDING'
WHERE status = 'IN_PROGRESS';
`);
console.log('[Migration] Updated IN_PROGRESS workflow requests to PENDING');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Note: We cannot reliably restore IN_PROGRESS status since we don't know
// which requests were originally IN_PROGRESS vs PENDING
// This migration is one-way
console.log('[Migration] Cannot rollback - IN_PROGRESS to PENDING migration is one-way');
}

View File

@ -1,199 +0,0 @@
import { QueryInterface, QueryTypes } from 'sequelize';
/**
* Migration to migrate from multi-provider AI to Vertex AI Gemini
*
* Removes:
* - AI_PROVIDER
* - CLAUDE_API_KEY, OPENAI_API_KEY, GEMINI_API_KEY
* - CLAUDE_MODEL, OPENAI_MODEL, GEMINI_MODEL
* - VERTEX_AI_MODEL (moved to environment variable only)
* - VERTEX_AI_LOCATION (moved to environment variable only)
*
* Note: Both VERTEX_AI_MODEL and VERTEX_AI_LOCATION are now configured via
* environment variables only (not in admin settings).
*
* This migration is idempotent - it will only delete configs that exist.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Remove old AI provider configurations
await queryInterface.sequelize.query(`
DELETE FROM admin_configurations
WHERE config_key IN (
'AI_PROVIDER',
'CLAUDE_API_KEY',
'OPENAI_API_KEY',
'GEMINI_API_KEY',
'CLAUDE_MODEL',
'OPENAI_MODEL',
'GEMINI_MODEL',
'VERTEX_AI_MODEL',
'VERTEX_AI_LOCATION'
)
`, { type: QueryTypes.DELETE });
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// This migration only removes configs, so down migration would restore them
// However, we don't restore them as they're now environment-only
console.log('[Migration] Down migration skipped - AI configs are now environment-only');
// Restore old configurations (for rollback)
await queryInterface.sequelize.query(`
INSERT INTO admin_configurations (
config_id, config_key, config_category, config_value, value_type,
display_name, description, default_value, is_editable, is_sensitive,
validation_rules, ui_component, options, sort_order, requires_restart,
last_modified_by, last_modified_at, created_at, updated_at
) VALUES
(
gen_random_uuid(),
'AI_PROVIDER',
'AI_CONFIGURATION',
'claude',
'STRING',
'AI Provider',
'Active AI provider for conclusion generation (claude, openai, or gemini)',
'claude',
true,
false,
'{"enum": ["claude", "openai", "gemini"], "required": true}'::jsonb,
'select',
'["claude", "openai", "gemini"]'::jsonb,
22,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'CLAUDE_API_KEY',
'AI_CONFIGURATION',
'',
'STRING',
'Claude API Key',
'API key for Claude (Anthropic) - Get from console.anthropic.com',
'',
true,
true,
'{"pattern": "^sk-ant-", "minLength": 40}'::jsonb,
'input',
NULL,
23,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'OPENAI_API_KEY',
'AI_CONFIGURATION',
'',
'STRING',
'OpenAI API Key',
'API key for OpenAI (GPT-4) - Get from platform.openai.com',
'',
true,
true,
'{"pattern": "^sk-", "minLength": 40}'::jsonb,
'input',
NULL,
24,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'GEMINI_API_KEY',
'AI_CONFIGURATION',
'',
'STRING',
'Gemini API Key',
'API key for Gemini (Google) - Get from ai.google.dev',
'',
true,
true,
'{"minLength": 20}'::jsonb,
'input',
NULL,
25,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'CLAUDE_MODEL',
'AI_CONFIGURATION',
'claude-sonnet-4-20250514',
'STRING',
'Claude Model',
'Claude (Anthropic) model to use for AI generation',
'claude-sonnet-4-20250514',
true,
false,
'{}'::jsonb,
'input',
NULL,
27,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'OPENAI_MODEL',
'AI_CONFIGURATION',
'gpt-4o',
'STRING',
'OpenAI Model',
'OpenAI model to use for AI generation',
'gpt-4o',
true,
false,
'{}'::jsonb,
'input',
NULL,
28,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'GEMINI_MODEL',
'AI_CONFIGURATION',
'gemini-2.0-flash-lite',
'STRING',
'Gemini Model',
'Gemini (Google) model to use for AI generation',
'gemini-2.0-flash-lite',
true,
false,
'{}'::jsonb,
'input',
NULL,
29,
false,
NULL,
NULL,
NOW(),
NOW()
)
ON CONFLICT (config_key) DO NOTHING
`, { type: QueryTypes.INSERT });
}

View File

@ -1,237 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration: Create users table
*
* Purpose: Create the main users table with all fields including RBAC and SSO fields
*
* This must run FIRST before other tables that reference users
*
* Includes:
* - Basic user information (email, name, etc.)
* - SSO/Okta fields (manager, job_title, etc.)
* - RBAC role system (USER, MANAGEMENT, ADMIN)
* - Location and AD group information
*
* Created: 2025-11-12 (Updated for fresh setup)
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
console.log('📋 Creating users table with RBAC and extended SSO fields...');
try {
// Step 1: Create ENUM type for roles
console.log(' ✓ Creating user_role_enum...');
await queryInterface.sequelize.query(`
CREATE TYPE user_role_enum AS ENUM ('USER', 'MANAGEMENT', 'ADMIN');
`);
// Step 2: Create users table
console.log(' ✓ Creating users table...');
await queryInterface.createTable('users', {
user_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
field: 'user_id',
comment: 'Primary key - UUID'
},
employee_id: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'employee_id',
comment: 'HR System Employee ID (optional) - some users may not have'
},
okta_sub: {
type: DataTypes.STRING(100),
allowNull: false,
unique: true,
field: 'okta_sub',
comment: 'Okta user subject identifier - unique identifier from SSO'
},
email: {
type: DataTypes.STRING(255),
allowNull: false,
unique: true,
field: 'email',
comment: 'Primary email address - unique and required'
},
first_name: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: '',
field: 'first_name',
comment: 'First name from SSO (optional)'
},
last_name: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: '',
field: 'last_name',
comment: 'Last name from SSO (optional)'
},
display_name: {
type: DataTypes.STRING(200),
allowNull: true,
defaultValue: '',
field: 'display_name',
comment: 'Full display name for UI'
},
department: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'Department/Division from SSO'
},
designation: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'Job designation/position'
},
phone: {
type: DataTypes.STRING(20),
allowNull: true,
comment: 'Office phone number'
},
// ============ Extended SSO/Okta Fields ============
manager: {
type: DataTypes.STRING(200),
allowNull: true,
comment: 'Reporting manager name from SSO/AD'
},
second_email: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'second_email',
comment: 'Alternate email address from SSO'
},
job_title: {
type: DataTypes.TEXT,
allowNull: true,
field: 'job_title',
comment: 'Detailed job title/description from SSO'
},
employee_number: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'employee_number',
comment: 'HR system employee number from SSO (e.g., "00020330")'
},
postal_address: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'postal_address',
comment: 'Work location/office address from SSO'
},
mobile_phone: {
type: DataTypes.STRING(20),
allowNull: true,
field: 'mobile_phone',
comment: 'Mobile contact number from SSO'
},
ad_groups: {
type: DataTypes.JSONB,
allowNull: true,
field: 'ad_groups',
comment: 'Active Directory group memberships from SSO (memberOf array)'
},
// ============ System Fields ============
location: {
type: DataTypes.JSONB,
allowNull: true,
comment: 'JSON object: {city, state, country, office, timezone}'
},
is_active: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_active',
comment: 'Account status - true=active, false=disabled'
},
role: {
type: DataTypes.ENUM('USER', 'MANAGEMENT', 'ADMIN'),
allowNull: false,
defaultValue: 'USER',
comment: 'RBAC role: USER (default), MANAGEMENT (read all), ADMIN (full access)'
},
last_login: {
type: DataTypes.DATE,
allowNull: true,
field: 'last_login',
comment: 'Last successful login timestamp'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
});
// Step 3: Create indexes
console.log(' ✓ Creating indexes...');
await queryInterface.addIndex('users', ['email'], {
name: 'users_email_idx',
unique: true
});
await queryInterface.addIndex('users', ['okta_sub'], {
name: 'users_okta_sub_idx',
unique: true
});
await queryInterface.addIndex('users', ['employee_id'], {
name: 'users_employee_id_idx'
});
await queryInterface.addIndex('users', ['department'], {
name: 'idx_users_department'
});
await queryInterface.addIndex('users', ['is_active'], {
name: 'idx_users_is_active'
});
await queryInterface.addIndex('users', ['role'], {
name: 'idx_users_role'
});
await queryInterface.addIndex('users', ['manager'], {
name: 'idx_users_manager'
});
await queryInterface.addIndex('users', ['postal_address'], {
name: 'idx_users_postal_address'
});
// GIN indexes for JSONB fields
await queryInterface.sequelize.query(`
CREATE INDEX idx_users_location ON users USING gin(location jsonb_path_ops);
CREATE INDEX idx_users_ad_groups ON users USING gin(ad_groups);
`);
console.log('✅ Users table created successfully with all indexes!');
} catch (error) {
console.error('❌ Failed to create users table:', error);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
console.log('📋 Dropping users table...');
await queryInterface.dropTable('users');
// Drop ENUM type
await queryInterface.sequelize.query(`
DROP TYPE IF EXISTS user_role_enum;
`);
console.log('✅ Users table dropped!');
}

View File

@ -1,51 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Enums
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_priority') THEN
CREATE TYPE enum_priority AS ENUM ('STANDARD','EXPRESS');
END IF;
END$$;`);
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_workflow_status') THEN
CREATE TYPE enum_workflow_status AS ENUM ('DRAFT','PENDING','IN_PROGRESS','APPROVED','REJECTED','CLOSED');
END IF;
END$$;`);
await queryInterface.createTable('workflow_requests', {
request_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
request_number: { type: DataTypes.STRING(20), allowNull: false, unique: true },
initiator_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
template_type: { type: DataTypes.STRING(20), allowNull: false, defaultValue: 'CUSTOM' },
title: { type: DataTypes.STRING(500), allowNull: false },
description: { type: DataTypes.TEXT, allowNull: false },
priority: { type: 'enum_priority' as any, allowNull: false, defaultValue: 'STANDARD' },
status: { type: 'enum_workflow_status' as any, allowNull: false, defaultValue: 'DRAFT' },
current_level: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
total_levels: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
total_tat_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
submission_date: { type: DataTypes.DATE, allowNull: true },
closure_date: { type: DataTypes.DATE, allowNull: true },
conclusion_remark: { type: DataTypes.TEXT, allowNull: true },
ai_generated_conclusion: { type: DataTypes.TEXT, allowNull: true },
is_draft: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
is_deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_initiator_id" ON "workflow_requests" ("initiator_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_status" ON "workflow_requests" ("status");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_created_at" ON "workflow_requests" ("created_at");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('workflow_requests');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_workflow_status;');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_priority;');
}

View File

@ -1,53 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_approval_status') THEN
CREATE TYPE enum_approval_status AS ENUM ('PENDING','IN_PROGRESS','APPROVED','REJECTED','SKIPPED');
END IF;
END$$;`);
await queryInterface.createTable('approval_levels', {
level_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
level_number: { type: DataTypes.INTEGER, allowNull: false },
level_name: { type: DataTypes.STRING(100), allowNull: true },
approver_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
approver_email: { type: DataTypes.STRING(255), allowNull: false },
approver_name: { type: DataTypes.STRING(200), allowNull: false },
tat_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false },
tat_days: { type: DataTypes.INTEGER, allowNull: false },
status: { type: 'enum_approval_status' as any, allowNull: false, defaultValue: 'PENDING' },
level_start_time: { type: DataTypes.DATE, allowNull: true },
level_end_time: { type: DataTypes.DATE, allowNull: true },
action_date: { type: DataTypes.DATE, allowNull: true },
comments: { type: DataTypes.TEXT, allowNull: true },
rejection_reason: { type: DataTypes.TEXT, allowNull: true },
is_final_approver: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
elapsed_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
remaining_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
tat_percentage_used: { type: DataTypes.DECIMAL(5,2), allowNull: false, defaultValue: 0 },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_request_id" ON "approval_levels" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_approver_id" ON "approval_levels" ("approver_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_status" ON "approval_levels" ("status");');
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'uq_approval_levels_request_level'
) THEN
ALTER TABLE "approval_levels" ADD CONSTRAINT "uq_approval_levels_request_level" UNIQUE ("request_id", "level_number");
END IF;
END$$;`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('approval_levels');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_approval_status;');
}

View File

@ -1,44 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_participant_type') THEN
CREATE TYPE enum_participant_type AS ENUM ('SPECTATOR','INITIATOR','APPROVER','CONSULTATION');
END IF;
END$$;`);
await queryInterface.createTable('participants', {
participant_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
user_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
user_email: { type: DataTypes.STRING(255), allowNull: false },
user_name: { type: DataTypes.STRING(200), allowNull: false },
participant_type: { type: 'enum_participant_type' as any, allowNull: false },
can_comment: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
can_view_documents: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
can_download_documents: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
notification_enabled: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
added_by: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
added_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
is_active: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "participants_request_id" ON "participants" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "participants_user_id" ON "participants" ("user_id");');
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'uq_participants_request_user'
) THEN
ALTER TABLE "participants" ADD CONSTRAINT "uq_participants_request_user" UNIQUE ("request_id", "user_id");
END IF;
END$$;`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('participants');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_participant_type;');
}

View File

@ -1,44 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_document_category') THEN
CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER','COMPLETION_DOC','ACTIVITY_PHOTO');
END IF;
END$$;`);
await queryInterface.createTable('documents', {
document_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
uploaded_by: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
file_name: { type: DataTypes.STRING(255), allowNull: false },
original_file_name: { type: DataTypes.STRING(255), allowNull: false },
file_type: { type: DataTypes.STRING(100), allowNull: false },
file_extension: { type: DataTypes.STRING(10), allowNull: false },
file_size: { type: DataTypes.BIGINT, allowNull: false },
file_path: { type: DataTypes.STRING(500), allowNull: false },
storage_url: { type: DataTypes.STRING(500), allowNull: true },
mime_type: { type: DataTypes.STRING(100), allowNull: false },
checksum: { type: DataTypes.STRING(64), allowNull: false },
is_google_doc: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
google_doc_url: { type: DataTypes.STRING(500), allowNull: true },
category: { type: 'enum_document_category' as any, allowNull: false, defaultValue: 'OTHER' },
version: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
parent_document_id: { type: DataTypes.UUID, allowNull: true, references: { model: 'documents', key: 'document_id' } },
is_deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
download_count: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 0 },
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_request_id" ON "documents" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_uploaded_by" ON "documents" ("uploaded_by");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_category" ON "documents" ("category");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('documents');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_document_category;');
}

View File

@ -1,21 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('subscriptions', {
subscription_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
user_id: { type: DataTypes.UUID, allowNull: false },
endpoint: { type: DataTypes.STRING(1000), allowNull: false, unique: true },
p256dh: { type: DataTypes.STRING(255), allowNull: false },
auth: { type: DataTypes.STRING(255), allowNull: false },
user_agent: { type: DataTypes.STRING(500), allowNull: true },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "subscriptions_user_id" ON "subscriptions" ("user_id");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('subscriptions');
}
};

View File

@ -1,29 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('activities', {
activity_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
request_id: { type: DataTypes.UUID, allowNull: false },
user_id: { type: DataTypes.UUID, allowNull: true },
user_name: { type: DataTypes.STRING(255), allowNull: true },
activity_type: { type: DataTypes.STRING(100), allowNull: false },
activity_description: { type: DataTypes.TEXT, allowNull: false },
activity_category: { type: DataTypes.STRING(100), allowNull: true },
severity: { type: DataTypes.STRING(50), allowNull: true },
metadata: { type: DataTypes.JSONB, allowNull: true },
is_system_event: { type: DataTypes.BOOLEAN, allowNull: true },
ip_address: { type: DataTypes.STRING(100), allowNull: true },
user_agent: { type: DataTypes.TEXT, allowNull: true },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_request_id" ON "activities" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_created_at" ON "activities" ("created_at");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_activity_type" ON "activities" ("activity_type");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('activities');
}
};

View File

@ -1,32 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('work_notes', {
note_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
request_id: { type: DataTypes.UUID, allowNull: false },
user_id: { type: DataTypes.UUID, allowNull: false },
user_name: { type: DataTypes.STRING(255), allowNull: true },
user_role: { type: DataTypes.STRING(50), allowNull: true },
message: { type: DataTypes.TEXT, allowNull: false },
message_type: { type: DataTypes.STRING(50), allowNull: true },
is_priority: { type: DataTypes.BOOLEAN, allowNull: true },
has_attachment: { type: DataTypes.BOOLEAN, allowNull: true },
parent_note_id: { type: DataTypes.UUID, allowNull: true },
mentioned_users: { type: DataTypes.ARRAY(DataTypes.UUID), allowNull: true },
reactions: { type: DataTypes.JSONB, allowNull: true },
is_edited: { type: DataTypes.BOOLEAN, allowNull: true },
is_deleted: { type: DataTypes.BOOLEAN, allowNull: true },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_request_id" ON "work_notes" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_user_id" ON "work_notes" ("user_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_created_at" ON "work_notes" ("created_at");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('work_notes');
}
};

View File

@ -1,25 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('work_note_attachments', {
attachment_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
note_id: { type: DataTypes.UUID, allowNull: false },
file_name: { type: DataTypes.STRING(255), allowNull: false },
file_type: { type: DataTypes.STRING(100), allowNull: false },
file_size: { type: DataTypes.BIGINT, allowNull: false },
file_path: { type: DataTypes.STRING(500), allowNull: false },
storage_url: { type: DataTypes.STRING(500), allowNull: true },
is_downloadable: { type: DataTypes.BOOLEAN, allowNull: true },
download_count: { type: DataTypes.INTEGER, allowNull: true, defaultValue: 0 },
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_note_attachments_note_id" ON "work_note_attachments" ("note_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_note_attachments_uploaded_at" ON "work_note_attachments" ("uploaded_at");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('work_note_attachments');
}
};

View File

@ -1,49 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to add TAT alert tracking fields to approval_levels table
* These fields track whether TAT notifications have been sent
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check and add columns only if they don't exist
const tableDescription = await queryInterface.describeTable('approval_levels');
if (!tableDescription.tat50_alert_sent) {
await queryInterface.addColumn('approval_levels', 'tat50_alert_sent', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
if (!tableDescription.tat75_alert_sent) {
await queryInterface.addColumn('approval_levels', 'tat75_alert_sent', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
if (!tableDescription.tat_breached) {
await queryInterface.addColumn('approval_levels', 'tat_breached', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
if (!tableDescription.tat_start_time) {
await queryInterface.addColumn('approval_levels', 'tat_start_time', {
type: DataTypes.DATE,
allowNull: true
});
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.removeColumn('approval_levels', 'tat50_alert_sent');
await queryInterface.removeColumn('approval_levels', 'tat75_alert_sent');
await queryInterface.removeColumn('approval_levels', 'tat_breached');
await queryInterface.removeColumn('approval_levels', 'tat_start_time');
}

View File

@ -1,134 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create admin_configurations table
* Stores system-wide configuration settings
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('admin_configurations', {
config_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
config_key: {
type: DataTypes.STRING(100),
allowNull: false,
unique: true,
comment: 'Unique configuration key (e.g., "DEFAULT_TAT_EXPRESS", "MAX_FILE_SIZE")'
},
config_category: {
type: DataTypes.ENUM(
'TAT_SETTINGS',
'NOTIFICATION_RULES',
'DOCUMENT_POLICY',
'USER_ROLES',
'DASHBOARD_LAYOUT',
'AI_CONFIGURATION',
'WORKFLOW_SHARING',
'SYSTEM_SETTINGS'
),
allowNull: false,
comment: 'Category of the configuration'
},
config_value: {
type: DataTypes.TEXT,
allowNull: false,
comment: 'Configuration value (can be JSON string for complex values)'
},
value_type: {
type: DataTypes.ENUM('STRING', 'NUMBER', 'BOOLEAN', 'JSON', 'ARRAY'),
defaultValue: 'STRING',
comment: 'Data type of the value'
},
display_name: {
type: DataTypes.STRING(200),
allowNull: false,
comment: 'Human-readable name for UI display'
},
description: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Description of what this configuration does'
},
default_value: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Default value if reset'
},
is_editable: {
type: DataTypes.BOOLEAN,
defaultValue: true,
comment: 'Whether this config can be edited by admin'
},
is_sensitive: {
type: DataTypes.BOOLEAN,
defaultValue: false,
comment: 'Whether this contains sensitive data (e.g., API keys)'
},
validation_rules: {
type: DataTypes.JSONB,
defaultValue: {},
comment: 'Validation rules (min, max, regex, etc.)'
},
ui_component: {
type: DataTypes.STRING(50),
allowNull: true,
comment: 'UI component type (input, select, toggle, slider, etc.)'
},
options: {
type: DataTypes.JSONB,
allowNull: true,
comment: 'Options for select/radio inputs'
},
sort_order: {
type: DataTypes.INTEGER,
defaultValue: 0,
comment: 'Display order in admin panel'
},
requires_restart: {
type: DataTypes.BOOLEAN,
defaultValue: false,
comment: 'Whether changing this requires server restart'
},
last_modified_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin who last modified this'
},
last_modified_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When this was last modified'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Indexes (with IF NOT EXISTS)
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "admin_configurations_config_category" ON "admin_configurations" ("config_category");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "admin_configurations_is_editable" ON "admin_configurations" ("is_editable");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "admin_configurations_sort_order" ON "admin_configurations" ("sort_order");');
// Admin config table created
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('admin_configurations');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_admin_configurations_config_category";');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_admin_configurations_value_type";');
// Admin config table dropped
}

View File

@ -1,106 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create holidays table for organization holiday calendar
* Holidays are excluded from working days in TAT calculations for STANDARD priority
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('holidays', {
holiday_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
holiday_date: {
type: DataTypes.DATEONLY,
allowNull: false,
unique: true,
comment: 'The date of the holiday (YYYY-MM-DD)'
},
holiday_name: {
type: DataTypes.STRING(200),
allowNull: false,
comment: 'Name/title of the holiday (e.g., "Diwali", "Republic Day")'
},
description: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Optional description or notes about the holiday'
},
is_recurring: {
type: DataTypes.BOOLEAN,
defaultValue: false,
comment: 'Whether this holiday recurs annually (e.g., Independence Day)'
},
recurrence_rule: {
type: DataTypes.STRING(100),
allowNull: true,
comment: 'RRULE for recurring holidays (e.g., "FREQ=YEARLY;BYMONTH=8;BYMONTHDAY=15")'
},
holiday_type: {
type: DataTypes.ENUM('NATIONAL', 'REGIONAL', 'ORGANIZATIONAL', 'OPTIONAL'),
defaultValue: 'ORGANIZATIONAL',
comment: 'Type of holiday'
},
is_active: {
type: DataTypes.BOOLEAN,
defaultValue: true,
comment: 'Whether this holiday is currently active/applicable'
},
applies_to_departments: {
type: DataTypes.ARRAY(DataTypes.STRING),
allowNull: true,
defaultValue: null,
comment: 'If null, applies to all departments. Otherwise, specific departments only'
},
applies_to_locations: {
type: DataTypes.ARRAY(DataTypes.STRING),
allowNull: true,
defaultValue: null,
comment: 'If null, applies to all locations. Otherwise, specific locations only'
},
created_by: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin user who created this holiday'
},
updated_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin user who last updated this holiday'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Indexes for performance (with IF NOT EXISTS)
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_holiday_date" ON "holidays" ("holiday_date");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_is_active" ON "holidays" ("is_active");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_holiday_type" ON "holidays" ("holiday_type");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_created_by" ON "holidays" ("created_by");');
// Holidays table created
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('holidays');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_holidays_holiday_type";');
// Holidays table dropped
}

View File

@ -1,266 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Migration to create database views for KPI reporting
* These views pre-aggregate data for faster reporting queries
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// 1. Request Volume & Status Summary View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_request_volume_summary AS
SELECT
w.request_id,
w.request_number,
w.title,
w.status,
w.priority,
w.template_type,
w.submission_date,
w.closure_date,
w.created_at,
u.user_id as initiator_id,
u.display_name as initiator_name,
u.department as initiator_department,
EXTRACT(EPOCH FROM (COALESCE(w.closure_date, NOW()) - w.submission_date)) / 3600 as cycle_time_hours,
EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / 3600 as age_hours,
w.current_level,
w.total_levels,
w.total_tat_hours,
CASE
WHEN w.status IN ('APPROVED', 'REJECTED', 'CLOSED') THEN 'COMPLETED'
WHEN w.status = 'DRAFT' THEN 'DRAFT'
ELSE 'IN_PROGRESS'
END as status_category
FROM workflow_requests w
LEFT JOIN users u ON w.initiator_id = u.user_id
WHERE w.is_deleted = false;
`);
// 2. TAT Compliance View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_tat_compliance AS
SELECT
al.level_id,
al.request_id,
w.request_number,
w.priority,
w.status as request_status,
al.level_number,
al.approver_id,
al.approver_name,
u.department as approver_department,
al.status as level_status,
al.tat_hours as allocated_hours,
al.elapsed_hours,
al.remaining_hours,
al.tat_percentage_used,
al.level_start_time,
al.level_end_time,
al.action_date,
al.tat50_alert_sent,
al.tat75_alert_sent,
al.tat_breached,
CASE
WHEN al.status IN ('APPROVED', 'REJECTED') AND al.elapsed_hours <= al.tat_hours THEN true
WHEN al.status IN ('APPROVED', 'REJECTED') AND al.elapsed_hours > al.tat_hours THEN false
WHEN al.status IN ('PENDING', 'IN_PROGRESS') AND al.tat_percentage_used >= 100 THEN false
ELSE null
END as completed_within_tat,
CASE
WHEN al.tat_percentage_used < 50 THEN 'ON_TRACK'
WHEN al.tat_percentage_used < 75 THEN 'AT_RISK'
WHEN al.tat_percentage_used < 100 THEN 'CRITICAL'
ELSE 'BREACHED'
END as tat_status,
CASE
WHEN al.status IN ('APPROVED', 'REJECTED') THEN
al.tat_hours - al.elapsed_hours
ELSE 0
END as time_saved_hours
FROM approval_levels al
JOIN workflow_requests w ON al.request_id = w.request_id
LEFT JOIN users u ON al.approver_id = u.user_id
WHERE w.is_deleted = false;
`);
// 3. Approver Performance View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_approver_performance AS
SELECT
al.approver_id,
u.display_name as approver_name,
u.department,
u.designation,
COUNT(*) as total_assignments,
COUNT(CASE WHEN al.status = 'PENDING' THEN 1 END) as pending_count,
COUNT(CASE WHEN al.status = 'IN_PROGRESS' THEN 1 END) as in_progress_count,
COUNT(CASE WHEN al.status = 'APPROVED' THEN 1 END) as approved_count,
COUNT(CASE WHEN al.status = 'REJECTED' THEN 1 END) as rejected_count,
AVG(CASE WHEN al.status IN ('APPROVED', 'REJECTED') THEN al.elapsed_hours END) as avg_response_time_hours,
SUM(CASE WHEN al.elapsed_hours <= al.tat_hours AND al.status IN ('APPROVED', 'REJECTED') THEN 1 ELSE 0 END)::FLOAT /
NULLIF(COUNT(CASE WHEN al.status IN ('APPROVED', 'REJECTED') THEN 1 END), 0) * 100 as tat_compliance_percentage,
COUNT(CASE WHEN al.tat_breached = true THEN 1 END) as breaches_count,
MIN(CASE WHEN al.status = 'PENDING' OR al.status = 'IN_PROGRESS' THEN
EXTRACT(EPOCH FROM (NOW() - al.level_start_time)) / 3600
END) as oldest_pending_hours
FROM approval_levels al
JOIN users u ON al.approver_id = u.user_id
JOIN workflow_requests w ON al.request_id = w.request_id
WHERE w.is_deleted = false
GROUP BY al.approver_id, u.display_name, u.department, u.designation;
`);
// 4. TAT Alerts Summary View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_tat_alerts_summary AS
SELECT
ta.alert_id,
ta.request_id,
w.request_number,
w.title as request_title,
w.priority,
ta.level_id,
al.level_number,
ta.approver_id,
ta.alert_type,
ta.threshold_percentage,
ta.tat_hours_allocated,
ta.tat_hours_elapsed,
ta.tat_hours_remaining,
ta.alert_sent_at,
ta.expected_completion_time,
ta.is_breached,
ta.was_completed_on_time,
ta.completion_time,
al.status as level_status,
EXTRACT(EPOCH FROM (ta.alert_sent_at - ta.level_start_time)) / 3600 as hours_before_alert,
CASE
WHEN ta.completion_time IS NOT NULL THEN
EXTRACT(EPOCH FROM (ta.completion_time - ta.alert_sent_at)) / 3600
ELSE NULL
END as response_time_after_alert_hours,
ta.metadata
FROM tat_alerts ta
JOIN workflow_requests w ON ta.request_id = w.request_id
JOIN approval_levels al ON ta.level_id = al.level_id
WHERE w.is_deleted = false
ORDER BY ta.alert_sent_at DESC;
`);
// 5. Department-wise Workflow Summary View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_department_summary AS
SELECT
u.department,
COUNT(DISTINCT w.request_id) as total_requests,
COUNT(DISTINCT CASE WHEN w.status = 'DRAFT' THEN w.request_id END) as draft_requests,
COUNT(DISTINCT CASE WHEN w.status IN ('PENDING', 'IN_PROGRESS') THEN w.request_id END) as open_requests,
COUNT(DISTINCT CASE WHEN w.status = 'APPROVED' THEN w.request_id END) as approved_requests,
COUNT(DISTINCT CASE WHEN w.status = 'REJECTED' THEN w.request_id END) as rejected_requests,
AVG(CASE WHEN w.closure_date IS NOT NULL THEN
EXTRACT(EPOCH FROM (w.closure_date - w.submission_date)) / 3600
END) as avg_cycle_time_hours,
COUNT(DISTINCT CASE WHEN w.priority = 'EXPRESS' THEN w.request_id END) as express_priority_count,
COUNT(DISTINCT CASE WHEN w.priority = 'STANDARD' THEN w.request_id END) as standard_priority_count
FROM users u
LEFT JOIN workflow_requests w ON u.user_id = w.initiator_id AND w.is_deleted = false
WHERE u.department IS NOT NULL
GROUP BY u.department;
`);
// 6. Daily/Weekly KPI Metrics View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_daily_kpi_metrics AS
SELECT
DATE(w.created_at) as date,
COUNT(*) as requests_created,
COUNT(CASE WHEN w.submission_date IS NOT NULL AND DATE(w.submission_date) = DATE(w.created_at) THEN 1 END) as requests_submitted,
COUNT(CASE WHEN w.closure_date IS NOT NULL AND DATE(w.closure_date) = DATE(w.created_at) THEN 1 END) as requests_closed,
COUNT(CASE WHEN w.status = 'APPROVED' AND DATE(w.closure_date) = DATE(w.created_at) THEN 1 END) as requests_approved,
COUNT(CASE WHEN w.status = 'REJECTED' AND DATE(w.closure_date) = DATE(w.created_at) THEN 1 END) as requests_rejected,
AVG(CASE WHEN w.closure_date IS NOT NULL AND DATE(w.closure_date) = DATE(w.created_at) THEN
EXTRACT(EPOCH FROM (w.closure_date - w.submission_date)) / 3600
END) as avg_completion_time_hours
FROM workflow_requests w
WHERE w.is_deleted = false
GROUP BY DATE(w.created_at)
ORDER BY DATE(w.created_at) DESC;
`);
// 7. Workflow Aging Report View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_workflow_aging AS
SELECT
w.request_id,
w.request_number,
w.title,
w.status,
w.priority,
w.current_level,
w.total_levels,
w.submission_date,
EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) as age_days,
CASE
WHEN EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) < 3 THEN 'FRESH'
WHEN EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) < 7 THEN 'NORMAL'
WHEN EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) < 14 THEN 'AGING'
ELSE 'CRITICAL'
END as age_category,
al.approver_name as current_approver,
al.level_start_time as current_level_start,
EXTRACT(EPOCH FROM (NOW() - al.level_start_time)) / 3600 as current_level_age_hours,
al.tat_hours as current_level_tat_hours,
al.tat_percentage_used as current_level_tat_used
FROM workflow_requests w
LEFT JOIN approval_levels al ON w.request_id = al.request_id
AND al.level_number = w.current_level
AND al.status IN ('PENDING', 'IN_PROGRESS')
WHERE w.status IN ('PENDING', 'IN_PROGRESS')
AND w.is_deleted = false
ORDER BY age_days DESC;
`);
// 8. Engagement & Quality Metrics View
await queryInterface.sequelize.query(`
CREATE OR REPLACE VIEW vw_engagement_metrics AS
SELECT
w.request_id,
w.request_number,
w.title,
w.status,
COUNT(DISTINCT wn.note_id) as work_notes_count,
COUNT(DISTINCT d.document_id) as documents_count,
COUNT(DISTINCT p.participant_id) as spectators_count,
COUNT(DISTINCT al.approver_id) as approvers_count,
MAX(wn.created_at) as last_comment_date,
MAX(d.uploaded_at) as last_document_date,
CASE
WHEN COUNT(DISTINCT wn.note_id) > 10 THEN 'HIGH'
WHEN COUNT(DISTINCT wn.note_id) > 5 THEN 'MEDIUM'
ELSE 'LOW'
END as engagement_level
FROM workflow_requests w
LEFT JOIN work_notes wn ON w.request_id = wn.request_id AND wn.is_deleted = false
LEFT JOIN documents d ON w.request_id = d.request_id AND d.is_deleted = false
LEFT JOIN participants p ON w.request_id = p.request_id AND p.participant_type = 'SPECTATOR'
LEFT JOIN approval_levels al ON w.request_id = al.request_id
WHERE w.is_deleted = false
GROUP BY w.request_id, w.request_number, w.title, w.status;
`);
// KPI views created
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_engagement_metrics;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_workflow_aging;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_daily_kpi_metrics;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_department_summary;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_tat_alerts_summary;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_approver_performance;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_tat_compliance;');
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_request_volume_summary;');
// KPI views dropped
}

View File

@ -1,134 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create TAT alerts/reminders table
* Stores all TAT-related notifications sent (50%, 75%, 100%)
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('tat_alerts', {
alert_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
level_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'approval_levels',
key: 'level_id'
}
},
approver_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
}
},
alert_type: {
type: DataTypes.ENUM('TAT_50', 'TAT_75', 'TAT_100'),
allowNull: false
},
threshold_percentage: {
type: DataTypes.INTEGER,
allowNull: false,
comment: '50, 75, or 100'
},
tat_hours_allocated: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
comment: 'Total TAT hours for this level'
},
tat_hours_elapsed: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
comment: 'Hours elapsed when alert was sent'
},
tat_hours_remaining: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
comment: 'Hours remaining when alert was sent'
},
level_start_time: {
type: DataTypes.DATE,
allowNull: false,
comment: 'When the approval level started'
},
alert_sent_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
comment: 'When the alert was sent'
},
expected_completion_time: {
type: DataTypes.DATE,
allowNull: false,
comment: 'When the level should be completed'
},
alert_message: {
type: DataTypes.TEXT,
allowNull: false,
comment: 'The notification message sent'
},
notification_sent: {
type: DataTypes.BOOLEAN,
defaultValue: true,
comment: 'Whether notification was successfully sent'
},
notification_channels: {
type: DataTypes.ARRAY(DataTypes.STRING),
defaultValue: [],
comment: 'push, email, sms'
},
is_breached: {
type: DataTypes.BOOLEAN,
defaultValue: false,
comment: 'Whether this was a breach alert (100%)'
},
was_completed_on_time: {
type: DataTypes.BOOLEAN,
allowNull: true,
comment: 'Set when level is completed - was it on time?'
},
completion_time: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When the level was actually completed'
},
metadata: {
type: DataTypes.JSONB,
defaultValue: {},
comment: 'Additional context (priority, request title, etc.)'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Indexes for performance (with IF NOT EXISTS check)
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_request_id" ON "tat_alerts" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_level_id" ON "tat_alerts" ("level_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_approver_id" ON "tat_alerts" ("approver_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_alert_type" ON "tat_alerts" ("alert_type");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_alert_sent_at" ON "tat_alerts" ("alert_sent_at");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_is_breached" ON "tat_alerts" ("is_breached");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_was_completed_on_time" ON "tat_alerts" ("was_completed_on_time");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('tat_alerts');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_tat_alerts_alert_type";');
}

View File

@ -1,97 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration: Add skip-related fields to approval_levels table
* Purpose: Track approvers who were skipped by initiator
* Date: 2025-11-05
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if table exists first
const tables = await queryInterface.showAllTables();
if (!tables.includes('approval_levels')) {
// Table doesn't exist yet, skipping
return;
}
// Get existing columns
const tableDescription = await queryInterface.describeTable('approval_levels');
// Add skip-related columns only if they don't exist
if (!tableDescription.is_skipped) {
await queryInterface.addColumn('approval_levels', 'is_skipped', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
comment: 'Indicates if this approver was skipped by initiator'
});
// Added is_skipped column
}
if (!tableDescription.skipped_at) {
await queryInterface.addColumn('approval_levels', 'skipped_at', {
type: DataTypes.DATE,
allowNull: true,
comment: 'Timestamp when approver was skipped'
});
// Added skipped_at column
}
if (!tableDescription.skipped_by) {
await queryInterface.addColumn('approval_levels', 'skipped_by', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
comment: 'User ID who skipped this approver'
});
// Added skipped_by column
}
if (!tableDescription.skip_reason) {
await queryInterface.addColumn('approval_levels', 'skip_reason', {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Reason for skipping this approver'
});
// Added skip_reason column
}
// Check if index exists before creating
try {
const indexes: any[] = await queryInterface.showIndex('approval_levels') as any[];
const indexExists = Array.isArray(indexes) && indexes.some((idx: any) => idx.name === 'idx_approval_levels_skipped');
if (!indexExists) {
await queryInterface.addIndex('approval_levels', ['is_skipped'], {
name: 'idx_approval_levels_skipped',
where: {
is_skipped: true
}
});
// Index added
}
} catch (error) {
// Index already exists
}
// Skip fields added
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove index first
await queryInterface.removeIndex('approval_levels', 'idx_approval_levels_skipped');
// Remove columns
await queryInterface.removeColumn('approval_levels', 'skip_reason');
await queryInterface.removeColumn('approval_levels', 'skipped_by');
await queryInterface.removeColumn('approval_levels', 'skipped_at');
await queryInterface.removeColumn('approval_levels', 'is_skipped');
// Skip fields removed
}

View File

@ -1,76 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Migration: Convert tat_days to GENERATED STORED column
*
* This ensures tat_days is auto-calculated from tat_hours across all environments.
* Production already has this as a generated column, this migration makes other environments consistent.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if tat_days is already a generated column
const result = await queryInterface.sequelize.query(`
SELECT
a.attname as column_name,
a.attgenerated as is_generated
FROM pg_attribute a
JOIN pg_class c ON a.attrelid = c.oid
WHERE c.relname = 'approval_levels'
AND a.attname = 'tat_days'
AND NOT a.attisdropped;
`, { type: 'SELECT' });
const column = result[0] as any;
if (column && column.is_generated === 's') {
// Already a GENERATED column, skipping
return;
}
// Converting tat_days to GENERATED column
// Step 1: Drop the existing regular column
await queryInterface.sequelize.query(`
ALTER TABLE approval_levels DROP COLUMN IF EXISTS tat_days;
`);
// Step 2: Add it back as a GENERATED STORED column
// Formula: CEIL(tat_hours / 24.0) - rounds up to nearest day
await queryInterface.sequelize.query(`
ALTER TABLE approval_levels
ADD COLUMN tat_days INTEGER
GENERATED ALWAYS AS (CAST(CEIL(tat_hours / 24.0) AS INTEGER)) STORED;
`);
// tat_days is now auto-calculated
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Rolling back to regular column
// Drop the generated column
await queryInterface.sequelize.query(`
ALTER TABLE approval_levels DROP COLUMN IF EXISTS tat_days;
`);
// Add it back as a regular column (with default calculation for existing rows)
await queryInterface.sequelize.query(`
ALTER TABLE approval_levels
ADD COLUMN tat_days INTEGER;
`);
// Populate existing rows with calculated values
await queryInterface.sequelize.query(`
UPDATE approval_levels
SET tat_days = CAST(CEIL(tat_hours / 24.0) AS INTEGER)
WHERE tat_days IS NULL;
`);
// Make it NOT NULL after populating
await queryInterface.sequelize.query(`
ALTER TABLE approval_levels
ALTER COLUMN tat_days SET NOT NULL;
`);
// Rolled back successfully
}

View File

@ -1,109 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create conclusion_remarks table
* Stores AI-generated and finalized conclusion remarks for workflow requests
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('conclusion_remarks', {
conclusion_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
allowNull: false
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE',
unique: true // One conclusion per request
},
ai_generated_remark: {
type: DataTypes.TEXT,
allowNull: true
},
ai_model_used: {
type: DataTypes.STRING(100),
allowNull: true
},
ai_confidence_score: {
type: DataTypes.DECIMAL(5, 2),
allowNull: true
},
final_remark: {
type: DataTypes.TEXT,
allowNull: true
},
edited_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL'
},
is_edited: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
},
edit_count: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0
},
approval_summary: {
type: DataTypes.JSONB,
allowNull: true
},
document_summary: {
type: DataTypes.JSONB,
allowNull: true
},
key_discussion_points: {
type: DataTypes.ARRAY(DataTypes.TEXT),
allowNull: false,
defaultValue: []
},
generated_at: {
type: DataTypes.DATE,
allowNull: true
},
finalized_at: {
type: DataTypes.DATE,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Add index on request_id for faster lookups
await queryInterface.addIndex('conclusion_remarks', ['request_id'], {
name: 'idx_conclusion_remarks_request_id'
});
// Add index on finalized_at for KPI queries
await queryInterface.addIndex('conclusion_remarks', ['finalized_at'], {
name: 'idx_conclusion_remarks_finalized_at'
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('conclusion_remarks');
}

View File

@ -1,137 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Create priority enum type
await queryInterface.sequelize.query(`
DO $$ BEGIN
CREATE TYPE notification_priority_enum AS ENUM ('LOW', 'MEDIUM', 'HIGH', 'URGENT');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
`);
// Create notifications table
await queryInterface.createTable('notifications', {
notification_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
user_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
request_id: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL'
},
notification_type: {
type: DataTypes.STRING(50),
allowNull: false
},
title: {
type: DataTypes.STRING(255),
allowNull: false
},
message: {
type: DataTypes.TEXT,
allowNull: false
},
is_read: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
},
priority: {
type: 'notification_priority_enum',
defaultValue: 'MEDIUM',
allowNull: false
},
action_url: {
type: DataTypes.STRING(500),
allowNull: true
},
action_required: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
},
metadata: {
type: DataTypes.JSONB,
allowNull: true,
defaultValue: {}
},
sent_via: {
type: DataTypes.ARRAY(DataTypes.STRING),
defaultValue: [],
allowNull: false
},
email_sent: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
},
sms_sent: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
},
push_sent: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
},
read_at: {
type: DataTypes.DATE,
allowNull: true
},
expires_at: {
type: DataTypes.DATE,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes for better query performance
await queryInterface.addIndex('notifications', ['user_id'], {
name: 'idx_notifications_user_id'
});
await queryInterface.addIndex('notifications', ['user_id', 'is_read'], {
name: 'idx_notifications_user_unread'
});
await queryInterface.addIndex('notifications', ['request_id'], {
name: 'idx_notifications_request_id'
});
await queryInterface.addIndex('notifications', ['created_at'], {
name: 'idx_notifications_created_at'
});
await queryInterface.addIndex('notifications', ['notification_type'], {
name: 'idx_notifications_type'
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('notifications');
await queryInterface.sequelize.query('DROP TYPE IF EXISTS notification_priority_enum;');
}

View File

@ -1,49 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration: Add breach_reason column to approval_levels table
* Purpose: Store TAT breach reason directly in approval_levels table
* Date: 2025-11-18
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if table exists first
const tables = await queryInterface.showAllTables();
if (!tables.includes('approval_levels')) {
// Table doesn't exist yet, skipping
return;
}
// Get existing columns
const tableDescription = await queryInterface.describeTable('approval_levels');
// Add breach_reason column only if it doesn't exist
if (!tableDescription.breach_reason) {
await queryInterface.addColumn('approval_levels', 'breach_reason', {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Reason for TAT breach - can contain paragraph-length text'
});
console.log('✅ Added breach_reason column to approval_levels table');
} else {
console.log(' breach_reason column already exists, skipping');
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Check if table exists
const tables = await queryInterface.showAllTables();
if (!tables.includes('approval_levels')) {
return;
}
// Get existing columns
const tableDescription = await queryInterface.describeTable('approval_levels');
// Remove column only if it exists
if (tableDescription.breach_reason) {
await queryInterface.removeColumn('approval_levels', 'breach_reason');
console.log('✅ Removed breach_reason column from approval_levels table');
}
}

View File

@ -1,94 +0,0 @@
import { QueryInterface, QueryTypes } from 'sequelize';
/**
* Migration to add AI model configuration entries
* Adds CLAUDE_MODEL, OPENAI_MODEL, and GEMINI_MODEL to admin_configurations
*
* This migration is idempotent - it will only insert if the configs don't exist.
* For existing databases, this ensures the new model configuration fields are available.
* For fresh databases, the seed scripts will handle the initial population.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Insert AI model configurations if they don't exist
await queryInterface.sequelize.query(`
INSERT INTO admin_configurations (
config_id, config_key, config_category, config_value, value_type,
display_name, description, default_value, is_editable, is_sensitive,
validation_rules, ui_component, options, sort_order, requires_restart,
last_modified_by, last_modified_at, created_at, updated_at
) VALUES
(
gen_random_uuid(),
'CLAUDE_MODEL',
'AI_CONFIGURATION',
'claude-sonnet-4-20250514',
'STRING',
'Claude Model',
'Claude (Anthropic) model to use for AI generation',
'claude-sonnet-4-20250514',
true,
false,
'{}'::jsonb,
'input',
NULL,
27,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'OPENAI_MODEL',
'AI_CONFIGURATION',
'gpt-4o',
'STRING',
'OpenAI Model',
'OpenAI model to use for AI generation',
'gpt-4o',
true,
false,
'{}'::jsonb,
'input',
NULL,
28,
false,
NULL,
NULL,
NOW(),
NOW()
),
(
gen_random_uuid(),
'GEMINI_MODEL',
'AI_CONFIGURATION',
'gemini-2.0-flash-lite',
'STRING',
'Gemini Model',
'Gemini (Google) model to use for AI generation',
'gemini-2.0-flash-lite',
true,
false,
'{}'::jsonb,
'input',
NULL,
29,
false,
NULL,
NULL,
NOW(),
NOW()
)
ON CONFLICT (config_key) DO NOTHING
`, { type: QueryTypes.INSERT });
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove the AI model configurations
await queryInterface.sequelize.query(`
DELETE FROM admin_configurations
WHERE config_key IN ('CLAUDE_MODEL', 'OPENAI_MODEL', 'GEMINI_MODEL')
`, { type: QueryTypes.DELETE });
}

View File

@ -1,53 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
async up(queryInterface: QueryInterface): Promise<void> {
// Add notification preference columns to users table
await queryInterface.addColumn('users', 'email_notifications_enabled', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
comment: 'User preference for receiving email notifications'
});
await queryInterface.addColumn('users', 'push_notifications_enabled', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
comment: 'User preference for receiving push notifications'
});
await queryInterface.addColumn('users', 'in_app_notifications_enabled', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
comment: 'User preference for receiving in-app notifications'
});
// Add indexes for faster queries
await queryInterface.addIndex('users', ['email_notifications_enabled'], {
name: 'idx_users_email_notifications_enabled'
});
await queryInterface.addIndex('users', ['push_notifications_enabled'], {
name: 'idx_users_push_notifications_enabled'
});
await queryInterface.addIndex('users', ['in_app_notifications_enabled'], {
name: 'idx_users_in_app_notifications_enabled'
});
},
async down(queryInterface: QueryInterface): Promise<void> {
// Remove indexes first
await queryInterface.removeIndex('users', 'idx_users_in_app_notifications_enabled');
await queryInterface.removeIndex('users', 'idx_users_push_notifications_enabled');
await queryInterface.removeIndex('users', 'idx_users_email_notifications_enabled');
// Remove columns
await queryInterface.removeColumn('users', 'in_app_notifications_enabled');
await queryInterface.removeColumn('users', 'push_notifications_enabled');
await queryInterface.removeColumn('users', 'email_notifications_enabled');
}
};

View File

@ -1,54 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Add foreign key constraint for template_id after workflow_templates table exists
* This should run after both:
* - 20251210-enhance-workflow-templates (creates workflow_templates table)
* - 20251210-add-workflow-type-support (adds template_id column)
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if workflow_templates table exists
const [tables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'workflow_templates';
`);
if (tables.length > 0) {
// Check if foreign key already exists
const [constraints] = await queryInterface.sequelize.query(`
SELECT constraint_name
FROM information_schema.table_constraints
WHERE table_schema = 'public'
AND table_name = 'workflow_requests'
AND constraint_name = 'workflow_requests_template_id_fkey';
`);
if (constraints.length === 0) {
// Add foreign key constraint
await queryInterface.sequelize.query(`
ALTER TABLE workflow_requests
ADD CONSTRAINT workflow_requests_template_id_fkey
FOREIGN KEY (template_id)
REFERENCES workflow_templates(template_id)
ON UPDATE CASCADE
ON DELETE SET NULL;
`);
}
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove foreign key constraint if it exists
try {
await queryInterface.sequelize.query(`
ALTER TABLE workflow_requests
DROP CONSTRAINT IF EXISTS workflow_requests_template_id_fkey;
`);
} catch (error) {
// Ignore if constraint doesn't exist
console.log('Note: Foreign key constraint may not exist');
}
}

View File

@ -1,116 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
try {
// Check if columns already exist (for idempotency and backward compatibility)
const tableDescription = await queryInterface.describeTable('workflow_requests');
// 1. Add workflow_type column to workflow_requests (only if it doesn't exist)
if (!tableDescription.workflow_type) {
try {
await queryInterface.addColumn('workflow_requests', 'workflow_type', {
type: DataTypes.STRING(50),
allowNull: true,
defaultValue: 'NON_TEMPLATIZED'
});
console.log('✅ Added workflow_type column');
} catch (error: any) {
// Column might have been added manually, check if it exists now
const updatedDescription = await queryInterface.describeTable('workflow_requests');
if (!updatedDescription.workflow_type) {
throw error; // Re-throw if column still doesn't exist
}
console.log('Note: workflow_type column already exists (may have been added manually)');
}
} else {
console.log('Note: workflow_type column already exists, skipping');
}
// 2. Add template_id column (nullable, for admin templates)
// Note: Foreign key constraint will be added later if workflow_templates table exists
if (!tableDescription.template_id) {
try {
await queryInterface.addColumn('workflow_requests', 'template_id', {
type: DataTypes.UUID,
allowNull: true
});
console.log('✅ Added template_id column');
} catch (error: any) {
// Column might have been added manually, check if it exists now
const updatedDescription = await queryInterface.describeTable('workflow_requests');
if (!updatedDescription.template_id) {
throw error; // Re-throw if column still doesn't exist
}
console.log('Note: template_id column already exists (may have been added manually)');
}
} else {
console.log('Note: template_id column already exists, skipping');
}
// Get updated table description for index creation
const finalTableDescription = await queryInterface.describeTable('workflow_requests');
// 3. Create index for workflow_type (only if column exists)
if (finalTableDescription.workflow_type) {
try {
await queryInterface.addIndex('workflow_requests', ['workflow_type'], {
name: 'idx_workflow_requests_workflow_type'
});
console.log('✅ Created workflow_type index');
} catch (error: any) {
// Index might already exist, ignore error
if (error.message?.includes('already exists') || error.message?.includes('duplicate')) {
console.log('Note: workflow_type index already exists');
} else {
console.log('Note: Could not create workflow_type index:', error.message);
}
}
}
// 4. Create index for template_id (only if column exists)
if (finalTableDescription.template_id) {
try {
await queryInterface.addIndex('workflow_requests', ['template_id'], {
name: 'idx_workflow_requests_template_id'
});
console.log('✅ Created template_id index');
} catch (error: any) {
// Index might already exist, ignore error
if (error.message?.includes('already exists') || error.message?.includes('duplicate')) {
console.log('Note: template_id index already exists');
} else {
console.log('Note: Could not create template_id index:', error.message);
}
}
}
// 5. Update existing records to have workflow_type (if any exist and column exists)
if (finalTableDescription.workflow_type) {
try {
const [result] = await queryInterface.sequelize.query(`
UPDATE workflow_requests
SET workflow_type = 'NON_TEMPLATIZED'
WHERE workflow_type IS NULL;
`);
console.log('✅ Updated existing records with workflow_type');
} catch (error: any) {
// Ignore if table is empty or other error
console.log('Note: Could not update existing records:', error.message);
}
}
} catch (error: any) {
console.error('Migration error:', error.message);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove indexes
await queryInterface.removeIndex('workflow_requests', 'idx_workflow_requests_template_id');
await queryInterface.removeIndex('workflow_requests', 'idx_workflow_requests_workflow_type');
// Remove columns
await queryInterface.removeColumn('workflow_requests', 'template_id');
await queryInterface.removeColumn('workflow_requests', 'workflow_type');
}

View File

@ -1,214 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// 1. Create dealer_claim_details table
await queryInterface.createTable('dealer_claim_details', {
claim_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
activity_name: {
type: DataTypes.STRING(500),
allowNull: false
},
activity_type: {
type: DataTypes.STRING(100),
allowNull: false
},
dealer_code: {
type: DataTypes.STRING(50),
allowNull: false
},
dealer_name: {
type: DataTypes.STRING(200),
allowNull: false
},
dealer_email: {
type: DataTypes.STRING(255),
allowNull: true
},
dealer_phone: {
type: DataTypes.STRING(20),
allowNull: true
},
dealer_address: {
type: DataTypes.TEXT,
allowNull: true
},
activity_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
location: {
type: DataTypes.STRING(255),
allowNull: true
},
period_start_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
period_end_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('dealer_claim_details', ['request_id'], {
name: 'idx_dealer_claim_details_request_id',
unique: true
});
await queryInterface.addIndex('dealer_claim_details', ['dealer_code'], {
name: 'idx_dealer_claim_details_dealer_code'
});
await queryInterface.addIndex('dealer_claim_details', ['activity_type'], {
name: 'idx_dealer_claim_details_activity_type'
});
// 2. Create dealer_proposal_details table (Step 1: Dealer Proposal)
await queryInterface.createTable('dealer_proposal_details', {
proposal_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
proposal_document_path: {
type: DataTypes.STRING(500),
allowNull: true
},
proposal_document_url: {
type: DataTypes.STRING(500),
allowNull: true
},
total_estimated_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
timeline_mode: {
type: DataTypes.STRING(10),
allowNull: true
},
expected_completion_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
expected_completion_days: {
type: DataTypes.INTEGER,
allowNull: true
},
dealer_comments: {
type: DataTypes.TEXT,
allowNull: true
},
submitted_at: {
type: DataTypes.DATE,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
await queryInterface.addIndex('dealer_proposal_details', ['request_id'], {
name: 'idx_dealer_proposal_details_request_id',
unique: true
});
// 3. Create dealer_completion_details table (Step 5: Dealer Completion)
await queryInterface.createTable('dealer_completion_details', {
completion_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
activity_completion_date: {
type: DataTypes.DATEONLY,
allowNull: false
},
number_of_participants: {
type: DataTypes.INTEGER,
allowNull: true
},
total_closed_expenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
submitted_at: {
type: DataTypes.DATE,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
await queryInterface.addIndex('dealer_completion_details', ['request_id'], {
name: 'idx_dealer_completion_details_request_id',
unique: true
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('dealer_completion_details');
await queryInterface.dropTable('dealer_proposal_details');
await queryInterface.dropTable('dealer_claim_details');
}

View File

@ -1,194 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration: Create dealer_proposal_cost_items table
*
* Purpose: Separate table for cost breakups to enable better querying, reporting, and data integrity
* This replaces the JSONB costBreakup field in dealer_proposal_details
*
* Benefits:
* - Better querying and filtering
* - Easier to update individual cost items
* - Better for analytics and reporting
* - Maintains referential integrity
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if table already exists
const [tables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'dealer_proposal_cost_items';
`);
if (tables.length === 0) {
// Create dealer_proposal_cost_items table
await queryInterface.createTable('dealer_proposal_cost_items', {
cost_item_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
field: 'cost_item_id'
},
proposal_id: {
type: DataTypes.UUID,
allowNull: false,
field: 'proposal_id',
references: {
model: 'dealer_proposal_details',
key: 'proposal_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
comment: 'Denormalized for easier querying without joins'
},
item_description: {
type: DataTypes.STRING(500),
allowNull: false,
field: 'item_description'
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: false,
field: 'amount',
comment: 'Cost amount in INR'
},
item_order: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
field: 'item_order',
comment: 'Order of item in the cost breakdown list'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
});
// Create indexes for better query performance
await queryInterface.addIndex('dealer_proposal_cost_items', ['proposal_id'], {
name: 'idx_proposal_cost_items_proposal_id'
});
await queryInterface.addIndex('dealer_proposal_cost_items', ['request_id'], {
name: 'idx_proposal_cost_items_request_id'
});
await queryInterface.addIndex('dealer_proposal_cost_items', ['proposal_id', 'item_order'], {
name: 'idx_proposal_cost_items_proposal_order'
});
console.log('✅ Created dealer_proposal_cost_items table');
} else {
console.log('Note: dealer_proposal_cost_items table already exists');
}
// Migrate existing JSONB costBreakup data to the new table
try {
const [existingProposals] = await queryInterface.sequelize.query(`
SELECT proposal_id, request_id, cost_breakup
FROM dealer_proposal_details
WHERE cost_breakup IS NOT NULL
AND cost_breakup::text != 'null'
AND cost_breakup::text != '[]';
`);
if (Array.isArray(existingProposals) && existingProposals.length > 0) {
console.log(`📦 Migrating ${existingProposals.length} existing proposal(s) with cost breakups...`);
for (const proposal of existingProposals as any[]) {
const proposalId = proposal.proposal_id;
const requestId = proposal.request_id;
let costBreakup = proposal.cost_breakup;
// Parse JSONB if it's a string
if (typeof costBreakup === 'string') {
try {
costBreakup = JSON.parse(costBreakup);
} catch (e) {
console.warn(`⚠️ Failed to parse costBreakup for proposal ${proposalId}:`, e);
continue;
}
}
// Ensure it's an array
if (!Array.isArray(costBreakup)) {
console.warn(`⚠️ costBreakup is not an array for proposal ${proposalId}`);
continue;
}
// Insert cost items
for (let i = 0; i < costBreakup.length; i++) {
const item = costBreakup[i];
if (item && item.description && item.amount !== undefined) {
await queryInterface.sequelize.query(`
INSERT INTO dealer_proposal_cost_items
(proposal_id, request_id, item_description, amount, item_order, created_at, updated_at)
VALUES (:proposalId, :requestId, :description, :amount, :order, NOW(), NOW())
ON CONFLICT DO NOTHING;
`, {
replacements: {
proposalId,
requestId,
description: item.description,
amount: item.amount,
order: i
}
});
}
}
}
console.log('✅ Migrated existing cost breakups to new table');
}
} catch (error: any) {
console.warn('⚠️ Could not migrate existing cost breakups:', error.message);
// Don't fail the migration if migration of existing data fails
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Drop indexes first
try {
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_proposal_order');
} catch (e) {
// Index might not exist
}
try {
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_request_id');
} catch (e) {
// Index might not exist
}
try {
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_proposal_id');
} catch (e) {
// Index might not exist
}
// Drop table
await queryInterface.dropTable('dealer_proposal_cost_items');
console.log('✅ Dropped dealer_proposal_cost_items table');
}

View File

@ -1,174 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if workflow_templates table exists, if not create it
const [tables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'workflow_templates';
`);
if (tables.length === 0) {
// Create workflow_templates table if it doesn't exist
await queryInterface.createTable('workflow_templates', {
template_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
template_name: {
type: DataTypes.STRING(200),
allowNull: false
},
template_code: {
type: DataTypes.STRING(50),
allowNull: true,
unique: true
},
template_description: {
type: DataTypes.TEXT,
allowNull: true
},
template_category: {
type: DataTypes.STRING(100),
allowNull: true
},
workflow_type: {
type: DataTypes.STRING(50),
allowNull: true
},
approval_levels_config: {
type: DataTypes.JSONB,
allowNull: true
},
default_tat_hours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
defaultValue: 24
},
form_steps_config: {
type: DataTypes.JSONB,
allowNull: true
},
user_field_mappings: {
type: DataTypes.JSONB,
allowNull: true
},
dynamic_approver_config: {
type: DataTypes.JSONB,
allowNull: true
},
is_active: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true
},
is_system_template: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
},
usage_count: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0
},
created_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
}
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('workflow_templates', ['template_code'], {
name: 'idx_workflow_templates_template_code',
unique: true
});
await queryInterface.addIndex('workflow_templates', ['workflow_type'], {
name: 'idx_workflow_templates_workflow_type'
});
await queryInterface.addIndex('workflow_templates', ['is_active'], {
name: 'idx_workflow_templates_is_active'
});
} else {
// Table exists, add new columns if they don't exist
const tableDescription = await queryInterface.describeTable('workflow_templates');
if (!tableDescription.form_steps_config) {
await queryInterface.addColumn('workflow_templates', 'form_steps_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.user_field_mappings) {
await queryInterface.addColumn('workflow_templates', 'user_field_mappings', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.dynamic_approver_config) {
await queryInterface.addColumn('workflow_templates', 'dynamic_approver_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.workflow_type) {
await queryInterface.addColumn('workflow_templates', 'workflow_type', {
type: DataTypes.STRING(50),
allowNull: true
});
}
if (!tableDescription.is_system_template) {
await queryInterface.addColumn('workflow_templates', 'is_system_template', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove columns if they exist
const tableDescription = await queryInterface.describeTable('workflow_templates');
if (tableDescription.dynamic_approver_config) {
await queryInterface.removeColumn('workflow_templates', 'dynamic_approver_config');
}
if (tableDescription.user_field_mappings) {
await queryInterface.removeColumn('workflow_templates', 'user_field_mappings');
}
if (tableDescription.form_steps_config) {
await queryInterface.removeColumn('workflow_templates', 'form_steps_config');
}
if (tableDescription.workflow_type) {
await queryInterface.removeColumn('workflow_templates', 'workflow_type');
}
if (tableDescription.is_system_template) {
await queryInterface.removeColumn('workflow_templates', 'is_system_template');
}
}

View File

@ -1,197 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Create claim_budget_tracking table for comprehensive budget management
await queryInterface.createTable('claim_budget_tracking', {
budget_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
// Initial Budget (from claim creation)
initial_estimated_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Initial estimated budget when claim was created'
},
// Proposal Budget (from Step 1 - Dealer Proposal)
proposal_estimated_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Total estimated budget from dealer proposal'
},
proposal_submitted_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When dealer submitted proposal'
},
// Approved Budget (from Step 2 - Requestor Evaluation)
approved_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Budget approved by requestor in Step 2'
},
approved_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When budget was approved by requestor'
},
approved_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
comment: 'User who approved the budget'
},
// IO Blocked Budget (from Step 3 - Department Lead)
io_blocked_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Amount blocked in IO (from internal_orders table)'
},
io_blocked_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When budget was blocked in IO'
},
// Closed Expenses (from Step 5 - Dealer Completion)
closed_expenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Total closed expenses from completion documents'
},
closed_expenses_submitted_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When completion expenses were submitted'
},
// Final Claim Amount (from Step 6 - Requestor Claim Approval)
final_claim_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Final claim amount approved/modified by requestor in Step 6'
},
final_claim_amount_approved_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When final claim amount was approved'
},
final_claim_amount_approved_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
comment: 'User who approved final claim amount'
},
// Credit Note (from Step 8 - Finance)
credit_note_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Credit note amount issued by finance'
},
credit_note_issued_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When credit note was issued'
},
// Budget Status
budget_status: {
type: DataTypes.ENUM('DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED'),
defaultValue: 'DRAFT',
allowNull: false,
comment: 'Current status of budget lifecycle'
},
// Currency
currency: {
type: DataTypes.STRING(3),
defaultValue: 'INR',
allowNull: false,
comment: 'Currency code (INR, USD, etc.)'
},
// Budget Variance
variance_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Difference between approved and closed expenses (closed - approved)'
},
variance_percentage: {
type: DataTypes.DECIMAL(5, 2),
allowNull: true,
comment: 'Variance as percentage of approved budget'
},
// Audit fields
last_modified_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
comment: 'Last user who modified budget'
},
last_modified_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When budget was last modified'
},
modification_reason: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Reason for budget modification'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('claim_budget_tracking', ['request_id'], {
name: 'idx_claim_budget_tracking_request_id',
unique: true
});
await queryInterface.addIndex('claim_budget_tracking', ['budget_status'], {
name: 'idx_claim_budget_tracking_status'
});
await queryInterface.addIndex('claim_budget_tracking', ['approved_by'], {
name: 'idx_claim_budget_tracking_approved_by'
});
await queryInterface.addIndex('claim_budget_tracking', ['final_claim_amount_approved_by'], {
name: 'idx_claim_budget_tracking_final_approved_by'
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('claim_budget_tracking');
}

View File

@ -1,95 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Create internal_orders table for storing IO (Internal Order) details
await queryInterface.createTable('internal_orders', {
io_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
io_number: {
type: DataTypes.STRING(50),
allowNull: false
},
io_remark: {
type: DataTypes.TEXT,
allowNull: true
},
io_available_balance: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
io_blocked_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
io_remaining_balance: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
organized_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE'
},
organized_at: {
type: DataTypes.DATE,
allowNull: true
},
sap_document_number: {
type: DataTypes.STRING(100),
allowNull: true
},
status: {
type: DataTypes.ENUM('PENDING', 'BLOCKED', 'RELEASED', 'CANCELLED'),
defaultValue: 'PENDING',
allowNull: false
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('internal_orders', ['io_number'], {
name: 'idx_internal_orders_io_number'
});
await queryInterface.addIndex('internal_orders', ['organized_by'], {
name: 'idx_internal_orders_organized_by'
});
// Create unique constraint: one IO per request (unique index on request_id)
await queryInterface.addIndex('internal_orders', ['request_id'], {
name: 'idx_internal_orders_request_id_unique',
unique: true
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('internal_orders');
}

View File

@ -1,162 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('claim_invoices', {
invoice_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true, // one invoice per request (adjust later if multiples needed)
references: { model: 'workflow_requests', key: 'request_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoice_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
invoice_date: {
type: DataTypes.DATEONLY,
allowNull: true,
},
invoice_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
},
dms_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
invoice_file_path: {
type: DataTypes.STRING(500),
allowNull: true,
},
generation_status: {
type: DataTypes.STRING(50), // e.g., PENDING, GENERATED, SENT, FAILED, CANCELLED
allowNull: true,
},
error_message: {
type: DataTypes.TEXT,
allowNull: true,
},
generated_at: {
type: DataTypes.DATE,
allowNull: true,
},
description: {
type: DataTypes.TEXT,
allowNull: true,
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
await queryInterface.addIndex('claim_invoices', ['request_id'], { name: 'idx_claim_invoices_request_id', unique: true });
await queryInterface.addIndex('claim_invoices', ['invoice_number'], { name: 'idx_claim_invoices_invoice_number' });
await queryInterface.addIndex('claim_invoices', ['dms_number'], { name: 'idx_claim_invoices_dms_number' });
await queryInterface.addIndex('claim_invoices', ['generation_status'], { name: 'idx_claim_invoices_status' });
await queryInterface.createTable('claim_credit_notes', {
credit_note_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true, // one credit note per request (adjust later if multiples needed)
references: { model: 'workflow_requests', key: 'request_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoice_id: {
type: DataTypes.UUID,
allowNull: true,
references: { model: 'claim_invoices', key: 'invoice_id' },
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
credit_note_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
credit_note_date: {
type: DataTypes.DATEONLY,
allowNull: true,
},
credit_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
},
sap_document_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
credit_note_file_path: {
type: DataTypes.STRING(500),
allowNull: true,
},
confirmation_status: {
type: DataTypes.STRING(50), // e.g., PENDING, GENERATED, CONFIRMED, FAILED, CANCELLED
allowNull: true,
},
error_message: {
type: DataTypes.TEXT,
allowNull: true,
},
confirmed_by: {
type: DataTypes.UUID,
allowNull: true,
references: { model: 'users', key: 'user_id' },
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
confirmed_at: {
type: DataTypes.DATE,
allowNull: true,
},
reason: {
type: DataTypes.TEXT,
allowNull: true,
},
description: {
type: DataTypes.TEXT,
allowNull: true,
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
await queryInterface.addIndex('claim_credit_notes', ['request_id'], { name: 'idx_claim_credit_notes_request_id', unique: true });
await queryInterface.addIndex('claim_credit_notes', ['invoice_id'], { name: 'idx_claim_credit_notes_invoice_id' });
await queryInterface.addIndex('claim_credit_notes', ['credit_note_number'], { name: 'idx_claim_credit_notes_number' });
await queryInterface.addIndex('claim_credit_notes', ['sap_document_number'], { name: 'idx_claim_credit_notes_sap_doc' });
await queryInterface.addIndex('claim_credit_notes', ['confirmation_status'], { name: 'idx_claim_credit_notes_status' });
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('claim_credit_notes');
await queryInterface.dropTable('claim_invoices');
}

View File

@ -1,68 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Helper function to check if a column exists in a table
*/
async function columnExists(
queryInterface: QueryInterface,
tableName: string,
columnName: string
): Promise<boolean> {
try {
const tableDescription = await queryInterface.describeTable(tableName);
return columnName in tableDescription;
} catch (error) {
return false;
}
}
export async function up(queryInterface: QueryInterface): Promise<void> {
const columnsToRemove = [
'dms_number',
'e_invoice_number',
'e_invoice_date',
'credit_note_number',
'credit_note_date',
'credit_note_amount',
];
// Only remove columns if they exist
// This handles the case where dealer_claim_details was created without these columns
for (const columnName of columnsToRemove) {
const exists = await columnExists(queryInterface, 'dealer_claim_details', columnName);
if (exists) {
await queryInterface.removeColumn('dealer_claim_details', columnName);
console.log(` ✅ Removed column: ${columnName}`);
} else {
console.log(` ⏭️ Column ${columnName} does not exist, skipping...`);
}
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.addColumn('dealer_claim_details', 'dms_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'e_invoice_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'e_invoice_date', {
type: DataTypes.DATEONLY,
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'credit_note_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'credit_note_date', {
type: DataTypes.DATEONLY,
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'credit_note_amount', {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
});
}

View File

@ -1,55 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('dealer_completion_expenses', {
expense_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: { model: 'workflow_requests', key: 'request_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
completion_id: {
type: DataTypes.UUID,
allowNull: true,
references: { model: 'dealer_completion_details', key: 'completion_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
description: {
type: DataTypes.STRING(500),
allowNull: false,
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: false,
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
await queryInterface.addIndex('dealer_completion_expenses', ['request_id'], {
name: 'idx_dealer_completion_expenses_request_id',
});
await queryInterface.addIndex('dealer_completion_expenses', ['completion_id'], {
name: 'idx_dealer_completion_expenses_completion_id',
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('dealer_completion_expenses');
}

View File

@ -1,240 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Helper function to check if a column exists in a table
*/
async function columnExists(
queryInterface: QueryInterface,
tableName: string,
columnName: string
): Promise<boolean> {
try {
const tableDescription = await queryInterface.describeTable(tableName);
return columnName in tableDescription;
} catch (error) {
return false;
}
}
/**
* Migration: Fix column names in claim_invoices and claim_credit_notes tables
*
* This migration handles the case where tables were created with old column names
* and need to be updated to match the new schema.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
try {
// Check if claim_invoices table exists
const [invoiceTables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'claim_invoices';
`);
if (invoiceTables.length > 0) {
// Fix claim_invoices table
const hasOldAmount = await columnExists(queryInterface, 'claim_invoices', 'amount');
const hasNewAmount = await columnExists(queryInterface, 'claim_invoices', 'invoice_amount');
if (hasOldAmount && !hasNewAmount) {
// Rename amount to invoice_amount
await queryInterface.renameColumn('claim_invoices', 'amount', 'invoice_amount');
console.log('✅ Renamed claim_invoices.amount to invoice_amount');
} else if (!hasOldAmount && !hasNewAmount) {
// Add invoice_amount if neither exists
await queryInterface.addColumn('claim_invoices', 'invoice_amount', {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
});
console.log('✅ Added invoice_amount column to claim_invoices');
} else if (hasNewAmount) {
console.log('✅ invoice_amount column already exists in claim_invoices');
}
// Check for status vs generation_status
const hasStatus = await columnExists(queryInterface, 'claim_invoices', 'status');
const hasGenerationStatus = await columnExists(queryInterface, 'claim_invoices', 'generation_status');
if (hasStatus && !hasGenerationStatus) {
// Rename status to generation_status
await queryInterface.renameColumn('claim_invoices', 'status', 'generation_status');
console.log('✅ Renamed claim_invoices.status to generation_status');
} else if (!hasStatus && !hasGenerationStatus) {
// Add generation_status if neither exists
await queryInterface.addColumn('claim_invoices', 'generation_status', {
type: DataTypes.STRING(50),
allowNull: true,
});
console.log('✅ Added generation_status column to claim_invoices');
} else if (hasGenerationStatus) {
console.log('✅ generation_status column already exists in claim_invoices');
}
}
// Check if claim_credit_notes table exists
const [creditNoteTables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'claim_credit_notes';
`);
if (creditNoteTables.length > 0) {
// Fix claim_credit_notes table
const hasOldAmount = await columnExists(queryInterface, 'claim_credit_notes', 'credit_note_amount');
const hasNewAmount = await columnExists(queryInterface, 'claim_credit_notes', 'credit_amount');
if (hasOldAmount && !hasNewAmount) {
// Rename credit_note_amount to credit_amount
await queryInterface.renameColumn('claim_credit_notes', 'credit_note_amount', 'credit_amount');
console.log('✅ Renamed claim_credit_notes.credit_note_amount to credit_amount');
} else if (!hasOldAmount && !hasNewAmount) {
// Add credit_amount if neither exists
await queryInterface.addColumn('claim_credit_notes', 'credit_amount', {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
});
console.log('✅ Added credit_amount column to claim_credit_notes');
} else if (hasNewAmount) {
console.log('✅ credit_amount column already exists in claim_credit_notes');
}
// Check for status vs confirmation_status
const hasStatus = await columnExists(queryInterface, 'claim_credit_notes', 'status');
const hasConfirmationStatus = await columnExists(queryInterface, 'claim_credit_notes', 'confirmation_status');
if (hasStatus && !hasConfirmationStatus) {
// Rename status to confirmation_status
await queryInterface.renameColumn('claim_credit_notes', 'status', 'confirmation_status');
console.log('✅ Renamed claim_credit_notes.status to confirmation_status');
} else if (!hasStatus && !hasConfirmationStatus) {
// Add confirmation_status if neither exists
await queryInterface.addColumn('claim_credit_notes', 'confirmation_status', {
type: DataTypes.STRING(50),
allowNull: true,
});
console.log('✅ Added confirmation_status column to claim_credit_notes');
} else if (hasConfirmationStatus) {
console.log('✅ confirmation_status column already exists in claim_credit_notes');
}
// Ensure invoice_id column exists
const hasInvoiceId = await columnExists(queryInterface, 'claim_credit_notes', 'invoice_id');
if (!hasInvoiceId) {
await queryInterface.addColumn('claim_credit_notes', 'invoice_id', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'claim_invoices',
key: 'invoice_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
});
console.log('✅ Added invoice_id column to claim_credit_notes');
}
// Ensure sap_document_number column exists
const hasSapDoc = await columnExists(queryInterface, 'claim_credit_notes', 'sap_document_number');
if (!hasSapDoc) {
await queryInterface.addColumn('claim_credit_notes', 'sap_document_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
console.log('✅ Added sap_document_number column to claim_credit_notes');
}
// Ensure credit_note_file_path column exists
const hasFilePath = await columnExists(queryInterface, 'claim_credit_notes', 'credit_note_file_path');
if (!hasFilePath) {
await queryInterface.addColumn('claim_credit_notes', 'credit_note_file_path', {
type: DataTypes.STRING(500),
allowNull: true,
});
console.log('✅ Added credit_note_file_path column to claim_credit_notes');
}
// Ensure confirmed_by column exists
const hasConfirmedBy = await columnExists(queryInterface, 'claim_credit_notes', 'confirmed_by');
if (!hasConfirmedBy) {
await queryInterface.addColumn('claim_credit_notes', 'confirmed_by', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
});
console.log('✅ Added confirmed_by column to claim_credit_notes');
}
// Ensure confirmed_at column exists
const hasConfirmedAt = await columnExists(queryInterface, 'claim_credit_notes', 'confirmed_at');
if (!hasConfirmedAt) {
await queryInterface.addColumn('claim_credit_notes', 'confirmed_at', {
type: DataTypes.DATE,
allowNull: true,
});
console.log('✅ Added confirmed_at column to claim_credit_notes');
}
}
// Ensure invoice_file_path exists in claim_invoices
if (invoiceTables.length > 0) {
const hasFilePath = await columnExists(queryInterface, 'claim_invoices', 'invoice_file_path');
if (!hasFilePath) {
await queryInterface.addColumn('claim_invoices', 'invoice_file_path', {
type: DataTypes.STRING(500),
allowNull: true,
});
console.log('✅ Added invoice_file_path column to claim_invoices');
}
// Ensure error_message exists
const hasErrorMessage = await columnExists(queryInterface, 'claim_invoices', 'error_message');
if (!hasErrorMessage) {
await queryInterface.addColumn('claim_invoices', 'error_message', {
type: DataTypes.TEXT,
allowNull: true,
});
console.log('✅ Added error_message column to claim_invoices');
}
// Ensure generated_at exists
const hasGeneratedAt = await columnExists(queryInterface, 'claim_invoices', 'generated_at');
if (!hasGeneratedAt) {
await queryInterface.addColumn('claim_invoices', 'generated_at', {
type: DataTypes.DATE,
allowNull: true,
});
console.log('✅ Added generated_at column to claim_invoices');
}
}
// Ensure error_message exists in claim_credit_notes
if (creditNoteTables.length > 0) {
const hasErrorMessage = await columnExists(queryInterface, 'claim_credit_notes', 'error_message');
if (!hasErrorMessage) {
await queryInterface.addColumn('claim_credit_notes', 'error_message', {
type: DataTypes.TEXT,
allowNull: true,
});
console.log('✅ Added error_message column to claim_credit_notes');
}
}
} catch (error: any) {
console.error('Migration error:', error.message);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// This migration is idempotent and safe to run multiple times
// The down migration would reverse the changes, but it's safer to keep the new schema
console.log('Note: Down migration not implemented - keeping new column names');
}

View File

@ -1,134 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export const up = async (queryInterface: QueryInterface) => {
// 1. Drop and recreate the enum type for snapshot_type to ensure all values are included
// This ensures APPROVE is always present when table is recreated
// Note: Table should be dropped manually before running this migration
try {
await queryInterface.sequelize.query(`
DO $$
BEGIN
-- Drop enum if it exists (cascade will handle any dependencies)
IF EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_dealer_claim_history_snapshot_type') THEN
DROP TYPE IF EXISTS enum_dealer_claim_history_snapshot_type CASCADE;
END IF;
-- Create enum with all values including APPROVE
CREATE TYPE enum_dealer_claim_history_snapshot_type AS ENUM ('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE');
END $$;
`);
} catch (error) {
// If enum creation fails, log error but continue
console.error('Enum creation error:', error);
throw error;
}
// 2. Create new simplified level-based dealer_claim_history table
await queryInterface.createTable('dealer_claim_history', {
history_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
approval_level_id: {
type: DataTypes.UUID,
allowNull: true, // Nullable for workflow-level snapshots
references: {
model: 'approval_levels',
key: 'level_id'
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL'
},
level_number: {
type: DataTypes.INTEGER,
allowNull: true, // Nullable for workflow-level snapshots
comment: 'Level number for easier querying (e.g., 1=Dealer, 3=Dept Lead, 4/5=Completion)'
},
level_name: {
type: DataTypes.STRING(255),
allowNull: true, // Nullable for workflow-level snapshots
comment: 'Level name for consistent matching (e.g., "Dealer Proposal Submission", "Department Lead Approval")'
},
version: {
type: DataTypes.INTEGER,
allowNull: false,
comment: 'Version number for this specific level (starts at 1 per level)'
},
snapshot_type: {
type: DataTypes.ENUM('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE'),
allowNull: false,
comment: 'Type of snapshot: PROPOSAL (Step 1), COMPLETION (Step 4/5), INTERNAL_ORDER (Step 3), WORKFLOW (general), APPROVE (approver actions with comments)'
},
snapshot_data: {
type: DataTypes.JSONB,
allowNull: false,
comment: 'JSON object containing all snapshot data specific to this level and type. Structure varies by snapshot_type.'
},
change_reason: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Reason for this version change (e.g., "Revision Requested: ...")'
},
changed_by: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
}
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Add indexes for efficient querying
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'level_number', 'version'], {
name: 'idx_history_request_level_version'
});
await queryInterface.addIndex('dealer_claim_history', ['approval_level_id', 'version'], {
name: 'idx_history_level_version'
});
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'snapshot_type'], {
name: 'idx_history_request_type'
});
await queryInterface.addIndex('dealer_claim_history', ['snapshot_type', 'level_number'], {
name: 'idx_history_type_level'
});
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'level_name'], {
name: 'idx_history_request_level_name'
});
await queryInterface.addIndex('dealer_claim_history', ['level_name', 'snapshot_type'], {
name: 'idx_history_level_name_type'
});
// Index for JSONB queries on snapshot_data
await queryInterface.addIndex('dealer_claim_history', ['snapshot_type'], {
name: 'idx_history_snapshot_type',
using: 'BTREE'
});
};
export const down = async (queryInterface: QueryInterface) => {
// Note: Table should be dropped manually
// Drop the enum type
try {
await queryInterface.sequelize.query(`
DROP TYPE IF EXISTS enum_dealer_claim_history_snapshot_type CASCADE;
`);
} catch (error) {
console.warn('Enum drop warning:', error);
}
};

View File

@ -1,115 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
try {
const tableDescription = await queryInterface.describeTable('workflow_templates');
// 1. Rename id -> template_id
if (tableDescription.id && !tableDescription.template_id) {
console.log('Renaming id to template_id...');
await queryInterface.renameColumn('workflow_templates', 'id', 'template_id');
}
// 2. Rename name -> template_name
if (tableDescription.name && !tableDescription.template_name) {
console.log('Renaming name to template_name...');
await queryInterface.renameColumn('workflow_templates', 'name', 'template_name');
}
// 3. Rename description -> template_description
if (tableDescription.description && !tableDescription.template_description) {
console.log('Renaming description to template_description...');
await queryInterface.renameColumn('workflow_templates', 'description', 'template_description');
}
// 4. Rename category -> template_category
if (tableDescription.category && !tableDescription.template_category) {
console.log('Renaming category to template_category...');
await queryInterface.renameColumn('workflow_templates', 'category', 'template_category');
}
// 5. Rename suggested_sla -> default_tat_hours
if (tableDescription.suggested_sla && !tableDescription.default_tat_hours) {
console.log('Renaming suggested_sla to default_tat_hours...');
await queryInterface.renameColumn('workflow_templates', 'suggested_sla', 'default_tat_hours');
}
// 6. Add missing columns
if (!tableDescription.template_code) {
console.log('Adding template_code column...');
await queryInterface.addColumn('workflow_templates', 'template_code', {
type: DataTypes.STRING(50),
allowNull: true,
unique: true
});
}
if (!tableDescription.workflow_type) {
console.log('Adding workflow_type column...');
await queryInterface.addColumn('workflow_templates', 'workflow_type', {
type: DataTypes.STRING(50),
allowNull: true
});
}
if (!tableDescription.approval_levels_config) {
console.log('Adding approval_levels_config column...');
await queryInterface.addColumn('workflow_templates', 'approval_levels_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.form_steps_config) {
console.log('Adding form_steps_config column...');
await queryInterface.addColumn('workflow_templates', 'form_steps_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.user_field_mappings) {
console.log('Adding user_field_mappings column...');
await queryInterface.addColumn('workflow_templates', 'user_field_mappings', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.dynamic_approver_config) {
console.log('Adding dynamic_approver_config column...');
await queryInterface.addColumn('workflow_templates', 'dynamic_approver_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.is_system_template) {
console.log('Adding is_system_template column...');
await queryInterface.addColumn('workflow_templates', 'is_system_template', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
if (!tableDescription.usage_count) {
console.log('Adding usage_count column...');
await queryInterface.addColumn('workflow_templates', 'usage_count', {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0
});
}
console.log('✅ Schema validation/fix complete');
} catch (error) {
console.error('Error in schema fix migration:', error);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Revert is complex/risky effectively, skipping for this fix-forward migration
}

View File

@ -1,120 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface ActivityAttributes {
activityId: string;
requestId: string;
userId?: string | null;
userName?: string | null;
activityType: string; // activity_type
activityDescription: string; // activity_description
activityCategory?: string | null;
severity?: string | null;
metadata?: object | null;
isSystemEvent?: boolean | null;
ipAddress?: string | null;
userAgent?: string | null;
createdAt: Date;
}
interface ActivityCreationAttributes extends Optional<ActivityAttributes, 'activityId' | 'createdAt'> {}
class Activity extends Model<ActivityAttributes, ActivityCreationAttributes> implements ActivityAttributes {
public activityId!: string;
public requestId!: string;
public userId!: string | null;
public userName!: string | null;
public activityType!: string;
public activityDescription!: string;
public activityCategory!: string | null;
public severity!: string | null;
public metadata!: object | null;
public isSystemEvent!: boolean | null;
public ipAddress!: string | null;
public userAgent!: string | null;
public createdAt!: Date;
}
Activity.init(
{
activityId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'activity_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id'
},
userId: {
type: DataTypes.UUID,
allowNull: true,
field: 'user_id'
},
userName: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'user_name'
},
activityType: {
type: DataTypes.STRING(100),
allowNull: false,
field: 'activity_type'
},
activityDescription: {
type: DataTypes.TEXT,
allowNull: false,
field: 'activity_description'
},
activityCategory: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'activity_category'
},
severity: {
type: DataTypes.STRING(50),
allowNull: true
},
metadata: {
type: DataTypes.JSONB,
allowNull: true
},
isSystemEvent: {
type: DataTypes.BOOLEAN,
allowNull: true,
field: 'is_system_event'
},
ipAddress: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'ip_address'
},
userAgent: {
type: DataTypes.TEXT,
allowNull: true,
field: 'user_agent'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
},
{
sequelize,
modelName: 'Activity',
tableName: 'activities',
timestamps: false,
indexes: [
{ fields: ['request_id'] },
{ fields: ['created_at'] }
]
}
);
export { Activity };

View File

@ -1,127 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
interface ActivityTypeAttributes {
activityTypeId: string;
title: string;
itemCode?: string;
taxationType?: string;
sapRefNo?: string;
isActive: boolean;
createdBy: string;
updatedBy?: string;
createdAt: Date;
updatedAt: Date;
}
interface ActivityTypeCreationAttributes extends Optional<ActivityTypeAttributes, 'activityTypeId' | 'itemCode' | 'taxationType' | 'sapRefNo' | 'isActive' | 'updatedBy' | 'createdAt' | 'updatedAt'> {}
class ActivityType extends Model<ActivityTypeAttributes, ActivityTypeCreationAttributes> implements ActivityTypeAttributes {
public activityTypeId!: string;
public title!: string;
public itemCode?: string;
public taxationType?: string;
public sapRefNo?: string;
public isActive!: boolean;
public createdBy!: string;
public updatedBy?: string;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public creator?: User;
public updater?: User;
}
ActivityType.init(
{
activityTypeId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'activity_type_id'
},
title: {
type: DataTypes.STRING(200),
allowNull: false,
unique: true,
field: 'title'
},
itemCode: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
field: 'item_code'
},
taxationType: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
field: 'taxation_type'
},
sapRefNo: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
field: 'sap_ref_no'
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_active'
},
createdBy: {
type: DataTypes.UUID,
allowNull: false,
field: 'created_by'
},
updatedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'updated_by'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'ActivityType',
tableName: 'activity_types',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ fields: ['title'], unique: true },
{ fields: ['is_active'] },
{ fields: ['item_code'] },
{ fields: ['created_by'] }
]
}
);
// Associations
ActivityType.belongsTo(User, {
as: 'creator',
foreignKey: 'createdBy',
targetKey: 'userId'
});
ActivityType.belongsTo(User, {
as: 'updater',
foreignKey: 'updatedBy',
targetKey: 'userId'
});
export { ActivityType };

View File

@ -1,307 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
import { WorkflowRequest } from './WorkflowRequest';
import { ApprovalStatus } from '../types/common.types';
interface ApprovalLevelAttributes {
levelId: string;
requestId: string;
levelNumber: number;
levelName?: string;
approverId: string;
approverEmail: string;
approverName: string;
tatHours: number;
tatDays: number;
status: ApprovalStatus;
levelStartTime?: Date;
levelEndTime?: Date;
actionDate?: Date;
comments?: string;
rejectionReason?: string;
breachReason?: string;
isFinalApprover: boolean;
elapsedHours: number;
remainingHours: number;
tatPercentageUsed: number;
tat50AlertSent: boolean;
tat75AlertSent: boolean;
tatBreached: boolean;
tatStartTime?: Date;
isPaused: boolean;
pausedAt?: Date;
pausedBy?: string;
pauseReason?: string;
pauseResumeDate?: Date;
pauseTatStartTime?: Date;
pauseElapsedHours?: number;
createdAt: Date;
updatedAt: Date;
}
interface ApprovalLevelCreationAttributes extends Optional<ApprovalLevelAttributes, 'levelId' | 'levelName' | 'levelStartTime' | 'levelEndTime' | 'actionDate' | 'comments' | 'rejectionReason' | 'breachReason' | 'tat50AlertSent' | 'tat75AlertSent' | 'tatBreached' | 'tatStartTime' | 'tatDays' | 'isPaused' | 'pausedAt' | 'pausedBy' | 'pauseReason' | 'pauseResumeDate' | 'pauseTatStartTime' | 'pauseElapsedHours' | 'createdAt' | 'updatedAt'> {}
class ApprovalLevel extends Model<ApprovalLevelAttributes, ApprovalLevelCreationAttributes> implements ApprovalLevelAttributes {
public levelId!: string;
public requestId!: string;
public levelNumber!: number;
public levelName?: string;
public approverId!: string;
public approverEmail!: string;
public approverName!: string;
public tatHours!: number;
public tatDays!: number;
public status!: ApprovalStatus;
public levelStartTime?: Date;
public levelEndTime?: Date;
public actionDate?: Date;
public comments?: string;
public rejectionReason?: string;
public breachReason?: string;
public isFinalApprover!: boolean;
public elapsedHours!: number;
public remainingHours!: number;
public tatPercentageUsed!: number;
public tat50AlertSent!: boolean;
public tat75AlertSent!: boolean;
public tatBreached!: boolean;
public tatStartTime?: Date;
public isPaused!: boolean;
public pausedAt?: Date;
public pausedBy?: string;
public pauseReason?: string;
public pauseResumeDate?: Date;
public pauseTatStartTime?: Date;
public pauseElapsedHours?: number;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public request?: WorkflowRequest;
public approver?: User;
}
ApprovalLevel.init(
{
levelId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'level_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
levelNumber: {
type: DataTypes.INTEGER,
allowNull: false,
field: 'level_number'
},
levelName: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'level_name'
},
approverId: {
type: DataTypes.UUID,
allowNull: false,
field: 'approver_id',
references: {
model: 'users',
key: 'user_id'
}
},
approverEmail: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'approver_email'
},
approverName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'approver_name'
},
tatHours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
field: 'tat_hours'
},
tatDays: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'tat_days'
// This is a GENERATED STORED column in production DB (calculated as CEIL(tat_hours / 24.0))
// Database will auto-calculate this value - do NOT pass it during INSERT/UPDATE operations
},
status: {
type: DataTypes.ENUM('PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'SKIPPED', 'PAUSED'),
defaultValue: 'PENDING'
},
levelStartTime: {
type: DataTypes.DATE,
allowNull: true,
field: 'level_start_time'
},
levelEndTime: {
type: DataTypes.DATE,
allowNull: true,
field: 'level_end_time'
},
actionDate: {
type: DataTypes.DATE,
allowNull: true,
field: 'action_date'
},
comments: {
type: DataTypes.TEXT,
allowNull: true
},
rejectionReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'rejection_reason'
},
breachReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'breach_reason',
comment: 'Reason for TAT breach - can contain paragraph-length text'
},
isFinalApprover: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_final_approver'
},
elapsedHours: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0,
field: 'elapsed_hours'
},
remainingHours: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0,
field: 'remaining_hours'
},
tatPercentageUsed: {
type: DataTypes.DECIMAL(5, 2),
defaultValue: 0,
field: 'tat_percentage_used'
},
tat50AlertSent: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'tat50_alert_sent'
},
tat75AlertSent: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'tat75_alert_sent'
},
tatBreached: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'tat_breached'
},
tatStartTime: {
type: DataTypes.DATE,
allowNull: true,
field: 'tat_start_time'
},
isPaused: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_paused'
},
pausedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'paused_at'
},
pausedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'paused_by',
references: {
model: 'users',
key: 'user_id'
}
},
pauseReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'pause_reason'
},
pauseResumeDate: {
type: DataTypes.DATE,
allowNull: true,
field: 'pause_resume_date'
},
pauseTatStartTime: {
type: DataTypes.DATE,
allowNull: true,
field: 'pause_tat_start_time'
},
pauseElapsedHours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
field: 'pause_elapsed_hours'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'ApprovalLevel',
tableName: 'approval_levels',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
fields: ['request_id']
},
{
fields: ['approver_id']
},
{
fields: ['status']
},
{
unique: true,
fields: ['request_id', 'level_number']
}
]
}
);
// Associations
ApprovalLevel.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
ApprovalLevel.belongsTo(User, {
as: 'approver',
foreignKey: 'approverId',
targetKey: 'userId'
});
export { ApprovalLevel };

View File

@ -1,295 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { User } from './User';
export enum BudgetStatus {
DRAFT = 'DRAFT',
PROPOSED = 'PROPOSED',
APPROVED = 'APPROVED',
BLOCKED = 'BLOCKED',
CLOSED = 'CLOSED',
SETTLED = 'SETTLED'
}
interface ClaimBudgetTrackingAttributes {
budgetId: string;
requestId: string;
// Initial Budget
initialEstimatedBudget?: number;
// Proposal Budget
proposalEstimatedBudget?: number;
proposalSubmittedAt?: Date;
// Approved Budget
approvedBudget?: number;
approvedAt?: Date;
approvedBy?: string;
// IO Blocked Budget
ioBlockedAmount?: number;
ioBlockedAt?: Date;
// Closed Expenses
closedExpenses?: number;
closedExpensesSubmittedAt?: Date;
// Final Claim Amount
finalClaimAmount?: number;
finalClaimAmountApprovedAt?: Date;
finalClaimAmountApprovedBy?: string;
// Credit Note
creditNoteAmount?: number;
creditNoteIssuedAt?: Date;
// Status & Metadata
budgetStatus: BudgetStatus;
currency: string;
varianceAmount?: number;
variancePercentage?: number;
// Audit
lastModifiedBy?: string;
lastModifiedAt?: Date;
modificationReason?: string;
createdAt: Date;
updatedAt: Date;
}
interface ClaimBudgetTrackingCreationAttributes extends Optional<ClaimBudgetTrackingAttributes, 'budgetId' | 'initialEstimatedBudget' | 'proposalEstimatedBudget' | 'proposalSubmittedAt' | 'approvedBudget' | 'approvedAt' | 'approvedBy' | 'ioBlockedAmount' | 'ioBlockedAt' | 'closedExpenses' | 'closedExpensesSubmittedAt' | 'finalClaimAmount' | 'finalClaimAmountApprovedAt' | 'finalClaimAmountApprovedBy' | 'creditNoteAmount' | 'creditNoteIssuedAt' | 'varianceAmount' | 'variancePercentage' | 'lastModifiedBy' | 'lastModifiedAt' | 'modificationReason' | 'budgetStatus' | 'currency' | 'createdAt' | 'updatedAt'> {}
class ClaimBudgetTracking extends Model<ClaimBudgetTrackingAttributes, ClaimBudgetTrackingCreationAttributes> implements ClaimBudgetTrackingAttributes {
public budgetId!: string;
public requestId!: string;
public initialEstimatedBudget?: number;
public proposalEstimatedBudget?: number;
public proposalSubmittedAt?: Date;
public approvedBudget?: number;
public approvedAt?: Date;
public approvedBy?: string;
public ioBlockedAmount?: number;
public ioBlockedAt?: Date;
public closedExpenses?: number;
public closedExpensesSubmittedAt?: Date;
public finalClaimAmount?: number;
public finalClaimAmountApprovedAt?: Date;
public finalClaimAmountApprovedBy?: string;
public creditNoteAmount?: number;
public creditNoteIssuedAt?: Date;
public budgetStatus!: BudgetStatus;
public currency!: string;
public varianceAmount?: number;
public variancePercentage?: number;
public lastModifiedBy?: string;
public lastModifiedAt?: Date;
public modificationReason?: string;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public request?: WorkflowRequest;
public approver?: User;
public finalApprover?: User;
public lastModifier?: User;
}
ClaimBudgetTracking.init(
{
budgetId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'budget_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
initialEstimatedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'initial_estimated_budget'
},
proposalEstimatedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'proposal_estimated_budget'
},
proposalSubmittedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'proposal_submitted_at'
},
approvedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'approved_budget'
},
approvedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'approved_at'
},
approvedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'approved_by',
references: {
model: 'users',
key: 'user_id'
}
},
ioBlockedAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'io_blocked_amount'
},
ioBlockedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'io_blocked_at'
},
closedExpenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'closed_expenses'
},
closedExpensesSubmittedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'closed_expenses_submitted_at'
},
finalClaimAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'final_claim_amount'
},
finalClaimAmountApprovedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'final_claim_amount_approved_at'
},
finalClaimAmountApprovedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'final_claim_amount_approved_by',
references: {
model: 'users',
key: 'user_id'
}
},
creditNoteAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'credit_note_amount'
},
creditNoteIssuedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'credit_note_issued_at'
},
budgetStatus: {
type: DataTypes.ENUM('DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED'),
defaultValue: 'DRAFT',
allowNull: false,
field: 'budget_status'
},
currency: {
type: DataTypes.STRING(3),
defaultValue: 'INR',
allowNull: false
},
varianceAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'variance_amount'
},
variancePercentage: {
type: DataTypes.DECIMAL(5, 2),
allowNull: true,
field: 'variance_percentage'
},
lastModifiedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'last_modified_by',
references: {
model: 'users',
key: 'user_id'
}
},
lastModifiedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'last_modified_at'
},
modificationReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'modification_reason'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'ClaimBudgetTracking',
tableName: 'claim_budget_tracking',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
fields: ['request_id'],
unique: true
},
{
fields: ['budget_status']
},
{
fields: ['approved_by']
},
{
fields: ['final_claim_amount_approved_by']
}
]
}
);
// Associations
ClaimBudgetTracking.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
ClaimBudgetTracking.belongsTo(User, {
as: 'approver',
foreignKey: 'approvedBy',
targetKey: 'userId'
});
ClaimBudgetTracking.belongsTo(User, {
as: 'finalApprover',
foreignKey: 'finalClaimAmountApprovedBy',
targetKey: 'userId'
});
ClaimBudgetTracking.belongsTo(User, {
as: 'lastModifier',
foreignKey: 'lastModifiedBy',
targetKey: 'userId'
});
export { ClaimBudgetTracking };

View File

@ -1,193 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { ClaimInvoice } from './ClaimInvoice';
interface ClaimCreditNoteAttributes {
creditNoteId: string;
requestId: string;
invoiceId?: string;
creditNoteNumber?: string;
creditNoteDate?: Date;
creditNoteAmount?: number;
sapDocumentNumber?: string;
creditNoteFilePath?: string;
status?: string;
errorMessage?: string;
confirmedBy?: string;
confirmedAt?: Date;
reason?: string;
description?: string;
createdAt: Date;
updatedAt: Date;
}
interface ClaimCreditNoteCreationAttributes extends Optional<ClaimCreditNoteAttributes, 'creditNoteId' | 'invoiceId' | 'creditNoteNumber' | 'creditNoteDate' | 'creditNoteAmount' | 'sapDocumentNumber' | 'creditNoteFilePath' | 'status' | 'errorMessage' | 'confirmedBy' | 'confirmedAt' | 'reason' | 'description' | 'createdAt' | 'updatedAt'> {}
class ClaimCreditNote extends Model<ClaimCreditNoteAttributes, ClaimCreditNoteCreationAttributes> implements ClaimCreditNoteAttributes {
public creditNoteId!: string;
public requestId!: string;
public invoiceId?: string;
public creditNoteNumber?: string;
public creditNoteDate?: Date;
public creditNoteAmount?: number;
public sapDocumentNumber?: string;
public creditNoteFilePath?: string;
public status?: string;
public errorMessage?: string;
public confirmedBy?: string;
public confirmedAt?: Date;
public reason?: string;
public description?: string;
public createdAt!: Date;
public updatedAt!: Date;
}
ClaimCreditNote.init(
{
creditNoteId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'credit_note_id',
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoiceId: {
type: DataTypes.UUID,
allowNull: true,
field: 'invoice_id',
references: {
model: 'claim_invoices',
key: 'invoice_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
creditNoteNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'credit_note_number',
},
creditNoteDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'credit_note_date',
},
creditNoteAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'credit_amount',
},
sapDocumentNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'sap_document_number',
},
creditNoteFilePath: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'credit_note_file_path',
},
status: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'confirmation_status',
},
errorMessage: {
type: DataTypes.TEXT,
allowNull: true,
field: 'error_message',
},
confirmedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'confirmed_by',
references: {
model: 'users',
key: 'user_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
confirmedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'confirmed_at',
},
reason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'reason',
},
description: {
type: DataTypes.TEXT,
allowNull: true,
field: 'description',
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at',
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at',
},
},
{
sequelize,
modelName: 'ClaimCreditNote',
tableName: 'claim_credit_notes',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ unique: true, fields: ['request_id'], name: 'idx_claim_credit_notes_request_id' },
{ fields: ['invoice_id'], name: 'idx_claim_credit_notes_invoice_id' },
{ fields: ['credit_note_number'], name: 'idx_claim_credit_notes_number' },
{ fields: ['sap_document_number'], name: 'idx_claim_credit_notes_sap_doc' },
{ fields: ['confirmation_status'], name: 'idx_claim_credit_notes_status' },
],
}
);
WorkflowRequest.hasOne(ClaimCreditNote, {
as: 'claimCreditNote',
foreignKey: 'requestId',
sourceKey: 'requestId',
});
ClaimCreditNote.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId',
});
ClaimCreditNote.belongsTo(ClaimInvoice, {
as: 'claimInvoice',
foreignKey: 'invoiceId',
targetKey: 'invoiceId',
});
ClaimInvoice.hasMany(ClaimCreditNote, {
as: 'creditNotes',
foreignKey: 'invoiceId',
sourceKey: 'invoiceId',
});
export { ClaimCreditNote };

Some files were not shown because too many files have changed in this diff Show More