migration strarted now user can create request upload document preview document add approver , wrorkinmg on current approver
This commit is contained in:
parent
e03049a861
commit
2dbfcd7a56
114
_archive/services/activity.service.ts
Normal file
114
_archive/services/activity.service.ts
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
|
// Special UUID for system events (login, etc.) - well-known UUID: 00000000-0000-0000-0000-000000000001
|
||||||
|
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
|
||||||
|
|
||||||
|
export type ActivityEntry = {
|
||||||
|
requestId: string;
|
||||||
|
type: 'created' | 'submitted' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'summary_generated' | 'closed' | 'login' | 'paused' | 'resumed' | 'pause_retriggered';
|
||||||
|
user?: { userId: string; name?: string; email?: string };
|
||||||
|
timestamp: string;
|
||||||
|
action: string;
|
||||||
|
details: string;
|
||||||
|
metadata?: any;
|
||||||
|
ipAddress?: string;
|
||||||
|
userAgent?: string;
|
||||||
|
category?: string;
|
||||||
|
severity?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
class ActivityService {
|
||||||
|
private byRequest: Map<string, ActivityEntry[]> = new Map();
|
||||||
|
|
||||||
|
private inferCategory(type: string): string {
|
||||||
|
const categoryMap: Record<string, string> = {
|
||||||
|
'created': 'WORKFLOW',
|
||||||
|
'submitted': 'WORKFLOW',
|
||||||
|
'approval': 'WORKFLOW',
|
||||||
|
'rejection': 'WORKFLOW',
|
||||||
|
'status_change': 'WORKFLOW',
|
||||||
|
'assignment': 'WORKFLOW',
|
||||||
|
'comment': 'COLLABORATION',
|
||||||
|
'document_added': 'DOCUMENT',
|
||||||
|
'sla_warning': 'SYSTEM',
|
||||||
|
'reminder': 'SYSTEM',
|
||||||
|
'ai_conclusion_generated': 'SYSTEM',
|
||||||
|
'closed': 'WORKFLOW',
|
||||||
|
'login': 'AUTHENTICATION',
|
||||||
|
'paused': 'WORKFLOW',
|
||||||
|
'resumed': 'WORKFLOW',
|
||||||
|
'pause_retriggered': 'WORKFLOW'
|
||||||
|
};
|
||||||
|
return categoryMap[type] || 'OTHER';
|
||||||
|
}
|
||||||
|
|
||||||
|
private inferSeverity(type: string): string {
|
||||||
|
const severityMap: Record<string, string> = {
|
||||||
|
'rejection': 'WARNING',
|
||||||
|
'sla_warning': 'WARNING',
|
||||||
|
'approval': 'INFO',
|
||||||
|
'closed': 'INFO',
|
||||||
|
'status_change': 'INFO',
|
||||||
|
'login': 'INFO',
|
||||||
|
'created': 'INFO',
|
||||||
|
'submitted': 'INFO',
|
||||||
|
'comment': 'INFO',
|
||||||
|
'document_added': 'INFO',
|
||||||
|
'assignment': 'INFO',
|
||||||
|
'reminder': 'INFO',
|
||||||
|
'ai_conclusion_generated': 'INFO',
|
||||||
|
'paused': 'WARNING',
|
||||||
|
'resumed': 'INFO',
|
||||||
|
'pause_retriggered': 'INFO'
|
||||||
|
};
|
||||||
|
return severityMap[type] || 'INFO';
|
||||||
|
}
|
||||||
|
|
||||||
|
async log(entry: ActivityEntry) {
|
||||||
|
const list = this.byRequest.get(entry.requestId) || [];
|
||||||
|
list.push(entry);
|
||||||
|
this.byRequest.set(entry.requestId, list);
|
||||||
|
|
||||||
|
// Persist to database
|
||||||
|
try {
|
||||||
|
const { Activity } = require('@models/Activity');
|
||||||
|
const userName = entry.user?.name || entry.user?.email || null;
|
||||||
|
|
||||||
|
const activityData = {
|
||||||
|
requestId: entry.requestId,
|
||||||
|
userId: entry.user?.userId || null,
|
||||||
|
userName: userName,
|
||||||
|
activityType: entry.type,
|
||||||
|
activityDescription: entry.details,
|
||||||
|
activityCategory: entry.category || this.inferCategory(entry.type),
|
||||||
|
severity: entry.severity || this.inferSeverity(entry.type),
|
||||||
|
metadata: entry.metadata || null,
|
||||||
|
isSystemEvent: !entry.user,
|
||||||
|
ipAddress: entry.ipAddress || null, // Database accepts null
|
||||||
|
userAgent: entry.userAgent || null, // Database accepts null
|
||||||
|
};
|
||||||
|
|
||||||
|
logger.info(`[Activity] Creating activity:`, {
|
||||||
|
requestId: entry.requestId,
|
||||||
|
userName,
|
||||||
|
userId: entry.user?.userId,
|
||||||
|
type: entry.type,
|
||||||
|
ipAddress: entry.ipAddress ? '***' : null
|
||||||
|
});
|
||||||
|
|
||||||
|
await Activity.create(activityData);
|
||||||
|
|
||||||
|
logger.info(`[Activity] Successfully logged activity for request ${entry.requestId} by user: ${userName}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Activity] Failed to persist activity:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get(requestId: string): ActivityEntry[] {
|
||||||
|
return this.byRequest.get(requestId) || [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const activityService = new ActivityService();
|
||||||
|
|
||||||
|
|
||||||
897
_archive/services/approval.service.ts
Normal file
897
_archive/services/approval.service.ts
Normal file
@ -0,0 +1,897 @@
|
|||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { Participant } from '@models/Participant';
|
||||||
|
import { TatAlert } from '@models/TatAlert';
|
||||||
|
import { ApprovalAction } from '../types/approval.types';
|
||||||
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
|
import { calculateTATPercentage } from '@utils/helpers';
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
import logger, { logWorkflowEvent, logAIEvent } from '@utils/logger';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
import { emitToRequestRoom } from '../realtime/socket';
|
||||||
|
// Note: DealerClaimService import removed - dealer claim approvals are handled by DealerClaimApprovalService
|
||||||
|
|
||||||
|
export class ApprovalService {
|
||||||
|
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level) return null;
|
||||||
|
|
||||||
|
// Get workflow to determine priority for working hours calculation
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Verify this is NOT a claim management workflow (should use DealerClaimApprovalService)
|
||||||
|
const workflowType = (wf as any)?.workflowType;
|
||||||
|
if (workflowType === 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.error(`[Approval] Attempted to use ApprovalService for CLAIM_MANAGEMENT workflow ${level.requestId}. Use DealerClaimApprovalService instead.`);
|
||||||
|
throw new Error('ApprovalService cannot be used for CLAIM_MANAGEMENT workflows. Use DealerClaimApprovalService instead.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
||||||
|
const isPaused = (wf as any).isPaused || (level as any).isPaused;
|
||||||
|
|
||||||
|
// If paused, resume automatically when approving/rejecting (requirement 3.6)
|
||||||
|
if (isPaused) {
|
||||||
|
const { pauseService } = await import('./pause.service');
|
||||||
|
try {
|
||||||
|
await pauseService.resumeWorkflow(level.requestId, _userId);
|
||||||
|
logger.info(`[Approval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
|
||||||
|
} catch (pauseError) {
|
||||||
|
logger.warn(`[Approval] Failed to auto-resume paused workflow:`, pauseError);
|
||||||
|
// Continue with approval/rejection even if resume fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Calculate elapsed hours using working hours logic (with pause handling)
|
||||||
|
// Case 1: Level is currently paused (isPaused = true)
|
||||||
|
// Case 2: Level was paused and resumed (isPaused = false but pauseElapsedHours and pauseResumeDate exist)
|
||||||
|
const isPausedLevel = (level as any).isPaused;
|
||||||
|
const wasResumed = !isPausedLevel &&
|
||||||
|
(level as any).pauseElapsedHours !== null &&
|
||||||
|
(level as any).pauseElapsedHours !== undefined &&
|
||||||
|
(level as any).pauseResumeDate !== null;
|
||||||
|
|
||||||
|
const pauseInfo = isPausedLevel ? {
|
||||||
|
// Level is currently paused - return frozen elapsed hours at pause time
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (level as any).pausedAt,
|
||||||
|
pauseElapsedHours: (level as any).pauseElapsedHours,
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate
|
||||||
|
} : wasResumed ? {
|
||||||
|
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(
|
||||||
|
level.levelStartTime || level.createdAt,
|
||||||
|
now,
|
||||||
|
priority,
|
||||||
|
pauseInfo
|
||||||
|
);
|
||||||
|
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||||
|
|
||||||
|
const updateData = {
|
||||||
|
status: action.action === 'APPROVE' ? ApprovalStatus.APPROVED : ApprovalStatus.REJECTED,
|
||||||
|
actionDate: now,
|
||||||
|
levelEndTime: now,
|
||||||
|
elapsedHours,
|
||||||
|
tatPercentageUsed: tatPercentage,
|
||||||
|
comments: action.comments,
|
||||||
|
rejectionReason: action.rejectionReason
|
||||||
|
};
|
||||||
|
|
||||||
|
const updatedLevel = await level.update(updateData);
|
||||||
|
|
||||||
|
// Cancel TAT jobs for the current level since it's been actioned
|
||||||
|
try {
|
||||||
|
await tatSchedulerService.cancelTatJobs(level.requestId, level.levelId);
|
||||||
|
logger.info(`[Approval] TAT jobs cancelled for level ${level.levelId}`);
|
||||||
|
} catch (tatError) {
|
||||||
|
logger.error(`[Approval] Failed to cancel TAT jobs:`, tatError);
|
||||||
|
// Don't fail the approval if TAT cancellation fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update TAT alerts for this level to mark completion status
|
||||||
|
try {
|
||||||
|
const wasOnTime = elapsedHours <= level.tatHours;
|
||||||
|
await TatAlert.update(
|
||||||
|
{
|
||||||
|
wasCompletedOnTime: wasOnTime,
|
||||||
|
completionTime: now
|
||||||
|
},
|
||||||
|
{
|
||||||
|
where: { levelId: level.levelId }
|
||||||
|
}
|
||||||
|
);
|
||||||
|
logger.info(`[Approval] TAT alerts updated for level ${level.levelId} - Completed ${wasOnTime ? 'on time' : 'late'}`);
|
||||||
|
} catch (tatAlertError) {
|
||||||
|
logger.error(`[Approval] Failed to update TAT alerts:`, tatAlertError);
|
||||||
|
// Don't fail the approval if TAT alert update fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle approval - move to next level or close workflow (wf already loaded above)
|
||||||
|
if (action.action === 'APPROVE') {
|
||||||
|
// Check if this is final approval: either isFinalApprover flag is set OR all levels are approved
|
||||||
|
// This handles cases where additional approvers are added after initial approval
|
||||||
|
const allLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
const approvedLevelsCount = allLevels.filter((l: any) => l.status === 'APPROVED').length;
|
||||||
|
const totalLevels = allLevels.length;
|
||||||
|
const isAllLevelsApproved = approvedLevelsCount === totalLevels;
|
||||||
|
const isFinalApproval = level.isFinalApprover || isAllLevelsApproved;
|
||||||
|
|
||||||
|
if (isFinalApproval) {
|
||||||
|
// Final approver - close workflow as APPROVED
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: (level.levelNumber || 0) + 1
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
logWorkflowEvent('approved', level.requestId, {
|
||||||
|
level: level.levelNumber,
|
||||||
|
isFinalApproval: true,
|
||||||
|
status: 'APPROVED',
|
||||||
|
detectedBy: level.isFinalApprover ? 'isFinalApprover flag' : 'all levels approved check'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log final approval activity first (so it's included in AI context)
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
|
||||||
|
// This runs in the background without blocking the approval response
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
const { aiService } = await import('./ai.service');
|
||||||
|
const { ConclusionRemark } = await import('@models/index');
|
||||||
|
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
||||||
|
const { WorkNote } = await import('@models/WorkNote');
|
||||||
|
const { Document } = await import('@models/Document');
|
||||||
|
const { Activity } = await import('@models/Activity');
|
||||||
|
const { getConfigValue } = await import('./configReader.service');
|
||||||
|
|
||||||
|
// Check if AI features and remark generation are enabled in admin config
|
||||||
|
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
|
||||||
|
if (aiEnabled && remarkGenerationEnabled && aiService.isAvailable()) {
|
||||||
|
logAIEvent('request', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
action: 'conclusion_generation_started',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Gather context for AI generation
|
||||||
|
const approvalLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const workNotes = await WorkNote.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 20
|
||||||
|
});
|
||||||
|
|
||||||
|
const documents = await Document.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['uploadedAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const activities = await Activity.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 50
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build context object
|
||||||
|
const context = {
|
||||||
|
requestTitle: (wf as any).title,
|
||||||
|
requestDescription: (wf as any).description,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
priority: (wf as any).priority,
|
||||||
|
approvalFlow: approvalLevels.map((l: any) => {
|
||||||
|
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
|
||||||
|
? Number(l.tatPercentageUsed)
|
||||||
|
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
|
||||||
|
return {
|
||||||
|
levelNumber: l.levelNumber,
|
||||||
|
approverName: l.approverName,
|
||||||
|
status: l.status,
|
||||||
|
comments: l.comments,
|
||||||
|
actionDate: l.actionDate,
|
||||||
|
tatHours: Number(l.tatHours || 0),
|
||||||
|
elapsedHours: Number(l.elapsedHours || 0),
|
||||||
|
tatPercentageUsed: tatPercentage
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
workNotes: workNotes.map((note: any) => ({
|
||||||
|
userName: note.userName,
|
||||||
|
message: note.message,
|
||||||
|
createdAt: note.createdAt
|
||||||
|
})),
|
||||||
|
documents: documents.map((doc: any) => ({
|
||||||
|
fileName: doc.originalFileName || doc.fileName,
|
||||||
|
uploadedBy: doc.uploadedBy,
|
||||||
|
uploadedAt: doc.uploadedAt
|
||||||
|
})),
|
||||||
|
activities: activities.map((activity: any) => ({
|
||||||
|
type: activity.activityType,
|
||||||
|
action: activity.activityDescription,
|
||||||
|
details: activity.activityDescription,
|
||||||
|
timestamp: activity.createdAt
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
const aiResult = await aiService.generateConclusionRemark(context);
|
||||||
|
|
||||||
|
// Check if conclusion already exists (e.g., from previous final approval before additional approver was added)
|
||||||
|
const existingConclusion = await ConclusionRemark.findOne({
|
||||||
|
where: { requestId: level.requestId }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingConclusion) {
|
||||||
|
// Update existing conclusion with new AI-generated remark (regenerated with updated context)
|
||||||
|
await existingConclusion.update({
|
||||||
|
aiGeneratedRemark: aiResult.remark,
|
||||||
|
aiModelUsed: aiResult.provider,
|
||||||
|
aiConfidenceScore: aiResult.confidence,
|
||||||
|
// Preserve finalRemark if it was already finalized
|
||||||
|
// Only reset if it wasn't finalized yet
|
||||||
|
finalRemark: (existingConclusion as any).finalizedAt ? (existingConclusion as any).finalRemark : null,
|
||||||
|
editedBy: null,
|
||||||
|
isEdited: false,
|
||||||
|
editCount: 0,
|
||||||
|
approvalSummary: {
|
||||||
|
totalLevels: approvalLevels.length,
|
||||||
|
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
|
||||||
|
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
|
||||||
|
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
|
||||||
|
},
|
||||||
|
documentSummary: {
|
||||||
|
totalDocuments: documents.length,
|
||||||
|
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
||||||
|
},
|
||||||
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
|
generatedAt: new Date(),
|
||||||
|
// Preserve finalizedAt if it was already finalized
|
||||||
|
finalizedAt: (existingConclusion as any).finalizedAt || null
|
||||||
|
} as any);
|
||||||
|
logger.info(`[Approval] Updated existing AI conclusion for request ${level.requestId} with regenerated content (includes new approver)`);
|
||||||
|
} else {
|
||||||
|
// Create new conclusion
|
||||||
|
await ConclusionRemark.create({
|
||||||
|
requestId: level.requestId,
|
||||||
|
aiGeneratedRemark: aiResult.remark,
|
||||||
|
aiModelUsed: aiResult.provider,
|
||||||
|
aiConfidenceScore: aiResult.confidence,
|
||||||
|
finalRemark: null,
|
||||||
|
editedBy: null,
|
||||||
|
isEdited: false,
|
||||||
|
editCount: 0,
|
||||||
|
approvalSummary: {
|
||||||
|
totalLevels: approvalLevels.length,
|
||||||
|
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
|
||||||
|
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
|
||||||
|
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
|
||||||
|
},
|
||||||
|
documentSummary: {
|
||||||
|
totalDocuments: documents.length,
|
||||||
|
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
||||||
|
},
|
||||||
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
|
generatedAt: new Date(),
|
||||||
|
finalizedAt: null
|
||||||
|
} as any);
|
||||||
|
}
|
||||||
|
|
||||||
|
logAIEvent('response', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
action: 'conclusion_generation_completed',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'ai_conclusion_generated',
|
||||||
|
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'AI Conclusion Generated',
|
||||||
|
details: 'AI-powered conclusion remark generated for review by initiator',
|
||||||
|
ipAddress: undefined, // System-generated, no IP
|
||||||
|
userAgent: undefined // System-generated, no user agent
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Log why AI generation was skipped
|
||||||
|
if (!aiEnabled) {
|
||||||
|
logger.info(`[Approval] AI features disabled in admin config, skipping conclusion generation for ${level.requestId}`);
|
||||||
|
} else if (!remarkGenerationEnabled) {
|
||||||
|
logger.info(`[Approval] AI remark generation disabled in admin config, skipping for ${level.requestId}`);
|
||||||
|
} else if (!aiService.isAvailable()) {
|
||||||
|
logger.warn(`[Approval] AI service unavailable for ${level.requestId}, skipping conclusion generation`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-generate RequestSummary after final approval (system-level generation)
|
||||||
|
// This makes the summary immediately available when user views the approved request
|
||||||
|
try {
|
||||||
|
const { summaryService } = await import('./summary.service');
|
||||||
|
const summary = await summaryService.createSummary(level.requestId, 'system', {
|
||||||
|
isSystemGeneration: true
|
||||||
|
});
|
||||||
|
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId}`);
|
||||||
|
|
||||||
|
// Log summary generation activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'summary_generated',
|
||||||
|
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Summary Auto-Generated',
|
||||||
|
details: 'Request summary auto-generated after final approval',
|
||||||
|
ipAddress: undefined,
|
||||||
|
userAgent: undefined
|
||||||
|
});
|
||||||
|
} catch (summaryError: any) {
|
||||||
|
// Log but don't fail - initiator can regenerate later
|
||||||
|
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (aiError) {
|
||||||
|
logAIEvent('error', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
action: 'conclusion_generation_failed',
|
||||||
|
error: aiError,
|
||||||
|
});
|
||||||
|
// Silent failure - initiator can write manually
|
||||||
|
|
||||||
|
// Still try to generate summary even if AI conclusion failed
|
||||||
|
try {
|
||||||
|
const { summaryService } = await import('./summary.service');
|
||||||
|
const summary = await summaryService.createSummary(level.requestId, 'system', {
|
||||||
|
isSystemGeneration: true
|
||||||
|
});
|
||||||
|
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId} (without AI conclusion)`);
|
||||||
|
} catch (summaryError: any) {
|
||||||
|
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})().catch(err => {
|
||||||
|
// Catch any unhandled promise rejections
|
||||||
|
logger.error(`[Approval] Unhandled error in background AI generation:`, err);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator and all participants (including spectators) about approval
|
||||||
|
// Spectators are CC'd for transparency, similar to email CC
|
||||||
|
if (wf) {
|
||||||
|
const participants = await Participant.findAll({
|
||||||
|
where: { requestId: level.requestId }
|
||||||
|
});
|
||||||
|
const targetUserIds = new Set<string>();
|
||||||
|
targetUserIds.add((wf as any).initiatorId);
|
||||||
|
for (const p of participants as any[]) {
|
||||||
|
targetUserIds.add(p.userId); // Includes spectators
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notification to initiator about final approval (triggers email)
|
||||||
|
const initiatorId = (wf as any).initiatorId;
|
||||||
|
await notificationService.sendToUsers([initiatorId], {
|
||||||
|
title: `Request Approved - All Approvals Complete`,
|
||||||
|
body: `Your request "${(wf as any).title}" has been fully approved by all approvers. Please review and finalize the conclusion remark to close the request.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send notification to all participants/spectators (for transparency, no action required)
|
||||||
|
const participantUserIds = Array.from(targetUserIds).filter(id => id !== initiatorId);
|
||||||
|
if (participantUserIds.length > 0) {
|
||||||
|
await notificationService.sendToUsers(participantUserIds, {
|
||||||
|
title: `Request Approved`,
|
||||||
|
body: `Request "${(wf as any).title}" has been fully approved. The initiator will finalize the conclusion remark to close the request.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval_pending_closure',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[Approval] ✅ Final approval complete for ${level.requestId}. Initiator and ${participants.length} participant(s) notified.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Not final - move to next level
|
||||||
|
// Check if workflow is paused - if so, don't advance
|
||||||
|
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[Approval] Cannot advance workflow ${level.requestId} - workflow is paused`);
|
||||||
|
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the next PENDING level
|
||||||
|
// Custom workflows use strict sequential ordering (levelNumber + 1) to maintain intended order
|
||||||
|
// This ensures custom workflows work predictably and don't skip levels
|
||||||
|
const currentLevelNumber = level.levelNumber || 0;
|
||||||
|
logger.info(`[Approval] Finding next level after level ${currentLevelNumber} for request ${level.requestId} (Custom workflow)`);
|
||||||
|
|
||||||
|
// Use strict sequential approach for custom workflows
|
||||||
|
const nextLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
levelNumber: currentLevelNumber + 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!nextLevel) {
|
||||||
|
logger.info(`[Approval] Sequential level ${currentLevelNumber + 1} not found for custom workflow - this may be the final approval`);
|
||||||
|
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
|
||||||
|
// Sequential level exists but not PENDING - log warning but proceed
|
||||||
|
logger.warn(`[Approval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level to maintain workflow order.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
logger.info(`[Approval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
|
||||||
|
} else {
|
||||||
|
logger.info(`[Approval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
// Check if next level is paused - if so, don't activate it
|
||||||
|
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[Approval] Cannot activate next level ${nextLevelNumber} - level is paused`);
|
||||||
|
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Activate next level
|
||||||
|
await nextLevel.update({
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
levelStartTime: now,
|
||||||
|
tatStartTime: now
|
||||||
|
});
|
||||||
|
|
||||||
|
// Schedule TAT jobs for the next level
|
||||||
|
try {
|
||||||
|
// Get workflow priority for TAT calculation
|
||||||
|
const workflowPriority = (wf as any)?.priority || 'STANDARD';
|
||||||
|
|
||||||
|
await tatSchedulerService.scheduleTatJobs(
|
||||||
|
level.requestId,
|
||||||
|
(nextLevel as any).levelId,
|
||||||
|
(nextLevel as any).approverId,
|
||||||
|
Number((nextLevel as any).tatHours),
|
||||||
|
now,
|
||||||
|
workflowPriority // Pass workflow priority (EXPRESS = 24/7, STANDARD = working hours)
|
||||||
|
);
|
||||||
|
logger.info(`[Approval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
|
||||||
|
} catch (tatError) {
|
||||||
|
logger.error(`[Approval] Failed to schedule TAT jobs for next level:`, tatError);
|
||||||
|
// Don't fail the approval if TAT scheduling fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow current level (only if nextLevelNumber is not null)
|
||||||
|
if (nextLevelNumber !== null) {
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{ currentLevel: nextLevelNumber },
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
|
||||||
|
} else {
|
||||||
|
logger.warn(`Approved level ${level.levelNumber} but no next level found - workflow may be complete`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Dealer claim-specific logic (Activity Creation, E-Invoice) is handled by DealerClaimApprovalService
|
||||||
|
// This service is for custom workflows only
|
||||||
|
|
||||||
|
// Log approval activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator about the approval (triggers email for regular workflows)
|
||||||
|
if (wf) {
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Request Approved - Level ${level.levelNumber}`,
|
||||||
|
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify next approver
|
||||||
|
if (wf && nextLevel) {
|
||||||
|
// Check if it's an auto-step by checking approverEmail or levelName
|
||||||
|
// Note: Activity Creation, E-Invoice Generation, and Credit Note Confirmation are now activity logs only, not approval steps
|
||||||
|
// These steps are processed automatically and should NOT trigger notifications
|
||||||
|
const isAutoStep = (nextLevel as any).approverEmail === 'system@royalenfield.com'
|
||||||
|
|| (nextLevel as any).approverName === 'System Auto-Process'
|
||||||
|
|| (nextLevel as any).approverId === 'system';
|
||||||
|
|
||||||
|
// IMPORTANT: Skip notifications and assignment logging for system/auto-steps
|
||||||
|
// System steps are any step with system@royalenfield.com
|
||||||
|
// Only send notifications to real users, NOT system processes
|
||||||
|
if (!isAutoStep && (nextLevel as any).approverId && (nextLevel as any).approverId !== 'system') {
|
||||||
|
// Additional checks: ensure approverEmail and approverName are not system-related
|
||||||
|
// This prevents notifications to system accounts even if they pass other checks
|
||||||
|
const approverEmail = (nextLevel as any).approverEmail || '';
|
||||||
|
const approverName = (nextLevel as any).approverName || '';
|
||||||
|
const isSystemEmail = approverEmail.toLowerCase() === 'system@royalenfield.com'
|
||||||
|
|| approverEmail.toLowerCase().includes('system');
|
||||||
|
const isSystemName = approverName.toLowerCase() === 'system auto-process'
|
||||||
|
|| approverName.toLowerCase().includes('system');
|
||||||
|
|
||||||
|
// EXCLUDE all system-related steps from notifications
|
||||||
|
// Only send notifications to real users, NOT system processes
|
||||||
|
if (!isSystemEmail && !isSystemName) {
|
||||||
|
// Send notification to next approver (only for real users, not system processes)
|
||||||
|
// This will send both in-app and email notifications
|
||||||
|
const nextApproverId = (nextLevel as any).approverId;
|
||||||
|
const nextApproverName = (nextLevel as any).approverName || (nextLevel as any).approverEmail || 'approver';
|
||||||
|
|
||||||
|
logger.info(`[Approval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
|
||||||
|
|
||||||
|
await notificationService.sendToUsers([ nextApproverId ], {
|
||||||
|
title: `Action required: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'assignment',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Approval] Assignment notification sent successfully to ${nextApproverName} for level ${nextLevelNumber}`);
|
||||||
|
|
||||||
|
// Log assignment activity for the next approver
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'assignment',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Assigned to approver',
|
||||||
|
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.info(`[Approval] Skipping notification for system process: ${approverEmail} at level ${nextLevelNumber}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(`[Approval] Skipping notification for auto-step at level ${nextLevelNumber}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Dealer-specific notifications (proposal/completion submissions) are handled by DealerClaimApprovalService
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No next level found but not final approver - this shouldn't happen
|
||||||
|
logger.warn(`No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
|
||||||
|
// Use current level number since there's no next level (workflow is complete)
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: level.levelNumber || 0
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
if (wf) {
|
||||||
|
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
|
||||||
|
title: `Approved: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`
|
||||||
|
});
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (action.action === 'REJECT') {
|
||||||
|
// Rejection - mark workflow as REJECTED (closure will happen when initiator finalizes conclusion)
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.REJECTED
|
||||||
|
// Note: closureDate will be set when initiator finalizes the conclusion
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mark all pending levels as skipped
|
||||||
|
await ApprovalLevel.update(
|
||||||
|
{
|
||||||
|
status: ApprovalStatus.SKIPPED,
|
||||||
|
levelEndTime: now
|
||||||
|
},
|
||||||
|
{
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
status: ApprovalStatus.PENDING,
|
||||||
|
levelNumber: { [Op.gt]: level.levelNumber }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logWorkflowEvent('rejected', level.requestId, {
|
||||||
|
level: level.levelNumber,
|
||||||
|
status: 'REJECTED',
|
||||||
|
message: 'Awaiting closure from initiator',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log rejection activity first (so it's included in AI context)
|
||||||
|
if (wf) {
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'rejection',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Rejected',
|
||||||
|
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}. Awaiting closure from initiator.`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify initiator and all participants
|
||||||
|
if (wf) {
|
||||||
|
const participants = await Participant.findAll({ where: { requestId: level.requestId } });
|
||||||
|
const targetUserIds = new Set<string>();
|
||||||
|
targetUserIds.add((wf as any).initiatorId);
|
||||||
|
for (const p of participants as any[]) {
|
||||||
|
targetUserIds.add(p.userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notification to initiator with type 'rejection' to trigger email
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Rejected: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'rejection',
|
||||||
|
priority: 'HIGH',
|
||||||
|
metadata: {
|
||||||
|
rejectionReason: action.rejectionReason || action.comments || 'No reason provided'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send notification to other participants (spectators) for transparency (no email, just in-app)
|
||||||
|
const participantUserIds = Array.from(targetUserIds).filter(id => id !== (wf as any).initiatorId);
|
||||||
|
if (participantUserIds.length > 0) {
|
||||||
|
await notificationService.sendToUsers(participantUserIds, {
|
||||||
|
title: `Rejected: ${(wf as any).requestNumber}`,
|
||||||
|
body: `Request "${(wf as any).title}" has been rejected.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'status_change', // Use status_change to avoid triggering emails for participants
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate AI conclusion remark ASYNCHRONOUSLY for rejected requests (similar to approved)
|
||||||
|
// This runs in the background without blocking the rejection response
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
const { aiService } = await import('./ai.service');
|
||||||
|
const { ConclusionRemark } = await import('@models/index');
|
||||||
|
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
||||||
|
const { WorkNote } = await import('@models/WorkNote');
|
||||||
|
const { Document } = await import('@models/Document');
|
||||||
|
const { Activity } = await import('@models/Activity');
|
||||||
|
const { getConfigValue } = await import('./configReader.service');
|
||||||
|
|
||||||
|
// Check if AI features and remark generation are enabled in admin config
|
||||||
|
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
|
||||||
|
if (!aiEnabled || !remarkGenerationEnabled) {
|
||||||
|
logger.info(`[Approval] AI conclusion generation skipped for rejected request ${level.requestId} (AI disabled)`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if AI service is available
|
||||||
|
const { aiService: aiSvc } = await import('./ai.service');
|
||||||
|
if (!aiSvc.isAvailable()) {
|
||||||
|
logger.warn(`[Approval] AI service unavailable for rejected request ${level.requestId}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gather context for AI generation (similar to approved flow)
|
||||||
|
const approvalLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const workNotes = await WorkNote.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 20
|
||||||
|
});
|
||||||
|
|
||||||
|
const documents = await Document.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['uploadedAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const activities = await Activity.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 50
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build context object (include rejection reason)
|
||||||
|
const context = {
|
||||||
|
requestTitle: (wf as any).title,
|
||||||
|
requestDescription: (wf as any).description,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
priority: (wf as any).priority,
|
||||||
|
rejectionReason: action.rejectionReason || action.comments || 'No reason provided',
|
||||||
|
rejectedBy: level.approverName || level.approverEmail,
|
||||||
|
approvalFlow: approvalLevels.map((l: any) => {
|
||||||
|
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
|
||||||
|
? Number(l.tatPercentageUsed)
|
||||||
|
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
|
||||||
|
return {
|
||||||
|
levelNumber: l.levelNumber,
|
||||||
|
approverName: l.approverName,
|
||||||
|
status: l.status,
|
||||||
|
comments: l.comments,
|
||||||
|
actionDate: l.actionDate,
|
||||||
|
tatHours: Number(l.tatHours || 0),
|
||||||
|
elapsedHours: Number(l.elapsedHours || 0),
|
||||||
|
tatPercentageUsed: tatPercentage
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
workNotes: workNotes.map((note: any) => ({
|
||||||
|
userName: note.userName,
|
||||||
|
message: note.message,
|
||||||
|
createdAt: note.createdAt
|
||||||
|
})),
|
||||||
|
documents: documents.map((doc: any) => ({
|
||||||
|
fileName: doc.originalFileName || doc.fileName,
|
||||||
|
uploadedBy: doc.uploadedBy,
|
||||||
|
uploadedAt: doc.uploadedAt
|
||||||
|
})),
|
||||||
|
activities: activities.map((activity: any) => ({
|
||||||
|
type: activity.activityType,
|
||||||
|
action: activity.activityDescription,
|
||||||
|
details: activity.activityDescription,
|
||||||
|
timestamp: activity.createdAt
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
logger.info(`[Approval] Generating AI conclusion for rejected request ${level.requestId}...`);
|
||||||
|
|
||||||
|
// Generate AI conclusion (will adapt to rejection context)
|
||||||
|
const aiResult = await aiSvc.generateConclusionRemark(context);
|
||||||
|
|
||||||
|
// Create or update conclusion remark
|
||||||
|
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId: level.requestId } });
|
||||||
|
|
||||||
|
const conclusionData = {
|
||||||
|
aiGeneratedRemark: aiResult.remark,
|
||||||
|
aiModelUsed: aiResult.provider,
|
||||||
|
aiConfidenceScore: aiResult.confidence,
|
||||||
|
approvalSummary: {
|
||||||
|
totalLevels: approvalLevels.length,
|
||||||
|
rejectedLevel: level.levelNumber,
|
||||||
|
rejectedBy: level.approverName || level.approverEmail,
|
||||||
|
rejectionReason: action.rejectionReason || action.comments
|
||||||
|
},
|
||||||
|
documentSummary: {
|
||||||
|
totalDocuments: documents.length,
|
||||||
|
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
||||||
|
},
|
||||||
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
|
generatedAt: new Date()
|
||||||
|
};
|
||||||
|
|
||||||
|
if (conclusionInstance) {
|
||||||
|
await conclusionInstance.update(conclusionData as any);
|
||||||
|
logger.info(`[Approval] ✅ AI conclusion updated for rejected request ${level.requestId}`);
|
||||||
|
} else {
|
||||||
|
await ConclusionRemark.create({
|
||||||
|
requestId: level.requestId,
|
||||||
|
...conclusionData,
|
||||||
|
finalRemark: null,
|
||||||
|
editedBy: null,
|
||||||
|
isEdited: false,
|
||||||
|
editCount: 0,
|
||||||
|
finalizedAt: null
|
||||||
|
} as any);
|
||||||
|
logger.info(`[Approval] ✅ AI conclusion generated for rejected request ${level.requestId}`);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Approval] Failed to generate AI conclusion for rejected request ${level.requestId}:`, error);
|
||||||
|
// Don't fail the rejection if AI generation fails
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(level.requestId, 'request:updated', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
requestNumber: (wf as any)?.requestNumber,
|
||||||
|
action: action.action,
|
||||||
|
levelNumber: level.levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return updatedLevel;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to ${action.action.toLowerCase()} level ${levelId}:`, error);
|
||||||
|
throw new Error(`Failed to ${action.action.toLowerCase()} level`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
return await ApprovalLevel.findOne({
|
||||||
|
where: { requestId, status: ApprovalStatus.PENDING },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to get current approval level for ${requestId}:`, error);
|
||||||
|
throw new Error('Failed to get current approval level');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
|
||||||
|
try {
|
||||||
|
return await ApprovalLevel.findAll({
|
||||||
|
where: { requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to get approval levels for ${requestId}:`, error);
|
||||||
|
throw new Error('Failed to get approval levels');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
160
_archive/services/configReader.service.ts
Normal file
160
_archive/services/configReader.service.ts
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
/**
|
||||||
|
* Configuration Reader Service
|
||||||
|
* Reads admin configurations from database for use in backend logic
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
import { QueryTypes } from 'sequelize';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
|
// Cache configurations in memory for performance
|
||||||
|
let configCache: Map<string, string> = new Map();
|
||||||
|
let cacheExpiry: Date | null = null;
|
||||||
|
const CACHE_DURATION_MS = 5 * 60 * 1000; // 5 minutes
|
||||||
|
|
||||||
|
// Sensitive config keys that should be masked in logs
|
||||||
|
const SENSITIVE_CONFIG_PATTERNS = [
|
||||||
|
'API_KEY', 'SECRET', 'PASSWORD', 'TOKEN', 'CREDENTIAL',
|
||||||
|
'PRIVATE', 'AUTH', 'KEY', 'VAPID'
|
||||||
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a config key contains sensitive data
|
||||||
|
*/
|
||||||
|
function isSensitiveConfig(configKey: string): boolean {
|
||||||
|
const upperKey = configKey.toUpperCase();
|
||||||
|
return SENSITIVE_CONFIG_PATTERNS.some(pattern => upperKey.includes(pattern));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mask sensitive value for logging (show first 4 and last 2 chars)
|
||||||
|
*/
|
||||||
|
function maskSensitiveValue(value: string): string {
|
||||||
|
if (!value || value.length <= 8) {
|
||||||
|
return '***REDACTED***';
|
||||||
|
}
|
||||||
|
return `${value.substring(0, 4)}****${value.substring(value.length - 2)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a configuration value from database (with caching)
|
||||||
|
*/
|
||||||
|
export async function getConfigValue(configKey: string, defaultValue: string = ''): Promise<string> {
|
||||||
|
try {
|
||||||
|
// Check cache first
|
||||||
|
if (configCache.has(configKey) && cacheExpiry && new Date() < cacheExpiry) {
|
||||||
|
return configCache.get(configKey)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query database
|
||||||
|
const result = await sequelize.query(`
|
||||||
|
SELECT config_value
|
||||||
|
FROM admin_configurations
|
||||||
|
WHERE config_key = :configKey
|
||||||
|
LIMIT 1
|
||||||
|
`, {
|
||||||
|
replacements: { configKey },
|
||||||
|
type: QueryTypes.SELECT
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result && result.length > 0) {
|
||||||
|
const value = (result[0] as any).config_value;
|
||||||
|
configCache.set(configKey, value);
|
||||||
|
|
||||||
|
// Always update cache expiry when loading from database
|
||||||
|
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
|
||||||
|
|
||||||
|
// Mask sensitive values in logs for security
|
||||||
|
const logValue = isSensitiveConfig(configKey) ? maskSensitiveValue(value) : value;
|
||||||
|
logger.info(`[ConfigReader] Loaded config '${configKey}' = '${logValue}' from database (cached for 5min)`);
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mask sensitive default values in logs for security
|
||||||
|
const logDefault = isSensitiveConfig(configKey) ? maskSensitiveValue(defaultValue) : defaultValue;
|
||||||
|
logger.warn(`[ConfigReader] Config key '${configKey}' not found, using default: ${logDefault}`);
|
||||||
|
return defaultValue;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[ConfigReader] Error reading config '${configKey}':`, error);
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get number configuration
|
||||||
|
*/
|
||||||
|
export async function getConfigNumber(configKey: string, defaultValue: number): Promise<number> {
|
||||||
|
const value = await getConfigValue(configKey, String(defaultValue));
|
||||||
|
return parseFloat(value) || defaultValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get boolean configuration
|
||||||
|
*/
|
||||||
|
export async function getConfigBoolean(configKey: string, defaultValue: boolean): Promise<boolean> {
|
||||||
|
const value = await getConfigValue(configKey, String(defaultValue));
|
||||||
|
return value === 'true' || value === '1';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get TAT thresholds from database
|
||||||
|
*/
|
||||||
|
export async function getTatThresholds(): Promise<{ first: number; second: number }> {
|
||||||
|
const first = await getConfigNumber('TAT_REMINDER_THRESHOLD_1', 50);
|
||||||
|
const second = await getConfigNumber('TAT_REMINDER_THRESHOLD_2', 75);
|
||||||
|
|
||||||
|
return { first, second };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get working hours from database
|
||||||
|
*/
|
||||||
|
export async function getWorkingHours(): Promise<{ startHour: number; endHour: number }> {
|
||||||
|
const startHour = await getConfigNumber('WORK_START_HOUR', 9);
|
||||||
|
const endHour = await getConfigNumber('WORK_END_HOUR', 18);
|
||||||
|
|
||||||
|
return { startHour, endHour };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear configuration cache (call after updating configs)
|
||||||
|
*/
|
||||||
|
export function clearConfigCache(): void {
|
||||||
|
configCache.clear();
|
||||||
|
cacheExpiry = null;
|
||||||
|
logger.info('[ConfigReader] Configuration cache cleared');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preload all configurations into cache
|
||||||
|
*/
|
||||||
|
export async function preloadConfigurations(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const results = await sequelize.query(`
|
||||||
|
SELECT config_key, config_value
|
||||||
|
FROM admin_configurations
|
||||||
|
`, { type: QueryTypes.SELECT });
|
||||||
|
|
||||||
|
results.forEach((row: any) => {
|
||||||
|
configCache.set(row.config_key, row.config_value);
|
||||||
|
});
|
||||||
|
|
||||||
|
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
|
||||||
|
logger.info(`[ConfigReader] Preloaded ${results.length} configurations into cache`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[ConfigReader] Error preloading configurations:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Vertex AI configurations
|
||||||
|
*/
|
||||||
|
export async function getVertexAIConfig(): Promise<{
|
||||||
|
enabled: boolean;
|
||||||
|
}> {
|
||||||
|
const enabled = await getConfigBoolean('AI_ENABLED', true);
|
||||||
|
|
||||||
|
return { enabled };
|
||||||
|
}
|
||||||
|
|
||||||
2767
_archive/services/dashboard.service.ts
Normal file
2767
_archive/services/dashboard.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
3353
_archive/services/dealerClaim.service.ts
Normal file
3353
_archive/services/dealerClaim.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
967
_archive/services/dealerClaimApproval.service.ts
Normal file
967
_archive/services/dealerClaimApproval.service.ts
Normal file
@ -0,0 +1,967 @@
|
|||||||
|
/**
|
||||||
|
* Dealer Claim Approval Service
|
||||||
|
*
|
||||||
|
* Dedicated approval service for dealer claim workflows (CLAIM_MANAGEMENT).
|
||||||
|
* Handles dealer claim-specific logic including:
|
||||||
|
* - Dynamic approver support (additional approvers added between steps)
|
||||||
|
* - Activity Creation processing
|
||||||
|
* - Dealer-specific notifications
|
||||||
|
*
|
||||||
|
* This service is separate from ApprovalService to prevent conflicts with custom workflows.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { User } from '@models/User';
|
||||||
|
import { ApprovalAction } from '../types/approval.types';
|
||||||
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
|
import { calculateTATPercentage } from '@utils/helpers';
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import { notificationMongoService } from './notification.mongo.service';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
import { DealerClaimService } from './dealerClaim.service';
|
||||||
|
import { emitToRequestRoom } from '../realtime/socket';
|
||||||
|
|
||||||
|
export class DealerClaimApprovalService {
|
||||||
|
// Use lazy initialization to avoid circular dependency
|
||||||
|
private getDealerClaimService(): DealerClaimService {
|
||||||
|
return new DealerClaimService();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Approve a level in a dealer claim workflow
|
||||||
|
* Handles dealer claim-specific logic including dynamic approvers and activity creation
|
||||||
|
*/
|
||||||
|
async approveLevel(
|
||||||
|
levelId: string,
|
||||||
|
action: ApprovalAction,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
|
||||||
|
): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level) return null;
|
||||||
|
|
||||||
|
// Get workflow to determine priority for working hours calculation
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Verify this is a claim management workflow
|
||||||
|
const workflowType = (wf as any)?.workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
|
||||||
|
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
|
||||||
|
}
|
||||||
|
|
||||||
|
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
||||||
|
const isPaused = (wf as any).isPaused || (level as any).isPaused;
|
||||||
|
|
||||||
|
// If paused, resume automatically when approving/rejecting
|
||||||
|
if (isPaused) {
|
||||||
|
const { pauseService } = await import('./pause.service');
|
||||||
|
try {
|
||||||
|
await pauseService.resumeWorkflow(level.requestId, userId);
|
||||||
|
logger.info(`[DealerClaimApproval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
|
||||||
|
} catch (pauseError) {
|
||||||
|
logger.warn(`[DealerClaimApproval] Failed to auto-resume paused workflow:`, pauseError);
|
||||||
|
// Continue with approval/rejection even if resume fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Calculate elapsed hours using working hours logic (with pause handling)
|
||||||
|
const isPausedLevel = (level as any).isPaused;
|
||||||
|
const wasResumed = !isPausedLevel &&
|
||||||
|
(level as any).pauseElapsedHours !== null &&
|
||||||
|
(level as any).pauseElapsedHours !== undefined &&
|
||||||
|
(level as any).pauseResumeDate !== null;
|
||||||
|
|
||||||
|
const pauseInfo = isPausedLevel ? {
|
||||||
|
// Level is currently paused - return frozen elapsed hours at pause time
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (level as any).pausedAt,
|
||||||
|
pauseElapsedHours: (level as any).pauseElapsedHours,
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate
|
||||||
|
} : wasResumed ? {
|
||||||
|
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(
|
||||||
|
(level as any).levelStartTime || (level as any).tatStartTime || now,
|
||||||
|
now,
|
||||||
|
priority,
|
||||||
|
pauseInfo
|
||||||
|
);
|
||||||
|
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||||
|
|
||||||
|
// Handle rejection
|
||||||
|
if (action.action === 'REJECT') {
|
||||||
|
return await this.handleRejection(level, action, userId, requestMetadata, elapsedHours, tatPercentage, now);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Approving level ${levelId} with action:`, JSON.stringify(action));
|
||||||
|
|
||||||
|
// Robust comment extraction
|
||||||
|
const approvalComment = action.comments || (action as any).comment || '';
|
||||||
|
|
||||||
|
// Update level status and elapsed time for approval FIRST
|
||||||
|
// Only save snapshot if the update succeeds
|
||||||
|
await level.update({
|
||||||
|
status: ApprovalStatus.APPROVED,
|
||||||
|
actionDate: now,
|
||||||
|
levelEndTime: now,
|
||||||
|
elapsedHours: elapsedHours,
|
||||||
|
tatPercentageUsed: tatPercentage,
|
||||||
|
comments: approvalComment || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if this is a dealer submission (proposal or completion) - these have their own snapshot types
|
||||||
|
const levelName = (level.levelName || '').toLowerCase();
|
||||||
|
const isDealerSubmission = levelName.includes('dealer proposal') || levelName.includes('dealer completion');
|
||||||
|
|
||||||
|
// Only save APPROVE snapshot for actual approver actions (not dealer submissions)
|
||||||
|
// Dealer submissions use PROPOSAL/COMPLETION snapshot types instead
|
||||||
|
if (!isDealerSubmission) {
|
||||||
|
try {
|
||||||
|
await this.getDealerClaimService().saveApprovalHistory(
|
||||||
|
level.requestId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
'APPROVE',
|
||||||
|
approvalComment,
|
||||||
|
undefined,
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
} catch (snapshotError) {
|
||||||
|
// Log error but don't fail the approval - snapshot is for audit, not critical
|
||||||
|
logger.error(`[DealerClaimApproval] Failed to save approval history snapshot (non-critical):`, snapshotError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: We don't save workflow history for approval actions
|
||||||
|
// The approval history (saveApprovalHistory) is sufficient and includes comments
|
||||||
|
// Workflow movement information is included in the APPROVE snapshot's changeReason
|
||||||
|
|
||||||
|
// Check if this is the final approver
|
||||||
|
const allLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId }
|
||||||
|
});
|
||||||
|
const approvedCount = allLevels.filter((l: any) => l.status === ApprovalStatus.APPROVED).length;
|
||||||
|
const isFinalApprover = approvedCount === allLevels.length;
|
||||||
|
|
||||||
|
if (isFinalApprover) {
|
||||||
|
// Final approval - close workflow
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: level.levelNumber || 0
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Notify all participants
|
||||||
|
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
|
||||||
|
where: { requestId: level.requestId, isActive: true }
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (participants && participants.length > 0) {
|
||||||
|
const participantIds = participants.map((p: any) => p.userId).filter(Boolean);
|
||||||
|
await notificationService.sendToUsers(participantIds, {
|
||||||
|
title: `Request Approved: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
logger.info(`[DealerClaimApproval] Final approval complete. ${participants.length} participant(s) notified.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Not final - move to next level
|
||||||
|
// Check if workflow is paused - if so, don't advance
|
||||||
|
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Cannot advance workflow ${level.requestId} - workflow is paused`);
|
||||||
|
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the next PENDING level (supports dynamically added approvers)
|
||||||
|
// Strategy: First try sequential, then find next PENDING level if sequential doesn't exist
|
||||||
|
const currentLevelNumber = level.levelNumber || 0;
|
||||||
|
logger.info(`[DealerClaimApproval] Finding next level after level ${currentLevelNumber} for request ${level.requestId}`);
|
||||||
|
|
||||||
|
// First, try sequential approach
|
||||||
|
let nextLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
levelNumber: currentLevelNumber + 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// If sequential level doesn't exist, search for next PENDING level
|
||||||
|
// This handles cases where additional approvers are added dynamically between steps
|
||||||
|
if (!nextLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} not found, searching for next PENDING level (dynamic approvers)`);
|
||||||
|
nextLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
levelNumber: { [Op.gt]: currentLevelNumber },
|
||||||
|
status: ApprovalStatus.PENDING
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] Using fallback level ${nextLevel.levelNumber} (${(nextLevel as any).levelName || 'unnamed'})`);
|
||||||
|
}
|
||||||
|
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
|
||||||
|
// Sequential level exists but not PENDING - check if it's already approved/rejected
|
||||||
|
if (nextLevel.status === ApprovalStatus.APPROVED || nextLevel.status === ApprovalStatus.REJECTED) {
|
||||||
|
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} already ${nextLevel.status}. Skipping activation.`);
|
||||||
|
nextLevel = null; // Don't activate an already completed level
|
||||||
|
} else {
|
||||||
|
// Level exists but in unexpected status - log warning but proceed
|
||||||
|
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
|
||||||
|
} else {
|
||||||
|
logger.info(`[DealerClaimApproval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
// Check if next level is paused - if so, don't activate it
|
||||||
|
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Cannot activate next level ${nextLevelNumber} - level is paused`);
|
||||||
|
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Activate next level
|
||||||
|
await nextLevel.update({
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
levelStartTime: now,
|
||||||
|
tatStartTime: now
|
||||||
|
});
|
||||||
|
|
||||||
|
// Schedule TAT jobs for the next level
|
||||||
|
try {
|
||||||
|
const workflowPriority = (wf as any)?.priority || 'STANDARD';
|
||||||
|
|
||||||
|
await tatSchedulerService.scheduleTatJobs(
|
||||||
|
level.requestId,
|
||||||
|
(nextLevel as any).levelId,
|
||||||
|
(nextLevel as any).approverId,
|
||||||
|
Number((nextLevel as any).tatHours),
|
||||||
|
now,
|
||||||
|
workflowPriority
|
||||||
|
);
|
||||||
|
logger.info(`[DealerClaimApproval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
|
||||||
|
} catch (tatError) {
|
||||||
|
logger.error(`[DealerClaimApproval] Failed to schedule TAT jobs for next level:`, tatError);
|
||||||
|
// Don't fail the approval if TAT scheduling fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow current level
|
||||||
|
if (nextLevelNumber !== null) {
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{ currentLevel: nextLevelNumber },
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update the APPROVE snapshot's changeReason to include movement information
|
||||||
|
// This ensures the approval snapshot shows both the approval and the movement
|
||||||
|
// We don't create a separate WORKFLOW snapshot for approvals - only APPROVE snapshot
|
||||||
|
try {
|
||||||
|
const { DealerClaimHistory } = await import('@models/DealerClaimHistory');
|
||||||
|
const { SnapshotType } = await import('@models/DealerClaimHistory');
|
||||||
|
|
||||||
|
const approvalHistory = await DealerClaimHistory.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
approvalLevelId: level.levelId,
|
||||||
|
snapshotType: SnapshotType.APPROVE
|
||||||
|
},
|
||||||
|
order: [['createdAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (approvalHistory) {
|
||||||
|
// Use the robust approvalComment from outer scope
|
||||||
|
const updatedChangeReason = approvalComment
|
||||||
|
? `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber}). Comment: ${approvalComment}`
|
||||||
|
: `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber})`;
|
||||||
|
|
||||||
|
await approvalHistory.update({
|
||||||
|
changeReason: updatedChangeReason
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
// Log error but don't fail - this is just updating the changeReason for better display
|
||||||
|
logger.warn(`[DealerClaimApproval] Failed to update approval history changeReason (non-critical):`, updateError);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle dealer claim-specific step processing
|
||||||
|
const currentLevelName = (level.levelName || '').toLowerCase();
|
||||||
|
// Check by levelName first, use levelNumber only as fallback if levelName is missing
|
||||||
|
// This handles cases where additional approvers shift step numbers
|
||||||
|
const hasLevelName = level.levelName && level.levelName.trim() !== '';
|
||||||
|
const isDeptLeadApproval = hasLevelName
|
||||||
|
? currentLevelName.includes('department lead')
|
||||||
|
: (level.levelNumber === 3); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
const isRequestorClaimApproval = hasLevelName
|
||||||
|
? (currentLevelName.includes('requestor') && (currentLevelName.includes('claim') || currentLevelName.includes('approval')))
|
||||||
|
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
if (isDeptLeadApproval) {
|
||||||
|
// Activity Creation is now an activity log only - process it automatically
|
||||||
|
logger.info(`[DealerClaimApproval] Department Lead approved. Processing Activity Creation as activity log.`);
|
||||||
|
try {
|
||||||
|
const dealerClaimService = new DealerClaimService();
|
||||||
|
await dealerClaimService.processActivityCreation(level.requestId);
|
||||||
|
logger.info(`[DealerClaimApproval] Activity Creation activity logged for request ${level.requestId}`);
|
||||||
|
} catch (activityError) {
|
||||||
|
logger.error(`[DealerClaimApproval] Error processing Activity Creation activity for request ${level.requestId}:`, activityError);
|
||||||
|
// Don't fail the Department Lead approval if Activity Creation logging fails
|
||||||
|
}
|
||||||
|
} else if (isRequestorClaimApproval) {
|
||||||
|
// Step 6 (System - E-Invoice Generation) is now an activity log only - process it automatically
|
||||||
|
logger.info(`[DealerClaimApproval] Requestor Claim Approval approved. Triggering DMS push for E-Invoice generation.`);
|
||||||
|
try {
|
||||||
|
// Lazy load DealerClaimService to avoid circular dependency issues during method execution
|
||||||
|
const dealerClaimService = this.getDealerClaimService();
|
||||||
|
await dealerClaimService.updateEInvoiceDetails(level.requestId);
|
||||||
|
logger.info(`[DealerClaimApproval] DMS push initiated for request ${level.requestId}`);
|
||||||
|
} catch (dmsError) {
|
||||||
|
logger.error(`[DealerClaimApproval] Error initiating DMS push for request ${level.requestId}:`, dmsError);
|
||||||
|
// Don't fail the Requestor Claim Approval if DMS push fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log approval activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator about the approval
|
||||||
|
// BUT skip this if it's a dealer proposal or dealer completion step - those have special notifications below
|
||||||
|
// Priority: levelName check first, then levelNumber only if levelName is missing
|
||||||
|
const hasLevelNameForApproval = level.levelName && level.levelName.trim() !== '';
|
||||||
|
const levelNameForApproval = hasLevelNameForApproval && level.levelName ? level.levelName.toLowerCase() : '';
|
||||||
|
const isDealerProposalApproval = hasLevelNameForApproval
|
||||||
|
? (levelNameForApproval.includes('dealer') && levelNameForApproval.includes('proposal'))
|
||||||
|
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
|
||||||
|
const isDealerCompletionApproval = hasLevelNameForApproval
|
||||||
|
? (levelNameForApproval.includes('dealer') && (levelNameForApproval.includes('completion') || levelNameForApproval.includes('documents')))
|
||||||
|
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
// Skip sending approval notification to initiator if they are the approver
|
||||||
|
// (they don't need to be notified that they approved their own request)
|
||||||
|
const isApproverInitiator = level.approverId && (wf as any).initiatorId && level.approverId === (wf as any).initiatorId;
|
||||||
|
|
||||||
|
if (wf && !isDealerProposalApproval && !isDealerCompletionApproval && !isApproverInitiator) {
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Request Approved - Level ${level.levelNumber}`,
|
||||||
|
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
} else if (isApproverInitiator) {
|
||||||
|
logger.info(`[DealerClaimApproval] Skipping approval notification to initiator - they are the approver`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify next approver - ALWAYS send notification when there's a next level
|
||||||
|
if (wf && nextLevel) {
|
||||||
|
const nextApproverId = (nextLevel as any).approverId;
|
||||||
|
const nextApproverEmail = (nextLevel as any).approverEmail || '';
|
||||||
|
const nextApproverName = (nextLevel as any).approverName || nextApproverEmail || 'approver';
|
||||||
|
|
||||||
|
// Check if it's an auto-step or system process
|
||||||
|
const isAutoStep = nextApproverEmail === 'system@royalenfield.com'
|
||||||
|
|| (nextLevel as any).approverName === 'System Auto-Process'
|
||||||
|
|| nextApproverId === 'system';
|
||||||
|
|
||||||
|
const isSystemEmail = nextApproverEmail.toLowerCase() === 'system@royalenfield.com'
|
||||||
|
|| nextApproverEmail.toLowerCase().includes('system');
|
||||||
|
const isSystemName = nextApproverName.toLowerCase() === 'system auto-process'
|
||||||
|
|| nextApproverName.toLowerCase().includes('system');
|
||||||
|
|
||||||
|
// Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents)
|
||||||
|
// Check this BEFORE sending assignment notification to avoid duplicates
|
||||||
|
// Priority: levelName check first, then levelNumber only if levelName is missing
|
||||||
|
const hasLevelNameForNotification = level.levelName && level.levelName.trim() !== '';
|
||||||
|
const levelNameForNotification = hasLevelNameForNotification && level.levelName ? level.levelName.toLowerCase() : '';
|
||||||
|
const isDealerProposalApproval = hasLevelNameForNotification
|
||||||
|
? (levelNameForNotification.includes('dealer') && levelNameForNotification.includes('proposal'))
|
||||||
|
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
|
||||||
|
const isDealerCompletionApproval = hasLevelNameForNotification
|
||||||
|
? (levelNameForNotification.includes('dealer') && (levelNameForNotification.includes('completion') || levelNameForNotification.includes('documents')))
|
||||||
|
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
// Check if next approver is the initiator (to avoid duplicate notifications)
|
||||||
|
const isNextApproverInitiator = nextApproverId && (wf as any).initiatorId && nextApproverId === (wf as any).initiatorId;
|
||||||
|
|
||||||
|
if (isDealerProposalApproval && (wf as any).initiatorId) {
|
||||||
|
// Get dealer and proposal data for the email template
|
||||||
|
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
|
||||||
|
const { DealerProposalDetails } = await import('@models/DealerProposalDetails');
|
||||||
|
const { DealerProposalCostItem } = await import('@models/DealerProposalCostItem');
|
||||||
|
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
const proposalDetails = await DealerProposalDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
|
||||||
|
// Get cost items if proposal exists
|
||||||
|
let costBreakup: any[] = [];
|
||||||
|
if (proposalDetails) {
|
||||||
|
const proposalId = (proposalDetails as any).proposalId || (proposalDetails as any).proposal_id;
|
||||||
|
if (proposalId) {
|
||||||
|
const costItems = await DealerProposalCostItem.findAll({
|
||||||
|
where: { proposalId },
|
||||||
|
order: [['itemOrder', 'ASC']]
|
||||||
|
});
|
||||||
|
costBreakup = costItems.map((item: any) => ({
|
||||||
|
description: item.itemDescription || item.description,
|
||||||
|
amount: Number(item.amount) || 0
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get dealer user
|
||||||
|
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
|
||||||
|
const dealerData = dealerUser ? dealerUser.toJSON() : {
|
||||||
|
userId: level.approverId,
|
||||||
|
email: level.approverEmail || '',
|
||||||
|
displayName: level.approverName || level.approverEmail || 'Dealer'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get next approver (could be Step 2 - Requestor Evaluation, or an additional approver if one was added between Step 1 and Step 2)
|
||||||
|
// The nextLevel is already found above using dynamic logic that handles additional approvers correctly
|
||||||
|
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
|
||||||
|
|
||||||
|
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 1 and Step 2)
|
||||||
|
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
|
||||||
|
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
|
||||||
|
|
||||||
|
// Send proposal submitted notification with proper type and metadata
|
||||||
|
// This will use the dealerProposalSubmitted template, not the multi-level approval template
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: 'Proposal Submitted',
|
||||||
|
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted a proposal for your claim request "${(wf as any).title}".`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'proposal_submitted',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
dealerData: dealerData,
|
||||||
|
proposalData: {
|
||||||
|
totalEstimatedBudget: proposalDetails ? (proposalDetails as any).totalEstimatedBudget : 0,
|
||||||
|
expectedCompletionDate: proposalDetails ? (proposalDetails as any).expectedCompletionDate : undefined,
|
||||||
|
dealerComments: proposalDetails ? (proposalDetails as any).dealerComments : undefined,
|
||||||
|
costBreakup: costBreakup,
|
||||||
|
submittedAt: proposalDetails ? (proposalDetails as any).submittedAt : new Date(),
|
||||||
|
nextApproverIsAdditional: isNextAdditionalApprover,
|
||||||
|
nextApproverIsInitiator: isNextApproverInitiator
|
||||||
|
},
|
||||||
|
nextApproverId: nextApproverData ? nextApproverData.userId : undefined,
|
||||||
|
// Add activity information from claimDetails
|
||||||
|
activityName: claimDetails ? (claimDetails as any).activityName : undefined,
|
||||||
|
activityType: claimDetails ? (claimDetails as any).activityType : undefined
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Sent proposal_submitted notification to initiator for Dealer Proposal Submission. Next approver: ${isNextApproverInitiator ? 'Initiator (self)' : (isNextAdditionalApprover ? 'Additional Approver' : 'Step 2 (Requestor Evaluation)')}`);
|
||||||
|
} else if (isDealerCompletionApproval && (wf as any).initiatorId) {
|
||||||
|
// Get dealer and completion data for the email template
|
||||||
|
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
|
||||||
|
const { DealerCompletionDetails } = await import('@models/DealerCompletionDetails');
|
||||||
|
const { DealerCompletionExpense } = await import('@models/DealerCompletionExpense');
|
||||||
|
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
const completionDetails = await DealerCompletionDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
|
||||||
|
// Get expense items if completion exists
|
||||||
|
let closedExpenses: any[] = [];
|
||||||
|
if (completionDetails) {
|
||||||
|
const expenses = await DealerCompletionExpense.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']]
|
||||||
|
});
|
||||||
|
closedExpenses = expenses.map((item: any) => ({
|
||||||
|
description: item.description || '',
|
||||||
|
amount: Number(item.amount) || 0
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get dealer user
|
||||||
|
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
|
||||||
|
const dealerData = dealerUser ? dealerUser.toJSON() : {
|
||||||
|
userId: level.approverId,
|
||||||
|
email: level.approverEmail || '',
|
||||||
|
displayName: level.approverName || level.approverEmail || 'Dealer'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get next approver (could be Step 5 - Requestor Claim Approval, or an additional approver if one was added between Step 4 and Step 5)
|
||||||
|
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
|
||||||
|
|
||||||
|
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 4 and Step 5)
|
||||||
|
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
|
||||||
|
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
|
||||||
|
|
||||||
|
// Check if next approver is the initiator (to show appropriate message in email)
|
||||||
|
const isNextApproverInitiator = nextApproverData && (wf as any).initiatorId && nextApproverData.userId === (wf as any).initiatorId;
|
||||||
|
|
||||||
|
// Send completion submitted notification with proper type and metadata
|
||||||
|
// This will use the completionDocumentsSubmitted template, not the multi-level approval template
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: 'Completion Documents Submitted',
|
||||||
|
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted completion documents for your claim request "${(wf as any).title}".`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'completion_submitted',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
dealerData: dealerData,
|
||||||
|
completionData: {
|
||||||
|
activityCompletionDate: completionDetails ? (completionDetails as any).activityCompletionDate : undefined,
|
||||||
|
numberOfParticipants: completionDetails ? (completionDetails as any).numberOfParticipants : undefined,
|
||||||
|
totalClosedExpenses: completionDetails ? (completionDetails as any).totalClosedExpenses : 0,
|
||||||
|
closedExpenses: closedExpenses,
|
||||||
|
documentsCount: undefined, // Documents count can be retrieved from documents table if needed
|
||||||
|
submittedAt: completionDetails ? (completionDetails as any).submittedAt : new Date(),
|
||||||
|
nextApproverIsAdditional: isNextAdditionalApprover,
|
||||||
|
nextApproverIsInitiator: isNextApproverInitiator
|
||||||
|
},
|
||||||
|
nextApproverId: nextApproverData ? nextApproverData.userId : undefined
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Sent completion_submitted notification to initiator for Dealer Completion Documents. Next approver: ${isNextAdditionalApprover ? 'Additional Approver' : 'Step 5 (Requestor Claim Approval)'}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only send assignment notification to next approver if:
|
||||||
|
// 1. It's NOT a dealer proposal/completion step (those have special notifications above)
|
||||||
|
// 2. Next approver is NOT the initiator (to avoid duplicate notifications)
|
||||||
|
// 3. It's not a system/auto step
|
||||||
|
if (!isDealerProposalApproval && !isDealerCompletionApproval && !isNextApproverInitiator) {
|
||||||
|
if (!isAutoStep && !isSystemEmail && !isSystemName && nextApproverId && nextApproverId !== 'system') {
|
||||||
|
try {
|
||||||
|
logger.info(`[DealerClaimApproval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
|
||||||
|
|
||||||
|
await notificationService.sendToUsers([nextApproverId], {
|
||||||
|
title: `Action required: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'assignment',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] ✅ Assignment notification sent successfully to ${nextApproverName} (${nextApproverId}) for level ${nextLevelNumber}`);
|
||||||
|
|
||||||
|
// Log assignment activity for the next approver
|
||||||
|
await activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'assignment',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Assigned to approver',
|
||||||
|
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
} catch (notifError) {
|
||||||
|
logger.error(`[DealerClaimApproval] ❌ Failed to send notification to next approver ${nextApproverId} at level ${nextLevelNumber}:`, notifError);
|
||||||
|
// Don't throw - continue with workflow even if notification fails
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(`[DealerClaimApproval] ⚠️ Skipping notification for system/auto-step: ${nextApproverEmail} (${nextApproverId}) at level ${nextLevelNumber}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (isDealerProposalApproval || isDealerCompletionApproval) {
|
||||||
|
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - dealer-specific notification already sent`);
|
||||||
|
}
|
||||||
|
if (isNextApproverInitiator) {
|
||||||
|
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - next approver is the initiator (already notified)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No next level found but not final approver - this shouldn't happen
|
||||||
|
logger.warn(`[DealerClaimApproval] No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: level.levelNumber || 0
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
if (wf) {
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Approved: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(level.requestId, 'request:updated', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
requestNumber: (wf as any)?.requestNumber,
|
||||||
|
action: action.action,
|
||||||
|
levelNumber: level.levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Approval level ${levelId} ${action.action.toLowerCase()}ed and socket event emitted`);
|
||||||
|
|
||||||
|
return level;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[DealerClaimApproval] Error approving level:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle rejection (internal method called from approveLevel)
|
||||||
|
*/
|
||||||
|
private async handleRejection(
|
||||||
|
level: ApprovalLevel,
|
||||||
|
action: ApprovalAction,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null },
|
||||||
|
elapsedHours?: number,
|
||||||
|
tatPercentage?: number,
|
||||||
|
now?: Date
|
||||||
|
): Promise<ApprovalLevel | null> {
|
||||||
|
const rejectionNow = now || new Date();
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Check if this is the Department Lead approval step (Step 3)
|
||||||
|
// Robust check: check level name for variations and level number as fallback
|
||||||
|
// Default rejection logic: Return to immediately previous approval step
|
||||||
|
logger.info(`[DealerClaimApproval] Rejection for request ${level.requestId} by level ${level.levelNumber}. Finding previous step to return to.`);
|
||||||
|
|
||||||
|
// Save approval history (rejection) BEFORE updating level
|
||||||
|
await this.getDealerClaimService().saveApprovalHistory(
|
||||||
|
level.requestId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
'REJECT',
|
||||||
|
action.comments || '',
|
||||||
|
action.rejectionReason || undefined,
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
|
||||||
|
// Find all levels to determine previous step
|
||||||
|
const allLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find the immediately previous approval level
|
||||||
|
const currentLevelNumber = level.levelNumber || 0;
|
||||||
|
const previousLevels = allLevels.filter(l => l.levelNumber < currentLevelNumber && l.levelNumber > 0);
|
||||||
|
const previousLevel = previousLevels[previousLevels.length - 1];
|
||||||
|
|
||||||
|
// Update level status - if returning to previous step, set this level to PENDING (reset)
|
||||||
|
// If no previous step (terminal rejection), set to REJECTED
|
||||||
|
const newStatus = previousLevel ? ApprovalStatus.PENDING : ApprovalStatus.REJECTED;
|
||||||
|
|
||||||
|
await level.update({
|
||||||
|
status: newStatus,
|
||||||
|
// If resetting to PENDING, clear action details so it can be acted upon again later
|
||||||
|
actionDate: previousLevel ? null : rejectionNow,
|
||||||
|
levelEndTime: previousLevel ? null : rejectionNow,
|
||||||
|
elapsedHours: previousLevel ? 0 : (elapsedHours || 0),
|
||||||
|
tatPercentageUsed: previousLevel ? 0 : (tatPercentage || 0),
|
||||||
|
comments: previousLevel ? null : (action.comments || action.rejectionReason || undefined)
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
// If no previous level found (this is the first step), close the workflow
|
||||||
|
if (!previousLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] No previous level found. This is the first step. Closing workflow.`);
|
||||||
|
|
||||||
|
// Capture workflow snapshot for terminal rejection
|
||||||
|
await this.getDealerClaimService().saveWorkflowHistory(
|
||||||
|
level.requestId,
|
||||||
|
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
|
||||||
|
userId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
level.levelName || undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Close workflow FIRST
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.REJECTED,
|
||||||
|
closureDate: rejectionNow
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Capture workflow snapshot AFTER workflow is closed successfully
|
||||||
|
try {
|
||||||
|
await this.getDealerClaimService().saveWorkflowHistory(
|
||||||
|
level.requestId,
|
||||||
|
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
|
||||||
|
userId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
level.levelName || undefined
|
||||||
|
);
|
||||||
|
} catch (snapshotError) {
|
||||||
|
// Log error but don't fail the rejection - snapshot is for audit, not critical
|
||||||
|
logger.error(`[DealerClaimApproval] Failed to save workflow history snapshot (non-critical):`, snapshotError);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log rejection activity (terminal rejection)
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'rejection',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: rejectionNow.toISOString(),
|
||||||
|
action: 'Rejected',
|
||||||
|
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator and participants (workflow is closed)
|
||||||
|
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
|
||||||
|
where: { requestId: level.requestId, isActive: true }
|
||||||
|
}));
|
||||||
|
|
||||||
|
const userIdsToNotify = [(wf as any).initiatorId];
|
||||||
|
if (participants && participants.length > 0) {
|
||||||
|
participants.forEach((p: any) => {
|
||||||
|
if (p.userId && p.userId !== (wf as any).initiatorId) {
|
||||||
|
userIdsToNotify.push(p.userId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await notificationService.sendToUsers(userIdsToNotify, {
|
||||||
|
title: `Request Rejected: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title} - Rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'rejection',
|
||||||
|
priority: 'HIGH'
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Return to previous step
|
||||||
|
logger.info(`[DealerClaimApproval] Returning to previous level ${previousLevel.levelNumber} (${previousLevel.levelName || 'unnamed'})`);
|
||||||
|
|
||||||
|
// Reset previous level to IN_PROGRESS so it can be acted upon again
|
||||||
|
await previousLevel.update({
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
levelStartTime: rejectionNow,
|
||||||
|
tatStartTime: rejectionNow,
|
||||||
|
actionDate: undefined,
|
||||||
|
levelEndTime: undefined,
|
||||||
|
comments: undefined,
|
||||||
|
elapsedHours: 0,
|
||||||
|
tatPercentageUsed: 0
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update workflow status to IN_PROGRESS (remains active for rework)
|
||||||
|
// Set currentLevel to previous level
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.PENDING,
|
||||||
|
currentLevel: previousLevel.levelNumber
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Log rejection activity (returned to previous step)
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'rejection',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: rejectionNow.toISOString(),
|
||||||
|
action: 'Returned to Previous Step',
|
||||||
|
details: `Request rejected by ${level.approverName || level.approverEmail} and returned to level ${previousLevel.levelNumber}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify the approver of the previous level
|
||||||
|
if (previousLevel.approverId) {
|
||||||
|
await notificationService.sendToUsers([previousLevel.approverId], {
|
||||||
|
title: `Request Returned: ${(wf as any).requestNumber}`,
|
||||||
|
body: `Request "${(wf as any).title}" has been returned to your level for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'assignment',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify initiator when request is returned (not closed)
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Request Returned: ${(wf as any).requestNumber}`,
|
||||||
|
body: `Request "${(wf as any).title}" has been returned to level ${previousLevel.levelNumber} for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'rejection',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(level.requestId, 'request:updated', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
requestNumber: (wf as any)?.requestNumber,
|
||||||
|
action: 'REJECT',
|
||||||
|
levelNumber: level.levelNumber,
|
||||||
|
timestamp: rejectionNow.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return level;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reject a level in a dealer claim workflow (legacy method - kept for backward compatibility)
|
||||||
|
*/
|
||||||
|
async rejectLevel(
|
||||||
|
levelId: string,
|
||||||
|
reason: string,
|
||||||
|
comments: string,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
|
||||||
|
): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level) return null;
|
||||||
|
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Verify this is a claim management workflow
|
||||||
|
const workflowType = (wf as any)?.workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
|
||||||
|
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Calculate elapsed hours
|
||||||
|
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
||||||
|
const isPausedLevel = (level as any).isPaused;
|
||||||
|
const wasResumed = !isPausedLevel &&
|
||||||
|
(level as any).pauseElapsedHours !== null &&
|
||||||
|
(level as any).pauseElapsedHours !== undefined &&
|
||||||
|
(level as any).pauseResumeDate !== null;
|
||||||
|
|
||||||
|
const pauseInfo = isPausedLevel ? {
|
||||||
|
// Level is currently paused - return frozen elapsed hours at pause time
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (level as any).pausedAt,
|
||||||
|
pauseElapsedHours: (level as any).pauseElapsedHours,
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate
|
||||||
|
} : wasResumed ? {
|
||||||
|
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
// Use the internal handleRejection method
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(
|
||||||
|
(level as any).levelStartTime || (level as any).tatStartTime || now,
|
||||||
|
now,
|
||||||
|
priority,
|
||||||
|
pauseInfo
|
||||||
|
);
|
||||||
|
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||||
|
|
||||||
|
return await this.handleRejection(
|
||||||
|
level,
|
||||||
|
{ action: 'REJECT', comments: comments || reason, rejectionReason: reason || comments },
|
||||||
|
userId,
|
||||||
|
requestMetadata,
|
||||||
|
elapsedHours,
|
||||||
|
tatPercentage,
|
||||||
|
now
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[DealerClaimApproval] Error rejecting level:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current approval level for a request
|
||||||
|
*/
|
||||||
|
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) return null;
|
||||||
|
|
||||||
|
const currentLevel = (workflow as any).currentLevel;
|
||||||
|
if (!currentLevel) return null;
|
||||||
|
|
||||||
|
return await ApprovalLevel.findOne({
|
||||||
|
where: { requestId, levelNumber: currentLevel }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all approval levels for a request
|
||||||
|
*/
|
||||||
|
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
|
||||||
|
return await ApprovalLevel.findAll({
|
||||||
|
where: { requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
535
_archive/services/dmsWebhook.service.ts
Normal file
535
_archive/services/dmsWebhook.service.ts
Normal file
@ -0,0 +1,535 @@
|
|||||||
|
import { Request } from 'express';
|
||||||
|
import { ClaimInvoice } from '../models/ClaimInvoice';
|
||||||
|
import { ClaimCreditNote } from '../models/ClaimCreditNote';
|
||||||
|
import { WorkflowRequest } from '../models/WorkflowRequest';
|
||||||
|
import { ApprovalLevel } from '../models/ApprovalLevel';
|
||||||
|
import { DealerClaimDetails } from '../models/DealerClaimDetails';
|
||||||
|
import { User } from '../models/User';
|
||||||
|
import { ApprovalService } from './approval.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DMS Webhook Service
|
||||||
|
* Handles processing of webhook callbacks from DMS system
|
||||||
|
*/
|
||||||
|
export class DMSWebhookService {
|
||||||
|
private webhookSecret: string;
|
||||||
|
private approvalService: ApprovalService;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.webhookSecret = process.env.DMS_WEBHOOK_SECRET || '';
|
||||||
|
this.approvalService = new ApprovalService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate webhook signature for security
|
||||||
|
* DMS should send a signature in the header that we can verify
|
||||||
|
*/
|
||||||
|
async validateWebhookSignature(req: Request): Promise<boolean> {
|
||||||
|
// If webhook secret is not configured, skip validation (for development)
|
||||||
|
if (!this.webhookSecret) {
|
||||||
|
logger.warn('[DMSWebhook] Webhook secret not configured, skipping signature validation');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const signature = req.headers['x-dms-signature'] as string;
|
||||||
|
if (!signature) {
|
||||||
|
logger.warn('[DMSWebhook] Missing webhook signature in header');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create HMAC hash of the request body
|
||||||
|
const body = JSON.stringify(req.body);
|
||||||
|
const expectedSignature = crypto
|
||||||
|
.createHmac('sha256', this.webhookSecret)
|
||||||
|
.update(body)
|
||||||
|
.digest('hex');
|
||||||
|
|
||||||
|
// Compare signatures (use constant-time comparison to prevent timing attacks)
|
||||||
|
const isValid = crypto.timingSafeEqual(
|
||||||
|
Buffer.from(signature),
|
||||||
|
Buffer.from(expectedSignature)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isValid) {
|
||||||
|
logger.warn('[DMSWebhook] Invalid webhook signature');
|
||||||
|
}
|
||||||
|
|
||||||
|
return isValid;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[DMSWebhook] Error validating webhook signature:', error);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process invoice generation webhook from DMS
|
||||||
|
*/
|
||||||
|
async processInvoiceWebhook(payload: any): Promise<{
|
||||||
|
success: boolean;
|
||||||
|
invoiceNumber?: string;
|
||||||
|
error?: string;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
// Validate required fields
|
||||||
|
const requiredFields = ['request_number', 'document_no', 'document_type'];
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!payload[field]) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Missing required field: ${field}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find workflow request by request number
|
||||||
|
const request = await WorkflowRequest.findOne({
|
||||||
|
where: {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!request) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Request not found: ${payload.request_number}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find or create invoice record
|
||||||
|
let invoice = await ClaimInvoice.findOne({
|
||||||
|
where: { requestId: request.requestId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create invoice if it doesn't exist (new flow: webhook creates invoice)
|
||||||
|
if (!invoice) {
|
||||||
|
logger.info('[DMSWebhook] Invoice record not found, creating new invoice from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
});
|
||||||
|
|
||||||
|
invoice = await ClaimInvoice.create({
|
||||||
|
requestId: request.requestId,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
dmsNumber: payload.document_no,
|
||||||
|
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
amount: payload.total_amount || payload.claim_amount,
|
||||||
|
status: 'GENERATED',
|
||||||
|
generatedAt: new Date(),
|
||||||
|
invoiceFilePath: payload.invoice_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
description: this.buildInvoiceDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Invoice created successfully from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Update existing invoice with DMS response data
|
||||||
|
await invoice.update({
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
dmsNumber: payload.document_no, // DMS document number
|
||||||
|
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
amount: payload.total_amount || payload.claim_amount,
|
||||||
|
status: 'GENERATED',
|
||||||
|
generatedAt: new Date(),
|
||||||
|
invoiceFilePath: payload.invoice_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
// Store additional DMS data in description or separate fields if needed
|
||||||
|
description: this.buildInvoiceDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Invoice updated successfully', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
irnNo: payload.irn_no,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-approve Step 7 and move to Step 8
|
||||||
|
await this.logEInvoiceGenerationActivity(request.requestId, payload.request_number);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process credit note generation webhook from DMS
|
||||||
|
*/
|
||||||
|
async processCreditNoteWebhook(payload: any): Promise<{
|
||||||
|
success: boolean;
|
||||||
|
creditNoteNumber?: string;
|
||||||
|
error?: string;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
// Validate required fields
|
||||||
|
const requiredFields = ['request_number', 'document_no', 'document_type'];
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!payload[field]) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Missing required field: ${field}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find workflow request by request number
|
||||||
|
const request = await WorkflowRequest.findOne({
|
||||||
|
where: {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!request) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Request not found: ${payload.request_number}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find invoice to link credit note (optional - credit note can exist without invoice)
|
||||||
|
const invoice = await ClaimInvoice.findOne({
|
||||||
|
where: { requestId: request.requestId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find or create credit note record
|
||||||
|
let creditNote = await ClaimCreditNote.findOne({
|
||||||
|
where: { requestId: request.requestId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create credit note if it doesn't exist (new flow: webhook creates credit note)
|
||||||
|
if (!creditNote) {
|
||||||
|
logger.info('[DMSWebhook] Credit note record not found, creating new credit note from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
hasInvoice: !!invoice,
|
||||||
|
});
|
||||||
|
|
||||||
|
creditNote = await ClaimCreditNote.create({
|
||||||
|
requestId: request.requestId,
|
||||||
|
invoiceId: invoice?.invoiceId || undefined, // Allow undefined if no invoice exists
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
creditNoteAmount: payload.total_amount || payload.credit_amount,
|
||||||
|
sapDocumentNumber: payload.sap_credit_note_no || null,
|
||||||
|
status: 'CONFIRMED',
|
||||||
|
confirmedAt: new Date(),
|
||||||
|
creditNoteFilePath: payload.credit_note_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
description: this.buildCreditNoteDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note created successfully from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
hasInvoice: !!invoice,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity and notify initiator
|
||||||
|
await this.logCreditNoteCreationActivity(
|
||||||
|
request.requestId,
|
||||||
|
payload.request_number,
|
||||||
|
payload.document_no,
|
||||||
|
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Update existing credit note with DMS response data
|
||||||
|
await creditNote.update({
|
||||||
|
invoiceId: invoice?.invoiceId || creditNote.invoiceId, // Preserve existing invoiceId if no invoice found
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
creditNoteAmount: payload.total_amount || payload.credit_amount,
|
||||||
|
sapDocumentNumber: payload.sap_credit_note_no || null,
|
||||||
|
status: 'CONFIRMED',
|
||||||
|
confirmedAt: new Date(),
|
||||||
|
creditNoteFilePath: payload.credit_note_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
description: this.buildCreditNoteDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note updated successfully', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
sapCreditNoteNo: payload.sap_credit_note_no,
|
||||||
|
irnNo: payload.irn_no,
|
||||||
|
hasInvoice: !!invoice,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity and notify initiator for updated credit note
|
||||||
|
await this.logCreditNoteCreationActivity(
|
||||||
|
request.requestId,
|
||||||
|
payload.request_number,
|
||||||
|
payload.document_no,
|
||||||
|
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build invoice description from DMS payload
|
||||||
|
*/
|
||||||
|
private buildInvoiceDescription(payload: any): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
if (payload.irn_no) {
|
||||||
|
parts.push(`IRN: ${payload.irn_no}`);
|
||||||
|
}
|
||||||
|
if (payload.item_code_no) {
|
||||||
|
parts.push(`Item Code: ${payload.item_code_no}`);
|
||||||
|
}
|
||||||
|
if (payload.hsn_sac_code) {
|
||||||
|
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
|
||||||
|
}
|
||||||
|
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
|
||||||
|
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.length > 0 ? parts.join(' | ') : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build credit note description from DMS payload
|
||||||
|
*/
|
||||||
|
private buildCreditNoteDescription(payload: any): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
if (payload.irn_no) {
|
||||||
|
parts.push(`IRN: ${payload.irn_no}`);
|
||||||
|
}
|
||||||
|
if (payload.sap_credit_note_no) {
|
||||||
|
parts.push(`SAP CN: ${payload.sap_credit_note_no}`);
|
||||||
|
}
|
||||||
|
if (payload.credit_type) {
|
||||||
|
parts.push(`Credit Type: ${payload.credit_type}`);
|
||||||
|
}
|
||||||
|
if (payload.item_code_no) {
|
||||||
|
parts.push(`Item Code: ${payload.item_code_no}`);
|
||||||
|
}
|
||||||
|
if (payload.hsn_sac_code) {
|
||||||
|
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
|
||||||
|
}
|
||||||
|
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
|
||||||
|
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.length > 0 ? parts.join(' | ') : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log Credit Note Creation as activity and notify initiator
|
||||||
|
* This is called after credit note is created/updated from DMS webhook
|
||||||
|
*/
|
||||||
|
private async logCreditNoteCreationActivity(
|
||||||
|
requestId: string,
|
||||||
|
requestNumber: string,
|
||||||
|
creditNoteNumber: string,
|
||||||
|
creditNoteAmount: number
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if this is a claim management workflow
|
||||||
|
const request = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!request) {
|
||||||
|
logger.warn('[DMSWebhook] Request not found for credit note activity logging', { requestId });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowType = (request as any).workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.info('[DMSWebhook] Not a claim management workflow, skipping credit note activity logging', {
|
||||||
|
requestId,
|
||||||
|
workflowType,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const initiatorId = (request as any).initiatorId;
|
||||||
|
if (!initiatorId) {
|
||||||
|
logger.warn('[DMSWebhook] Initiator ID not found for credit note notification', { requestId });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'status_change',
|
||||||
|
user: undefined, // System event (no user means it's a system event)
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Credit Note Generated',
|
||||||
|
details: `Credit note generated from DMS. Credit Note Number: ${creditNoteNumber}. Credit Note Amount: ₹${creditNoteAmount || 0}. Request: ${requestNumber}`,
|
||||||
|
category: 'credit_note',
|
||||||
|
severity: 'INFO',
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note activity logged successfully', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
creditNoteNumber,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get dealer information from claim details
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({
|
||||||
|
where: { requestId }
|
||||||
|
});
|
||||||
|
|
||||||
|
let dealerUserId: string | null = null;
|
||||||
|
if (claimDetails?.dealerEmail) {
|
||||||
|
const dealerUser = await User.findOne({
|
||||||
|
where: { email: claimDetails.dealerEmail.toLowerCase() },
|
||||||
|
attributes: ['userId'],
|
||||||
|
});
|
||||||
|
dealerUserId = dealerUser?.userId || null;
|
||||||
|
|
||||||
|
if (dealerUserId) {
|
||||||
|
logger.info('[DMSWebhook] Found dealer user for notification', {
|
||||||
|
requestId,
|
||||||
|
dealerEmail: claimDetails.dealerEmail,
|
||||||
|
dealerUserId,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.warn('[DMSWebhook] Dealer email found but user not found in system', {
|
||||||
|
requestId,
|
||||||
|
dealerEmail: claimDetails.dealerEmail,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info('[DMSWebhook] No dealer email found in claim details', { requestId });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notification to initiator
|
||||||
|
await notificationService.sendToUsers([initiatorId], {
|
||||||
|
title: 'Credit Note Generated',
|
||||||
|
body: `Credit note ${creditNoteNumber} has been generated for request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
creditNoteNumber,
|
||||||
|
creditNoteAmount,
|
||||||
|
source: 'dms_webhook',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note notification sent to initiator', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
initiatorId,
|
||||||
|
creditNoteNumber,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send notification to dealer if dealer user exists
|
||||||
|
if (dealerUserId) {
|
||||||
|
await notificationService.sendToUsers([dealerUserId], {
|
||||||
|
title: 'Credit Note Generated',
|
||||||
|
body: `Credit note ${creditNoteNumber} has been generated for your claim request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
creditNoteNumber,
|
||||||
|
creditNoteAmount,
|
||||||
|
source: 'dms_webhook',
|
||||||
|
recipient: 'dealer',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note notification sent to dealer', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
dealerUserId,
|
||||||
|
dealerEmail: claimDetails?.dealerEmail,
|
||||||
|
creditNoteNumber,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error logging credit note activity:', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
error: errorMessage,
|
||||||
|
});
|
||||||
|
// Don't throw error - webhook processing should continue even if activity/notification fails
|
||||||
|
// The credit note is already created/updated, which is the primary goal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log E-Invoice Generation as activity (no longer an approval step)
|
||||||
|
* This is called after invoice is created/updated from DMS webhook
|
||||||
|
*/
|
||||||
|
private async logEInvoiceGenerationActivity(requestId: string, requestNumber: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if this is a claim management workflow
|
||||||
|
const request = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!request) {
|
||||||
|
logger.warn('[DMSWebhook] Request not found for Step 7 auto-approval', { requestId });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowType = (request as any).workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.info('[DMSWebhook] Not a claim management workflow, skipping Step 7 auto-approval', {
|
||||||
|
requestId,
|
||||||
|
workflowType,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// E-Invoice Generation is now an activity log only, not an approval step
|
||||||
|
// Log the activity using the dealerClaimService
|
||||||
|
const { DealerClaimService } = await import('./dealerClaim.service');
|
||||||
|
const dealerClaimService = new DealerClaimService();
|
||||||
|
const invoice = await ClaimInvoice.findOne({ where: { requestId } });
|
||||||
|
const invoiceNumber = invoice?.invoiceNumber || 'N/A';
|
||||||
|
|
||||||
|
await dealerClaimService.logEInvoiceGenerationActivity(requestId, invoiceNumber);
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] E-Invoice Generation activity logged successfully', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
invoiceNumber,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error logging E-Invoice Generation activity:', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
error: errorMessage,
|
||||||
|
});
|
||||||
|
// Don't throw error - webhook processing should continue even if activity logging fails
|
||||||
|
// The invoice is already created/updated, which is the primary goal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
221
_archive/services/holiday.service.ts
Normal file
221
_archive/services/holiday.service.ts
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
import { Holiday, HolidayType } from '@models/Holiday';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
|
|
||||||
|
export class HolidayService {
|
||||||
|
/**
|
||||||
|
* Get all holidays within a date range
|
||||||
|
*/
|
||||||
|
async getHolidaysInRange(startDate: Date | string, endDate: Date | string): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const holidays = await Holiday.findAll({
|
||||||
|
where: {
|
||||||
|
holidayDate: {
|
||||||
|
[Op.between]: [dayjs(startDate).format('YYYY-MM-DD'), dayjs(endDate).format('YYYY-MM-DD')]
|
||||||
|
},
|
||||||
|
isActive: true
|
||||||
|
},
|
||||||
|
attributes: ['holidayDate'],
|
||||||
|
raw: true
|
||||||
|
});
|
||||||
|
|
||||||
|
return holidays.map((h: any) => h.holidayDate || h.holiday_date);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error fetching holidays:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a specific date is a holiday
|
||||||
|
*/
|
||||||
|
async isHoliday(date: Date | string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const dateStr = dayjs(date).format('YYYY-MM-DD');
|
||||||
|
const holiday = await Holiday.findOne({
|
||||||
|
where: {
|
||||||
|
holidayDate: dateStr,
|
||||||
|
isActive: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return !!holiday;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error checking holiday:', error);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a date is a working day (not weekend or holiday)
|
||||||
|
*/
|
||||||
|
async isWorkingDay(date: Date | string): Promise<boolean> {
|
||||||
|
const day = dayjs(date);
|
||||||
|
const dayOfWeek = day.day(); // 0 = Sunday, 6 = Saturday
|
||||||
|
|
||||||
|
// Check if weekend
|
||||||
|
if (dayOfWeek === 0 || dayOfWeek === 6) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if holiday
|
||||||
|
const isHol = await this.isHoliday(date);
|
||||||
|
return !isHol;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new holiday
|
||||||
|
*/
|
||||||
|
async createHoliday(holidayData: {
|
||||||
|
holidayDate: string;
|
||||||
|
holidayName: string;
|
||||||
|
description?: string;
|
||||||
|
holidayType?: HolidayType;
|
||||||
|
isRecurring?: boolean;
|
||||||
|
recurrenceRule?: string;
|
||||||
|
appliesToDepartments?: string[];
|
||||||
|
appliesToLocations?: string[];
|
||||||
|
createdBy: string;
|
||||||
|
}): Promise<Holiday> {
|
||||||
|
try {
|
||||||
|
const holiday = await Holiday.create({
|
||||||
|
...holidayData,
|
||||||
|
isActive: true
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Holiday created: ${holidayData.holidayName} on ${holidayData.holidayDate}`);
|
||||||
|
return holiday;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error creating holiday:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a holiday
|
||||||
|
*/
|
||||||
|
async updateHoliday(holidayId: string, updates: any, updatedBy: string): Promise<Holiday | null> {
|
||||||
|
try {
|
||||||
|
const holiday = await Holiday.findByPk(holidayId);
|
||||||
|
if (!holiday) {
|
||||||
|
throw new Error('Holiday not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
await holiday.update({
|
||||||
|
...updates,
|
||||||
|
updatedBy,
|
||||||
|
updatedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Holiday updated: ${holidayId}`);
|
||||||
|
return holiday;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error updating holiday:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete (deactivate) a holiday
|
||||||
|
*/
|
||||||
|
async deleteHoliday(holidayId: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await Holiday.update(
|
||||||
|
{ isActive: false },
|
||||||
|
{ where: { holidayId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Holiday deactivated: ${holidayId}`);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error deleting holiday:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active holidays
|
||||||
|
*/
|
||||||
|
async getAllActiveHolidays(year?: number): Promise<Holiday[]> {
|
||||||
|
try {
|
||||||
|
const whereClause: any = { isActive: true };
|
||||||
|
|
||||||
|
if (year) {
|
||||||
|
const startDate = `${year}-01-01`;
|
||||||
|
const endDate = `${year}-12-31`;
|
||||||
|
whereClause.holidayDate = {
|
||||||
|
[Op.between]: [startDate, endDate]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const holidays = await Holiday.findAll({
|
||||||
|
where: whereClause,
|
||||||
|
order: [['holidayDate', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
return holidays;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error fetching holidays:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get holidays by year for calendar view
|
||||||
|
*/
|
||||||
|
async getHolidayCalendar(year: number): Promise<any[]> {
|
||||||
|
try {
|
||||||
|
const startDate = `${year}-01-01`;
|
||||||
|
const endDate = `${year}-12-31`;
|
||||||
|
|
||||||
|
const holidays = await Holiday.findAll({
|
||||||
|
where: {
|
||||||
|
holidayDate: {
|
||||||
|
[Op.between]: [startDate, endDate]
|
||||||
|
},
|
||||||
|
isActive: true
|
||||||
|
},
|
||||||
|
order: [['holidayDate', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
return holidays.map((h: any) => ({
|
||||||
|
date: h.holidayDate || h.holiday_date,
|
||||||
|
name: h.holidayName || h.holiday_name,
|
||||||
|
description: h.description,
|
||||||
|
type: h.holidayType || h.holiday_type,
|
||||||
|
isRecurring: h.isRecurring || h.is_recurring
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error fetching holiday calendar:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import multiple holidays (bulk upload)
|
||||||
|
*/
|
||||||
|
async bulkImportHolidays(holidays: any[], createdBy: string): Promise<{ success: number; failed: number }> {
|
||||||
|
let success = 0;
|
||||||
|
let failed = 0;
|
||||||
|
|
||||||
|
for (const holiday of holidays) {
|
||||||
|
try {
|
||||||
|
await this.createHoliday({
|
||||||
|
...holiday,
|
||||||
|
createdBy
|
||||||
|
});
|
||||||
|
success++;
|
||||||
|
} catch (error) {
|
||||||
|
failed++;
|
||||||
|
logger.error(`[Holiday Service] Failed to import holiday: ${holiday.holidayName}`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Bulk import complete: ${success} success, ${failed} failed`);
|
||||||
|
return { success, failed };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const holidayService = new HolidayService();
|
||||||
|
|
||||||
1098
_archive/services/notification.service.ts
Normal file
1098
_archive/services/notification.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
764
_archive/services/pause.service.ts
Normal file
764
_archive/services/pause.service.ts
Normal file
@ -0,0 +1,764 @@
|
|||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { User } from '@models/User';
|
||||||
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
|
import { emitToRequestRoom } from '../realtime/socket';
|
||||||
|
|
||||||
|
export class PauseService {
|
||||||
|
/**
|
||||||
|
* Pause a workflow at a specific approval level
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID to pause (optional, pauses current level if not provided)
|
||||||
|
* @param userId - The user ID who is pausing
|
||||||
|
* @param reason - Reason for pausing
|
||||||
|
* @param resumeDate - Date when workflow should auto-resume (max 1 month from now)
|
||||||
|
*/
|
||||||
|
async pauseWorkflow(
|
||||||
|
requestId: string,
|
||||||
|
levelId: string | null,
|
||||||
|
userId: string,
|
||||||
|
reason: string,
|
||||||
|
resumeDate: Date
|
||||||
|
): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
|
||||||
|
try {
|
||||||
|
// Validate resume date (max 1 month from now)
|
||||||
|
const now = new Date();
|
||||||
|
const maxResumeDate = dayjs(now).add(1, 'month').toDate();
|
||||||
|
if (resumeDate > maxResumeDate) {
|
||||||
|
throw new Error('Resume date cannot be more than 1 month from now');
|
||||||
|
}
|
||||||
|
if (resumeDate <= now) {
|
||||||
|
throw new Error('Resume date must be in the future');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get workflow
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if already paused
|
||||||
|
if ((workflow as any).isPaused) {
|
||||||
|
throw new Error('Workflow is already paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current approval level
|
||||||
|
let level: ApprovalLevel | null = null;
|
||||||
|
if (levelId) {
|
||||||
|
level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level || (level as any).requestId !== requestId) {
|
||||||
|
throw new Error('Approval level not found or does not belong to this workflow');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Get current active level
|
||||||
|
level = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
status: { [Op.in]: [ApprovalStatus.PENDING, ApprovalStatus.IN_PROGRESS] }
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!level) {
|
||||||
|
throw new Error('No active approval level found to pause');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user is either the approver for this level OR the initiator
|
||||||
|
const isApprover = (level as any).approverId === userId;
|
||||||
|
const isInitiator = (workflow as any).initiatorId === userId;
|
||||||
|
|
||||||
|
if (!isApprover && !isInitiator) {
|
||||||
|
throw new Error('Only the assigned approver or the initiator can pause this workflow');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if level is already paused
|
||||||
|
if ((level as any).isPaused) {
|
||||||
|
throw new Error('This approval level is already paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate elapsed hours before pause
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
|
||||||
|
// Check if this level was previously paused and resumed
|
||||||
|
// If so, we need to account for the previous pauseElapsedHours
|
||||||
|
// IMPORTANT: Convert to number to avoid string concatenation (DB returns DECIMAL as string)
|
||||||
|
const previousPauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
|
||||||
|
const previousResumeDate = (level as any).pauseResumeDate;
|
||||||
|
const originalTatStartTime = (level as any).pauseTatStartTime || (level as any).levelStartTime || (level as any).tatStartTime || (level as any).createdAt;
|
||||||
|
|
||||||
|
let elapsedHours: number;
|
||||||
|
let levelStartTimeForCalculation: Date;
|
||||||
|
|
||||||
|
if (previousPauseElapsedHours > 0 && previousResumeDate) {
|
||||||
|
// This is a second (or subsequent) pause
|
||||||
|
// Calculate: previous elapsed hours + time from resume to now
|
||||||
|
levelStartTimeForCalculation = previousResumeDate; // Start from last resume time
|
||||||
|
const timeSinceResume = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
|
||||||
|
elapsedHours = previousPauseElapsedHours + Number(timeSinceResume);
|
||||||
|
|
||||||
|
logger.info(`[Pause] Second pause detected - Previous elapsed: ${previousPauseElapsedHours}h, Since resume: ${timeSinceResume}h, Total: ${elapsedHours}h`);
|
||||||
|
} else {
|
||||||
|
// First pause - calculate from original start time
|
||||||
|
levelStartTimeForCalculation = originalTatStartTime;
|
||||||
|
elapsedHours = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store TAT snapshot
|
||||||
|
const tatSnapshot = {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
elapsedHours: Number(elapsedHours),
|
||||||
|
remainingHours: Math.max(0, Number((level as any).tatHours) - elapsedHours),
|
||||||
|
tatPercentageUsed: (Number((level as any).tatHours) > 0
|
||||||
|
? Math.min(100, Math.round((elapsedHours / Number((level as any).tatHours)) * 100))
|
||||||
|
: 0),
|
||||||
|
pausedAt: now.toISOString(),
|
||||||
|
originalTatStartTime: originalTatStartTime // Always use the original start time, not the resume time
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update approval level with pause information
|
||||||
|
await level.update({
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: now,
|
||||||
|
pausedBy: userId,
|
||||||
|
pauseReason: reason,
|
||||||
|
pauseResumeDate: resumeDate,
|
||||||
|
pauseTatStartTime: originalTatStartTime, // Always preserve the original start time
|
||||||
|
pauseElapsedHours: elapsedHours,
|
||||||
|
status: ApprovalStatus.PAUSED
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update workflow with pause information
|
||||||
|
// Store the current status before pausing so we can restore it on resume
|
||||||
|
const currentWorkflowStatus = (workflow as any).status;
|
||||||
|
const currentLevel = (workflow as any).currentLevel || (level as any).levelNumber;
|
||||||
|
|
||||||
|
await workflow.update({
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: now,
|
||||||
|
pausedBy: userId,
|
||||||
|
pauseReason: reason,
|
||||||
|
pauseResumeDate: resumeDate,
|
||||||
|
pauseTatSnapshot: {
|
||||||
|
...tatSnapshot,
|
||||||
|
previousStatus: currentWorkflowStatus, // Store previous status for resume
|
||||||
|
previousCurrentLevel: currentLevel // Store current level to prevent advancement
|
||||||
|
},
|
||||||
|
status: WorkflowStatus.PAUSED
|
||||||
|
// Note: We do NOT update currentLevel here - it should stay at the paused level
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cancel TAT jobs for this level
|
||||||
|
await tatSchedulerService.cancelTatJobs(requestId, (level as any).levelId);
|
||||||
|
|
||||||
|
// Get user details for notifications
|
||||||
|
const user = await User.findByPk(userId);
|
||||||
|
const userName = (user as any)?.displayName || (user as any)?.email || 'User';
|
||||||
|
|
||||||
|
// Get initiator
|
||||||
|
const initiator = await User.findByPk((workflow as any).initiatorId);
|
||||||
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
|
||||||
|
|
||||||
|
// Send notifications
|
||||||
|
const requestNumber = (workflow as any).requestNumber;
|
||||||
|
const title = (workflow as any).title;
|
||||||
|
|
||||||
|
// Notify initiator only if someone else (approver) paused the request
|
||||||
|
// Skip notification if initiator paused their own request
|
||||||
|
if (!isInitiator) {
|
||||||
|
await notificationService.sendToUsers([(workflow as any).initiatorId], {
|
||||||
|
title: 'Workflow Paused',
|
||||||
|
body: `Your request "${title}" has been paused by ${userName}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_paused',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
pauseReason: reason,
|
||||||
|
resumeDate: resumeDate.toISOString(),
|
||||||
|
pausedBy: userId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify the user who paused (confirmation) - no email for self-action
|
||||||
|
await notificationService.sendToUsers([userId], {
|
||||||
|
title: 'Workflow Paused Successfully',
|
||||||
|
body: `You have paused request "${title}". It will automatically resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change', // Use status_change to avoid email for self-action
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// If initiator paused, notify the current approver
|
||||||
|
if (isInitiator && (level as any).approverId) {
|
||||||
|
const approver = await User.findByPk((level as any).approverId);
|
||||||
|
const approverUserId = (level as any).approverId;
|
||||||
|
await notificationService.sendToUsers([approverUserId], {
|
||||||
|
title: 'Workflow Paused by Initiator',
|
||||||
|
body: `Request "${title}" has been paused by the initiator (${userName}). Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_paused',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
pauseReason: reason,
|
||||||
|
resumeDate: resumeDate.toISOString(),
|
||||||
|
pausedBy: userId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'paused',
|
||||||
|
user: { userId, name: userName },
|
||||||
|
timestamp: now.toISOString(),
|
||||||
|
action: 'Workflow Paused',
|
||||||
|
details: `Workflow paused by ${userName} at level ${(level as any).levelNumber}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
metadata: {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
resumeDate: resumeDate.toISOString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Pause] Workflow ${requestId} paused at level ${(level as any).levelNumber} by ${userId}`);
|
||||||
|
|
||||||
|
// Schedule dedicated auto-resume job for this workflow
|
||||||
|
try {
|
||||||
|
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
|
||||||
|
if (pauseResumeQueue && resumeDate) {
|
||||||
|
const delay = resumeDate.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (delay > 0) {
|
||||||
|
const jobId = `resume-${requestId}-${(level as any).levelId}`;
|
||||||
|
|
||||||
|
await pauseResumeQueue.add(
|
||||||
|
'auto-resume-workflow',
|
||||||
|
{
|
||||||
|
type: 'auto-resume-workflow',
|
||||||
|
requestId,
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
scheduledResumeDate: resumeDate.toISOString()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
jobId,
|
||||||
|
delay, // Exact delay in milliseconds until resume time
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[Pause] Scheduled dedicated auto-resume job ${jobId} for ${resumeDate.toISOString()} (delay: ${Math.round(delay / 1000 / 60)} minutes)`);
|
||||||
|
} else {
|
||||||
|
logger.warn(`[Pause] Resume date ${resumeDate.toISOString()} is in the past, skipping job scheduling`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (queueError) {
|
||||||
|
logger.warn(`[Pause] Could not schedule dedicated auto-resume job:`, queueError);
|
||||||
|
// Continue with pause even if job scheduling fails (hourly check will handle it as fallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(requestId, 'request:updated', {
|
||||||
|
requestId,
|
||||||
|
requestNumber: (workflow as any).requestNumber,
|
||||||
|
action: 'PAUSE',
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return { workflow, level };
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to pause workflow:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resume a paused workflow
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param userId - The user ID who is resuming (optional, for manual resume)
|
||||||
|
* @param notes - Optional notes for the resume action
|
||||||
|
*/
|
||||||
|
async resumeWorkflow(requestId: string, userId?: string, notes?: string): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Get workflow
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if paused
|
||||||
|
if (!(workflow as any).isPaused) {
|
||||||
|
throw new Error('Workflow is not paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get paused level
|
||||||
|
const level = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
isPaused: true
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!level) {
|
||||||
|
throw new Error('Paused approval level not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user has permission (if manual resume)
|
||||||
|
// Both initiator and current approver can resume the workflow
|
||||||
|
if (userId) {
|
||||||
|
const isApprover = (level as any).approverId === userId;
|
||||||
|
const isInitiator = (workflow as any).initiatorId === userId;
|
||||||
|
|
||||||
|
if (!isApprover && !isInitiator) {
|
||||||
|
throw new Error('Only the assigned approver or the initiator can resume this workflow');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate remaining TAT from resume time
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
const pauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
|
||||||
|
const tatHours = Number((level as any).tatHours);
|
||||||
|
const remainingHours = Math.max(0, tatHours - pauseElapsedHours);
|
||||||
|
|
||||||
|
// Get which alerts have already been sent (to avoid re-sending on resume)
|
||||||
|
const tat50AlertSent = (level as any).tat50AlertSent || false;
|
||||||
|
const tat75AlertSent = (level as any).tat75AlertSent || false;
|
||||||
|
const tatBreached = (level as any).tatBreached || false;
|
||||||
|
|
||||||
|
// Update approval level - resume TAT
|
||||||
|
// IMPORTANT: Keep pauseElapsedHours and store resumedAt (pauseResumeDate repurposed)
|
||||||
|
// This allows SLA calculation to correctly add pre-pause elapsed time
|
||||||
|
await level.update({
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null as any,
|
||||||
|
pausedBy: null as any,
|
||||||
|
pauseReason: null as any,
|
||||||
|
pauseResumeDate: now, // Store actual resume time (repurposed from scheduled resume date)
|
||||||
|
// pauseTatStartTime: null as any, // Keep original TAT start time for reference
|
||||||
|
// pauseElapsedHours is intentionally NOT cleared - needed for SLA calculations
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
tatStartTime: now, // Reset TAT start time to now for new elapsed calculation
|
||||||
|
levelStartTime: now // This is the new start time from resume
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cancel any scheduled auto-resume job (if exists)
|
||||||
|
try {
|
||||||
|
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
|
||||||
|
if (pauseResumeQueue) {
|
||||||
|
// Try to remove job by specific ID pattern first (more efficient)
|
||||||
|
const jobId = `resume-${requestId}-${(level as any).levelId}`;
|
||||||
|
try {
|
||||||
|
const specificJob = await pauseResumeQueue.getJob(jobId);
|
||||||
|
if (specificJob) {
|
||||||
|
await specificJob.remove();
|
||||||
|
logger.info(`[Pause] Cancelled scheduled auto-resume job ${jobId} for workflow ${requestId}`);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Job might not exist, which is fine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also check for any other jobs for this request (fallback for old jobs)
|
||||||
|
const scheduledJobs = await pauseResumeQueue.getJobs(['delayed', 'waiting']);
|
||||||
|
const otherJobs = scheduledJobs.filter((job: any) =>
|
||||||
|
job.data.requestId === requestId && job.id !== jobId
|
||||||
|
);
|
||||||
|
for (const job of otherJobs) {
|
||||||
|
await job.remove();
|
||||||
|
logger.info(`[Pause] Cancelled legacy auto-resume job ${job.id} for workflow ${requestId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (queueError) {
|
||||||
|
logger.warn(`[Pause] Could not cancel scheduled auto-resume job:`, queueError);
|
||||||
|
// Continue with resume even if job cancellation fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow - restore previous status or default to PENDING
|
||||||
|
const pauseSnapshot = (workflow as any).pauseTatSnapshot || {};
|
||||||
|
const previousStatus = pauseSnapshot.previousStatus || WorkflowStatus.PENDING;
|
||||||
|
|
||||||
|
await workflow.update({
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null as any,
|
||||||
|
pausedBy: null as any,
|
||||||
|
pauseReason: null as any,
|
||||||
|
pauseResumeDate: null as any,
|
||||||
|
pauseTatSnapshot: null as any,
|
||||||
|
status: previousStatus // Restore previous status (PENDING or IN_PROGRESS)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Reschedule TAT jobs from resume time - only for alerts that haven't been sent yet
|
||||||
|
if (remainingHours > 0) {
|
||||||
|
// Calculate which thresholds are still pending based on remaining time
|
||||||
|
const percentageUsedAtPause = tatHours > 0 ? (pauseElapsedHours / tatHours) * 100 : 0;
|
||||||
|
|
||||||
|
// Only schedule jobs for thresholds that:
|
||||||
|
// 1. Haven't been sent yet
|
||||||
|
// 2. Haven't been passed yet (based on percentage used at pause)
|
||||||
|
await tatSchedulerService.scheduleTatJobsOnResume(
|
||||||
|
requestId,
|
||||||
|
(level as any).levelId,
|
||||||
|
(level as any).approverId,
|
||||||
|
remainingHours, // Remaining TAT hours
|
||||||
|
now, // Start from now
|
||||||
|
priority as any,
|
||||||
|
{
|
||||||
|
// Pass which alerts were already sent
|
||||||
|
tat50AlertSent: tat50AlertSent,
|
||||||
|
tat75AlertSent: tat75AlertSent,
|
||||||
|
tatBreached: tatBreached,
|
||||||
|
// Pass percentage used at pause to determine which thresholds are still relevant
|
||||||
|
percentageUsedAtPause: percentageUsedAtPause
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user details
|
||||||
|
const resumeUser = userId ? await User.findByPk(userId) : null;
|
||||||
|
const resumeUserName = resumeUser
|
||||||
|
? ((resumeUser as any)?.displayName || (resumeUser as any)?.email || 'User')
|
||||||
|
: 'System (Auto-resume)';
|
||||||
|
|
||||||
|
// Get initiator and paused by user
|
||||||
|
const initiator = await User.findByPk((workflow as any).initiatorId);
|
||||||
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
|
||||||
|
const pausedByUser = (workflow as any).pausedBy
|
||||||
|
? await User.findByPk((workflow as any).pausedBy)
|
||||||
|
: null;
|
||||||
|
const pausedByName = pausedByUser
|
||||||
|
? ((pausedByUser as any)?.displayName || (pausedByUser as any)?.email || 'User')
|
||||||
|
: 'Unknown';
|
||||||
|
|
||||||
|
const requestNumber = (workflow as any).requestNumber;
|
||||||
|
const title = (workflow as any).title;
|
||||||
|
const initiatorId = (workflow as any).initiatorId;
|
||||||
|
const approverId = (level as any).approverId;
|
||||||
|
const isResumedByInitiator = userId === initiatorId;
|
||||||
|
const isResumedByApprover = userId === approverId;
|
||||||
|
|
||||||
|
// Calculate pause duration
|
||||||
|
const pausedAt = (level as any).pausedAt || (workflow as any).pausedAt;
|
||||||
|
const pauseDurationMs = pausedAt ? now.getTime() - new Date(pausedAt).getTime() : 0;
|
||||||
|
const pauseDurationHours = Math.round((pauseDurationMs / (1000 * 60 * 60)) * 100) / 100; // Round to 2 decimal places
|
||||||
|
const pauseDuration = pauseDurationHours > 0 ? `${pauseDurationHours} hours` : 'less than 1 hour';
|
||||||
|
|
||||||
|
// Notify initiator only if someone else resumed (or auto-resume)
|
||||||
|
// Skip if initiator resumed their own request
|
||||||
|
if (!isResumedByInitiator) {
|
||||||
|
await notificationService.sendToUsers([initiatorId], {
|
||||||
|
title: 'Workflow Resumed',
|
||||||
|
body: `Your request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_resumed',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
resumedBy: userId ? { userId, name: resumeUserName } : null,
|
||||||
|
pauseDuration: pauseDuration
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify approver only if someone else resumed (or auto-resume)
|
||||||
|
// Skip if approver resumed the request themselves
|
||||||
|
if (!isResumedByApprover && approverId) {
|
||||||
|
await notificationService.sendToUsers([approverId], {
|
||||||
|
title: 'Workflow Resumed',
|
||||||
|
body: `Request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}. Please continue with your review.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_resumed',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true,
|
||||||
|
metadata: {
|
||||||
|
resumedBy: userId ? { userId, name: resumeUserName } : null,
|
||||||
|
pauseDuration: pauseDuration
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send confirmation to the user who resumed (if manual resume) - no email for self-action
|
||||||
|
if (userId) {
|
||||||
|
await notificationService.sendToUsers([userId], {
|
||||||
|
title: 'Workflow Resumed Successfully',
|
||||||
|
body: `You have resumed request "${title}". ${isResumedByApprover ? 'Please continue with your review.' : ''}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change', // Use status_change to avoid email for self-action
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: isResumedByApprover
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity with notes
|
||||||
|
const resumeDetails = notes
|
||||||
|
? `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}. Notes: ${notes}`
|
||||||
|
: `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}.`;
|
||||||
|
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'resumed',
|
||||||
|
user: userId ? { userId, name: resumeUserName } : undefined,
|
||||||
|
timestamp: now.toISOString(),
|
||||||
|
action: 'Workflow Resumed',
|
||||||
|
details: resumeDetails,
|
||||||
|
metadata: {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
wasAutoResume: !userId,
|
||||||
|
notes: notes || null
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Pause] Workflow ${requestId} resumed ${userId ? `by ${userId}` : 'automatically'}`);
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(requestId, 'request:updated', {
|
||||||
|
requestId,
|
||||||
|
requestNumber: (workflow as any).requestNumber,
|
||||||
|
action: 'RESUME',
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return { workflow, level };
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to resume workflow:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel pause (for retrigger scenario - initiator requests approver to resume)
|
||||||
|
* This sends a notification to the approver who paused it
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param userId - The initiator user ID
|
||||||
|
*/
|
||||||
|
async retriggerPause(requestId: string, userId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(workflow as any).isPaused) {
|
||||||
|
throw new Error('Workflow is not paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user is initiator
|
||||||
|
if ((workflow as any).initiatorId !== userId) {
|
||||||
|
throw new Error('Only the initiator can retrigger a pause');
|
||||||
|
}
|
||||||
|
|
||||||
|
const pausedBy = (workflow as any).pausedBy;
|
||||||
|
if (!pausedBy) {
|
||||||
|
throw new Error('Cannot retrigger - no approver found who paused this workflow');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user details
|
||||||
|
const initiator = await User.findByPk(userId);
|
||||||
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
|
||||||
|
|
||||||
|
// Get approver details (who paused the workflow)
|
||||||
|
const approver = await User.findByPk(pausedBy);
|
||||||
|
const approverName = (approver as any)?.displayName || (approver as any)?.email || 'Approver';
|
||||||
|
|
||||||
|
const requestNumber = (workflow as any).requestNumber;
|
||||||
|
const title = (workflow as any).title;
|
||||||
|
|
||||||
|
// Notify approver who paused it
|
||||||
|
await notificationService.sendToUsers([pausedBy], {
|
||||||
|
title: 'Pause Retrigger Request',
|
||||||
|
body: `${initiatorName} is requesting you to cancel the pause and resume work on request "${title}".`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'pause_retrigger_request',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity with approver name
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'pause_retriggered',
|
||||||
|
user: { userId, name: initiatorName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Pause Retrigger Requested',
|
||||||
|
details: `${initiatorName} requested ${approverName} to cancel the pause and resume work.`,
|
||||||
|
metadata: {
|
||||||
|
pausedBy,
|
||||||
|
approverName
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Pause] Pause retrigger requested for workflow ${requestId} by initiator ${userId}`);
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to retrigger pause:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get pause details for a workflow
|
||||||
|
*/
|
||||||
|
async getPauseDetails(requestId: string): Promise<any> {
|
||||||
|
try {
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(workflow as any).isPaused) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const level = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
isPaused: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const pausedByUser = (workflow as any).pausedBy
|
||||||
|
? await User.findByPk((workflow as any).pausedBy, { attributes: ['userId', 'email', 'displayName'] })
|
||||||
|
: null;
|
||||||
|
|
||||||
|
return {
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (workflow as any).pausedAt,
|
||||||
|
pausedBy: pausedByUser ? {
|
||||||
|
userId: (pausedByUser as any).userId,
|
||||||
|
email: (pausedByUser as any).email,
|
||||||
|
name: (pausedByUser as any).displayName || (pausedByUser as any).email
|
||||||
|
} : null,
|
||||||
|
pauseReason: (workflow as any).pauseReason,
|
||||||
|
pauseResumeDate: (workflow as any).pauseResumeDate,
|
||||||
|
level: level ? {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
approverName: (level as any).approverName
|
||||||
|
} : null
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to get pause details:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check and auto-resume paused workflows whose resume date has passed
|
||||||
|
* This is called by a scheduled job
|
||||||
|
*/
|
||||||
|
async checkAndResumePausedWorkflows(): Promise<number> {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Find all paused workflows where resume date has passed
|
||||||
|
// Handle backward compatibility: workflow_type column may not exist in old environments
|
||||||
|
let pausedWorkflows: WorkflowRequest[];
|
||||||
|
try {
|
||||||
|
pausedWorkflows = await WorkflowRequest.findAll({
|
||||||
|
where: {
|
||||||
|
isPaused: true,
|
||||||
|
pauseResumeDate: {
|
||||||
|
[Op.lte]: now
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
// If error is due to missing workflow_type column, use raw query
|
||||||
|
if (error.message?.includes('workflow_type') || (error.message?.includes('column') && error.message?.includes('does not exist'))) {
|
||||||
|
logger.warn('[Pause] workflow_type column not found, using raw query for backward compatibility');
|
||||||
|
const { sequelize } = await import('../config/database');
|
||||||
|
const { QueryTypes } = await import('sequelize');
|
||||||
|
const results = await sequelize.query(`
|
||||||
|
SELECT request_id, is_paused, pause_resume_date
|
||||||
|
FROM workflow_requests
|
||||||
|
WHERE is_paused = true
|
||||||
|
AND pause_resume_date <= :now
|
||||||
|
`, {
|
||||||
|
replacements: { now },
|
||||||
|
type: QueryTypes.SELECT
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert to WorkflowRequest-like objects
|
||||||
|
// results is an array of objects from SELECT query
|
||||||
|
pausedWorkflows = (results as any[]).map((r: any) => ({
|
||||||
|
requestId: r.request_id,
|
||||||
|
isPaused: r.is_paused,
|
||||||
|
pauseResumeDate: r.pause_resume_date
|
||||||
|
})) as any;
|
||||||
|
} else {
|
||||||
|
throw error; // Re-throw if it's a different error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let resumedCount = 0;
|
||||||
|
for (const workflow of pausedWorkflows) {
|
||||||
|
try {
|
||||||
|
await this.resumeWorkflow((workflow as any).requestId);
|
||||||
|
resumedCount++;
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to auto-resume workflow ${(workflow as any).requestId}:`, error);
|
||||||
|
// Continue with other workflows
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resumedCount > 0) {
|
||||||
|
logger.info(`[Pause] Auto-resumed ${resumedCount} workflow(s)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resumedCount;
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to check and resume paused workflows:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all paused workflows (for admin/reporting)
|
||||||
|
*/
|
||||||
|
async getPausedWorkflows(): Promise<WorkflowRequest[]> {
|
||||||
|
try {
|
||||||
|
return await WorkflowRequest.findAll({
|
||||||
|
where: {
|
||||||
|
isPaused: true
|
||||||
|
},
|
||||||
|
order: [['pausedAt', 'DESC']]
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to get paused workflows:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const pauseService = new PauseService();
|
||||||
|
|
||||||
383
_archive/services/tatScheduler.service.ts
Normal file
383
_archive/services/tatScheduler.service.ts
Normal file
@ -0,0 +1,383 @@
|
|||||||
|
import { tatQueue } from '../queues/tatQueue';
|
||||||
|
import { calculateDelay, addWorkingHours, addWorkingHoursExpress } from '@utils/tatTimeUtils';
|
||||||
|
import { getTatThresholds } from './configReader.service';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
|
import logger, { logTATEvent } from '@utils/logger';
|
||||||
|
import { Priority } from '../types/common.types';
|
||||||
|
|
||||||
|
export class TatSchedulerService {
|
||||||
|
/**
|
||||||
|
* Schedule TAT notification jobs for an approval level
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID
|
||||||
|
* @param approverId - The approver user ID
|
||||||
|
* @param tatDurationHours - TAT duration in hours
|
||||||
|
* @param startTime - Optional start time (defaults to now)
|
||||||
|
* @param priority - Request priority (EXPRESS = 24/7, STANDARD = working hours only)
|
||||||
|
*/
|
||||||
|
async scheduleTatJobs(
|
||||||
|
requestId: string,
|
||||||
|
levelId: string,
|
||||||
|
approverId: string,
|
||||||
|
tatDurationHours: number,
|
||||||
|
startTime?: Date,
|
||||||
|
priority: Priority = Priority.STANDARD
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if tatQueue is available
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = startTime || new Date();
|
||||||
|
// Handle both enum and string (case-insensitive) priority values
|
||||||
|
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
|
||||||
|
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
|
||||||
|
|
||||||
|
// Get current thresholds from database configuration
|
||||||
|
const thresholds = await getTatThresholds();
|
||||||
|
|
||||||
|
// Calculate milestone times using configured thresholds
|
||||||
|
// EXPRESS mode: 24/7 calculation (includes holidays, weekends, non-working hours)
|
||||||
|
// STANDARD mode: Working hours only (excludes holidays, weekends, non-working hours)
|
||||||
|
let threshold1Time: Date;
|
||||||
|
let threshold2Time: Date;
|
||||||
|
let breachTime: Date;
|
||||||
|
|
||||||
|
if (isExpress) {
|
||||||
|
// EXPRESS: All calendar days (Mon-Sun, including weekends/holidays) but working hours only (9 AM - 6 PM)
|
||||||
|
const t1 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.first / 100));
|
||||||
|
const t2 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.second / 100));
|
||||||
|
const tBreach = await addWorkingHoursExpress(now, tatDurationHours);
|
||||||
|
threshold1Time = t1.toDate();
|
||||||
|
threshold2Time = t2.toDate();
|
||||||
|
breachTime = tBreach.toDate();
|
||||||
|
} else {
|
||||||
|
// STANDARD: Working days only (Mon-Fri), working hours (9 AM - 6 PM), excludes holidays
|
||||||
|
const t1 = await addWorkingHours(now, tatDurationHours * (thresholds.first / 100));
|
||||||
|
const t2 = await addWorkingHours(now, tatDurationHours * (thresholds.second / 100));
|
||||||
|
const tBreach = await addWorkingHours(now, tatDurationHours);
|
||||||
|
threshold1Time = t1.toDate();
|
||||||
|
threshold2Time = t2.toDate();
|
||||||
|
breachTime = tBreach.toDate();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] Scheduling TAT jobs - Request: ${requestId}, Priority: ${priority}, TAT: ${tatDurationHours}h`);
|
||||||
|
|
||||||
|
const jobs = [
|
||||||
|
{
|
||||||
|
type: 'threshold1' as const,
|
||||||
|
threshold: thresholds.first,
|
||||||
|
delay: calculateDelay(threshold1Time),
|
||||||
|
targetTime: threshold1Time
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'threshold2' as const,
|
||||||
|
threshold: thresholds.second,
|
||||||
|
delay: calculateDelay(threshold2Time),
|
||||||
|
targetTime: threshold2Time
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'breach' as const,
|
||||||
|
threshold: 100,
|
||||||
|
delay: calculateDelay(breachTime),
|
||||||
|
targetTime: breachTime
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
|
// Check if test mode enabled (1 hour = 1 minute)
|
||||||
|
const isTestMode = process.env.TAT_TEST_MODE === 'true';
|
||||||
|
|
||||||
|
// Check if times collide (working hours calculation issue)
|
||||||
|
const uniqueTimes = new Set(jobs.map(j => j.targetTime.getTime()));
|
||||||
|
const hasCollision = uniqueTimes.size < jobs.length;
|
||||||
|
|
||||||
|
let jobIndex = 0;
|
||||||
|
for (const job of jobs) {
|
||||||
|
if (job.delay < 0) {
|
||||||
|
logger.error(`[TAT Scheduler] Skipping ${job.type} - time in past`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let spacedDelay: number;
|
||||||
|
|
||||||
|
if (isTestMode) {
|
||||||
|
// Test mode: times are already in minutes (tatTimeUtils converts hours to minutes)
|
||||||
|
// Just ensure they have minimum spacing for BullMQ reliability
|
||||||
|
spacedDelay = Math.max(job.delay, 5000) + (jobIndex * 5000);
|
||||||
|
} else if (hasCollision) {
|
||||||
|
// Production with collision: add 5-minute spacing
|
||||||
|
spacedDelay = job.delay + (jobIndex * 300000);
|
||||||
|
} else {
|
||||||
|
// Production without collision: use calculated delays
|
||||||
|
spacedDelay = job.delay;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
|
||||||
|
|
||||||
|
await tatQueue.add(
|
||||||
|
job.type,
|
||||||
|
{
|
||||||
|
type: job.type,
|
||||||
|
threshold: job.threshold,
|
||||||
|
requestId,
|
||||||
|
levelId,
|
||||||
|
approverId
|
||||||
|
},
|
||||||
|
{
|
||||||
|
delay: spacedDelay,
|
||||||
|
jobId: jobId,
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600, // Keep for 1 hour for debugging
|
||||||
|
count: 1000
|
||||||
|
},
|
||||||
|
removeOnFail: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
jobIndex++;
|
||||||
|
}
|
||||||
|
|
||||||
|
logTATEvent('warning', requestId, {
|
||||||
|
level: parseInt(levelId.split('-').pop() || '1'),
|
||||||
|
tatHours: tatDurationHours,
|
||||||
|
priority,
|
||||||
|
message: 'TAT jobs scheduled',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule TAT jobs on resume - only schedules jobs for alerts that haven't been sent yet
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID
|
||||||
|
* @param approverId - The approver user ID
|
||||||
|
* @param remainingTatHours - Remaining TAT duration in hours (from resume point)
|
||||||
|
* @param startTime - Resume start time
|
||||||
|
* @param priority - Request priority
|
||||||
|
* @param alertStatus - Object indicating which alerts have already been sent and percentage used at pause
|
||||||
|
*/
|
||||||
|
async scheduleTatJobsOnResume(
|
||||||
|
requestId: string,
|
||||||
|
levelId: string,
|
||||||
|
approverId: string,
|
||||||
|
remainingTatHours: number,
|
||||||
|
startTime: Date,
|
||||||
|
priority: Priority = Priority.STANDARD,
|
||||||
|
alertStatus: {
|
||||||
|
tat50AlertSent: boolean;
|
||||||
|
tat75AlertSent: boolean;
|
||||||
|
tatBreached: boolean;
|
||||||
|
percentageUsedAtPause: number;
|
||||||
|
}
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling on resume.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = startTime;
|
||||||
|
// Handle both enum and string (case-insensitive) priority values
|
||||||
|
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
|
||||||
|
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
|
||||||
|
|
||||||
|
// Get current thresholds from database configuration
|
||||||
|
const thresholds = await getTatThresholds();
|
||||||
|
|
||||||
|
// Calculate original TAT from remaining + elapsed
|
||||||
|
// Example: If 35 min used (58.33%) and 25 min remaining, original TAT = 60 min
|
||||||
|
const elapsedHours = alertStatus.percentageUsedAtPause > 0
|
||||||
|
? (remainingTatHours * alertStatus.percentageUsedAtPause) / (100 - alertStatus.percentageUsedAtPause)
|
||||||
|
: 0;
|
||||||
|
const originalTatHours = elapsedHours + remainingTatHours;
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] Resuming TAT scheduling - Request: ${requestId}, Remaining: ${(remainingTatHours * 60).toFixed(1)} min, Priority: ${isExpress ? 'EXPRESS' : 'STANDARD'}`);
|
||||||
|
|
||||||
|
// Jobs to schedule - only include those that haven't been sent and haven't been passed
|
||||||
|
const jobsToSchedule: Array<{
|
||||||
|
type: 'threshold1' | 'threshold2' | 'breach';
|
||||||
|
threshold: number;
|
||||||
|
alreadySent: boolean;
|
||||||
|
alreadyPassed: boolean;
|
||||||
|
hoursFromNow: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
// Threshold 1 (e.g., 50%)
|
||||||
|
// Skip if: already sent OR already passed the threshold
|
||||||
|
if (!alertStatus.tat50AlertSent && alertStatus.percentageUsedAtPause < thresholds.first) {
|
||||||
|
// Calculate: How many hours from NOW until we reach this threshold?
|
||||||
|
// Formula: (thresholdHours - elapsedHours)
|
||||||
|
// thresholdHours = originalTatHours * (threshold/100)
|
||||||
|
const thresholdHours = originalTatHours * (thresholds.first / 100);
|
||||||
|
const hoursFromNow = thresholdHours - elapsedHours;
|
||||||
|
|
||||||
|
if (hoursFromNow > 0) {
|
||||||
|
jobsToSchedule.push({
|
||||||
|
type: 'threshold1',
|
||||||
|
threshold: thresholds.first,
|
||||||
|
alreadySent: false,
|
||||||
|
alreadyPassed: false,
|
||||||
|
hoursFromNow: hoursFromNow
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Threshold 2 (e.g., 75%)
|
||||||
|
if (!alertStatus.tat75AlertSent && alertStatus.percentageUsedAtPause < thresholds.second) {
|
||||||
|
const thresholdHours = originalTatHours * (thresholds.second / 100);
|
||||||
|
const hoursFromNow = thresholdHours - elapsedHours;
|
||||||
|
|
||||||
|
if (hoursFromNow > 0) {
|
||||||
|
jobsToSchedule.push({
|
||||||
|
type: 'threshold2',
|
||||||
|
threshold: thresholds.second,
|
||||||
|
alreadySent: false,
|
||||||
|
alreadyPassed: false,
|
||||||
|
hoursFromNow: hoursFromNow
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Breach (100%)
|
||||||
|
if (!alertStatus.tatBreached) {
|
||||||
|
// Breach is always scheduled for the end of remaining TAT
|
||||||
|
jobsToSchedule.push({
|
||||||
|
type: 'breach',
|
||||||
|
threshold: 100,
|
||||||
|
alreadySent: false,
|
||||||
|
alreadyPassed: false,
|
||||||
|
hoursFromNow: remainingTatHours
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jobsToSchedule.length === 0) {
|
||||||
|
logger.info(`[TAT Scheduler] No TAT jobs to schedule (all alerts already sent)`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate actual times and schedule jobs
|
||||||
|
for (const job of jobsToSchedule) {
|
||||||
|
let targetTime: Date;
|
||||||
|
|
||||||
|
if (isExpress) {
|
||||||
|
targetTime = (await addWorkingHoursExpress(now, job.hoursFromNow)).toDate();
|
||||||
|
} else {
|
||||||
|
targetTime = (await addWorkingHours(now, job.hoursFromNow)).toDate();
|
||||||
|
}
|
||||||
|
|
||||||
|
const delay = calculateDelay(targetTime);
|
||||||
|
|
||||||
|
if (delay < 0) {
|
||||||
|
logger.warn(`[TAT Scheduler] Skipping ${job.type} - calculated time is in past`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
|
||||||
|
|
||||||
|
await tatQueue.add(
|
||||||
|
job.type,
|
||||||
|
{
|
||||||
|
type: job.type,
|
||||||
|
threshold: job.threshold,
|
||||||
|
requestId,
|
||||||
|
levelId,
|
||||||
|
approverId
|
||||||
|
},
|
||||||
|
{
|
||||||
|
delay: delay,
|
||||||
|
jobId: jobId,
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000
|
||||||
|
},
|
||||||
|
removeOnFail: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✓ Scheduled ${job.type} (${job.threshold}%) for ${dayjs(targetTime).format('YYYY-MM-DD HH:mm')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✅ ${jobsToSchedule.length} TAT job(s) scheduled for request ${requestId}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs on resume:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel TAT jobs for a specific approval level
|
||||||
|
* Useful when an approver acts before TAT expires
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID
|
||||||
|
*/
|
||||||
|
async cancelTatJobs(requestId: string, levelId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if tatQueue is available
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use generic job names that don't depend on threshold percentages
|
||||||
|
const jobIds = [
|
||||||
|
`tat-threshold1-${requestId}-${levelId}`,
|
||||||
|
`tat-threshold2-${requestId}-${levelId}`,
|
||||||
|
`tat-breach-${requestId}-${levelId}`
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const jobId of jobIds) {
|
||||||
|
try {
|
||||||
|
const job = await tatQueue.getJob(jobId);
|
||||||
|
if (job) {
|
||||||
|
await job.remove();
|
||||||
|
logger.info(`[TAT Scheduler] Cancelled job ${jobId}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Job might not exist, which is fine
|
||||||
|
logger.debug(`[TAT Scheduler] Job ${jobId} not found (may have already been processed)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✅ TAT jobs cancelled for level ${levelId}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to cancel TAT jobs:`, error);
|
||||||
|
// Don't throw - cancellation failure shouldn't break the workflow
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel all TAT jobs for a workflow request
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
*/
|
||||||
|
async cancelAllTatJobsForRequest(requestId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if tatQueue is available
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = await tatQueue.getJobs(['delayed', 'waiting']);
|
||||||
|
const requestJobs = jobs.filter(job => job.data.requestId === requestId);
|
||||||
|
|
||||||
|
for (const job of requestJobs) {
|
||||||
|
await job.remove();
|
||||||
|
logger.info(`[TAT Scheduler] Cancelled job ${job.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✅ All TAT jobs cancelled for request ${requestId}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to cancel all TAT jobs:`, error);
|
||||||
|
// Don't throw - cancellation failure shouldn't break the workflow
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const tatSchedulerService = new TatSchedulerService();
|
||||||
|
|
||||||
3449
_archive/services/workflow.service.ts
Normal file
3449
_archive/services/workflow.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
446
_archive/services/worknote.service.ts
Normal file
446
_archive/services/worknote.service.ts
Normal file
@ -0,0 +1,446 @@
|
|||||||
|
import { Op } from 'sequelize';
|
||||||
|
import { WorkNote } from '@models/WorkNote';
|
||||||
|
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
|
||||||
|
import { Participant } from '@models/Participant';
|
||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { User } from '@models/User';
|
||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
import { emailNotificationService } from './emailNotification.service';
|
||||||
|
import { gcsStorageService } from './gcsStorage.service';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
export class WorkNoteService {
|
||||||
|
async list(requestId: string) {
|
||||||
|
const notes = await WorkNote.findAll({
|
||||||
|
where: { requestId },
|
||||||
|
order: [['created_at' as any, 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Load attachments for each note
|
||||||
|
const enriched = await Promise.all(notes.map(async (note) => {
|
||||||
|
const noteId = (note as any).noteId;
|
||||||
|
const attachments = await WorkNoteAttachment.findAll({
|
||||||
|
where: { noteId }
|
||||||
|
});
|
||||||
|
|
||||||
|
const noteData = (note as any).toJSON();
|
||||||
|
|
||||||
|
const mappedAttachments = attachments.map((a: any) => {
|
||||||
|
const attData = typeof a.toJSON === 'function' ? a.toJSON() : a;
|
||||||
|
return {
|
||||||
|
attachmentId: attData.attachmentId || attData.attachment_id,
|
||||||
|
fileName: attData.fileName || attData.file_name,
|
||||||
|
fileType: attData.fileType || attData.file_type,
|
||||||
|
fileSize: attData.fileSize || attData.file_size,
|
||||||
|
filePath: attData.filePath || attData.file_path,
|
||||||
|
storageUrl: attData.storageUrl || attData.storage_url,
|
||||||
|
isDownloadable: attData.isDownloadable || attData.is_downloadable,
|
||||||
|
uploadedAt: attData.uploadedAt || attData.uploaded_at
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
noteId: noteData.noteId || noteData.note_id,
|
||||||
|
requestId: noteData.requestId || noteData.request_id,
|
||||||
|
userId: noteData.userId || noteData.user_id,
|
||||||
|
userName: noteData.userName || noteData.user_name,
|
||||||
|
userRole: noteData.userRole || noteData.user_role,
|
||||||
|
message: noteData.message,
|
||||||
|
isPriority: noteData.isPriority || noteData.is_priority,
|
||||||
|
hasAttachment: noteData.hasAttachment || noteData.has_attachment,
|
||||||
|
createdAt: noteData.createdAt || noteData.created_at,
|
||||||
|
updatedAt: noteData.updatedAt || noteData.updated_at,
|
||||||
|
attachments: mappedAttachments
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
|
||||||
|
return enriched;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getUserRole(requestId: string, userId: string): Promise<string> {
|
||||||
|
try {
|
||||||
|
const participant = await Participant.findOne({
|
||||||
|
where: { requestId, userId }
|
||||||
|
});
|
||||||
|
if (participant) {
|
||||||
|
const type = (participant as any).participantType || (participant as any).participant_type;
|
||||||
|
return type ? type.toString() : 'Participant';
|
||||||
|
}
|
||||||
|
return 'Participant';
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[WorkNote] Error fetching user role:', error);
|
||||||
|
return 'Participant';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> {
|
||||||
|
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
|
||||||
|
|
||||||
|
const note = await WorkNote.create({
|
||||||
|
requestId,
|
||||||
|
userId: user.userId,
|
||||||
|
userName: user.name || null,
|
||||||
|
userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR)
|
||||||
|
message: payload.message,
|
||||||
|
isPriority: !!payload.isPriority,
|
||||||
|
parentNoteId: payload.parentNoteId || null,
|
||||||
|
mentionedUsers: payload.mentionedUsers || null,
|
||||||
|
hasAttachment: files && files.length > 0 ? true : false
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
logger.info('[WorkNote] Created note:', {
|
||||||
|
noteId: (note as any).noteId,
|
||||||
|
userId: (note as any).userId,
|
||||||
|
userName: (note as any).userName,
|
||||||
|
userRole: (note as any).userRole
|
||||||
|
});
|
||||||
|
|
||||||
|
const attachments = [];
|
||||||
|
if (files && files.length) {
|
||||||
|
// Get request number for folder structure
|
||||||
|
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
|
||||||
|
const requestNumber = workflow ? ((workflow as any).requestNumber || (workflow as any).request_number) : null;
|
||||||
|
|
||||||
|
for (const f of files) {
|
||||||
|
// Read file buffer if path exists, otherwise use provided buffer
|
||||||
|
const fileBuffer = f.buffer || (f.path ? fs.readFileSync(f.path) : Buffer.from(''));
|
||||||
|
|
||||||
|
// Upload with automatic fallback to local storage
|
||||||
|
// If requestNumber is not available, use a default structure
|
||||||
|
const effectiveRequestNumber = requestNumber || 'UNKNOWN';
|
||||||
|
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
||||||
|
buffer: fileBuffer,
|
||||||
|
originalName: f.originalname,
|
||||||
|
mimeType: f.mimetype,
|
||||||
|
requestNumber: effectiveRequestNumber,
|
||||||
|
fileType: 'attachments'
|
||||||
|
});
|
||||||
|
|
||||||
|
const storageUrl = uploadResult.storageUrl;
|
||||||
|
const gcsFilePath = uploadResult.filePath;
|
||||||
|
|
||||||
|
// Clean up local temporary file if it exists (from multer disk storage)
|
||||||
|
if (f.path && fs.existsSync(f.path)) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(f.path);
|
||||||
|
} catch (unlinkError) {
|
||||||
|
logger.warn('[WorkNote] Failed to delete local temporary file:', unlinkError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachment = await WorkNoteAttachment.create({
|
||||||
|
noteId: (note as any).noteId,
|
||||||
|
fileName: f.originalname,
|
||||||
|
fileType: f.mimetype,
|
||||||
|
fileSize: f.size,
|
||||||
|
filePath: gcsFilePath, // Store GCS path or local path
|
||||||
|
storageUrl: storageUrl, // Store GCS URL or local URL
|
||||||
|
isDownloadable: true
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
attachments.push({
|
||||||
|
attachmentId: (attachment as any).attachmentId,
|
||||||
|
fileName: (attachment as any).fileName,
|
||||||
|
fileType: (attachment as any).fileType,
|
||||||
|
fileSize: (attachment as any).fileSize,
|
||||||
|
filePath: (attachment as any).filePath,
|
||||||
|
storageUrl: (attachment as any).storageUrl,
|
||||||
|
isDownloadable: (attachment as any).isDownloadable
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notifications for additional document added via work notes
|
||||||
|
if (attachments.length > 0) {
|
||||||
|
try {
|
||||||
|
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
|
||||||
|
if (workflow) {
|
||||||
|
const initiatorId = (workflow as any).initiatorId || (workflow as any).initiator_id;
|
||||||
|
const isInitiator = user.userId === initiatorId;
|
||||||
|
|
||||||
|
// Get all participants (spectators)
|
||||||
|
const spectators = await Participant.findAll({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
participantType: 'SPECTATOR'
|
||||||
|
},
|
||||||
|
include: [{
|
||||||
|
model: User,
|
||||||
|
as: 'user',
|
||||||
|
attributes: ['userId', 'email', 'displayName']
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get current approver (pending or in-progress approval level)
|
||||||
|
const currentApprovalLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']],
|
||||||
|
include: [{
|
||||||
|
model: User,
|
||||||
|
as: 'approver',
|
||||||
|
attributes: ['userId', 'email', 'displayName']
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Determine who to notify based on who uploaded
|
||||||
|
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
||||||
|
|
||||||
|
if (isInitiator) {
|
||||||
|
// Initiator added → notify spectators and current approver
|
||||||
|
spectators.forEach((spectator: any) => {
|
||||||
|
const spectatorUser = spectator.user || spectator.User;
|
||||||
|
if (spectatorUser && spectatorUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: spectatorUser.userId,
|
||||||
|
email: spectatorUser.email,
|
||||||
|
displayName: spectatorUser.displayName || spectatorUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (currentApprovalLevel) {
|
||||||
|
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
||||||
|
if (approverUser && approverUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: approverUser.userId,
|
||||||
|
email: approverUser.email,
|
||||||
|
displayName: approverUser.displayName || approverUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Check if uploader is a spectator
|
||||||
|
const uploaderParticipant = await Participant.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
userId: user.userId,
|
||||||
|
participantType: 'SPECTATOR'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (uploaderParticipant) {
|
||||||
|
// Spectator added → notify initiator and current approver
|
||||||
|
const initiator = await User.findByPk(initiatorId);
|
||||||
|
if (initiator) {
|
||||||
|
const initiatorData = initiator.toJSON();
|
||||||
|
if (initiatorData.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: initiatorData.userId,
|
||||||
|
email: initiatorData.email,
|
||||||
|
displayName: initiatorData.displayName || initiatorData.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentApprovalLevel) {
|
||||||
|
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
||||||
|
if (approverUser && approverUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: approverUser.userId,
|
||||||
|
email: approverUser.email,
|
||||||
|
displayName: approverUser.displayName || approverUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Approver added → notify initiator and spectators
|
||||||
|
const initiator = await User.findByPk(initiatorId);
|
||||||
|
if (initiator) {
|
||||||
|
const initiatorData = initiator.toJSON();
|
||||||
|
if (initiatorData.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: initiatorData.userId,
|
||||||
|
email: initiatorData.email,
|
||||||
|
displayName: initiatorData.displayName || initiatorData.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
spectators.forEach((spectator: any) => {
|
||||||
|
const spectatorUser = spectator.user || spectator.User;
|
||||||
|
if (spectatorUser && spectatorUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: spectatorUser.userId,
|
||||||
|
email: spectatorUser.email,
|
||||||
|
displayName: spectatorUser.displayName || spectatorUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notifications (email, in-app, and web-push)
|
||||||
|
const requestNumber = (workflow as any).requestNumber || requestId;
|
||||||
|
const requestData = {
|
||||||
|
requestNumber: requestNumber,
|
||||||
|
requestId: requestId,
|
||||||
|
title: (workflow as any).title || 'Request'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Prepare user IDs for in-app and web-push notifications
|
||||||
|
const recipientUserIds = recipientsToNotify.map(r => r.userId);
|
||||||
|
|
||||||
|
// Send in-app and web-push notifications for each attachment
|
||||||
|
if (recipientUserIds.length > 0 && attachments.length > 0) {
|
||||||
|
try {
|
||||||
|
for (const attachment of attachments) {
|
||||||
|
await notificationService.sendToUsers(
|
||||||
|
recipientUserIds,
|
||||||
|
{
|
||||||
|
title: 'Additional Document Added',
|
||||||
|
body: `${user.name || 'User'} added "${attachment.fileName}" to ${requestNumber}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'document_added',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
documentName: attachment.fileName,
|
||||||
|
fileSize: attachment.fileSize,
|
||||||
|
addedByName: user.name || 'User',
|
||||||
|
source: 'Work Notes'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
logger.info('[WorkNote] In-app and web-push notifications sent for additional documents', {
|
||||||
|
requestId,
|
||||||
|
attachmentsCount: attachments.length,
|
||||||
|
recipientsCount: recipientUserIds.length
|
||||||
|
});
|
||||||
|
} catch (notifyError) {
|
||||||
|
logger.error('[WorkNote] Failed to send in-app/web-push notifications for additional documents:', notifyError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send email notifications for each attachment
|
||||||
|
for (const attachment of attachments) {
|
||||||
|
for (const recipient of recipientsToNotify) {
|
||||||
|
await emailNotificationService.sendAdditionalDocumentAdded(
|
||||||
|
requestData,
|
||||||
|
recipient,
|
||||||
|
{
|
||||||
|
documentName: attachment.fileName,
|
||||||
|
fileSize: attachment.fileSize,
|
||||||
|
addedByName: user.name || 'User',
|
||||||
|
source: 'Work Notes'
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[WorkNote] Additional document notifications sent', {
|
||||||
|
requestId,
|
||||||
|
attachmentsCount: attachments.length,
|
||||||
|
recipientsCount: recipientsToNotify.length,
|
||||||
|
isInitiator
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (notifyError) {
|
||||||
|
// Don't fail work note creation if notifications fail
|
||||||
|
logger.error('[WorkNote] Failed to send additional document notifications:', notifyError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity for work note
|
||||||
|
activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'comment',
|
||||||
|
user: { userId: user.userId, name: user.name || 'User' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Work Note Added',
|
||||||
|
details: `${user.name || 'User'} added a work note: ${payload.message.substring(0, 100)}${payload.message.length > 100 ? '...' : ''}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Optional realtime emit (if socket layer is initialized)
|
||||||
|
const { emitToRequestRoom } = require('../realtime/socket');
|
||||||
|
if (emitToRequestRoom) {
|
||||||
|
// Emit note with all fields explicitly (to ensure camelCase fields are sent)
|
||||||
|
const noteData = {
|
||||||
|
noteId: (note as any).noteId,
|
||||||
|
requestId: (note as any).requestId,
|
||||||
|
userId: (note as any).userId,
|
||||||
|
userName: (note as any).userName,
|
||||||
|
userRole: (note as any).userRole, // Include participant role
|
||||||
|
message: (note as any).message,
|
||||||
|
createdAt: (note as any).createdAt,
|
||||||
|
hasAttachment: (note as any).hasAttachment,
|
||||||
|
attachments: attachments // Include attachments
|
||||||
|
};
|
||||||
|
emitToRequestRoom(requestId, 'worknote:new', { note: noteData });
|
||||||
|
}
|
||||||
|
} catch (e) { logger.warn('Realtime emit failed (not initialized)'); }
|
||||||
|
|
||||||
|
// Send notifications to mentioned users
|
||||||
|
if (payload.mentionedUsers && Array.isArray(payload.mentionedUsers) && payload.mentionedUsers.length > 0) {
|
||||||
|
try {
|
||||||
|
// Get workflow details for request number and title
|
||||||
|
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
|
||||||
|
const requestNumber = (workflow as any)?.requestNumber || requestId;
|
||||||
|
const requestTitle = (workflow as any)?.title || 'Request';
|
||||||
|
|
||||||
|
logger.info(`[WorkNote] Sending mention notifications to ${payload.mentionedUsers.length} users`);
|
||||||
|
|
||||||
|
await notificationService.sendToUsers(
|
||||||
|
payload.mentionedUsers,
|
||||||
|
{
|
||||||
|
title: '💬 Mentioned in Work Note',
|
||||||
|
body: `${user.name || 'Someone'} mentioned you in ${requestNumber}: "${payload.message.substring(0, 50)}${payload.message.length > 50 ? '...' : ''}"`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'mention'
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[WorkNote] Mention notifications sent successfully`);
|
||||||
|
} catch (notifyError) {
|
||||||
|
logger.error('[WorkNote] Failed to send mention notifications:', notifyError);
|
||||||
|
// Don't fail the work note creation if notifications fail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ...note, attachments };
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadAttachment(attachmentId: string) {
|
||||||
|
const attachment = await WorkNoteAttachment.findOne({
|
||||||
|
where: { attachmentId }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!attachment) {
|
||||||
|
throw new Error('Attachment not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageUrl = (attachment as any).storageUrl || (attachment as any).storage_url;
|
||||||
|
const filePath = (attachment as any).filePath || (attachment as any).file_path;
|
||||||
|
const fileName = (attachment as any).fileName || (attachment as any).file_name;
|
||||||
|
const fileType = (attachment as any).fileType || (attachment as any).file_type;
|
||||||
|
|
||||||
|
// Check if it's a GCS URL
|
||||||
|
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
|
||||||
|
|
||||||
|
return {
|
||||||
|
filePath: filePath,
|
||||||
|
storageUrl: storageUrl,
|
||||||
|
fileName: fileName,
|
||||||
|
fileType: fileType,
|
||||||
|
isGcsUrl: isGcsUrl
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workNoteService = new WorkNoteService();
|
||||||
|
|
||||||
|
|
||||||
@ -383,3 +383,221 @@ report_cache {
|
|||||||
%% 8. TAT thresholds: 50%, 80%, 100%
|
%% 8. TAT thresholds: 50%, 80%, 100%
|
||||||
%% 9. Max approval levels: 10
|
%% 9. Max approval levels: 10
|
||||||
%% 10. Max file size: 10 MB
|
%% 10. Max file size: 10 MB
|
||||||
|
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|| dealer_claim_details : "has_claim_details"
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : "has_claim_history"
|
||||||
|
workflow_requests ||--|| dealer_proposal_details : "has_proposal"
|
||||||
|
workflow_requests ||--|| dealer_completion_details : "has_completion"
|
||||||
|
workflow_requests ||--|| claim_budget_tracking : "tracks_budget"
|
||||||
|
workflow_requests ||--|| internal_orders : "has_io"
|
||||||
|
workflow_requests ||--o{ claim_invoices : "has_invoices"
|
||||||
|
workflow_requests ||--o{ claim_credit_notes : "has_credit_notes"
|
||||||
|
workflow_requests ||--o{ tat_alerts : "triggers_alerts"
|
||||||
|
workflow_requests ||--|| request_summaries : "has_summary"
|
||||||
|
|
||||||
|
dealer_proposal_details ||--o{ dealer_proposal_cost_items : "has_items"
|
||||||
|
dealer_completion_details ||--o{ dealer_completion_expenses : "has_expenses"
|
||||||
|
claim_invoices ||--o{ claim_credit_notes : "has_credit_notes"
|
||||||
|
|
||||||
|
request_summaries ||--o{ shared_summaries : "shared_as"
|
||||||
|
users ||--o{ shared_summaries : "shares"
|
||||||
|
users ||--o{ subscriptions : "has_subscription"
|
||||||
|
users ||--o{ holidays : "creates"
|
||||||
|
users ||--o{ activity_types : "creates"
|
||||||
|
|
||||||
|
dealers {
|
||||||
|
uuid dealer_id PK
|
||||||
|
varchar sales_code
|
||||||
|
varchar service_code
|
||||||
|
varchar dealer_name
|
||||||
|
varchar region
|
||||||
|
varchar state
|
||||||
|
varchar city
|
||||||
|
varchar location
|
||||||
|
boolean is_active
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_claim_details {
|
||||||
|
uuid claim_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar activity_name
|
||||||
|
varchar activity_type
|
||||||
|
varchar dealer_code
|
||||||
|
varchar dealer_name
|
||||||
|
date activity_date
|
||||||
|
date period_start_date
|
||||||
|
date period_end_date
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_claim_history {
|
||||||
|
uuid history_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid approval_level_id FK
|
||||||
|
integer version
|
||||||
|
enum snapshot_type
|
||||||
|
jsonb snapshot_data
|
||||||
|
text change_reason
|
||||||
|
uuid changed_by FK
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_proposal_details {
|
||||||
|
uuid proposal_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar proposal_document_path
|
||||||
|
decimal total_estimated_budget
|
||||||
|
date expected_completion_date
|
||||||
|
text dealer_comments
|
||||||
|
timestamp submitted_at
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_proposal_cost_items {
|
||||||
|
uuid cost_item_id PK
|
||||||
|
uuid proposal_id FK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar item_description
|
||||||
|
decimal amount
|
||||||
|
integer item_order
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_completion_details {
|
||||||
|
uuid completion_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
date activity_completion_date
|
||||||
|
integer number_of_participants
|
||||||
|
decimal total_closed_expenses
|
||||||
|
timestamp submitted_at
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_completion_expenses {
|
||||||
|
uuid expense_id PK
|
||||||
|
uuid completion_id FK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar description
|
||||||
|
decimal amount
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
claim_budget_tracking {
|
||||||
|
uuid budget_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
decimal initial_estimated_budget
|
||||||
|
decimal proposal_estimated_budget
|
||||||
|
decimal approved_budget
|
||||||
|
decimal io_blocked_amount
|
||||||
|
decimal closed_expenses
|
||||||
|
decimal final_claim_amount
|
||||||
|
decimal credit_note_amount
|
||||||
|
enum budget_status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
claim_invoices {
|
||||||
|
uuid invoice_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar invoice_number
|
||||||
|
date invoice_date
|
||||||
|
decimal amount
|
||||||
|
varchar status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
claim_credit_notes {
|
||||||
|
uuid credit_note_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid invoice_id FK
|
||||||
|
varchar credit_note_number
|
||||||
|
decimal credit_note_amount
|
||||||
|
varchar status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
internal_orders {
|
||||||
|
uuid io_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar io_number
|
||||||
|
decimal io_available_balance
|
||||||
|
decimal io_blocked_amount
|
||||||
|
enum status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
holidays {
|
||||||
|
uuid holiday_id PK
|
||||||
|
date holiday_date
|
||||||
|
varchar holiday_name
|
||||||
|
enum holiday_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
activity_types {
|
||||||
|
uuid activity_type_id PK
|
||||||
|
varchar title
|
||||||
|
varchar item_code
|
||||||
|
varchar taxation_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
tat_alerts {
|
||||||
|
uuid alert_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid level_id FK
|
||||||
|
uuid approver_id FK
|
||||||
|
enum alert_type
|
||||||
|
boolean is_breached
|
||||||
|
timestamp alert_sent_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
request_summaries {
|
||||||
|
uuid summary_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid initiator_id FK
|
||||||
|
varchar title
|
||||||
|
text description
|
||||||
|
text closing_remarks
|
||||||
|
boolean is_ai_generated
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
shared_summaries {
|
||||||
|
uuid shared_summary_id PK
|
||||||
|
uuid summary_id FK
|
||||||
|
uuid shared_by FK
|
||||||
|
uuid shared_with FK
|
||||||
|
boolean is_read
|
||||||
|
timestamp shared_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
subscriptions {
|
||||||
|
uuid subscription_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar endpoint
|
||||||
|
varchar p256dh
|
||||||
|
varchar auth
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|||||||
310
docs/DATABASE_SCHEMA.md
Normal file
310
docs/DATABASE_SCHEMA.md
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
# Database Schema Documentation
|
||||||
|
|
||||||
|
## 1. Overview
|
||||||
|
This document provides a detailed reference for the backend database schema of the Royal Enfield Workflow Management System.
|
||||||
|
|
||||||
|
**Database System:** PostgreSQL 16.x
|
||||||
|
**Schema Conventions:**
|
||||||
|
* **Primary Keys:** UUID (v4) for all tables.
|
||||||
|
* **Naming:** Snake_case for tables and columns.
|
||||||
|
* **Audit Columns:** Most tables include `created_at`, `updated_at`, `created_by`, `updated_by`.
|
||||||
|
* **Soft Deletes:** `is_deleted` flag used on critical entities.
|
||||||
|
|
||||||
|
## 2. Architecture Diagrams (A4 Optimized)
|
||||||
|
|
||||||
|
### 2.1. Core Workflow Architecture
|
||||||
|
Focuses on the request lifecycle, approval chains, and direct interactions.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
users ||--o{ workflow_requests : "initiates"
|
||||||
|
users ||--o{ approval_levels : "approves"
|
||||||
|
users ||--o{ participants : "collaborates"
|
||||||
|
workflow_requests ||--|{ approval_levels : "has_steps"
|
||||||
|
workflow_requests ||--o{ participants : "has_users"
|
||||||
|
workflow_requests ||--o{ documents : "contains"
|
||||||
|
workflow_requests ||--o{ work_notes : "discussions"
|
||||||
|
workflow_requests ||--o{ activities : "audit_trail"
|
||||||
|
workflow_templates ||--o{ workflow_requests : "spawns"
|
||||||
|
workflow_requests ||--|| conclusion_remarks : "finalizes"
|
||||||
|
|
||||||
|
workflow_requests {
|
||||||
|
uuid request_id PK
|
||||||
|
varchar request_number
|
||||||
|
enum status
|
||||||
|
integer current_level
|
||||||
|
}
|
||||||
|
approval_levels {
|
||||||
|
uuid level_id PK
|
||||||
|
integer level_number
|
||||||
|
enum status
|
||||||
|
uuid approver_id FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.2. Business Domain Data
|
||||||
|
Focuses on the specific data payloads (Dealers, Finance, Claims) attached to requests.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--o{ dealers : "context"
|
||||||
|
workflow_requests ||--|| dealer_claim_details : "claim_data"
|
||||||
|
workflow_requests ||--|| dealer_proposal_details : "proposal"
|
||||||
|
workflow_requests ||--|| dealer_completion_details : "evidence"
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : "versions"
|
||||||
|
|
||||||
|
workflow_requests ||--|| claim_budget_tracking : "financials"
|
||||||
|
workflow_requests ||--|| internal_orders : "sap_ref"
|
||||||
|
workflow_requests ||--o{ claim_invoices : "billing"
|
||||||
|
claim_invoices ||--o{ claim_credit_notes : "adjustments"
|
||||||
|
|
||||||
|
dealer_claim_details {
|
||||||
|
uuid claim_id PK
|
||||||
|
varchar activity_type
|
||||||
|
}
|
||||||
|
claim_budget_tracking {
|
||||||
|
decimal approved_budget
|
||||||
|
decimal final_claim_amount
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.3. System Support Services
|
||||||
|
Focuses on cross-cutting concerns like logging, notifications, and monitoring.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
users ||--o{ notifications : "receives"
|
||||||
|
users ||--o{ system_settings : "configures"
|
||||||
|
users ||--o{ audit_logs : "actions"
|
||||||
|
|
||||||
|
workflow_requests ||--o{ notifications : "triggers"
|
||||||
|
workflow_requests ||--o{ tat_tracking : "monitors_sla"
|
||||||
|
workflow_requests ||--o{ tat_alerts : "sla_breaches"
|
||||||
|
workflow_requests ||--o{ request_summaries : "ai_summary"
|
||||||
|
workflow_requests ||--o{ report_cache : "reporting"
|
||||||
|
|
||||||
|
notifications ||--o{ email_logs : "outbound"
|
||||||
|
notifications ||--o{ sms_logs : "outbound"
|
||||||
|
|
||||||
|
tat_tracking {
|
||||||
|
decimal total_tat_hours
|
||||||
|
boolean threshold_breached
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Schema Modules
|
||||||
|
|
||||||
|
### 3.1. User & Authentication Module
|
||||||
|
Manages user identities, sessions, and system-wide configurations.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
users ||--o{ user_sessions : "has"
|
||||||
|
users ||--o{ subscriptions : "has_device"
|
||||||
|
users ||--o{ system_settings : "modifies"
|
||||||
|
|
||||||
|
users {
|
||||||
|
uuid user_id PK
|
||||||
|
varchar employee_id
|
||||||
|
varchar email
|
||||||
|
varchar display_name
|
||||||
|
enum role
|
||||||
|
boolean is_active
|
||||||
|
}
|
||||||
|
user_sessions {
|
||||||
|
uuid session_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar session_token
|
||||||
|
timestamp expires_at
|
||||||
|
}
|
||||||
|
subscriptions {
|
||||||
|
uuid subscription_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar endpoint
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`users`**
|
||||||
|
Core user registry. synced with Okta/HRMS.
|
||||||
|
* `user_id` (PK): Unique UUID.
|
||||||
|
* `employee_id` (Unique): HR system ID.
|
||||||
|
* `email` (Unique): Official email address.
|
||||||
|
* `role`: RBAC role (USER, ADMIN, etc.).
|
||||||
|
* `is_active`: Soft delete/account link status.
|
||||||
|
|
||||||
|
**`user_sessions`**
|
||||||
|
Active JWT sessions for invalidation/tracking.
|
||||||
|
* `session_token`: The JWT access token.
|
||||||
|
* `refresh_token`: For renewing access tokens.
|
||||||
|
* `device_type`: Web/Mobile classification.
|
||||||
|
|
||||||
|
**`system_settings`**
|
||||||
|
Dynamic configuration (e.g., global TAT thresholds).
|
||||||
|
* `setting_key` (Unique): Config identifier name.
|
||||||
|
* `setting_value`: The value (text/json).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.2. Workflow Engine Module
|
||||||
|
The core engine driving request lifecycles, approvals, and tracking.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|{ approval_levels : "steps"
|
||||||
|
workflow_requests ||--o{ activities : "events"
|
||||||
|
workflow_requests ||--|{ participants : "access"
|
||||||
|
workflow_templates ||--o{ workflow_requests : "spawns"
|
||||||
|
|
||||||
|
workflow_requests {
|
||||||
|
uuid request_id PK
|
||||||
|
varchar request_number
|
||||||
|
enum status
|
||||||
|
uuid initiator_id FK
|
||||||
|
}
|
||||||
|
approval_levels {
|
||||||
|
uuid level_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
integer level_number
|
||||||
|
enum status
|
||||||
|
uuid approver_id FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`workflow_requests`**
|
||||||
|
The central entity representing a business process instance.
|
||||||
|
* `request_number`: Human-readable ID (e.g., REQ-2024-001).
|
||||||
|
* `current_level`: Pointer to the active approval step.
|
||||||
|
* `status`: DRAFT, PENDING, APPROVED, REJECTED, CLOSED.
|
||||||
|
|
||||||
|
**`approval_levels`**
|
||||||
|
Defines the sequence of approvers for a request.
|
||||||
|
* `level_number`: Sequence index (1, 2, 3...).
|
||||||
|
* `approver_id`: User responsible for this step.
|
||||||
|
* `tat_hours`: SLA for this specific step.
|
||||||
|
* `status`: PENDING, APPROVED, REJECTED.
|
||||||
|
|
||||||
|
**`participants`**
|
||||||
|
Users with visibility/access to the request (spectators, contributors).
|
||||||
|
* `participant_type`: SPECTATOR, CONTRIBUTOR.
|
||||||
|
* `can_comment`, `can_view_documents`: Granular permissions.
|
||||||
|
|
||||||
|
**`activities`**
|
||||||
|
Audit trail of all actions performed on a request.
|
||||||
|
* `activity_type`: CREATED, APPROVED, COMMENTED, FILE_UPLOADED.
|
||||||
|
* `metadata`: JSON payload with specific details of the event.
|
||||||
|
|
||||||
|
**`workflow_templates`**
|
||||||
|
Blueprints for creating new requests.
|
||||||
|
* `approval_levels_config`: JSON defining the default approver chain structure.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.3. Dealer Management Module
|
||||||
|
Stores specific data related to dealer claims, onboardings, and performance.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|| dealer_claim_details : "details"
|
||||||
|
workflow_requests ||--|| dealer_proposal_details : "proposal"
|
||||||
|
workflow_requests ||--|| dealer_completion_details : "completion"
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : "versions"
|
||||||
|
workflow_requests ||--o{ dealers : "related_to"
|
||||||
|
|
||||||
|
dealers {
|
||||||
|
uuid dealer_id PK
|
||||||
|
varchar dealer_name
|
||||||
|
varchar sales_code
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`dealers`**
|
||||||
|
Master data for dealerships.
|
||||||
|
* `sales_code`, `service_code`: Dealer unique identifiers.
|
||||||
|
* `dealer_name`, `region`, `city`: Location details.
|
||||||
|
|
||||||
|
**`dealer_claim_details`**
|
||||||
|
Specific attributes for a Dealer Claim request.
|
||||||
|
* `activity_name`, `activity_type`: Marketing/Sales activity details.
|
||||||
|
* `period_start_date`, `period_end_date`: Duration of the claim activity.
|
||||||
|
|
||||||
|
**`dealer_proposal_details`**
|
||||||
|
Stores the initial proposal data for a claim.
|
||||||
|
* `total_estimated_budget`: The proposed validation amount.
|
||||||
|
* `proposal_document_url`: Link to the uploaded proposal PDF/Doc.
|
||||||
|
|
||||||
|
**`dealer_claim_history`**
|
||||||
|
Snapshots of the claim data at various approval stages.
|
||||||
|
* `snapshot_data`: JSON dump of the claim state.
|
||||||
|
* `version`: Incremental version number.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.4. Financial Module
|
||||||
|
Manages budgeting, internal orders, and invoicing.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|| claim_budget_tracking : "budget"
|
||||||
|
workflow_requests ||--|| internal_orders : "io"
|
||||||
|
workflow_requests ||--o{ claim_invoices : "invoices"
|
||||||
|
claim_invoices ||--o{ claim_credit_notes : "credit_notes"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`claim_budget_tracking`**
|
||||||
|
Central ledger for a request's financial lifecycle.
|
||||||
|
* `initial_estimated_budget`: Original requested amount.
|
||||||
|
* `approved_budget`: Validated amount after approvals.
|
||||||
|
* `io_blocked_amount`: Amount reserved in SAP.
|
||||||
|
* `final_claim_amount`: Actual payout amount.
|
||||||
|
|
||||||
|
**`internal_orders`**
|
||||||
|
SAP Internal Order references.
|
||||||
|
* `io_number`: The IO code from SAP.
|
||||||
|
* `io_available_balance`, `io_blocked_amount`: Balance tracking.
|
||||||
|
|
||||||
|
**`claim_invoices`**
|
||||||
|
Invoices submitted against the claim.
|
||||||
|
* `invoice_number`: Vendor invoice ID.
|
||||||
|
* `amount`: Invoice value.
|
||||||
|
* `dms_number`: Document Management System reference.
|
||||||
|
|
||||||
|
**`claim_credit_notes`**
|
||||||
|
Adjustments/Returns linked to invoices.
|
||||||
|
* `credit_note_amount`: Value to be deducted/adjusted.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.5. Ancillary Modules
|
||||||
|
Support functions like notifications, tracking, and logs.
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`notifications`**
|
||||||
|
User alerts.
|
||||||
|
* `is_read`: Read status.
|
||||||
|
* `action_url`: Deep link to the relevant request.
|
||||||
|
|
||||||
|
**`tat_tracking`**
|
||||||
|
Turnaround Time monitoring.
|
||||||
|
* `tracking_type`: REQUEST (overall) or LEVEL (step-specific).
|
||||||
|
* `total_tat_hours`: The allowed time.
|
||||||
|
* `elapsed_hours`: Time consumed so far.
|
||||||
|
* `breached_flags`: `threshold_50_breached`, etc.
|
||||||
|
|
||||||
|
**`tat_alerts`**
|
||||||
|
Logs of TAT breach notifications sent.
|
||||||
|
* `alert_type`: TAT_50, TAT_75, TAT_100.
|
||||||
|
* `is_breached`: Confirmed breach status.
|
||||||
|
|
||||||
|
**`request_summaries`**
|
||||||
|
AI or manually generated summaries of complex requests.
|
||||||
|
* `is_ai_generated`: Origin flag.
|
||||||
|
* `description`, `closing_remarks`: Narrative text.
|
||||||
141
docs/ERD.mermaid
141
docs/ERD.mermaid
@ -24,12 +24,19 @@ erDiagram
|
|||||||
workflow_requests ||--|| claim_invoices : claim_invoice
|
workflow_requests ||--|| claim_invoices : claim_invoice
|
||||||
workflow_requests ||--|| claim_credit_notes : claim_credit_note
|
workflow_requests ||--|| claim_credit_notes : claim_credit_note
|
||||||
work_notes ||--o{ work_note_attachments : has
|
work_notes ||--o{ work_note_attachments : has
|
||||||
notifications ||--o{ email_logs : sends
|
|
||||||
notifications ||--o{ sms_logs : sends
|
|
||||||
workflow_requests ||--o{ report_cache : caches
|
workflow_requests ||--o{ report_cache : caches
|
||||||
workflow_requests ||--o{ audit_logs : audits
|
workflow_requests ||--o{ audit_logs : audits
|
||||||
workflow_requests ||--o{ workflow_templates : templates
|
workflow_requests ||--o{ workflow_templates : templates
|
||||||
users ||--o{ system_settings : updates
|
users ||--o{ system_settings : updates
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : has_history
|
||||||
|
workflow_requests ||--o{ tat_alerts : triggers
|
||||||
|
workflow_requests ||--|| request_summaries : summarizes
|
||||||
|
request_summaries ||--o{ shared_summaries : shared_as
|
||||||
|
users ||--o{ shared_summaries : shares
|
||||||
|
users ||--o{ subscriptions : has_device
|
||||||
|
users ||--o{ holidays : manages
|
||||||
|
users ||--o{ activity_types : manages
|
||||||
|
|
||||||
users {
|
users {
|
||||||
uuid user_id PK
|
uuid user_id PK
|
||||||
@ -286,46 +293,7 @@ erDiagram
|
|||||||
varchar logout_reason
|
varchar logout_reason
|
||||||
}
|
}
|
||||||
|
|
||||||
email_logs {
|
|
||||||
uuid email_log_id PK
|
|
||||||
uuid request_id FK
|
|
||||||
uuid notification_id FK
|
|
||||||
varchar recipient_email
|
|
||||||
uuid recipient_user_id FK
|
|
||||||
text[] cc_emails
|
|
||||||
text[] bcc_emails
|
|
||||||
varchar subject
|
|
||||||
text body
|
|
||||||
varchar email_type
|
|
||||||
varchar status
|
|
||||||
integer send_attempts
|
|
||||||
timestamp sent_at
|
|
||||||
timestamp failed_at
|
|
||||||
text failure_reason
|
|
||||||
timestamp opened_at
|
|
||||||
timestamp clicked_at
|
|
||||||
timestamp created_at
|
|
||||||
}
|
|
||||||
|
|
||||||
sms_logs {
|
|
||||||
uuid sms_log_id PK
|
|
||||||
uuid request_id FK
|
|
||||||
uuid notification_id FK
|
|
||||||
varchar recipient_phone
|
|
||||||
uuid recipient_user_id FK
|
|
||||||
text message
|
|
||||||
varchar sms_type
|
|
||||||
varchar status
|
|
||||||
integer send_attempts
|
|
||||||
timestamp sent_at
|
|
||||||
timestamp delivered_at
|
|
||||||
timestamp failed_at
|
|
||||||
text failure_reason
|
|
||||||
varchar sms_provider
|
|
||||||
varchar sms_provider_message_id
|
|
||||||
decimal cost
|
|
||||||
timestamp created_at
|
|
||||||
}
|
|
||||||
|
|
||||||
system_settings {
|
system_settings {
|
||||||
uuid setting_id PK
|
uuid setting_id PK
|
||||||
@ -505,3 +473,94 @@ erDiagram
|
|||||||
timestamp updated_at
|
timestamp updated_at
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dealers {
|
||||||
|
uuid dealer_id PK
|
||||||
|
varchar sales_code
|
||||||
|
varchar service_code
|
||||||
|
varchar dealer_name
|
||||||
|
varchar region
|
||||||
|
varchar state
|
||||||
|
varchar city
|
||||||
|
varchar location
|
||||||
|
boolean is_active
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_claim_history {
|
||||||
|
uuid history_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid approval_level_id FK
|
||||||
|
integer version
|
||||||
|
enum snapshot_type
|
||||||
|
jsonb snapshot_data
|
||||||
|
text change_reason
|
||||||
|
uuid changed_by FK
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
holidays {
|
||||||
|
uuid holiday_id PK
|
||||||
|
date holiday_date
|
||||||
|
varchar holiday_name
|
||||||
|
enum holiday_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
activity_types {
|
||||||
|
uuid activity_type_id PK
|
||||||
|
varchar title
|
||||||
|
varchar item_code
|
||||||
|
varchar taxation_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
tat_alerts {
|
||||||
|
uuid alert_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid level_id FK
|
||||||
|
uuid approver_id FK
|
||||||
|
enum alert_type
|
||||||
|
boolean is_breached
|
||||||
|
timestamp alert_sent_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
request_summaries {
|
||||||
|
uuid summary_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid initiator_id FK
|
||||||
|
varchar title
|
||||||
|
text description
|
||||||
|
text closing_remarks
|
||||||
|
boolean is_ai_generated
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
shared_summaries {
|
||||||
|
uuid shared_summary_id PK
|
||||||
|
uuid summary_id FK
|
||||||
|
uuid shared_by FK
|
||||||
|
uuid shared_with FK
|
||||||
|
boolean is_read
|
||||||
|
timestamp shared_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
subscriptions {
|
||||||
|
uuid subscription_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar endpoint
|
||||||
|
varchar p256dh
|
||||||
|
varchar auth
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
113
docs/POSTGRES_JUSTIFICATION.md
Normal file
113
docs/POSTGRES_JUSTIFICATION.md
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
# Why PostgreSQL Wins for "Royal Enfield Workflow"
|
||||||
|
|
||||||
|
## Executive Summary
|
||||||
|
For "Royal Enfield Workflow", **PostgreSQL is superior to MongoDB**.
|
||||||
|
The decision rests on **Reporting Speed** and **Deep Filtering capabilities**. Your workflow requires filtering by *Relationships* (Approvers, Departments), not just static data.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Complex Workflow Filters (The "My Tasks" Problem)
|
||||||
|
Users need specific views like "Requests waiting for me" or "Paused requests".
|
||||||
|
|
||||||
|
### A. "Requests Open For Me" (The Join Filter)
|
||||||
|
*Scenario: Show all requests where **I am the current approver**.*
|
||||||
|
|
||||||
|
#### PostgreSQL (Simple SQL `JOIN`)
|
||||||
|
Index usage is perfect. The DB jumps mainly to the few rows in `approval_levels` assigned to you.
|
||||||
|
```sql
|
||||||
|
SELECT r.id, r.status, r.created_at
|
||||||
|
FROM workflow_requests r
|
||||||
|
JOIN approval_levels al ON r.id = al.request_id
|
||||||
|
WHERE al.approver_id = 'USER_UUID_123'
|
||||||
|
AND al.status = 'PENDING'
|
||||||
|
ORDER BY r.created_at DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### MongoDB (Array Query + Sort Issue)
|
||||||
|
You must index inside an array. If you sort by "Date", Mongo often cannot use the index effectively for both the *array match* and the *sort*, leading to slow scans.
|
||||||
|
```javascript
|
||||||
|
db.requests.find({
|
||||||
|
"approvers": {
|
||||||
|
$elemMatch: {
|
||||||
|
userId: "USER_UUID_123",
|
||||||
|
status: "PENDING"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).sort({ createdAt: -1 });
|
||||||
|
// WARNING: Performance degrades heavily if user has many historical requests
|
||||||
|
```
|
||||||
|
|
||||||
|
### B. "Paused & Resumed" History
|
||||||
|
*Scenario: Show requests that were previously Paused but are now Active (requires checking history).*
|
||||||
|
|
||||||
|
#### PostgreSQL (Audit Log Join)
|
||||||
|
You query the history table directly without loading the main request data until the match is found.
|
||||||
|
```sql
|
||||||
|
SELECT DISTINCT r.*
|
||||||
|
FROM workflow_requests r
|
||||||
|
JOIN audit_logs log ON r.id = log.request_id
|
||||||
|
WHERE log.action = 'PAUSED'
|
||||||
|
AND r.status = 'IN_PROGRESS';
|
||||||
|
```
|
||||||
|
|
||||||
|
#### MongoDB (The "Lookup" or "Bloat" Trade-off)
|
||||||
|
**Option 1: Lookups (Slow)**
|
||||||
|
You have to join the separate `audit_logs` collection for every request.
|
||||||
|
```javascript
|
||||||
|
db.requests.aggregate([
|
||||||
|
{ $match: { status: "IN_PROGRESS" } },
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: "audit_logs",
|
||||||
|
localField: "_id",
|
||||||
|
foreignField: "requestId",
|
||||||
|
as: "history"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $match: { "history.action": "PAUSED" } }
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 2: Embedding (Bloated)**
|
||||||
|
You store every log inside the Request document.
|
||||||
|
* *Result*: Your generic `db.requests.find({})` becomes 10x slower because it's dragging megabytes of history logs across the network for every result.
|
||||||
|
|
||||||
|
## 2. The Filter Nightmare: "Deep Filtering"
|
||||||
|
Users expect to slice-and-dice data freely. *Example: "Show requests initiated by users in the 'Sales' Department".*
|
||||||
|
|
||||||
|
* **Postgres (Cross-Table Filter)**:
|
||||||
|
```sql
|
||||||
|
SELECT * FROM workflow_requests r
|
||||||
|
JOIN users u ON r.initiator_id = u.id
|
||||||
|
WHERE u.department = 'Sales'
|
||||||
|
```
|
||||||
|
* **Result**: Instant. SQL simply filters the `users` table first (using an index on `department`) and then grabs the matching requests.
|
||||||
|
|
||||||
|
* **MongoDB (The "Lookup" Trap)**:
|
||||||
|
* `Department` is stored on the **User** document, not the Request.
|
||||||
|
* To filter Requests by "Department", you must `$lookup` (join) the User collection for *every single request* before you can filter them.
|
||||||
|
* *Alternative*: Copy `department` into every Request document.
|
||||||
|
* *Maintenance Cost*: If a user transfers from 'Sales' to 'Marketing', you must run a script to update all their historical requests, or your reports will be wrong.
|
||||||
|
|
||||||
|
## 3. Dashboard: The "Aggregation" Bottleneck
|
||||||
|
Your dashboard provides real-time insights (e.g., "Approver Efficiency," "TAT per Region").
|
||||||
|
|
||||||
|
* **Window Functions (SQL Superpower)**:
|
||||||
|
* *Requirement*: Rank dealers by "Average Approval Time" compared to their peers.
|
||||||
|
* *Postgres*: `RANK() OVER (PARTITION BY region ORDER BY avg_tat)` runs natively and instanly.
|
||||||
|
* *MongoDB*: Requires complex Aggregation Pipelines (`$setWindowFields`) that are memory-intensive and harder to optimize.
|
||||||
|
|
||||||
|
## 4. Audit & Compliance
|
||||||
|
* **Postgres**: Foreign Key constraints prevent "Orphaned Logs." You cannot delete a User if they are referenced in an Audit Log. This guarantees **legal traceability**.
|
||||||
|
* **MongoDB**: No constraints. Deleting a user can leave "Ghost Logs" (Referencing a null ID), breaking compliance reports.
|
||||||
|
|
||||||
|
## Summary Verdict
|
||||||
|
| Feature | PostgreSQL | MongoDB |
|
||||||
|
| :--- | :--- | :--- |
|
||||||
|
| **"Open For Me"** | **Simple Join** | **Complex Array Indexing** |
|
||||||
|
| **Dept/Region Filters** | **Simple Join** | **Slow Lookup** or **Duplicated Data** |
|
||||||
|
| **Ad-Hoc Reports** | **Flexible** | **Rigid** (Needs Indexes) |
|
||||||
|
| **Audit Compliance** | **Guaranteed** | **Risk of Orphaned Data** |
|
||||||
|
|
||||||
|
**Recommendation**: Stick with PostgreSQL.
|
||||||
|
The "Relational" nature of your reporting (Connecting Requests -> Users -> Departments -> Regions) is exactly what SQL was built to solve efficiently.
|
||||||
49
fix-imports.ps1
Normal file
49
fix-imports.ps1
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# Fix all simple imports to use MongoDB services
|
||||||
|
|
||||||
|
$replacements = @{
|
||||||
|
'from ''@services/activity.service''' = 'from ''@services/activity.mongo.service'''
|
||||||
|
'from ''../services/activity.service''' = 'from ''../services/activity.mongo.service'''
|
||||||
|
'from ''@services/notification.service''' = 'from ''@services/notification.mongo.service'''
|
||||||
|
'from ''../services/notification.service''' = 'from ''../services/notification.mongo.service'''
|
||||||
|
'from ''@services/configReader.service''' = 'from ''@services/configReader.mongo.service'''
|
||||||
|
'from ''../services/configReader.service''' = 'from ''../services/configReader.mongo.service'''
|
||||||
|
'from ''./configReader.service''' = 'from ''./configReader.mongo.service'''
|
||||||
|
'from ''../services/holiday.service''' = 'from ''../services/holiday.mongo.service'''
|
||||||
|
'from ''../services/workflow.service''' = 'from ''../services/workflow.service.mongo'''
|
||||||
|
'from ''../services/worknote.service''' = 'from ''../services/worknote.mongo.service'''
|
||||||
|
|
||||||
|
# Service instance renames
|
||||||
|
'\bactivityService\b' = 'activityMongoService'
|
||||||
|
'\bnotificationService\b' = 'notificationMongoService'
|
||||||
|
'\bholidayService\b' = 'holidayMongoService'
|
||||||
|
'\bworkNoteService\b' = 'workNoteMongoService'
|
||||||
|
}
|
||||||
|
|
||||||
|
$files = @(
|
||||||
|
'src/controllers/conclusion.controller.ts',
|
||||||
|
'src/controllers/document.controller.ts',
|
||||||
|
'src/controllers/notification.controller.ts',
|
||||||
|
'src/controllers/tat.controller.ts',
|
||||||
|
'src/routes/workflow.routes.ts',
|
||||||
|
'src/emailtemplates/emailPreferences.helper.ts',
|
||||||
|
'src/routes/debug.routes.ts',
|
||||||
|
'src/services/ai.service.ts',
|
||||||
|
'src/utils/tatTimeUtils.ts'
|
||||||
|
)
|
||||||
|
|
||||||
|
foreach ($file in $files) {
|
||||||
|
if (Test-Path $file) {
|
||||||
|
$content = Get-Content $file -Raw
|
||||||
|
|
||||||
|
foreach ($key in $replacements.Keys) {
|
||||||
|
$content = $content -replace $key, $replacements[$key]
|
||||||
|
}
|
||||||
|
|
||||||
|
Set-Content $file $content -NoNewline
|
||||||
|
Write-Host "✓ Updated: $file"
|
||||||
|
} else {
|
||||||
|
Write-Host "✗ Not found: $file"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "`n✅ Import replacements complete!"
|
||||||
334
package-lock.json
generated
334
package-lock.json
generated
@ -26,6 +26,7 @@
|
|||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
"ioredis": "^5.8.2",
|
"ioredis": "^5.8.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"mongoose": "^9.1.5",
|
||||||
"morgan": "^1.10.0",
|
"morgan": "^1.10.0",
|
||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
"node-cron": "^3.0.3",
|
"node-cron": "^3.0.3",
|
||||||
@ -51,6 +52,7 @@
|
|||||||
"@types/express": "^5.0.0",
|
"@types/express": "^5.0.0",
|
||||||
"@types/jest": "^29.5.14",
|
"@types/jest": "^29.5.14",
|
||||||
"@types/jsonwebtoken": "^9.0.7",
|
"@types/jsonwebtoken": "^9.0.7",
|
||||||
|
"@types/mongoose": "^5.11.96",
|
||||||
"@types/morgan": "^1.9.9",
|
"@types/morgan": "^1.9.9",
|
||||||
"@types/multer": "^1.4.12",
|
"@types/multer": "^1.4.12",
|
||||||
"@types/node": "^22.19.1",
|
"@types/node": "^22.19.1",
|
||||||
@ -1370,6 +1372,7 @@
|
|||||||
"integrity": "sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==",
|
"integrity": "sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@emnapi/wasi-threads": "1.1.0",
|
"@emnapi/wasi-threads": "1.1.0",
|
||||||
"tslib": "^2.4.0"
|
"tslib": "^2.4.0"
|
||||||
@ -1381,6 +1384,7 @@
|
|||||||
"integrity": "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==",
|
"integrity": "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tslib": "^2.4.0"
|
"tslib": "^2.4.0"
|
||||||
}
|
}
|
||||||
@ -1391,6 +1395,7 @@
|
|||||||
"integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==",
|
"integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tslib": "^2.4.0"
|
"tslib": "^2.4.0"
|
||||||
}
|
}
|
||||||
@ -2349,6 +2354,15 @@
|
|||||||
"url": "https://opencollective.com/js-sdsl"
|
"url": "https://opencollective.com/js-sdsl"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@mongodb-js/saslprep": {
|
||||||
|
"version": "1.4.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.4.5.tgz",
|
||||||
|
"integrity": "sha512-k64Lbyb7ycCSXHSLzxVdb2xsKGPMvYZfCICXvDsI8Z65CeWQzTEKS4YmGbnqw+U9RBvLPTsB6UCmwkgsDTGWIw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"sparse-bitfield": "^3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
|
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
|
||||||
"version": "3.0.3",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
|
||||||
@ -2439,6 +2453,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"android"
|
"android"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2455,6 +2470,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"android"
|
"android"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2471,6 +2487,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"darwin"
|
"darwin"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2487,6 +2504,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"darwin"
|
"darwin"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2503,6 +2521,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"freebsd"
|
"freebsd"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2519,6 +2538,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2535,6 +2555,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2551,6 +2572,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2567,6 +2589,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2583,6 +2606,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2599,6 +2623,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2615,6 +2640,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2631,6 +2657,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"linux"
|
"linux"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2647,6 +2674,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"openharmony"
|
"openharmony"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2660,6 +2688,7 @@
|
|||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@napi-rs/wasm-runtime": "^1.0.3"
|
"@napi-rs/wasm-runtime": "^1.0.3"
|
||||||
},
|
},
|
||||||
@ -2679,6 +2708,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"win32"
|
"win32"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2695,6 +2725,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"win32"
|
"win32"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2711,6 +2742,7 @@
|
|||||||
"os": [
|
"os": [
|
||||||
"win32"
|
"win32"
|
||||||
],
|
],
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
@ -2721,6 +2753,7 @@
|
|||||||
"integrity": "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==",
|
"integrity": "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@emnapi/core": "^1.5.0",
|
"@emnapi/core": "^1.5.0",
|
||||||
"@emnapi/runtime": "^1.5.0",
|
"@emnapi/runtime": "^1.5.0",
|
||||||
@ -3544,6 +3577,7 @@
|
|||||||
"integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==",
|
"integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"optional": true,
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tslib": "^2.4.0"
|
"tslib": "^2.4.0"
|
||||||
}
|
}
|
||||||
@ -3781,6 +3815,16 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/mongoose": {
|
||||||
|
"version": "5.11.96",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/mongoose/-/mongoose-5.11.96.tgz",
|
||||||
|
"integrity": "sha512-keiY22ljJtXyM7osgScmZOHV6eL5VFUD5tQumlu+hjS++HND5nM8jNEdj5CSWfKIJpVwQfPuwQ2SfBqUnCAVRw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"mongoose": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/morgan": {
|
"node_modules/@types/morgan": {
|
||||||
"version": "1.9.10",
|
"version": "1.9.10",
|
||||||
"resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.10.tgz",
|
"resolved": "https://registry.npmjs.org/@types/morgan/-/morgan-1.9.10.tgz",
|
||||||
@ -4025,6 +4069,21 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/webidl-conversions": {
|
||||||
|
"version": "7.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz",
|
||||||
|
"integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/whatwg-url": {
|
||||||
|
"version": "13.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-13.0.0.tgz",
|
||||||
|
"integrity": "sha512-N8WXpbE6Wgri7KUSvrmQcqrMllKZ9uxkYWMt+mCSGwNc0Hsw9VQTW7ApqI4XNrx6/SaM2QQJCzMPDEXE058s+Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/webidl-conversions": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/yargs": {
|
"node_modules/@types/yargs": {
|
||||||
"version": "17.0.34",
|
"version": "17.0.34",
|
||||||
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.34.tgz",
|
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.34.tgz",
|
||||||
@ -4903,6 +4962,15 @@
|
|||||||
"node-int64": "^0.4.0"
|
"node-int64": "^0.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/bson": {
|
||||||
|
"version": "7.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/bson/-/bson-7.1.1.tgz",
|
||||||
|
"integrity": "sha512-TtJgBB+QyOlWjrbM+8bRgH84VM/xrDjyBFgSgGrfZF4xvt6gbEDtcswm27Tn9F9TWsjQybxT8b8VpCP/oJK4Dw==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.19.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/btoa": {
|
"node_modules/btoa": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz",
|
||||||
@ -8551,6 +8619,15 @@
|
|||||||
"safe-buffer": "^5.0.1"
|
"safe-buffer": "^5.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/kareem": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/kareem/-/kareem-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-RKhaOBSPN8L7y4yAgNhDT2602G5FD6QbOIISbjN9D6mjHPeqeg7K+EB5IGSU5o81/X2Gzm3ICnAvQW3x3OP8HA==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/keyv": {
|
"node_modules/keyv": {
|
||||||
"version": "4.5.4",
|
"version": "4.5.4",
|
||||||
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
|
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
|
||||||
@ -8797,6 +8874,12 @@
|
|||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/memory-pager": {
|
||||||
|
"version": "1.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
|
||||||
|
"integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/merge-descriptors": {
|
"node_modules/merge-descriptors": {
|
||||||
"version": "1.0.3",
|
"version": "1.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
|
||||||
@ -8961,6 +9044,223 @@
|
|||||||
"node": "*"
|
"node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/mongodb-connection-string-url": {
|
||||||
|
"version": "7.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-7.0.1.tgz",
|
||||||
|
"integrity": "sha512-h0AZ9A7IDVwwHyMxmdMXKy+9oNlF0zFoahHiX3vQ8e3KFcSP3VmsmfvtRSuLPxmyv2vjIDxqty8smTgie/SNRQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/whatwg-url": "^13.0.0",
|
||||||
|
"whatwg-url": "^14.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.19.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongodb-connection-string-url/node_modules/tr46": {
|
||||||
|
"version": "5.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
|
||||||
|
"integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"punycode": "^2.3.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongodb-connection-string-url/node_modules/webidl-conversions": {
|
||||||
|
"version": "7.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||||
|
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongodb-connection-string-url/node_modules/whatwg-url": {
|
||||||
|
"version": "14.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
|
||||||
|
"integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"tr46": "^5.1.0",
|
||||||
|
"webidl-conversions": "^7.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose": {
|
||||||
|
"version": "9.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/mongoose/-/mongoose-9.1.5.tgz",
|
||||||
|
"integrity": "sha512-N6gypEO+wLmZp8kCYNQmrEWxVMT0KhyHvVttBZoKA/1ngY7aUsBjqHzCPtDgz+i8JAnqMOiEKmuJIDEQu1b9Dw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"kareem": "3.0.0",
|
||||||
|
"mongodb": "~7.0",
|
||||||
|
"mpath": "0.9.0",
|
||||||
|
"mquery": "6.0.0",
|
||||||
|
"ms": "2.1.3",
|
||||||
|
"sift": "17.1.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.19.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/mongoose"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/gaxios": {
|
||||||
|
"version": "7.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.3.tgz",
|
||||||
|
"integrity": "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"extend": "^3.0.2",
|
||||||
|
"https-proxy-agent": "^7.0.1",
|
||||||
|
"node-fetch": "^3.3.2",
|
||||||
|
"rimraf": "^5.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/gcp-metadata": {
|
||||||
|
"version": "7.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-7.0.1.tgz",
|
||||||
|
"integrity": "sha512-UcO3kefx6dCcZkgcTGgVOTFb7b1LlQ02hY1omMjjrrBzkajRMCFgYOjs7J71WqnuG1k2b+9ppGL7FsOfhZMQKQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"gaxios": "^7.0.0",
|
||||||
|
"google-logging-utils": "^1.0.0",
|
||||||
|
"json-bigint": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/glob": {
|
||||||
|
"version": "10.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
||||||
|
"integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"foreground-child": "^3.1.0",
|
||||||
|
"jackspeak": "^3.1.2",
|
||||||
|
"minimatch": "^9.0.4",
|
||||||
|
"minipass": "^7.1.2",
|
||||||
|
"package-json-from-dist": "^1.0.0",
|
||||||
|
"path-scurry": "^1.11.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"glob": "dist/esm/bin.mjs"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/google-logging-utils": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-eAmLkjDjAFCVXg7A1unxHsLf961m6y17QFqXqAXGj/gVkKFrEICfStRfwUlGNfeCEjNRa32JEWOUTlYXPyyKvA==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/mongodb": {
|
||||||
|
"version": "7.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-7.0.0.tgz",
|
||||||
|
"integrity": "sha512-vG/A5cQrvGGvZm2mTnCSz1LUcbOPl83hfB6bxULKQ8oFZauyox/2xbZOoGNl+64m8VBrETkdGCDBdOsCr3F3jg==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@mongodb-js/saslprep": "^1.3.0",
|
||||||
|
"bson": "^7.0.0",
|
||||||
|
"mongodb-connection-string-url": "^7.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.19.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@aws-sdk/credential-providers": "^3.806.0",
|
||||||
|
"@mongodb-js/zstd": "^7.0.0",
|
||||||
|
"gcp-metadata": "^7.0.1",
|
||||||
|
"kerberos": "^7.0.0",
|
||||||
|
"mongodb-client-encryption": ">=7.0.0 <7.1.0",
|
||||||
|
"snappy": "^7.3.2",
|
||||||
|
"socks": "^2.8.6"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"@aws-sdk/credential-providers": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@mongodb-js/zstd": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"gcp-metadata": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"kerberos": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"mongodb-client-encryption": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"snappy": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"socks": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/node-fetch": {
|
||||||
|
"version": "3.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
|
||||||
|
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"data-uri-to-buffer": "^4.0.0",
|
||||||
|
"fetch-blob": "^3.1.4",
|
||||||
|
"formdata-polyfill": "^4.0.10"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/node-fetch"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mongoose/node_modules/rimraf": {
|
||||||
|
"version": "5.0.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
|
||||||
|
"integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
|
||||||
|
"license": "ISC",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
|
"dependencies": {
|
||||||
|
"glob": "^10.3.7"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"rimraf": "dist/esm/bin.mjs"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/morgan": {
|
"node_modules/morgan": {
|
||||||
"version": "1.10.1",
|
"version": "1.10.1",
|
||||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.1.tgz",
|
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.1.tgz",
|
||||||
@ -9004,6 +9304,24 @@
|
|||||||
"node": ">= 0.8"
|
"node": ">= 0.8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/mpath": {
|
||||||
|
"version": "0.9.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mpath/-/mpath-0.9.0.tgz",
|
||||||
|
"integrity": "sha512-ikJRQTk8hw5DEoFVxHG1Gn9T/xcjtdnOKIU1JTmGjZZlg9LST2mBLmcX3/ICIbgJydT2GOc15RnNy5mHmzfSew==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mquery": {
|
||||||
|
"version": "6.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mquery/-/mquery-6.0.0.tgz",
|
||||||
|
"integrity": "sha512-b2KQNsmgtkscfeDgkYMcWGn9vZI9YoXh802VDEwE6qc50zxBFQ0Oo8ROkawbPAsXCY1/Z1yp0MagqsZStPWJjw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.19.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ms": {
|
"node_modules/ms": {
|
||||||
"version": "2.1.3",
|
"version": "2.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||||
@ -10047,7 +10365,6 @@
|
|||||||
"version": "2.3.1",
|
"version": "2.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
||||||
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
|
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
@ -10712,6 +11029,12 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/sift": {
|
||||||
|
"version": "17.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz",
|
||||||
|
"integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/signal-exit": {
|
"node_modules/signal-exit": {
|
||||||
"version": "3.0.7",
|
"version": "3.0.7",
|
||||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
||||||
@ -10917,6 +11240,15 @@
|
|||||||
"source-map": "^0.6.0"
|
"source-map": "^0.6.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/sparse-bitfield": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"memory-pager": "^1.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/split2": {
|
"node_modules/split2": {
|
||||||
"version": "4.2.0",
|
"version": "4.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
||||||
|
|||||||
@ -19,7 +19,10 @@
|
|||||||
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
||||||
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
||||||
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
||||||
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
|
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts",
|
||||||
|
"reset:mongo": "ts-node -r tsconfig-paths/register src/scripts/reset-mongo-db.ts",
|
||||||
|
"seed:config:mongo": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.mongo.ts",
|
||||||
|
"seed:test-dealer:mongo": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.mongo.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google-cloud/secret-manager": "^6.1.1",
|
"@google-cloud/secret-manager": "^6.1.1",
|
||||||
@ -40,6 +43,7 @@
|
|||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
"ioredis": "^5.8.2",
|
"ioredis": "^5.8.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"mongoose": "^9.1.5",
|
||||||
"morgan": "^1.10.0",
|
"morgan": "^1.10.0",
|
||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
"node-cron": "^3.0.3",
|
"node-cron": "^3.0.3",
|
||||||
@ -65,6 +69,7 @@
|
|||||||
"@types/express": "^5.0.0",
|
"@types/express": "^5.0.0",
|
||||||
"@types/jest": "^29.5.14",
|
"@types/jest": "^29.5.14",
|
||||||
"@types/jsonwebtoken": "^9.0.7",
|
"@types/jsonwebtoken": "^9.0.7",
|
||||||
|
"@types/mongoose": "^5.11.96",
|
||||||
"@types/morgan": "^1.9.9",
|
"@types/morgan": "^1.9.9",
|
||||||
"@types/multer": "^1.4.12",
|
"@types/multer": "^1.4.12",
|
||||||
"@types/node": "^22.19.1",
|
"@types/node": "^22.19.1",
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import { Sequelize } from 'sequelize';
|
import { Sequelize } from 'sequelize';
|
||||||
|
import mongoose from 'mongoose';
|
||||||
import dotenv from 'dotenv';
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
@ -25,4 +26,18 @@ const sequelize = new Sequelize({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
export { sequelize };
|
export const connectMongoDB = async () => {
|
||||||
|
try {
|
||||||
|
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL || 'mongodb://localhost:27017/re_workflow_db';
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
console.log('MongoDB Connected Successfully');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('MongoDB Connection Error:', error);
|
||||||
|
// Don't exit process in development if Mongo is optional for now
|
||||||
|
if (process.env.NODE_ENV === 'production') {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export { sequelize, mongoose };
|
||||||
|
|||||||
@ -1,13 +1,14 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { Holiday, HolidayType } from '@models/Holiday';
|
import { Holiday, HolidayType } from '@models/Holiday';
|
||||||
import { holidayService } from '@services/holiday.service';
|
import { holidayMongoService as holidayService } from '@services/holiday.service';
|
||||||
import { activityTypeService } from '@services/activityType.service';
|
import { activityTypeService } from '@services/activityType.service';
|
||||||
import { sequelize } from '@config/database';
|
import { sequelize } from '../config/database'; // Import sequelize instance
|
||||||
import { QueryTypes, Op } from 'sequelize';
|
import { QueryTypes } from 'sequelize'; // Import QueryTypes
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
||||||
import { clearConfigCache } from '@services/configReader.service';
|
import { clearConfigCache } from '@services/configReader.service';
|
||||||
import { User, UserRole } from '@models/User';
|
import { UserModel as User, IUser } from '@models/mongoose/User.schema';
|
||||||
|
import { UserRole } from '../types/user.types';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all holidays (with optional year filter)
|
* Get all holidays (with optional year filter)
|
||||||
@ -101,15 +102,11 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
}
|
}
|
||||||
|
|
||||||
const holiday = await holidayService.createHoliday({
|
const holiday = await holidayService.createHoliday({
|
||||||
holidayDate,
|
date: holidayDate,
|
||||||
holidayName,
|
name: holidayName,
|
||||||
description,
|
type: (holidayType as any) || HolidayType.ORGANIZATIONAL,
|
||||||
holidayType: holidayType || HolidayType.ORGANIZATIONAL,
|
// explanation property removed as it is not part of the service interface
|
||||||
isRecurring: isRecurring || false,
|
year: new Date(holidayDate).getFullYear(),
|
||||||
recurrenceRule,
|
|
||||||
appliesToDepartments,
|
|
||||||
appliesToLocations,
|
|
||||||
createdBy: userId
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Reload holidays cache
|
// Reload holidays cache
|
||||||
@ -146,7 +143,7 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
const { holidayId } = req.params;
|
const { holidayId } = req.params;
|
||||||
const updates = req.body;
|
const updates = req.body;
|
||||||
|
|
||||||
const holiday = await holidayService.updateHoliday(holidayId, updates, userId);
|
const holiday = await holidayService.updateHoliday(holidayId, updates);
|
||||||
|
|
||||||
if (!holiday) {
|
if (!holiday) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
@ -222,7 +219,7 @@ export const bulkImportHolidays = async (req: Request, res: Response): Promise<v
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await holidayService.bulkImportHolidays(holidays, userId);
|
const result = await holidayService.bulkImportHolidays(holidays);
|
||||||
|
|
||||||
// Reload holidays cache
|
// Reload holidays cache
|
||||||
await initializeHolidaysCache();
|
await initializeHolidaysCache();
|
||||||
@ -533,7 +530,7 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find user
|
// Find user
|
||||||
const user = await User.findByPk(userId);
|
const user = await User.findOne({ userId });
|
||||||
if (!user) {
|
if (!user) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
@ -606,8 +603,8 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
|
|||||||
|
|
||||||
// Handle role filtering
|
// Handle role filtering
|
||||||
if (role && role !== 'ALL' && role !== 'ELEVATED') {
|
if (role && role !== 'ALL' && role !== 'ELEVATED') {
|
||||||
const validRoles: UserRole[] = ['USER', 'MANAGEMENT', 'ADMIN'];
|
const validRoles: string[] = ['USER', 'MANAGEMENT', 'ADMIN'];
|
||||||
if (!validRoles.includes(role as UserRole)) {
|
if (!validRoles.includes(role as string)) {
|
||||||
res.status(400).json({
|
res.status(400).json({
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED'
|
error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED'
|
||||||
@ -617,38 +614,20 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
|
|||||||
whereClause.role = role;
|
whereClause.role = role;
|
||||||
} else if (role === 'ELEVATED' || !role) {
|
} else if (role === 'ELEVATED' || !role) {
|
||||||
// Default: Show only ADMIN and MANAGEMENT (elevated users)
|
// Default: Show only ADMIN and MANAGEMENT (elevated users)
|
||||||
whereClause.role = { [Op.in]: ['ADMIN', 'MANAGEMENT'] };
|
whereClause.role = { $in: ['ADMIN', 'MANAGEMENT'] };
|
||||||
}
|
}
|
||||||
// If role === 'ALL', don't filter by role (show all users)
|
// If role === 'ALL', don't filter by role (show all users)
|
||||||
|
|
||||||
// Get total count for pagination
|
// Get total count for pagination
|
||||||
const totalUsers = await User.count({ where: whereClause });
|
const totalUsers = await User.countDocuments(whereClause);
|
||||||
const totalPages = Math.ceil(totalUsers / limitNum);
|
const totalPages = Math.ceil(totalUsers / limitNum);
|
||||||
|
|
||||||
// Get paginated users
|
// Get paginated users
|
||||||
const users = await User.findAll({
|
const users = await User.find(whereClause)
|
||||||
where: whereClause,
|
.select('userId email displayName firstName lastName department designation role manager postalAddress lastLogin createdAt')
|
||||||
attributes: [
|
.sort({ role: 1, displayName: 1 })
|
||||||
'userId',
|
.skip(offset)
|
||||||
'email',
|
.limit(limitNum);
|
||||||
'displayName',
|
|
||||||
'firstName',
|
|
||||||
'lastName',
|
|
||||||
'department',
|
|
||||||
'designation',
|
|
||||||
'role',
|
|
||||||
'manager',
|
|
||||||
'postalAddress',
|
|
||||||
'lastLogin',
|
|
||||||
'createdAt'
|
|
||||||
],
|
|
||||||
order: [
|
|
||||||
['role', 'ASC'], // ADMIN first, then MANAGEMENT, then USER
|
|
||||||
['displayName', 'ASC']
|
|
||||||
],
|
|
||||||
limit: limitNum,
|
|
||||||
offset: offset
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get role summary (across all users, not just current page)
|
// Get role summary (across all users, not just current page)
|
||||||
const roleStats = await sequelize.query(`
|
const roleStats = await sequelize.query(`
|
||||||
@ -777,7 +756,7 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`);
|
logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`);
|
||||||
|
|
||||||
// First, check if user already exists in our database
|
// First, check if user already exists in our database
|
||||||
let user = await User.findOne({ where: { email } });
|
let user: IUser | null = await User.findOne({ email });
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
// User doesn't exist, need to fetch from Okta and create
|
// User doesn't exist, need to fetch from Okta and create
|
||||||
@ -800,12 +779,11 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create user in our database via centralized userService with all fields including manager
|
// Create user in our database via centralized userService with all fields including manager
|
||||||
const ensured = await userService.createOrUpdateUser({
|
user = (await userService.createOrUpdateUser({
|
||||||
...oktaUserData,
|
...oktaUserData,
|
||||||
role, // Set the assigned role
|
role: role as any, // Set the assigned role
|
||||||
isActive: true, // Ensure user is active
|
isActive: true, // Ensure user is active
|
||||||
});
|
})) as IUser;
|
||||||
user = ensured;
|
|
||||||
|
|
||||||
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`);
|
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`);
|
||||||
} catch (oktaError: any) {
|
} catch (oktaError: any) {
|
||||||
@ -839,27 +817,36 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
|
|
||||||
if (oktaUserData) {
|
if (oktaUserData) {
|
||||||
// Sync all fields from Okta including the new role using centralized method
|
// Sync all fields from Okta including the new role using centralized method
|
||||||
const updated = await userService.createOrUpdateUser({
|
user = (await userService.createOrUpdateUser({
|
||||||
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
|
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
|
||||||
role, // Set the new role
|
role: role as any, // Set the new role
|
||||||
isActive: true, // Ensure user is active
|
isActive: true, // Ensure user is active
|
||||||
});
|
})) as IUser;
|
||||||
user = updated;
|
|
||||||
|
|
||||||
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
|
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
|
||||||
} else {
|
} else {
|
||||||
// Okta user not found, just update role
|
// Okta user not found, just update role
|
||||||
await user.update({ role });
|
user.role = role as any;
|
||||||
|
await user.save();
|
||||||
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
|
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
|
||||||
}
|
}
|
||||||
} catch (oktaError: any) {
|
} catch (oktaError: any) {
|
||||||
// If Okta fetch fails, just update the role
|
// If Okta fetch fails, just update the role
|
||||||
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
|
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
|
||||||
await user.update({ role });
|
user.role = role as any;
|
||||||
|
await user.save();
|
||||||
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
|
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to create or update user'
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully assigned ${role} role to ${user.displayName || email}`,
|
message: `Successfully assigned ${role} role to ${user.displayName || email}`,
|
||||||
@ -1049,4 +1036,3 @@ export const deleteActivityType = async (req: Request, res: Response): Promise<v
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,15 +1,15 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { ApprovalService } from '@services/approval.service';
|
import { ApprovalService } from '@services/approval.service';
|
||||||
import { DealerClaimApprovalService } from '@services/dealerClaimApproval.service';
|
import { DealerClaimApprovalMongoService } from '@services/dealerClaimApproval.service';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { validateApprovalAction } from '@validators/approval.validator';
|
import { validateApprovalAction } from '@validators/approval.validator';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
const approvalService = new ApprovalService();
|
const approvalService = new ApprovalService();
|
||||||
const dealerClaimApprovalService = new DealerClaimApprovalService();
|
const dealerClaimApprovalService = new DealerClaimApprovalMongoService();
|
||||||
|
|
||||||
export class ApprovalController {
|
export class ApprovalController {
|
||||||
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -18,13 +18,13 @@ export class ApprovalController {
|
|||||||
const validatedData = validateApprovalAction(req.body);
|
const validatedData = validateApprovalAction(req.body);
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
// Determine which service to use based on workflow type
|
||||||
const level = await ApprovalLevel.findByPk(levelId);
|
const level = await ApprovalLevel.findOne({ levelId });
|
||||||
if (!level) {
|
if (!level) {
|
||||||
ResponseHandler.notFound(res, 'Approval level not found');
|
ResponseHandler.notFound(res, 'Approval level not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflow = await WorkflowRequest.findByPk(level.requestId);
|
const workflow = await WorkflowRequest.findOne({ requestNumber: level.requestId });
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -75,8 +75,10 @@ export class ApprovalController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
// Determine which service to use based on workflow type (handle both requestId and requestNumber)
|
||||||
const workflow = await WorkflowRequest.findByPk(id);
|
const workflow = await WorkflowRequest.findOne({
|
||||||
|
$or: [{ requestId: id }, { requestNumber: id }]
|
||||||
|
});
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -103,8 +105,10 @@ export class ApprovalController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
// Determine which service to use based on workflow type (handle both requestId and requestNumber)
|
||||||
const workflow = await WorkflowRequest.findByPk(id);
|
const workflow = await WorkflowRequest.findOne({
|
||||||
|
$or: [{ requestId: id }, { requestNumber: id }]
|
||||||
|
});
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
|
|||||||
@ -4,7 +4,7 @@ import { validateSSOCallback, validateRefreshToken, validateTokenExchange, valid
|
|||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
import { activityMongoService as activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
||||||
import { getRequestMetadata } from '../utils/requestUtils';
|
import { getRequestMetadata } from '../utils/requestUtils';
|
||||||
|
|
||||||
export class AuthController {
|
export class AuthController {
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
|
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
|
||||||
import { aiService } from '@services/ai.service';
|
import { aiService } from '@services/ai.service';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityMongoService as activityService } from '@services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
@ -423,4 +423,3 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const conclusionController = new ConclusionController();
|
export const conclusionController = new ConclusionController();
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { DashboardService } from '../services/dashboard.service';
|
import { DashboardMongoService, dashboardMongoService } from '../services/dashboard.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
export class DashboardController {
|
export class DashboardController {
|
||||||
private dashboardService: DashboardService;
|
private dashboardService: DashboardMongoService = dashboardMongoService;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.dashboardService = new DashboardService();
|
// Service is now injected via import singleton
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -646,4 +646,3 @@ export class DashboardController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { DealerClaimService } from '../services/dealerClaim.service';
|
import { DealerClaimMongoService } from '../services/dealerClaim.service';
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { gcsStorageService } from '../services/gcsStorage.service';
|
import { gcsStorageService } from '../services/gcsStorage.service';
|
||||||
@ -13,7 +13,7 @@ import path from 'path';
|
|||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
|
|
||||||
export class DealerClaimController {
|
export class DealerClaimController {
|
||||||
private dealerClaimService = new DealerClaimService();
|
private dealerClaimService = new DealerClaimMongoService();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new dealer claim request
|
* Create a new dealer claim request
|
||||||
|
|||||||
@ -2,17 +2,16 @@ import { Request, Response } from 'express';
|
|||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { Document } from '@models/Document';
|
import { DocumentModel } from '@models/mongoose/Document.schema';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { Participant } from '@models/Participant';
|
import { ParticipantModel as Participant } from '../models/mongoose/Participant.schema';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
import { Op } from 'sequelize';
|
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityMongoService as activityService } from '@services/activity.service';
|
||||||
import { gcsStorageService } from '@services/gcsStorage.service';
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
||||||
import { emailNotificationService } from '@services/emailNotification.service';
|
import { emailNotificationService } from '@services/emailNotification.service';
|
||||||
import { notificationService } from '@services/notification.service';
|
import { notificationMongoService as notificationService } from '@services/notification.service';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
||||||
@ -28,9 +27,18 @@ export class DocumentController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract requestId from body (multer should parse form fields)
|
// Extract requestId from body (multer should parse form fields)
|
||||||
// Try both req.body and req.body.requestId for compatibility
|
|
||||||
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
|
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
|
||||||
|
|
||||||
|
console.log('[DEBUG] Document upload attempt:', {
|
||||||
|
identifier,
|
||||||
|
bodyKeys: Object.keys(req.body || {}),
|
||||||
|
bodyRequestId: req.body?.requestId,
|
||||||
|
bodyRequest_id: req.body?.request_id,
|
||||||
|
userId: req.user?.userId
|
||||||
|
});
|
||||||
|
|
||||||
if (!identifier || identifier === 'undefined' || identifier === 'null') {
|
if (!identifier || identifier === 'undefined' || identifier === 'null') {
|
||||||
|
console.log('[DEBUG] RequestId missing or invalid');
|
||||||
logWithContext('error', 'RequestId missing or invalid in document upload', {
|
logWithContext('error', 'RequestId missing or invalid in document upload', {
|
||||||
body: req.body,
|
body: req.body,
|
||||||
bodyKeys: Object.keys(req.body || {}),
|
bodyKeys: Object.keys(req.body || {}),
|
||||||
@ -46,19 +54,45 @@ export class DocumentController {
|
|||||||
return uuidRegex.test(id);
|
return uuidRegex.test(id);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get workflow request - handle both UUID (requestId) and requestNumber
|
// Helper to check if identifier is MongoDB ObjectId
|
||||||
let workflowRequest: WorkflowRequest | null = null;
|
const isObjectId = (id: string): boolean => {
|
||||||
if (isUuid(identifier)) {
|
return /^[0-9a-f]{24}$/i.test(id);
|
||||||
workflowRequest = await WorkflowRequest.findByPk(identifier);
|
};
|
||||||
|
|
||||||
|
// Get workflow request - handle UUID (requestId), requestNumber, or MongoDB ObjectId (_id)
|
||||||
|
let workflowRequest: any = null;
|
||||||
|
const identifierIsUuid = isUuid(identifier);
|
||||||
|
const identifierIsObjectId = isObjectId(identifier);
|
||||||
|
|
||||||
|
console.log('[DEBUG] Looking up workflow request:', {
|
||||||
|
identifier,
|
||||||
|
identifierIsUuid,
|
||||||
|
identifierIsObjectId,
|
||||||
|
lookupField: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (identifierIsUuid) {
|
||||||
|
workflowRequest = await WorkflowRequest.findOne({ requestId: identifier });
|
||||||
|
} else if (identifierIsObjectId) {
|
||||||
|
workflowRequest = await WorkflowRequest.findById(identifier);
|
||||||
} else {
|
} else {
|
||||||
workflowRequest = await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
|
workflowRequest = await WorkflowRequest.findOne({ requestNumber: identifier });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log('[DEBUG] Workflow lookup result:', {
|
||||||
|
found: !!workflowRequest,
|
||||||
|
requestId: workflowRequest?.requestId,
|
||||||
|
requestNumber: workflowRequest?.requestNumber,
|
||||||
|
_id: workflowRequest?._id?.toString()
|
||||||
|
});
|
||||||
|
|
||||||
if (!workflowRequest) {
|
if (!workflowRequest) {
|
||||||
logWithContext('error', 'Workflow request not found for document upload', {
|
logWithContext('error', 'Workflow request not found for document upload', {
|
||||||
identifier,
|
identifier,
|
||||||
isUuid: isUuid(identifier),
|
isUuid: identifierIsUuid,
|
||||||
userId: req.user?.userId
|
isObjectId: identifierIsObjectId,
|
||||||
|
userId: req.user?.userId,
|
||||||
|
attemptedLookup: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
|
||||||
});
|
});
|
||||||
ResponseHandler.error(res, 'Workflow request not found', 404);
|
ResponseHandler.error(res, 'Workflow request not found', 404);
|
||||||
return;
|
return;
|
||||||
@ -71,7 +105,6 @@ export class DocumentController {
|
|||||||
if (!requestNumber) {
|
if (!requestNumber) {
|
||||||
logWithContext('error', 'Request number not found for workflow', {
|
logWithContext('error', 'Request number not found for workflow', {
|
||||||
requestId,
|
requestId,
|
||||||
workflowRequest: JSON.stringify(workflowRequest.toJSON()),
|
|
||||||
userId: req.user?.userId
|
userId: req.user?.userId
|
||||||
});
|
});
|
||||||
ResponseHandler.error(res, 'Request number not found for workflow', 500);
|
ResponseHandler.error(res, 'Request number not found for workflow', 500);
|
||||||
@ -84,7 +117,7 @@ export class DocumentController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate file size against database configuration
|
// Validate file size
|
||||||
const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10);
|
const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10);
|
||||||
const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024;
|
const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024;
|
||||||
|
|
||||||
@ -97,9 +130,9 @@ export class DocumentController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate file type against database configuration
|
// Validate file type
|
||||||
const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif');
|
const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif');
|
||||||
const allowedFileTypes = allowedFileTypesStr.split(',').map(ext => ext.trim().toLowerCase());
|
const allowedFileTypes = allowedFileTypesStr.split(',').map((ext: string) => ext.trim().toLowerCase());
|
||||||
const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||||
|
|
||||||
if (!allowedFileTypes.includes(fileExtension)) {
|
if (!allowedFileTypes.includes(fileExtension)) {
|
||||||
@ -117,7 +150,7 @@ export class DocumentController {
|
|||||||
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||||
const category = (req.body?.category as string) || 'OTHER';
|
const category = (req.body?.category as string) || 'OTHER';
|
||||||
|
|
||||||
// Upload with automatic fallback to local storage
|
// Upload file
|
||||||
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
||||||
buffer: fileBuffer,
|
buffer: fileBuffer,
|
||||||
originalName: file.originalname,
|
originalName: file.originalname,
|
||||||
@ -129,7 +162,7 @@ export class DocumentController {
|
|||||||
const storageUrl = uploadResult.storageUrl;
|
const storageUrl = uploadResult.storageUrl;
|
||||||
const gcsFilePath = uploadResult.filePath;
|
const gcsFilePath = uploadResult.filePath;
|
||||||
|
|
||||||
// Clean up local temporary file if it exists (from multer disk storage)
|
// Clean up local temp file
|
||||||
if (file.path && fs.existsSync(file.path)) {
|
if (file.path && fs.existsSync(file.path)) {
|
||||||
try {
|
try {
|
||||||
fs.unlinkSync(file.path);
|
fs.unlinkSync(file.path);
|
||||||
@ -138,134 +171,30 @@ export class DocumentController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if storageUrl exceeds database column limit (500 chars)
|
|
||||||
// GCS signed URLs can be very long (500-1000+ chars)
|
|
||||||
const MAX_STORAGE_URL_LENGTH = 500;
|
|
||||||
let finalStorageUrl = storageUrl;
|
|
||||||
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
||||||
logWithContext('warn', 'Storage URL exceeds database column limit, truncating', {
|
|
||||||
originalLength: storageUrl.length,
|
|
||||||
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
||||||
urlPrefix: storageUrl.substring(0, 100),
|
|
||||||
});
|
|
||||||
// For signed URLs, we can't truncate as it will break the URL
|
|
||||||
// Instead, store null and generate signed URLs on-demand when needed
|
|
||||||
// The filePath is sufficient to generate a new signed URL later
|
|
||||||
finalStorageUrl = null as any;
|
|
||||||
logWithContext('info', 'Storing null storageUrl - will generate signed URL on-demand', {
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
reason: 'Signed URL too long for database column',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate file names if they exceed database column limits (255 chars)
|
|
||||||
const MAX_FILE_NAME_LENGTH = 255;
|
|
||||||
const originalFileName = file.originalname;
|
|
||||||
let truncatedOriginalFileName = originalFileName;
|
|
||||||
|
|
||||||
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
// Preserve file extension when truncating
|
|
||||||
const ext = path.extname(originalFileName);
|
|
||||||
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
// If extension itself is too long, just use the extension
|
|
||||||
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('warn', 'File name truncated to fit database column', {
|
|
||||||
originalLength: originalFileName.length,
|
|
||||||
truncatedLength: truncatedOriginalFileName.length,
|
|
||||||
originalName: originalFileName.substring(0, 100) + '...',
|
|
||||||
truncatedName: truncatedOriginalFileName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate fileName (basename of the generated file name in GCS)
|
|
||||||
const generatedFileName = path.basename(gcsFilePath);
|
|
||||||
let truncatedFileName = generatedFileName;
|
|
||||||
|
|
||||||
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
const ext = path.extname(generatedFileName);
|
|
||||||
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('warn', 'Generated file name truncated', {
|
|
||||||
originalLength: generatedFileName.length,
|
|
||||||
truncatedLength: truncatedFileName.length,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare document data
|
// Prepare document data
|
||||||
const documentData = {
|
const documentData = {
|
||||||
|
documentId: require('crypto').randomUUID(),
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: path.basename(gcsFilePath).substring(0, 255),
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: file.originalname.substring(0, 255),
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
fileExtension: extension,
|
fileExtension: extension,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
filePath: gcsFilePath, // Store GCS path or local path
|
filePath: gcsFilePath,
|
||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: (storageUrl && storageUrl.length < 500) ? storageUrl : undefined,
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
category: category as any,
|
||||||
googleDocUrl: null as any,
|
|
||||||
category,
|
|
||||||
version: 1,
|
version: 1,
|
||||||
parentDocumentId: null as any,
|
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
logWithContext('info', 'Creating document record', {
|
const doc = await (DocumentModel as any).create(documentData);
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
documentData: JSON.stringify(documentData, null, 2),
|
|
||||||
});
|
|
||||||
|
|
||||||
let doc;
|
// Log event
|
||||||
try {
|
logDocumentEvent('uploaded', (doc as any).documentId, {
|
||||||
doc = await Document.create(documentData as any);
|
requestId: workflowRequest.requestId, // Standardized to UUID
|
||||||
logWithContext('info', 'Document record created successfully', {
|
|
||||||
documentId: doc.documentId,
|
|
||||||
requestId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
});
|
|
||||||
} catch (createError) {
|
|
||||||
const createErrorMessage = createError instanceof Error ? createError.message : 'Unknown error';
|
|
||||||
const createErrorStack = createError instanceof Error ? createError.stack : undefined;
|
|
||||||
// Check if it's a Sequelize validation error
|
|
||||||
const sequelizeError = (createError as any)?.errors || (createError as any)?.parent;
|
|
||||||
logWithContext('error', 'Document.create() failed', {
|
|
||||||
error: createErrorMessage,
|
|
||||||
stack: createErrorStack,
|
|
||||||
sequelizeErrors: sequelizeError,
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
documentData: JSON.stringify(documentData, null, 2),
|
|
||||||
});
|
|
||||||
throw createError; // Re-throw to be caught by outer catch block
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log document upload event
|
|
||||||
logDocumentEvent('uploaded', doc.documentId, {
|
|
||||||
requestId,
|
|
||||||
userId,
|
userId,
|
||||||
fileName: file.originalname,
|
fileName: file.originalname,
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
@ -274,13 +203,13 @@ export class DocumentController {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Get user details for activity logging
|
// Get user details for activity logging
|
||||||
const user = await User.findByPk(userId);
|
const uploader = await UserModel.findOne({ userId });
|
||||||
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
const uploaderName = uploader?.displayName || uploader?.email || 'User';
|
||||||
|
|
||||||
// Log activity for document upload
|
// Log activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId: workflowRequest.requestId, // Standardized to UUID
|
||||||
type: 'document_added',
|
type: 'document_added',
|
||||||
user: { userId, name: uploaderName },
|
user: { userId, name: uploaderName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@ -296,148 +225,68 @@ export class DocumentController {
|
|||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
// Send notifications for additional document added
|
// Send notifications
|
||||||
try {
|
try {
|
||||||
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
|
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
|
||||||
const isInitiator = userId === initiatorId;
|
const isInitiator = userId === initiatorId;
|
||||||
|
|
||||||
// Get all participants (spectators)
|
// Get participants
|
||||||
const spectators = await Participant.findAll({
|
const participants = await Participant.find({
|
||||||
where: {
|
requestId: workflowRequest.requestId, // Standardized to UUID
|
||||||
requestId,
|
|
||||||
participantType: 'SPECTATOR'
|
participantType: 'SPECTATOR'
|
||||||
},
|
|
||||||
include: [{
|
|
||||||
model: User,
|
|
||||||
as: 'user',
|
|
||||||
attributes: ['userId', 'email', 'displayName']
|
|
||||||
}]
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get current approver (pending or in-progress approval level)
|
// Get current approver
|
||||||
const currentApprovalLevel = await ApprovalLevel.findOne({
|
const currentLevel = await ApprovalLevel.findOne({
|
||||||
where: {
|
requestId: requestId,
|
||||||
requestId,
|
status: { $in: ['PENDING', 'IN_PROGRESS'] }
|
||||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
|
}).sort({ levelNumber: 1 });
|
||||||
},
|
|
||||||
order: [['levelNumber', 'ASC']],
|
|
||||||
include: [{
|
|
||||||
model: User,
|
|
||||||
as: 'approver',
|
|
||||||
attributes: ['userId', 'email', 'displayName']
|
|
||||||
}]
|
|
||||||
});
|
|
||||||
|
|
||||||
logWithContext('info', 'Current approver lookup for document notification', {
|
|
||||||
requestId,
|
|
||||||
currentApprovalLevelFound: !!currentApprovalLevel,
|
|
||||||
approverUserId: currentApprovalLevel ? ((currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver)?.userId : null,
|
|
||||||
isInitiator
|
|
||||||
});
|
|
||||||
|
|
||||||
// Determine who to notify based on who uploaded
|
|
||||||
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
||||||
|
|
||||||
if (isInitiator) {
|
// Add initiator if they are not the uploader
|
||||||
// Initiator added → notify spectators and current approver
|
if (!isInitiator) {
|
||||||
spectators.forEach((spectator: any) => {
|
const initiator = await UserModel.findOne({ userId: initiatorId });
|
||||||
const spectatorUser = spectator.user || spectator.User;
|
|
||||||
if (spectatorUser && spectatorUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: spectatorUser.userId,
|
|
||||||
email: spectatorUser.email,
|
|
||||||
displayName: spectatorUser.displayName || spectatorUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (currentApprovalLevel) {
|
|
||||||
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
|
||||||
if (approverUser && approverUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: approverUser.userId,
|
|
||||||
email: approverUser.email,
|
|
||||||
displayName: approverUser.displayName || approverUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Check if uploader is a spectator
|
|
||||||
const uploaderParticipant = await Participant.findOne({
|
|
||||||
where: {
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
participantType: 'SPECTATOR'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (uploaderParticipant) {
|
|
||||||
// Spectator added → notify initiator and current approver
|
|
||||||
const initiator = await User.findByPk(initiatorId);
|
|
||||||
if (initiator) {
|
if (initiator) {
|
||||||
const initiatorData = initiator.toJSON();
|
|
||||||
if (initiatorData.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
recipientsToNotify.push({
|
||||||
userId: initiatorData.userId,
|
userId: initiator.userId,
|
||||||
email: initiatorData.email,
|
email: initiator.email,
|
||||||
displayName: initiatorData.displayName || initiatorData.email
|
displayName: initiator.displayName || initiator.email
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (currentApprovalLevel) {
|
// Add current approver if not the uploader
|
||||||
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
if (currentLevel?.approver?.userId && currentLevel.approver.userId !== userId) {
|
||||||
if (approverUser && approverUser.userId !== userId) {
|
const approver = await UserModel.findOne({ userId: currentLevel.approver.userId });
|
||||||
|
if (approver) {
|
||||||
recipientsToNotify.push({
|
recipientsToNotify.push({
|
||||||
userId: approverUser.userId,
|
userId: approver.userId,
|
||||||
email: approverUser.email,
|
email: approver.email,
|
||||||
displayName: approverUser.displayName || approverUser.email
|
displayName: approver.displayName || approver.email
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// Approver added → notify initiator and spectators
|
// Add spectators
|
||||||
const initiator = await User.findByPk(initiatorId);
|
for (const p of participants) {
|
||||||
if (initiator) {
|
if (p.userId !== userId && !recipientsToNotify.some(r => r.userId === p.userId)) {
|
||||||
const initiatorData = initiator.toJSON();
|
const spectator = await UserModel.findOne({ userId: p.userId });
|
||||||
if (initiatorData.userId !== userId) {
|
if (spectator) {
|
||||||
recipientsToNotify.push({
|
recipientsToNotify.push({
|
||||||
userId: initiatorData.userId,
|
userId: spectator.userId,
|
||||||
email: initiatorData.email,
|
email: spectator.email,
|
||||||
displayName: initiatorData.displayName || initiatorData.email
|
displayName: spectator.displayName || spectator.email
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
spectators.forEach((spectator: any) => {
|
|
||||||
const spectatorUser = spectator.user || spectator.User;
|
|
||||||
if (spectatorUser && spectatorUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: spectatorUser.userId,
|
|
||||||
email: spectatorUser.email,
|
|
||||||
displayName: spectatorUser.displayName || spectatorUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send notifications (email, in-app, and web-push)
|
// Send notifications
|
||||||
const requestData = {
|
if (recipientsToNotify.length > 0) {
|
||||||
requestNumber: requestNumber,
|
const recipientIds = recipientsToNotify.map(r => r.userId);
|
||||||
requestId: requestId,
|
|
||||||
title: (workflowRequest as any).title || 'Request'
|
|
||||||
};
|
|
||||||
|
|
||||||
// Prepare user IDs for in-app and web-push notifications
|
await notificationService.sendToUsers(recipientIds, {
|
||||||
const recipientUserIds = recipientsToNotify.map(r => r.userId);
|
|
||||||
|
|
||||||
// Send in-app and web-push notifications
|
|
||||||
if (recipientUserIds.length > 0) {
|
|
||||||
try {
|
|
||||||
await notificationService.sendToUsers(
|
|
||||||
recipientUserIds,
|
|
||||||
{
|
|
||||||
title: 'Additional Document Added',
|
title: 'Additional Document Added',
|
||||||
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`,
|
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`,
|
||||||
requestId,
|
requestId,
|
||||||
@ -448,75 +297,34 @@ export class DocumentController {
|
|||||||
actionRequired: false,
|
actionRequired: false,
|
||||||
metadata: {
|
metadata: {
|
||||||
documentName: file.originalname,
|
documentName: file.originalname,
|
||||||
fileSize: file.size,
|
addedByName: uploaderName
|
||||||
addedByName: uploaderName,
|
|
||||||
source: 'Documents Tab'
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
);
|
|
||||||
logWithContext('info', 'In-app and web-push notifications sent for additional document', {
|
|
||||||
requestId,
|
|
||||||
documentName: file.originalname,
|
|
||||||
recipientsCount: recipientUserIds.length
|
|
||||||
});
|
});
|
||||||
} catch (notifyError) {
|
|
||||||
logWithContext('error', 'Failed to send in-app/web-push notifications for additional document', {
|
|
||||||
requestId,
|
|
||||||
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send email notifications
|
const requestData = {
|
||||||
|
requestNumber,
|
||||||
|
requestId,
|
||||||
|
title: (workflowRequest as any).title || 'Request'
|
||||||
|
};
|
||||||
|
|
||||||
for (const recipient of recipientsToNotify) {
|
for (const recipient of recipientsToNotify) {
|
||||||
await emailNotificationService.sendAdditionalDocumentAdded(
|
await emailNotificationService.sendAdditionalDocumentAdded(requestData, recipient, {
|
||||||
requestData,
|
|
||||||
recipient,
|
|
||||||
{
|
|
||||||
documentName: file.originalname,
|
documentName: file.originalname,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
addedByName: uploaderName,
|
addedByName: uploaderName,
|
||||||
source: 'Documents Tab'
|
source: 'Documents Tab'
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('info', 'Additional document notifications sent', {
|
|
||||||
requestId,
|
|
||||||
documentName: file.originalname,
|
|
||||||
recipientsCount: recipientsToNotify.length,
|
|
||||||
isInitiator
|
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (notifyError) {
|
} catch (notifyError) {
|
||||||
// Don't fail document upload if notifications fail
|
logWithContext('error', 'Failed to send document notifications', { error: notifyError });
|
||||||
logWithContext('error', 'Failed to send additional document notifications', {
|
|
||||||
requestId,
|
|
||||||
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
logWithContext('error', 'Document upload failed', { error: message });
|
||||||
logWithContext('error', 'Document upload failed', {
|
|
||||||
userId: req.user?.userId,
|
|
||||||
requestId: req.body?.requestId || req.body?.request_id,
|
|
||||||
body: req.body,
|
|
||||||
bodyKeys: Object.keys(req.body || {}),
|
|
||||||
file: req.file ? {
|
|
||||||
originalname: req.file.originalname,
|
|
||||||
size: req.file.size,
|
|
||||||
mimetype: req.file.mimetype,
|
|
||||||
hasBuffer: !!req.file.buffer,
|
|
||||||
hasPath: !!req.file.path
|
|
||||||
} : 'No file',
|
|
||||||
error: message,
|
|
||||||
stack: errorStack
|
|
||||||
});
|
|
||||||
ResponseHandler.error(res, 'Upload failed', 500, message);
|
ResponseHandler.error(res, 'Upload failed', 500, message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { Notification } from '@models/Notification';
|
import { NotificationModel as Notification } from '../models/mongoose/Notification.schema';
|
||||||
import { Op } from 'sequelize';
|
import { Op } from 'sequelize';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { notificationService } from '@services/notification.service';
|
import { notificationMongoService as notificationService } from '@services/notification.service';
|
||||||
|
|
||||||
export class NotificationController {
|
export class NotificationController {
|
||||||
/**
|
/**
|
||||||
@ -25,12 +25,12 @@ export class NotificationController {
|
|||||||
|
|
||||||
const offset = (Number(page) - 1) * Number(limit);
|
const offset = (Number(page) - 1) * Number(limit);
|
||||||
|
|
||||||
const { rows, count } = await Notification.findAndCountAll({
|
const rows = await Notification.find(where)
|
||||||
where,
|
.sort({ createdAt: -1 })
|
||||||
order: [['createdAt', 'DESC']],
|
.limit(Number(limit))
|
||||||
limit: Number(limit),
|
.skip(offset);
|
||||||
offset
|
|
||||||
});
|
const count = await Notification.countDocuments(where);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -42,7 +42,7 @@ export class NotificationController {
|
|||||||
total: count,
|
total: count,
|
||||||
totalPages: Math.ceil(count / Number(limit))
|
totalPages: Math.ceil(count / Number(limit))
|
||||||
},
|
},
|
||||||
unreadCount: unreadOnly === 'true' ? count : await Notification.count({ where: { userId, isRead: false } })
|
unreadCount: unreadOnly === 'true' ? count : await Notification.countDocuments({ userId, isRead: false })
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -63,8 +63,8 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const count = await Notification.count({
|
const count = await Notification.countDocuments({
|
||||||
where: { userId, isRead: false }
|
userId, isRead: false
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@ -91,7 +91,7 @@ export class NotificationController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const notification = await Notification.findOne({
|
const notification = await Notification.findOne({
|
||||||
where: { notificationId, userId }
|
_id: notificationId, userId
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!notification) {
|
if (!notification) {
|
||||||
@ -99,10 +99,10 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await notification.update({
|
notification.isRead = true;
|
||||||
isRead: true,
|
notification.metadata = notification.metadata || {};
|
||||||
readAt: new Date()
|
notification.metadata.readAt = new Date();
|
||||||
});
|
await notification.save();
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -127,9 +127,9 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await Notification.update(
|
await Notification.updateMany(
|
||||||
{ isRead: true, readAt: new Date() },
|
{ userId, isRead: false },
|
||||||
{ where: { userId, isRead: false } }
|
{ $set: { isRead: true } }
|
||||||
);
|
);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@ -155,10 +155,12 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleted = await Notification.destroy({
|
const result = await Notification.deleteOne({
|
||||||
where: { notificationId, userId }
|
_id: notificationId, userId
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const deleted = result.deletedCount;
|
||||||
|
|
||||||
if (deleted === 0) {
|
if (deleted === 0) {
|
||||||
res.status(404).json({ success: false, message: 'Notification not found' });
|
res.status(404).json({ success: false, message: 'Notification not found' });
|
||||||
return;
|
return;
|
||||||
@ -201,4 +203,3 @@ export class NotificationController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,13 @@
|
|||||||
import { Response } from 'express';
|
import { Response } from 'express';
|
||||||
import { pauseService } from '@services/pause.service';
|
import { pauseMongoService } from '@services/pause.service';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
// Validation schemas
|
// Validation schemas
|
||||||
|
// In MongoDB, levelId could be a string (ObjectId)
|
||||||
const pauseWorkflowSchema = z.object({
|
const pauseWorkflowSchema = z.object({
|
||||||
levelId: z.string().uuid().optional().nullable(),
|
levelId: z.string().optional().nullable(),
|
||||||
reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'),
|
reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'),
|
||||||
resumeDate: z.string().datetime().or(z.date())
|
resumeDate: z.string().datetime().or(z.date())
|
||||||
});
|
});
|
||||||
@ -26,7 +27,7 @@ export class PauseController {
|
|||||||
const userId = req.user?.userId;
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
ResponseHandler.unauthorized(res, 'Unauthorized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -36,7 +37,7 @@ export class PauseController {
|
|||||||
? validated.resumeDate
|
? validated.resumeDate
|
||||||
: new Date(validated.resumeDate);
|
: new Date(validated.resumeDate);
|
||||||
|
|
||||||
const result = await pauseService.pauseWorkflow(
|
const result = await pauseMongoService.pauseWorkflow(
|
||||||
id,
|
id,
|
||||||
validated.levelId || null,
|
validated.levelId || null,
|
||||||
userId,
|
userId,
|
||||||
@ -68,14 +69,14 @@ export class PauseController {
|
|||||||
const userId = req.user?.userId;
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
ResponseHandler.unauthorized(res, 'Unauthorized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate request body (notes is optional)
|
// Validate request body (notes is optional)
|
||||||
const validated = resumeWorkflowSchema.parse(req.body || {});
|
const validated = resumeWorkflowSchema.parse(req.body || {});
|
||||||
|
|
||||||
const result = await pauseService.resumeWorkflow(id, userId, validated.notes);
|
const result = await pauseMongoService.resumeWorkflow(id, userId, validated.notes);
|
||||||
|
|
||||||
ResponseHandler.success(res, {
|
ResponseHandler.success(res, {
|
||||||
workflow: result.workflow,
|
workflow: result.workflow,
|
||||||
@ -101,11 +102,11 @@ export class PauseController {
|
|||||||
const userId = req.user?.userId;
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
ResponseHandler.unauthorized(res, 'Unauthorized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await pauseService.retriggerPause(id, userId);
|
await pauseMongoService.retriggerPause(id, userId);
|
||||||
|
|
||||||
ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200);
|
ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200);
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -122,7 +123,7 @@ export class PauseController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
const pauseDetails = await pauseService.getPauseDetails(id);
|
const pauseDetails = await pauseMongoService.getPauseDetails(id);
|
||||||
|
|
||||||
if (!pauseDetails) {
|
if (!pauseDetails) {
|
||||||
ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200);
|
ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200);
|
||||||
@ -138,4 +139,3 @@ export class PauseController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const pauseController = new PauseController();
|
export const pauseController = new PauseController();
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { TatAlert } from '@models/TatAlert';
|
import { TatAlert } from '@models/TatAlert';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { sequelize } from '@config/database';
|
import { sequelize } from '@config/database';
|
||||||
import { QueryTypes } from 'sequelize';
|
import { QueryTypes } from 'sequelize';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityMongoService as activityService } from '@services/activity.service';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
|
|
||||||
@ -24,19 +24,31 @@ export const getTatAlertsByRequest = async (req: Request, res: Response) => {
|
|||||||
model: ApprovalLevel,
|
model: ApprovalLevel,
|
||||||
as: 'level',
|
as: 'level',
|
||||||
attributes: ['levelNumber', 'levelName', 'approverName', 'status']
|
attributes: ['levelNumber', 'levelName', 'approverName', 'status']
|
||||||
},
|
|
||||||
{
|
|
||||||
model: User,
|
|
||||||
as: 'approver',
|
|
||||||
attributes: ['userId', 'displayName', 'email', 'department']
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
order: [['alertSentAt', 'ASC']]
|
order: [['alertSentAt', 'ASC']]
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Manually enrich with approver data from MongoDB
|
||||||
|
const enrichedAlerts = await Promise.all(alerts.map(async (alert: any) => {
|
||||||
|
const alertData = alert.toJSON();
|
||||||
|
if (alertData.approverId) {
|
||||||
|
const approver = await UserModel.findOne({ userId: alertData.approverId }).select('userId displayName email department');
|
||||||
|
if (approver) {
|
||||||
|
alertData.approver = {
|
||||||
|
userId: approver.userId,
|
||||||
|
displayName: approver.displayName,
|
||||||
|
email: approver.email,
|
||||||
|
department: approver.department
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return alertData;
|
||||||
|
}));
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: alerts
|
data: enrichedAlerts
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[TAT Controller] Error fetching TAT alerts:', error);
|
logger.error('[TAT Controller] Error fetching TAT alerts:', error);
|
||||||
@ -193,7 +205,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get user to check role
|
// Get user to check role
|
||||||
const user = await User.findByPk(userId);
|
const user = await UserModel.findOne({ userId });
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return res.status(404).json({
|
return res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
@ -201,7 +213,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const userRole = (user as any).role;
|
const userRole = user.role;
|
||||||
const approverId = (level as any).approverId;
|
const approverId = (level as any).approverId;
|
||||||
|
|
||||||
// Check permissions: ADMIN, MANAGEMENT, or the approver
|
// Check permissions: ADMIN, MANAGEMENT, or the approver
|
||||||
@ -218,7 +230,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get user details for activity logging
|
// Get user details for activity logging
|
||||||
const userDisplayName = (user as any).displayName || (user as any).email || 'Unknown User';
|
const userDisplayName = user.displayName || user.email || 'Unknown User';
|
||||||
const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time
|
const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time
|
||||||
const levelNumber = (level as any).levelNumber;
|
const levelNumber = (level as any).levelNumber;
|
||||||
const approverName = (level as any).approverName || 'Unknown Approver';
|
const approverName = (level as any).approverName || 'Unknown Approver';
|
||||||
@ -239,7 +251,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
user: {
|
user: {
|
||||||
userId: userId,
|
userId: userId,
|
||||||
name: userDisplayName,
|
name: userDisplayName,
|
||||||
email: (user as any).email
|
email: user.email
|
||||||
},
|
},
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason',
|
action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason',
|
||||||
@ -312,4 +324,3 @@ export const getApproverTatPerformance = async (req: Request, res: Response) =>
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator';
|
import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
@ -10,14 +10,7 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
|
|||||||
try {
|
try {
|
||||||
const userId = req.user!.userId;
|
const userId = req.user!.userId;
|
||||||
|
|
||||||
const user = await User.findByPk(userId, {
|
const user = await UserModel.findOne({ userId });
|
||||||
attributes: [
|
|
||||||
'userId',
|
|
||||||
'emailNotificationsEnabled',
|
|
||||||
'pushNotificationsEnabled',
|
|
||||||
'inAppNotificationsEnabled'
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
@ -32,9 +25,9 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
|
|||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: {
|
data: {
|
||||||
emailNotificationsEnabled: user.emailNotificationsEnabled,
|
emailNotificationsEnabled: user.notifications?.email ?? true,
|
||||||
pushNotificationsEnabled: user.pushNotificationsEnabled,
|
pushNotificationsEnabled: user.notifications?.push ?? true,
|
||||||
inAppNotificationsEnabled: user.inAppNotificationsEnabled
|
inAppNotificationsEnabled: user.notifications?.inApp ?? true
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -57,7 +50,7 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
|
|||||||
// Validate request body
|
// Validate request body
|
||||||
const validated = updateNotificationPreferencesSchema.parse(req.body);
|
const validated = updateNotificationPreferencesSchema.parse(req.body);
|
||||||
|
|
||||||
const user = await User.findByPk(userId);
|
const user = await UserModel.findOne({ userId });
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
@ -67,29 +60,32 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update only provided fields
|
// Update only provided fields in nested notifications object
|
||||||
const updateData: any = {};
|
if (!user.notifications) {
|
||||||
|
user.notifications = { email: true, push: true, inApp: true };
|
||||||
|
}
|
||||||
|
|
||||||
if (validated.emailNotificationsEnabled !== undefined) {
|
if (validated.emailNotificationsEnabled !== undefined) {
|
||||||
updateData.emailNotificationsEnabled = validated.emailNotificationsEnabled;
|
user.notifications.email = validated.emailNotificationsEnabled;
|
||||||
}
|
}
|
||||||
if (validated.pushNotificationsEnabled !== undefined) {
|
if (validated.pushNotificationsEnabled !== undefined) {
|
||||||
updateData.pushNotificationsEnabled = validated.pushNotificationsEnabled;
|
user.notifications.push = validated.pushNotificationsEnabled;
|
||||||
}
|
}
|
||||||
if (validated.inAppNotificationsEnabled !== undefined) {
|
if (validated.inAppNotificationsEnabled !== undefined) {
|
||||||
updateData.inAppNotificationsEnabled = validated.inAppNotificationsEnabled;
|
user.notifications.inApp = validated.inAppNotificationsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
await user.update(updateData);
|
await user.save();
|
||||||
|
|
||||||
logger.info(`[UserPreference] Updated notification preferences for user ${userId}:`, updateData);
|
logger.info(`[UserPreference] Updated notification preferences for user ${userId}`);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Notification preferences updated successfully',
|
message: 'Notification preferences updated successfully',
|
||||||
data: {
|
data: {
|
||||||
emailNotificationsEnabled: user.emailNotificationsEnabled,
|
emailNotificationsEnabled: user.notifications.email,
|
||||||
pushNotificationsEnabled: user.pushNotificationsEnabled,
|
pushNotificationsEnabled: user.notifications.push,
|
||||||
inAppNotificationsEnabled: user.inAppNotificationsEnabled
|
inAppNotificationsEnabled: user.notifications.inApp
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -110,4 +106,3 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,23 +1,23 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { WorkflowService } from '@services/workflow.service';
|
import { workflowServiceMongo } from '@services/workflow.service';
|
||||||
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
|
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { Priority } from '../types/common.types';
|
import { Priority } from '../types/common.types';
|
||||||
import type { UpdateWorkflowRequest } from '../types/workflow.types';
|
import type { UpdateWorkflowRequest } from '../types/workflow.types';
|
||||||
import { Document } from '@models/Document';
|
import { DocumentModel } from '@models/mongoose/Document.schema';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { gcsStorageService } from '@services/gcsStorage.service';
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
|
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
|
||||||
import { DealerClaimService } from '@services/dealerClaim.service';
|
import { DealerClaimMongoService } from '@services/dealerClaim.service';
|
||||||
|
import { activityMongoService as activityService } from '@services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
const workflowService = new WorkflowService();
|
const dealerClaimService = new DealerClaimMongoService();
|
||||||
const dealerClaimService = new DealerClaimService();
|
|
||||||
|
|
||||||
export class WorkflowController {
|
export class WorkflowController {
|
||||||
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -66,9 +66,9 @@ export class WorkflowController {
|
|||||||
|
|
||||||
// Build complete participants array automatically
|
// Build complete participants array automatically
|
||||||
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
||||||
const initiator = await User.findByPk(req.user.userId);
|
const initiator = await UserModel.findOne({ userId: req.user.userId });
|
||||||
const initiatorEmail = (initiator as any).email;
|
const initiatorEmail = (initiator as any)?.email;
|
||||||
const initiatorName = (initiator as any).displayName || (initiator as any).email;
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email;
|
||||||
|
|
||||||
const autoGeneratedParticipants = [
|
const autoGeneratedParticipants = [
|
||||||
// Add initiator
|
// Add initiator
|
||||||
@ -100,13 +100,15 @@ export class WorkflowController {
|
|||||||
// Convert string literal priority to enum
|
// Convert string literal priority to enum
|
||||||
const workflowData = {
|
const workflowData = {
|
||||||
...validatedData,
|
...validatedData,
|
||||||
|
initiatorEmail,
|
||||||
|
initiatorName,
|
||||||
priority: validatedData.priority as Priority,
|
priority: validatedData.priority as Priority,
|
||||||
approvalLevels: enrichedApprovalLevels,
|
approvalLevels: enrichedApprovalLevels,
|
||||||
participants: autoGeneratedParticipants,
|
participants: autoGeneratedParticipants,
|
||||||
};
|
};
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, {
|
const workflow = await workflowServiceMongo.createWorkflow(req.user.userId, workflowData, {
|
||||||
ipAddress: requestMeta.ipAddress,
|
ipAddress: requestMeta.ipAddress,
|
||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
@ -200,9 +202,9 @@ export class WorkflowController {
|
|||||||
|
|
||||||
// Build complete participants array automatically
|
// Build complete participants array automatically
|
||||||
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
||||||
const initiator = await User.findByPk(userId);
|
const initiator = await UserModel.findOne({ userId: userId });
|
||||||
const initiatorEmail = (initiator as any).email;
|
const initiatorEmail = (initiator as any)?.email;
|
||||||
const initiatorName = (initiator as any).displayName || (initiator as any).email;
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || initiatorEmail;
|
||||||
|
|
||||||
const autoGeneratedParticipants = [
|
const autoGeneratedParticipants = [
|
||||||
// Add initiator
|
// Add initiator
|
||||||
@ -233,13 +235,15 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const workflowData = {
|
const workflowData = {
|
||||||
...validated,
|
...validated,
|
||||||
|
initiatorEmail,
|
||||||
|
initiatorName,
|
||||||
priority: validated.priority as Priority,
|
priority: validated.priority as Priority,
|
||||||
approvalLevels: enrichedApprovalLevels,
|
approvalLevels: enrichedApprovalLevels,
|
||||||
participants: autoGeneratedParticipants,
|
participants: autoGeneratedParticipants,
|
||||||
} as any;
|
} as any;
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
const workflow = await workflowService.createWorkflow(userId, workflowData, {
|
const workflow = await workflowServiceMongo.createWorkflow(userId, workflowData, {
|
||||||
ipAddress: requestMeta.ipAddress,
|
ipAddress: requestMeta.ipAddress,
|
||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
@ -249,8 +253,7 @@ export class WorkflowController {
|
|||||||
const category = (req.body?.category as string) || 'OTHER';
|
const category = (req.body?.category as string) || 'OTHER';
|
||||||
const docs: any[] = [];
|
const docs: any[] = [];
|
||||||
if (files && files.length > 0) {
|
if (files && files.length > 0) {
|
||||||
const { activityService } = require('../services/activity.service');
|
const user = await UserModel.findOne({ userId });
|
||||||
const user = await User.findByPk(userId);
|
|
||||||
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
@ -346,12 +349,13 @@ export class WorkflowController {
|
|||||||
fileName: truncatedOriginalFileName,
|
fileName: truncatedOriginalFileName,
|
||||||
filePath: gcsFilePath,
|
filePath: gcsFilePath,
|
||||||
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
||||||
requestId: workflow.requestId
|
requestId: workflow.requestNumber
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const doc = await Document.create({
|
const doc = await DocumentModel.create({
|
||||||
requestId: workflow.requestId,
|
documentId: require('crypto').randomUUID(),
|
||||||
|
requestId: workflow.requestId, // Standardized to UUID
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: truncatedFileName,
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: truncatedOriginalFileName,
|
||||||
@ -362,14 +366,10 @@ export class WorkflowController {
|
|||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
category: (category || 'OTHER') as any,
|
||||||
googleDocUrl: null as any,
|
|
||||||
category: category || 'OTHER',
|
|
||||||
version: 1,
|
version: 1,
|
||||||
parentDocumentId: null as any,
|
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
});
|
||||||
} as any);
|
|
||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
logger.info('[Workflow] Document record created successfully', {
|
logger.info('[Workflow] Document record created successfully', {
|
||||||
documentId: doc.documentId,
|
documentId: doc.documentId,
|
||||||
@ -382,7 +382,7 @@ export class WorkflowController {
|
|||||||
error: docErrorMessage,
|
error: docErrorMessage,
|
||||||
stack: docErrorStack,
|
stack: docErrorStack,
|
||||||
fileName: file.originalname,
|
fileName: file.originalname,
|
||||||
requestId: workflow.requestId,
|
requestId: workflow.requestNumber,
|
||||||
filePath: gcsFilePath,
|
filePath: gcsFilePath,
|
||||||
storageUrl: storageUrl,
|
storageUrl: storageUrl,
|
||||||
});
|
});
|
||||||
@ -393,7 +393,7 @@ export class WorkflowController {
|
|||||||
// Log document upload activity
|
// Log document upload activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
activityService.log({
|
activityService.log({
|
||||||
requestId: workflow.requestId,
|
requestId: workflow.requestId, // Use UUID
|
||||||
type: 'document_added',
|
type: 'document_added',
|
||||||
user: { userId, name: uploaderName },
|
user: { userId, name: uploaderName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@ -406,7 +406,7 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
|
ResponseHandler.success(res, { requestId: workflow.requestNumber, documents: docs }, 'Workflow created with documents', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
const errorStack = error instanceof Error ? error.stack : undefined;
|
||||||
@ -423,7 +423,7 @@ export class WorkflowController {
|
|||||||
async getWorkflow(req: Request, res: Response): Promise<void> {
|
async getWorkflow(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
const workflow = await workflowService.getWorkflowById(id);
|
const workflow = await workflowServiceMongo.getWorkflowById(id);
|
||||||
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
@ -448,13 +448,13 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if user has access to this request
|
// Check if user has access to this request
|
||||||
const accessCheck = await workflowService.checkUserRequestAccess(userId, id);
|
const accessCheck = await workflowServiceMongo.checkUserRequestAccess(userId, id);
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
|
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await workflowService.getWorkflowDetails(id);
|
const result = await workflowServiceMongo.getWorkflowDetails(id);
|
||||||
if (!result) {
|
if (!result) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -479,7 +479,7 @@ export class WorkflowController {
|
|||||||
templateType: req.query.templateType as string | undefined,
|
templateType: req.query.templateType as string | undefined,
|
||||||
department: req.query.department as string | undefined,
|
department: req.query.department as string | undefined,
|
||||||
initiator: req.query.initiator as string | undefined,
|
initiator: req.query.initiator as string | undefined,
|
||||||
approver: req.query.approver as string | undefined,
|
approverName: req.query.approver as string | undefined, // Mapping 'approver' to 'approverName' for Mongo deep filter
|
||||||
approverType: req.query.approverType as 'current' | 'any' | undefined,
|
approverType: req.query.approverType as 'current' | 'any' | undefined,
|
||||||
slaCompliance: req.query.slaCompliance as string | undefined,
|
slaCompliance: req.query.slaCompliance as string | undefined,
|
||||||
dateRange: req.query.dateRange as string | undefined,
|
dateRange: req.query.dateRange as string | undefined,
|
||||||
@ -487,7 +487,8 @@ export class WorkflowController {
|
|||||||
endDate: req.query.endDate as string | undefined,
|
endDate: req.query.endDate as string | undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await workflowService.listWorkflows(page, limit, filters);
|
// USE MONGODB SERVICE FOR LISTING
|
||||||
|
const result = await workflowServiceMongo.listWorkflows(page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'Workflows fetched');
|
ResponseHandler.success(res, result, 'Workflows fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -516,7 +517,7 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyRequests(userId, page, limit, filters);
|
const result = await workflowServiceMongo.listMyRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'My requests fetched');
|
ResponseHandler.success(res, result, 'My requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -550,7 +551,7 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
const result = await workflowServiceMongo.listParticipantRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'Participant requests fetched');
|
ResponseHandler.success(res, result, 'Participant requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -580,7 +581,7 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
const result = await workflowServiceMongo.listMyInitiatedRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -606,7 +607,7 @@ export class WorkflowController {
|
|||||||
const sortBy = req.query.sortBy as string | undefined;
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
const result = await workflowServiceMongo.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -632,7 +633,7 @@ export class WorkflowController {
|
|||||||
const sortBy = req.query.sortBy as string | undefined;
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
const result = await workflowServiceMongo.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -651,7 +652,7 @@ export class WorkflowController {
|
|||||||
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflow = await workflowService.updateWorkflow(id, updateData);
|
const workflow = await workflowServiceMongo.updateWorkflow(id, updateData);
|
||||||
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
@ -690,7 +691,7 @@ export class WorkflowController {
|
|||||||
// Update workflow
|
// Update workflow
|
||||||
let workflow;
|
let workflow;
|
||||||
try {
|
try {
|
||||||
workflow = await workflowService.updateWorkflow(id, updateData);
|
workflow = await workflowServiceMongo.updateWorkflow(id, updateData);
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -814,7 +815,8 @@ export class WorkflowController {
|
|||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const doc = await Document.create({
|
const doc = await DocumentModel.create({
|
||||||
|
documentId: require('crypto').randomUUID(),
|
||||||
requestId: actualRequestId,
|
requestId: actualRequestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: truncatedFileName,
|
||||||
@ -826,14 +828,10 @@ export class WorkflowController {
|
|||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
category: (category || 'OTHER') as any,
|
||||||
googleDocUrl: null as any,
|
|
||||||
category: category || 'OTHER',
|
|
||||||
version: 1,
|
version: 1,
|
||||||
parentDocumentId: null as any,
|
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
});
|
||||||
} as any);
|
|
||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
logger.info('[Workflow] Document record created successfully', {
|
logger.info('[Workflow] Document record created successfully', {
|
||||||
documentId: doc.documentId,
|
documentId: doc.documentId,
|
||||||
@ -875,7 +873,7 @@ export class WorkflowController {
|
|||||||
async submitWorkflow(req: Request, res: Response): Promise<void> {
|
async submitWorkflow(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
const workflow = await workflowService.submitWorkflow(id);
|
const workflow = await workflowServiceMongo.submitWorkflow(id);
|
||||||
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
@ -918,14 +916,13 @@ export class WorkflowController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
// Resolve requestId UUID from identifier (could be requestNumber or UUID)
|
// Resolve requestId from identifier (could be requestNumber or ID)
|
||||||
const workflowService = new WorkflowService();
|
const wf = await workflowServiceMongo.getRequest(id);
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(id);
|
|
||||||
if (!wf) {
|
if (!wf) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const requestId = wf.getDataValue('requestId');
|
const requestId = wf.requestId; // Use UUID
|
||||||
|
|
||||||
const history = await dealerClaimService.getHistory(requestId);
|
const history = await dealerClaimService.getHistory(requestId);
|
||||||
ResponseHandler.success(res, history, 'Revision history fetched successfully');
|
ResponseHandler.success(res, history, 'Revision history fetched successfully');
|
||||||
|
|||||||
@ -1,70 +1,95 @@
|
|||||||
import type { Request, Response } from 'express';
|
import type { Response } from 'express';
|
||||||
import { workNoteService } from '../services/worknote.service';
|
import { workNoteMongoService } from '../services/worknote.service';
|
||||||
import { WorkflowService } from '../services/workflow.service';
|
import { workflowServiceMongo } from '../services/workflow.service';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
|
import { AuthenticatedRequest } from '../types/express';
|
||||||
|
import { ParticipantModel } from '../models/mongoose/Participant.schema';
|
||||||
|
|
||||||
export class WorkNoteController {
|
export class WorkNoteController {
|
||||||
private workflowService = new WorkflowService();
|
/**
|
||||||
|
* List notes for a request
|
||||||
|
*/
|
||||||
|
async list(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const requestNumber = req.params.id;
|
||||||
|
const request = await workflowServiceMongo.getRequest(requestNumber);
|
||||||
|
|
||||||
async list(req: any, res: Response): Promise<void> {
|
if (!request) {
|
||||||
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
|
ResponseHandler.notFound(res, 'Request not found');
|
||||||
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
|
return;
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
|
||||||
const rows = await workNoteService.list(requestId);
|
|
||||||
res.json({ success: true, data: rows });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(req: any, res: Response): Promise<void> {
|
const rows = await workNoteMongoService.list(requestNumber);
|
||||||
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
|
ResponseHandler.success(res, rows, 'Work notes retrieved');
|
||||||
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
|
} catch (error) {
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
ResponseHandler.error(res, 'Failed to list work notes', 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Get user's participant info (includes userName and role)
|
/**
|
||||||
const { Participant } = require('@models/Participant');
|
* Create a new work note
|
||||||
const participant = await Participant.findOne({
|
*/
|
||||||
where: { requestId, userId: req.user?.userId }
|
async create(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const requestNumber = req.params.id;
|
||||||
|
const request = await workflowServiceMongo.getRequest(requestNumber);
|
||||||
|
|
||||||
|
if (!request) {
|
||||||
|
ResponseHandler.notFound(res, 'Request not found');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user's participant info from Mongo
|
||||||
|
const participant = await ParticipantModel.findOne({
|
||||||
|
requestId: requestNumber,
|
||||||
|
userId: req.user.userId
|
||||||
});
|
});
|
||||||
|
|
||||||
let userName = req.user?.email || 'Unknown User';
|
let userName = req.user.email || 'Unknown User';
|
||||||
let userRole = 'SPECTATOR';
|
let userRole = 'SPECTATOR';
|
||||||
|
|
||||||
if (participant) {
|
if (participant) {
|
||||||
userName = (participant as any).userName || (participant as any).user_name || req.user?.email || 'Unknown User';
|
userName = participant.userName || req.user.email || 'Unknown User';
|
||||||
userRole = (participant as any).participantType || (participant as any).participant_type || 'SPECTATOR';
|
userRole = participant.participantType || 'SPECTATOR';
|
||||||
}
|
}
|
||||||
|
|
||||||
const user = {
|
const user = {
|
||||||
userId: req.user?.userId,
|
userId: req.user.userId,
|
||||||
name: userName,
|
name: userName,
|
||||||
role: userRole
|
role: userRole
|
||||||
};
|
};
|
||||||
|
|
||||||
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
|
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
|
||||||
// Map files with buffer for GCS upload (multer.memoryStorage provides buffer, not path)
|
|
||||||
|
// Map files
|
||||||
const files = (req.files as any[])?.map(f => ({
|
const files = (req.files as any[])?.map(f => ({
|
||||||
buffer: f.buffer,
|
buffer: f.buffer,
|
||||||
path: f.path || null, // May not exist with memory storage
|
path: f.path || null,
|
||||||
originalname: f.originalname,
|
originalname: f.originalname,
|
||||||
mimetype: f.mimetype,
|
mimetype: f.mimetype,
|
||||||
size: f.size
|
size: f.size
|
||||||
})) || [];
|
})) || [];
|
||||||
|
|
||||||
// Extract mentions from payload (sent by frontend)
|
|
||||||
const mentions = payload.mentions || [];
|
|
||||||
const workNotePayload = {
|
const workNotePayload = {
|
||||||
message: payload.message,
|
message: payload.message,
|
||||||
isPriority: payload.isPriority,
|
type: payload.type || 'COMMENT',
|
||||||
parentNoteId: payload.parentNoteId,
|
isVisibleToDealer: payload.isVisibleToDealer || false,
|
||||||
mentionedUsers: mentions // Pass mentioned user IDs to service
|
mentionedUsers: payload.mentions || []
|
||||||
};
|
};
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
const note = await workNoteService.create(requestId, user, workNotePayload, files, {
|
const note = await workNoteMongoService.create(
|
||||||
ipAddress: requestMeta.ipAddress,
|
requestNumber,
|
||||||
userAgent: requestMeta.userAgent
|
user,
|
||||||
});
|
workNotePayload,
|
||||||
res.status(201).json({ success: true, data: note });
|
files
|
||||||
|
);
|
||||||
|
|
||||||
|
ResponseHandler.success(res, note, 'Work note created', 201);
|
||||||
|
} catch (error) {
|
||||||
|
const msg = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
ResponseHandler.error(res, 'Failed to create work note', 500, msg);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -5,7 +5,8 @@
|
|||||||
* Logic: Email only sent if BOTH admin AND user have it enabled
|
* Logic: Email only sent if BOTH admin AND user have it enabled
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { User } from '@models/User';
|
|
||||||
|
|
||||||
import { SYSTEM_CONFIG } from '../config/system.config';
|
import { SYSTEM_CONFIG } from '../config/system.config';
|
||||||
import { getConfigValue } from '../services/configReader.service';
|
import { getConfigValue } from '../services/configReader.service';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
@ -119,18 +120,18 @@ async function isAdminEmailEnabled(emailType: EmailNotificationType): Promise<bo
|
|||||||
*/
|
*/
|
||||||
async function isUserEmailEnabled(userId: string, emailType: EmailNotificationType): Promise<boolean> {
|
async function isUserEmailEnabled(userId: string, emailType: EmailNotificationType): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
|
const { UserModel } = await import('../models/mongoose/User.schema');
|
||||||
// Fetch user and check emailNotificationsEnabled field
|
// Fetch user and check emailNotificationsEnabled field
|
||||||
const user = await User.findByPk(userId, {
|
const user = await UserModel.findOne({ userId });
|
||||||
attributes: ['userId', 'emailNotificationsEnabled']
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
logger.warn(`[Email] User ${userId} not found - defaulting to enabled`);
|
logger.warn(`[Email] User ${userId} not found - defaulting to enabled`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check user's global email notification setting
|
// Check user's global email notification setting (Mongoose uses nested 'notifications.email')
|
||||||
const enabled = (user as any).emailNotificationsEnabled !== false;
|
// Fallback to true if undefined
|
||||||
|
const enabled = user.notifications?.email !== false;
|
||||||
|
|
||||||
if (!enabled) {
|
if (!enabled) {
|
||||||
logger.info(`[Email] User ${userId} has disabled email notifications globally`);
|
logger.info(`[Email] User ${userId} has disabled email notifications globally`);
|
||||||
@ -159,18 +160,17 @@ export async function shouldSendInAppNotification(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { UserModel } = await import('../models/mongoose/User.schema');
|
||||||
// Fetch user and check inAppNotificationsEnabled field
|
// Fetch user and check inAppNotificationsEnabled field
|
||||||
const user = await User.findByPk(userId, {
|
const user = await UserModel.findOne({ userId });
|
||||||
attributes: ['userId', 'inAppNotificationsEnabled']
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
logger.warn(`[Notification] User ${userId} not found - defaulting to enabled`);
|
logger.warn(`[Notification] User ${userId} not found - defaulting to enabled`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check user's global in-app notification setting
|
// Check user's global in-app notification setting (Mongoose uses nested 'notifications.inApp')
|
||||||
const enabled = (user as any).inAppNotificationsEnabled !== false;
|
const enabled = user.notifications?.inApp !== false;
|
||||||
|
|
||||||
if (!enabled) {
|
if (!enabled) {
|
||||||
logger.info(`[Notification] User ${userId} has disabled in-app notifications globally`);
|
logger.info(`[Notification] User ${userId} has disabled in-app notifications globally`);
|
||||||
@ -282,4 +282,3 @@ export async function shouldSendEmailWithOverride(
|
|||||||
// Non-critical emails - check both admin and user preferences
|
// Non-critical emails - check both admin and user preferences
|
||||||
return await shouldSendEmail(userId, emailType);
|
return await shouldSendEmail(userId, emailType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import jwt from 'jsonwebtoken';
|
import jwt from 'jsonwebtoken';
|
||||||
import { User } from '../models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { ssoConfig } from '../config/sso';
|
import { ssoConfig } from '../config/sso';
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
|
||||||
@ -37,7 +37,7 @@ export const authenticateToken = async (
|
|||||||
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
||||||
|
|
||||||
// Fetch user from database to ensure they still exist and are active
|
// Fetch user from database to ensure they still exist and are active
|
||||||
const user = await User.findByPk(decoded.userId);
|
const user = await UserModel.findOne({ userId: decoded.userId });
|
||||||
|
|
||||||
if (!user || !user.isActive) {
|
if (!user || !user.isActive) {
|
||||||
ResponseHandler.unauthorized(res, 'User not found or inactive');
|
ResponseHandler.unauthorized(res, 'User not found or inactive');
|
||||||
@ -88,7 +88,7 @@ export const optionalAuth = async (
|
|||||||
|
|
||||||
if (token) {
|
if (token) {
|
||||||
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
||||||
const user = await User.findByPk(decoded.userId);
|
const user = await UserModel.findOne({ userId: decoded.userId });
|
||||||
|
|
||||||
if (user && user.isActive) {
|
if (user && user.isActive) {
|
||||||
req.user = {
|
req.user = {
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import { Participant } from '@models/Participant';
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { ParticipantModel } from '../models/mongoose/Participant.schema';
|
||||||
import { Op } from 'sequelize';
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
|
|
||||||
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
|
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
|
||||||
|
|
||||||
@ -12,14 +12,11 @@ function isUuid(identifier: string): boolean {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Helper to find workflow by either requestId or requestNumber
|
// Helper to find workflow by either requestId or requestNumber
|
||||||
async function findWorkflowByIdentifier(identifier: string): Promise<WorkflowRequest | null> {
|
async function findWorkflowByIdentifier(identifier: string): Promise<any | null> {
|
||||||
if (isUuid(identifier)) {
|
const query = isUuid(identifier)
|
||||||
return await WorkflowRequest.findByPk(identifier);
|
? { requestId: identifier }
|
||||||
} else {
|
: { requestNumber: identifier };
|
||||||
return await WorkflowRequest.findOne({
|
return await WorkflowRequestModel.findOne(query);
|
||||||
where: { requestNumber: identifier }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function requireParticipantTypes(allowed: AllowedType[]) {
|
export function requireParticipantTypes(allowed: AllowedType[]) {
|
||||||
@ -36,24 +33,22 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
|
|||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
return res.status(404).json({ success: false, error: 'Workflow not found' });
|
return res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
}
|
}
|
||||||
const actualRequestId = (workflow as any).requestId;
|
const actualRequestId = workflow.requestId;
|
||||||
|
|
||||||
// Check initiator
|
// Check initiator
|
||||||
if (allowed.includes('INITIATOR')) {
|
if (allowed.includes('INITIATOR')) {
|
||||||
if ((workflow as any).initiatorId === userId) {
|
if (workflow.initiator?.userId === userId) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check participants table for SPECTATOR
|
// Check participants table for SPECTATOR
|
||||||
if (allowed.includes('SPECTATOR')) {
|
if (allowed.includes('SPECTATOR')) {
|
||||||
const participant = await Participant.findOne({
|
const participant = await ParticipantModel.findOne({
|
||||||
where: {
|
|
||||||
requestId: actualRequestId,
|
requestId: actualRequestId,
|
||||||
userId,
|
userId,
|
||||||
participantType: 'SPECTATOR',
|
participantType: 'SPECTATOR',
|
||||||
isActive: true
|
isActive: true
|
||||||
},
|
|
||||||
});
|
});
|
||||||
if (participant) {
|
if (participant) {
|
||||||
return next();
|
return next();
|
||||||
@ -63,26 +58,21 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
|
|||||||
// For APPROVER role, check ApprovalLevel table
|
// For APPROVER role, check ApprovalLevel table
|
||||||
// This is the primary source of truth for approvers
|
// This is the primary source of truth for approvers
|
||||||
if (allowed.includes('APPROVER')) {
|
if (allowed.includes('APPROVER')) {
|
||||||
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
const approvalLevel = await ApprovalLevelModel.findOne({
|
||||||
const approvalLevel = await ApprovalLevel.findOne({
|
|
||||||
where: {
|
|
||||||
requestId: actualRequestId,
|
requestId: actualRequestId,
|
||||||
approverId: userId,
|
'approver.userId': userId,
|
||||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any }
|
status: { $in: ['PENDING', 'IN_PROGRESS'] }
|
||||||
}
|
|
||||||
});
|
});
|
||||||
if (approvalLevel) {
|
if (approvalLevel) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: also check Participants table (some approvers might be added there)
|
// Fallback: also check Participants table (some approvers might be added there)
|
||||||
const participant = await Participant.findOne({
|
const participant = await ParticipantModel.findOne({
|
||||||
where: {
|
|
||||||
requestId: actualRequestId,
|
requestId: actualRequestId,
|
||||||
userId,
|
userId,
|
||||||
participantType: 'APPROVER',
|
participantType: 'APPROVER',
|
||||||
isActive: true
|
isActive: true
|
||||||
},
|
|
||||||
});
|
});
|
||||||
if (participant) {
|
if (participant) {
|
||||||
return next();
|
return next();
|
||||||
|
|||||||
59
src/models/mongoose/Activity.schema.ts
Normal file
59
src/models/mongoose/Activity.schema.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IActivity extends Document {
|
||||||
|
activityId: string;
|
||||||
|
requestId: string;
|
||||||
|
userId: string;
|
||||||
|
userName: string; // User display name for easy access
|
||||||
|
|
||||||
|
activityType: string; // e.g., WORKFLOW_CREATED, APPROVED, REJECTED
|
||||||
|
activityDescription: string; // Human-readable description
|
||||||
|
activityCategory?: string; // WORKFLOW, APPROVAL, DOCUMENT, COMMENT, PARTICIPANT, SYSTEM
|
||||||
|
severity: 'INFO' | 'WARNING' | 'ERROR' | 'CRITICAL'; // Activity severity level
|
||||||
|
|
||||||
|
metadata?: any; // Flexible JSON for extra details
|
||||||
|
isSystemEvent: boolean; // true for system-generated activities
|
||||||
|
ipAddress?: string;
|
||||||
|
userAgent?: string;
|
||||||
|
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ActivitySchema = new Schema<IActivity>({
|
||||||
|
activityId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true },
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
userName: { type: String, required: true }, // NEW: User display name
|
||||||
|
|
||||||
|
activityType: { type: String, required: true, index: true }, // RENAMED from 'type'
|
||||||
|
activityDescription: { type: String, required: true }, // RENAMED from 'details'
|
||||||
|
activityCategory: {
|
||||||
|
type: String,
|
||||||
|
enum: ['WORKFLOW', 'APPROVAL', 'DOCUMENT', 'COMMENT', 'PARTICIPANT', 'NOTIFICATION', 'SYSTEM'],
|
||||||
|
index: true
|
||||||
|
}, // NEW: Activity category
|
||||||
|
severity: {
|
||||||
|
type: String,
|
||||||
|
enum: ['INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
||||||
|
default: 'INFO',
|
||||||
|
index: true
|
||||||
|
}, // NEW: Severity level
|
||||||
|
|
||||||
|
metadata: Schema.Types.Mixed,
|
||||||
|
isSystemEvent: { type: Boolean, default: false, index: true }, // NEW: System vs user action
|
||||||
|
ipAddress: String,
|
||||||
|
userAgent: String,
|
||||||
|
|
||||||
|
createdAt: { type: Date, default: Date.now, index: true } // RENAMED from 'timestamp'
|
||||||
|
}, {
|
||||||
|
timestamps: true, // Auto-manage createdAt and updatedAt
|
||||||
|
collection: 'activities'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Indexes for common queries
|
||||||
|
ActivitySchema.index({ requestId: 1, createdAt: -1 }); // Get activities for a request, sorted by date
|
||||||
|
ActivitySchema.index({ userId: 1, createdAt: -1 }); // Get user's activities
|
||||||
|
ActivitySchema.index({ activityCategory: 1, severity: 1 }); // Filter by category and severity
|
||||||
|
ActivitySchema.index({ isSystemEvent: 1, createdAt: -1 }); // Filter system events
|
||||||
|
|
||||||
|
export const ActivityModel = mongoose.model<IActivity>('Activity', ActivitySchema);
|
||||||
30
src/models/mongoose/ActivityType.schema.ts
Normal file
30
src/models/mongoose/ActivityType.schema.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import { Schema, Document, model } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IActivityType extends Document {
|
||||||
|
activityTypeId: string;
|
||||||
|
title: string;
|
||||||
|
itemCode?: string;
|
||||||
|
taxationType?: string;
|
||||||
|
sapRefNo?: string;
|
||||||
|
isActive: boolean;
|
||||||
|
createdBy: string;
|
||||||
|
updatedBy?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ActivityTypeSchema = new Schema<IActivityType>({
|
||||||
|
activityTypeId: { type: String, required: true, unique: true },
|
||||||
|
title: { type: String, required: true, unique: true },
|
||||||
|
itemCode: String,
|
||||||
|
taxationType: String,
|
||||||
|
sapRefNo: String,
|
||||||
|
isActive: { type: Boolean, default: true },
|
||||||
|
createdBy: { type: String, required: true },
|
||||||
|
updatedBy: String
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'activity_types'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ActivityTypeModel = model<IActivityType>('ActivityType', ActivityTypeSchema);
|
||||||
22
src/models/mongoose/AdminConfiguration.schema.ts
Normal file
22
src/models/mongoose/AdminConfiguration.schema.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import { Schema, model, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IAdminConfiguration extends Document {
|
||||||
|
configKey: string;
|
||||||
|
configValue: string;
|
||||||
|
description?: string;
|
||||||
|
updatedBy?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AdminConfigurationSchema = new Schema<IAdminConfiguration>({
|
||||||
|
configKey: { type: String, required: true, unique: true, index: true },
|
||||||
|
configValue: { type: String, required: true },
|
||||||
|
description: { type: String },
|
||||||
|
updatedBy: { type: String }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'admin_configurations'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminConfigurationModel = model<IAdminConfiguration>('AdminConfiguration', AdminConfigurationSchema);
|
||||||
113
src/models/mongoose/ApprovalLevel.schema.ts
Normal file
113
src/models/mongoose/ApprovalLevel.schema.ts
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IApprovalLevel extends Document {
|
||||||
|
levelId: string;
|
||||||
|
requestId: string; // Reference to WorkflowRequest.requestNumber
|
||||||
|
levelNumber: number;
|
||||||
|
levelName?: string;
|
||||||
|
|
||||||
|
approver: {
|
||||||
|
userId: string;
|
||||||
|
email: string;
|
||||||
|
name: string;
|
||||||
|
roles?: string[]; // Snapshot
|
||||||
|
};
|
||||||
|
|
||||||
|
tat: {
|
||||||
|
assignedHours: number;
|
||||||
|
assignedDays: number;
|
||||||
|
startTime?: Date;
|
||||||
|
endTime?: Date;
|
||||||
|
elapsedHours: number;
|
||||||
|
remainingHours: number;
|
||||||
|
percentageUsed: number;
|
||||||
|
isBreached: boolean;
|
||||||
|
breachReason?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
status: 'PENDING' | 'IN_PROGRESS' | 'APPROVED' | 'REJECTED' | 'SKIPPED' | 'PAUSED';
|
||||||
|
actionDate?: Date;
|
||||||
|
comments?: string;
|
||||||
|
rejectionReason?: string;
|
||||||
|
isFinalApprover: boolean;
|
||||||
|
|
||||||
|
alerts: {
|
||||||
|
fiftyPercentSent: boolean;
|
||||||
|
seventyFivePercentSent: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
paused: {
|
||||||
|
isPaused: boolean;
|
||||||
|
pausedAt?: Date;
|
||||||
|
pausedBy?: string;
|
||||||
|
reason?: string;
|
||||||
|
resumeDate?: Date;
|
||||||
|
resumedAt?: Date;
|
||||||
|
elapsedHoursBeforePause?: number;
|
||||||
|
tatSnapshot?: any;
|
||||||
|
};
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ApprovalLevelSchema = new Schema<IApprovalLevel>({
|
||||||
|
levelId: { type: String, required: true },
|
||||||
|
requestId: { type: String, required: true, index: true }, // Index for fast lookup
|
||||||
|
levelNumber: { type: Number, required: true },
|
||||||
|
levelName: String,
|
||||||
|
|
||||||
|
approver: {
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
email: { type: String, required: true },
|
||||||
|
name: { type: String, required: true },
|
||||||
|
roles: [String]
|
||||||
|
},
|
||||||
|
|
||||||
|
tat: {
|
||||||
|
assignedHours: { type: Number, required: true },
|
||||||
|
assignedDays: Number,
|
||||||
|
startTime: Date,
|
||||||
|
endTime: Date,
|
||||||
|
elapsedHours: { type: Number, default: 0 },
|
||||||
|
remainingHours: { type: Number, default: 0 },
|
||||||
|
percentageUsed: { type: Number, default: 0 },
|
||||||
|
isBreached: { type: Boolean, default: false },
|
||||||
|
breachReason: String
|
||||||
|
},
|
||||||
|
|
||||||
|
status: {
|
||||||
|
type: String,
|
||||||
|
enum: ['PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'SKIPPED', 'PAUSED'],
|
||||||
|
default: 'PENDING',
|
||||||
|
index: true
|
||||||
|
},
|
||||||
|
|
||||||
|
actionDate: Date,
|
||||||
|
comments: String,
|
||||||
|
rejectionReason: String,
|
||||||
|
isFinalApprover: { type: Boolean, default: false },
|
||||||
|
|
||||||
|
alerts: {
|
||||||
|
fiftyPercentSent: { type: Boolean, default: false },
|
||||||
|
seventyFivePercentSent: { type: Boolean, default: false }
|
||||||
|
},
|
||||||
|
|
||||||
|
paused: {
|
||||||
|
isPaused: { type: Boolean, default: false },
|
||||||
|
pausedAt: Date,
|
||||||
|
pausedBy: String,
|
||||||
|
reason: String,
|
||||||
|
resumeDate: Date,
|
||||||
|
resumedAt: Date,
|
||||||
|
elapsedHoursBeforePause: { type: Number, default: 0 },
|
||||||
|
tatSnapshot: Schema.Types.Mixed
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'approval_levels'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Compound Indexes
|
||||||
|
ApprovalLevelSchema.index({ requestId: 1, levelNumber: 1 }, { unique: true });
|
||||||
|
|
||||||
|
export const ApprovalLevelModel = mongoose.model<IApprovalLevel>('ApprovalLevel', ApprovalLevelSchema);
|
||||||
17
src/models/mongoose/ClaimBudgetTracking.schema.ts
Normal file
17
src/models/mongoose/ClaimBudgetTracking.schema.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IClaimBudgetTracking extends Document {
|
||||||
|
approvedBudget: number;
|
||||||
|
utilizedBudget: number;
|
||||||
|
remainingBudget: number;
|
||||||
|
sapInsertionStatus: string;
|
||||||
|
sapDocId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ClaimBudgetTrackingSchema = new Schema<IClaimBudgetTracking>({
|
||||||
|
approvedBudget: { type: Number, default: 0 },
|
||||||
|
utilizedBudget: { type: Number, default: 0 },
|
||||||
|
remainingBudget: { type: Number, default: 0 },
|
||||||
|
sapInsertionStatus: { type: String, default: 'PENDING' },
|
||||||
|
sapDocId: String
|
||||||
|
}, { _id: false });
|
||||||
17
src/models/mongoose/ClaimCreditNote.schema.ts
Normal file
17
src/models/mongoose/ClaimCreditNote.schema.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IClaimCreditNote extends Document {
|
||||||
|
noteId: string;
|
||||||
|
noteNumber: string;
|
||||||
|
amount: number;
|
||||||
|
date: Date;
|
||||||
|
sapDocId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ClaimCreditNoteSchema = new Schema<IClaimCreditNote>({
|
||||||
|
noteId: String,
|
||||||
|
noteNumber: String,
|
||||||
|
amount: Number,
|
||||||
|
date: Date,
|
||||||
|
sapDocId: String
|
||||||
|
}, { _id: false });
|
||||||
33
src/models/mongoose/ClaimInvoice.schema.ts
Normal file
33
src/models/mongoose/ClaimInvoice.schema.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IClaimInvoice extends Document {
|
||||||
|
invoiceId: string;
|
||||||
|
invoiceNumber: string;
|
||||||
|
amount: number;
|
||||||
|
taxAmount: number;
|
||||||
|
taxDetails?: {
|
||||||
|
cgst: number;
|
||||||
|
sgst: number;
|
||||||
|
igst: number;
|
||||||
|
rate: number;
|
||||||
|
};
|
||||||
|
date: Date;
|
||||||
|
status: string;
|
||||||
|
documentUrl: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ClaimInvoiceSchema = new Schema<IClaimInvoice>({
|
||||||
|
invoiceId: String,
|
||||||
|
invoiceNumber: String,
|
||||||
|
amount: Number,
|
||||||
|
taxAmount: Number,
|
||||||
|
taxDetails: {
|
||||||
|
cgst: { type: Number, default: 0 },
|
||||||
|
sgst: { type: Number, default: 0 },
|
||||||
|
igst: { type: Number, default: 0 },
|
||||||
|
rate: { type: Number, default: 0 }
|
||||||
|
},
|
||||||
|
date: Date,
|
||||||
|
status: String,
|
||||||
|
documentUrl: String
|
||||||
|
}, { _id: false });
|
||||||
22
src/models/mongoose/ConclusionRemark.schema.ts
Normal file
22
src/models/mongoose/ConclusionRemark.schema.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IConclusionRemark extends Document {
|
||||||
|
conclusionId: string;
|
||||||
|
requestId: string;
|
||||||
|
remark: string;
|
||||||
|
authorId: string;
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ConclusionRemarkSchema = new Schema<IConclusionRemark>({
|
||||||
|
conclusionId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true },
|
||||||
|
remark: { type: String, required: true },
|
||||||
|
authorId: { type: String, required: true },
|
||||||
|
createdAt: { type: Date, default: Date.now }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'conclusion_remarks'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ConclusionRemarkModel = mongoose.model<IConclusionRemark>('ConclusionRemark', ConclusionRemarkSchema);
|
||||||
61
src/models/mongoose/Dealer.schema.ts
Normal file
61
src/models/mongoose/Dealer.schema.ts
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IDealer extends Document {
|
||||||
|
dealerCode: string; // Primary ID
|
||||||
|
dealerName: string;
|
||||||
|
region: string;
|
||||||
|
state: string;
|
||||||
|
city: string;
|
||||||
|
zone: string;
|
||||||
|
location: string;
|
||||||
|
sapCode: string;
|
||||||
|
email?: string;
|
||||||
|
phone?: string;
|
||||||
|
address?: string;
|
||||||
|
|
||||||
|
gstin?: string;
|
||||||
|
pan?: string;
|
||||||
|
bankDetails?: {
|
||||||
|
accountName: string;
|
||||||
|
accountNumber: string;
|
||||||
|
bankName: string;
|
||||||
|
ifscCode: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
isActive: boolean;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DealerSchema = new Schema<IDealer>({
|
||||||
|
dealerCode: { type: String, required: true, unique: true, index: true },
|
||||||
|
dealerName: { type: String, required: true },
|
||||||
|
region: { type: String, required: true },
|
||||||
|
state: { type: String, required: true },
|
||||||
|
city: { type: String, required: true },
|
||||||
|
zone: { type: String, required: true },
|
||||||
|
location: { type: String, required: true },
|
||||||
|
sapCode: { type: String, required: true },
|
||||||
|
|
||||||
|
email: String,
|
||||||
|
phone: String,
|
||||||
|
address: String,
|
||||||
|
|
||||||
|
gstin: String,
|
||||||
|
pan: String,
|
||||||
|
bankDetails: {
|
||||||
|
accountName: String,
|
||||||
|
accountNumber: String,
|
||||||
|
bankName: String,
|
||||||
|
ifscCode: String
|
||||||
|
},
|
||||||
|
|
||||||
|
isActive: { type: Boolean, default: true },
|
||||||
|
createdAt: { type: Date, default: Date.now },
|
||||||
|
updatedAt: { type: Date, default: Date.now }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'dealers'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const DealerModel = mongoose.model<IDealer>('Dealer', DealerSchema);
|
||||||
260
src/models/mongoose/DealerClaim.schema.ts
Normal file
260
src/models/mongoose/DealerClaim.schema.ts
Normal file
@ -0,0 +1,260 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IDealerClaim extends Document {
|
||||||
|
claimId: string;
|
||||||
|
requestId: string; // Foreign Key to WorkflowRequest (UUID)
|
||||||
|
requestNumber: string; // Reference to WorkflowRequest
|
||||||
|
workflowStatus: string;
|
||||||
|
claimDate: Date; // activityDate
|
||||||
|
|
||||||
|
// Basic Info
|
||||||
|
dealer: {
|
||||||
|
code: string;
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
phone: string;
|
||||||
|
address: string;
|
||||||
|
location: string;
|
||||||
|
// Extended Details for KPI/Filtering
|
||||||
|
region: string;
|
||||||
|
state: string;
|
||||||
|
city: string;
|
||||||
|
zone: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
activity: {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
periodStart?: Date;
|
||||||
|
periodEnd?: Date;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Proposal Phase
|
||||||
|
proposal?: {
|
||||||
|
proposalId: string;
|
||||||
|
totalEstimatedBudget: number;
|
||||||
|
timelineMode: string;
|
||||||
|
expectedCompletion: Date | number; // days or date
|
||||||
|
dealerComments: string;
|
||||||
|
submittedAt: Date;
|
||||||
|
documentUrl: string;
|
||||||
|
costItems: {
|
||||||
|
itemId: string;
|
||||||
|
description: string;
|
||||||
|
quantity: number;
|
||||||
|
unitCost: number;
|
||||||
|
totalCost: number;
|
||||||
|
category: string;
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// Completion Phase
|
||||||
|
completion?: {
|
||||||
|
completionId: string;
|
||||||
|
actualTotalCost: number;
|
||||||
|
completionDate: Date;
|
||||||
|
dealerComments: string;
|
||||||
|
submittedAt: Date;
|
||||||
|
// expenses
|
||||||
|
expenses: {
|
||||||
|
expenseId: string;
|
||||||
|
description: string;
|
||||||
|
amount: number;
|
||||||
|
category: string;
|
||||||
|
invoiceNumber: string;
|
||||||
|
invoiceDate: Date;
|
||||||
|
documentUrl: string; // Proof
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// Finance & Budget
|
||||||
|
budgetTracking?: {
|
||||||
|
approvedBudget: number;
|
||||||
|
utilizedBudget: number;
|
||||||
|
remainingBudget: number;
|
||||||
|
sapInsertionStatus: string; // PENDING, COMPLETED, FAILED
|
||||||
|
sapDocId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Documents
|
||||||
|
invoices: {
|
||||||
|
invoiceId: string;
|
||||||
|
invoiceNumber: string;
|
||||||
|
amount: number;
|
||||||
|
taxAmount: number;
|
||||||
|
taxDetails?: {
|
||||||
|
cgst: number;
|
||||||
|
sgst: number;
|
||||||
|
igst: number;
|
||||||
|
rate: number; // Tax Percentage
|
||||||
|
};
|
||||||
|
date: Date;
|
||||||
|
status: string; // SUBMITTED, APPROVED
|
||||||
|
documentUrl: string;
|
||||||
|
}[];
|
||||||
|
|
||||||
|
creditNotes: {
|
||||||
|
noteId: string;
|
||||||
|
noteNumber: string; // SAP Credit Note #
|
||||||
|
amount: number;
|
||||||
|
date: Date;
|
||||||
|
sapDocId: string;
|
||||||
|
}[];
|
||||||
|
|
||||||
|
// Iteration & Versioning
|
||||||
|
revisions: {
|
||||||
|
revisionId: string;
|
||||||
|
timestamp: Date;
|
||||||
|
stage: string;
|
||||||
|
action: string;
|
||||||
|
triggeredBy: string; // UserId
|
||||||
|
snapshot: any; // Full copy of proposal or completion data at that time
|
||||||
|
comments?: string;
|
||||||
|
}[];
|
||||||
|
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DealerClaimSchema = new Schema<IDealerClaim>({
|
||||||
|
claimId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true }, // Foreign Key to WorkflowRequest (UUID)
|
||||||
|
requestNumber: { type: String, required: true, index: true },
|
||||||
|
workflowStatus: { type: String, default: 'SUBMITTED' },
|
||||||
|
claimDate: Date,
|
||||||
|
|
||||||
|
dealer: {
|
||||||
|
code: { type: String, index: true },
|
||||||
|
name: String,
|
||||||
|
email: String,
|
||||||
|
phone: String,
|
||||||
|
address: String,
|
||||||
|
location: String,
|
||||||
|
region: { type: String, index: true },
|
||||||
|
state: { type: String, index: true },
|
||||||
|
city: { type: String, index: true },
|
||||||
|
zone: String
|
||||||
|
},
|
||||||
|
|
||||||
|
activity: {
|
||||||
|
name: String,
|
||||||
|
type: { type: String }, // Fix: Escape reserved keyword 'type'
|
||||||
|
periodStart: Date,
|
||||||
|
periodEnd: Date
|
||||||
|
},
|
||||||
|
|
||||||
|
proposal: {
|
||||||
|
proposalId: String,
|
||||||
|
totalEstimatedBudget: Number,
|
||||||
|
timelineMode: String,
|
||||||
|
expectedCompletion: Schema.Types.Mixed,
|
||||||
|
dealerComments: String,
|
||||||
|
submittedAt: Date,
|
||||||
|
documentUrl: String,
|
||||||
|
costItems: [{
|
||||||
|
itemId: String,
|
||||||
|
description: String,
|
||||||
|
quantity: Number,
|
||||||
|
unitCost: Number,
|
||||||
|
totalCost: Number,
|
||||||
|
category: String,
|
||||||
|
// Enhanced Tax Support
|
||||||
|
taxDetails: {
|
||||||
|
cgst: { type: Number, default: 0 },
|
||||||
|
sgst: { type: Number, default: 0 },
|
||||||
|
igst: { type: Number, default: 0 },
|
||||||
|
rate: { type: Number, default: 0 }
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
|
||||||
|
completion: {
|
||||||
|
completionId: String,
|
||||||
|
actualTotalCost: Number,
|
||||||
|
completionDate: Date,
|
||||||
|
dealerComments: String,
|
||||||
|
submittedAt: Date,
|
||||||
|
expenses: [{
|
||||||
|
expenseId: String,
|
||||||
|
description: String,
|
||||||
|
amount: Number,
|
||||||
|
category: String,
|
||||||
|
invoiceNumber: String,
|
||||||
|
invoiceDate: Date,
|
||||||
|
documentUrl: String,
|
||||||
|
// Enhanced Tax Support
|
||||||
|
taxDetails: {
|
||||||
|
cgst: { type: Number, default: 0 },
|
||||||
|
sgst: { type: Number, default: 0 },
|
||||||
|
igst: { type: Number, default: 0 },
|
||||||
|
rate: { type: Number, default: 0 }
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
|
||||||
|
budgetTracking: {
|
||||||
|
approvedBudget: Number,
|
||||||
|
utilizedBudget: Number,
|
||||||
|
remainingBudget: Number,
|
||||||
|
sapInsertionStatus: { type: String, default: 'PENDING' },
|
||||||
|
sapDocId: String
|
||||||
|
},
|
||||||
|
|
||||||
|
invoices: [{
|
||||||
|
invoiceId: String,
|
||||||
|
invoiceNumber: String,
|
||||||
|
amount: Number,
|
||||||
|
taxAmount: Number,
|
||||||
|
taxDetails: {
|
||||||
|
cgst: { type: Number, default: 0 },
|
||||||
|
sgst: { type: Number, default: 0 },
|
||||||
|
igst: { type: Number, default: 0 },
|
||||||
|
rate: { type: Number, default: 0 }
|
||||||
|
},
|
||||||
|
date: Date,
|
||||||
|
status: String,
|
||||||
|
documentUrl: String
|
||||||
|
}],
|
||||||
|
|
||||||
|
creditNotes: [{
|
||||||
|
noteId: String,
|
||||||
|
noteNumber: String,
|
||||||
|
amount: Number,
|
||||||
|
date: Date,
|
||||||
|
sapDocId: String
|
||||||
|
}],
|
||||||
|
|
||||||
|
// Versioning Support
|
||||||
|
revisions: [{
|
||||||
|
revisionId: String,
|
||||||
|
timestamp: { type: Date, default: Date.now },
|
||||||
|
stage: String,
|
||||||
|
action: String,
|
||||||
|
triggeredBy: String,
|
||||||
|
snapshot: Schema.Types.Mixed,
|
||||||
|
comments: String
|
||||||
|
}]
|
||||||
|
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'dealer_claims'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Indexes for KPI & Dashboard FilteringStrategy
|
||||||
|
// 1. Budget Status (For "Pending Claims" dashboard)
|
||||||
|
DealerClaimSchema.index({ 'budgetTracking.budgetStatus': 1 });
|
||||||
|
|
||||||
|
// 2. Expense Analysis (Multikey Index on embedded array)
|
||||||
|
// Allows fast filtering like: expenses.category = 'Travel' AND expenses.amount > 5000
|
||||||
|
DealerClaimSchema.index({ 'completion.expenses.category': 1, 'completion.expenses.amount': 1 });
|
||||||
|
|
||||||
|
// 3. Proposal Cost Analysis
|
||||||
|
DealerClaimSchema.index({ 'proposal.costItems.category': 1 });
|
||||||
|
|
||||||
|
// 4. Financial Period Filtering (Multikey on Invoice Dates)
|
||||||
|
DealerClaimSchema.index({ 'invoices.date': 1 });
|
||||||
|
|
||||||
|
// 5. Region/State Filtering (Already supported by field definition, but ensuring compound if frequent)
|
||||||
|
DealerClaimSchema.index({ 'dealer.region': 1, 'dealer.state': 1 });
|
||||||
|
|
||||||
|
export const DealerClaimModel = mongoose.model<IDealerClaim>('DealerClaim', DealerClaimSchema);
|
||||||
21
src/models/mongoose/DealerClaimHistory.schema.ts
Normal file
21
src/models/mongoose/DealerClaimHistory.schema.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IDealerClaimHistory extends Document {
|
||||||
|
revisionId: string;
|
||||||
|
timestamp: Date;
|
||||||
|
stage: string;
|
||||||
|
action: string;
|
||||||
|
triggeredBy: string;
|
||||||
|
snapshot: any;
|
||||||
|
comments?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DealerClaimHistorySchema = new Schema<IDealerClaimHistory>({
|
||||||
|
revisionId: String,
|
||||||
|
timestamp: { type: Date, default: Date.now },
|
||||||
|
stage: String,
|
||||||
|
action: String,
|
||||||
|
triggeredBy: String,
|
||||||
|
snapshot: Schema.Types.Mixed,
|
||||||
|
comments: String
|
||||||
|
}, { _id: false });
|
||||||
33
src/models/mongoose/DealerCompletionExpense.schema.ts
Normal file
33
src/models/mongoose/DealerCompletionExpense.schema.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IDealerCompletionExpense extends Document {
|
||||||
|
expenseId: string;
|
||||||
|
description: string;
|
||||||
|
amount: number;
|
||||||
|
category: string;
|
||||||
|
invoiceNumber: string;
|
||||||
|
invoiceDate: Date;
|
||||||
|
documentUrl: string;
|
||||||
|
taxDetails?: {
|
||||||
|
cgst: number;
|
||||||
|
sgst: number;
|
||||||
|
igst: number;
|
||||||
|
rate: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DealerCompletionExpenseSchema = new Schema<IDealerCompletionExpense>({
|
||||||
|
expenseId: String,
|
||||||
|
description: String,
|
||||||
|
amount: Number,
|
||||||
|
category: String,
|
||||||
|
invoiceNumber: String,
|
||||||
|
invoiceDate: Date,
|
||||||
|
documentUrl: String,
|
||||||
|
taxDetails: {
|
||||||
|
cgst: { type: Number, default: 0 },
|
||||||
|
sgst: { type: Number, default: 0 },
|
||||||
|
igst: { type: Number, default: 0 },
|
||||||
|
rate: { type: Number, default: 0 }
|
||||||
|
}
|
||||||
|
}, { _id: false });
|
||||||
31
src/models/mongoose/DealerProposalCostItem.schema.ts
Normal file
31
src/models/mongoose/DealerProposalCostItem.schema.ts
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IDealerProposalCostItem extends Document {
|
||||||
|
itemId: string;
|
||||||
|
description: string;
|
||||||
|
quantity: number;
|
||||||
|
unitCost: number;
|
||||||
|
totalCost: number;
|
||||||
|
category: string;
|
||||||
|
taxDetails?: {
|
||||||
|
cgst: number;
|
||||||
|
sgst: number;
|
||||||
|
igst: number;
|
||||||
|
rate: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DealerProposalCostItemSchema = new Schema<IDealerProposalCostItem>({
|
||||||
|
itemId: String,
|
||||||
|
description: String,
|
||||||
|
quantity: Number,
|
||||||
|
unitCost: Number,
|
||||||
|
totalCost: Number,
|
||||||
|
category: String,
|
||||||
|
taxDetails: {
|
||||||
|
cgst: { type: Number, default: 0 },
|
||||||
|
sgst: { type: Number, default: 0 },
|
||||||
|
igst: { type: Number, default: 0 },
|
||||||
|
rate: { type: Number, default: 0 }
|
||||||
|
}
|
||||||
|
}, { _id: false });
|
||||||
54
src/models/mongoose/Document.schema.ts
Normal file
54
src/models/mongoose/Document.schema.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IDocument extends Document {
|
||||||
|
documentId: string; // Original SQL ID or UUID
|
||||||
|
requestId: string; // FK to workflow_requests.requestNumber
|
||||||
|
uploadedBy: string; // FK to users.userId
|
||||||
|
|
||||||
|
fileName: string;
|
||||||
|
originalFileName: string;
|
||||||
|
fileType: string;
|
||||||
|
fileExtension: string;
|
||||||
|
fileSize: number;
|
||||||
|
filePath: string;
|
||||||
|
storageUrl?: string; // Signed URL or GCS link
|
||||||
|
mimeType: string;
|
||||||
|
checksum?: string;
|
||||||
|
|
||||||
|
category: 'SUPPORTING' | 'INVALID_INVOICE' | 'COMMERCIAL' | 'OTHER';
|
||||||
|
version: number;
|
||||||
|
isDeleted: boolean;
|
||||||
|
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DocumentSchema = new Schema<IDocument>({
|
||||||
|
documentId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true },
|
||||||
|
uploadedBy: { type: String, required: true, index: true },
|
||||||
|
|
||||||
|
fileName: { type: String, required: true },
|
||||||
|
originalFileName: String,
|
||||||
|
fileType: String,
|
||||||
|
fileExtension: String,
|
||||||
|
fileSize: Number,
|
||||||
|
filePath: String,
|
||||||
|
storageUrl: String,
|
||||||
|
mimeType: String,
|
||||||
|
checksum: String,
|
||||||
|
|
||||||
|
category: {
|
||||||
|
type: String,
|
||||||
|
default: 'SUPPORTING',
|
||||||
|
index: true
|
||||||
|
},
|
||||||
|
|
||||||
|
version: { type: Number, default: 1 },
|
||||||
|
isDeleted: { type: Boolean, default: false }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'documents'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const DocumentModel = mongoose.model<IDocument>('Document', DocumentSchema);
|
||||||
24
src/models/mongoose/Holiday.schema.ts
Normal file
24
src/models/mongoose/Holiday.schema.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IHoliday extends Document {
|
||||||
|
date: Date;
|
||||||
|
name: string;
|
||||||
|
type: 'PUBLIC' | 'OPTIONAL' | 'WEEKEND';
|
||||||
|
year: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const HolidaySchema = new Schema<IHoliday>({
|
||||||
|
date: { type: Date, required: true, unique: true },
|
||||||
|
name: { type: String, required: true },
|
||||||
|
type: {
|
||||||
|
type: String,
|
||||||
|
enum: ['PUBLIC', 'OPTIONAL', 'WEEKEND'],
|
||||||
|
default: 'PUBLIC'
|
||||||
|
},
|
||||||
|
year: { type: Number, required: true, index: true }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'holidays'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const HolidayModel = mongoose.model<IHoliday>('Holiday', HolidaySchema);
|
||||||
34
src/models/mongoose/InternalOrder.schema.ts
Normal file
34
src/models/mongoose/InternalOrder.schema.ts
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IInternalOrder extends Document {
|
||||||
|
requestId: string;
|
||||||
|
ioNumber: string;
|
||||||
|
ioAvailableBalance: number;
|
||||||
|
ioBlockedAmount: number;
|
||||||
|
ioRemainingBalance: number;
|
||||||
|
ioRemark?: string;
|
||||||
|
status: 'PENDING' | 'BLOCKED' | 'RELEASED';
|
||||||
|
sapDocId?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const InternalOrderSchema = new Schema<IInternalOrder>({
|
||||||
|
requestId: { type: String, required: true, unique: true, index: true },
|
||||||
|
ioNumber: { type: String, required: true },
|
||||||
|
ioAvailableBalance: { type: Number, default: 0 },
|
||||||
|
ioBlockedAmount: { type: Number, default: 0 },
|
||||||
|
ioRemainingBalance: { type: Number, default: 0 },
|
||||||
|
ioRemark: String,
|
||||||
|
status: {
|
||||||
|
type: String,
|
||||||
|
enum: ['PENDING', 'BLOCKED', 'RELEASED'],
|
||||||
|
default: 'PENDING'
|
||||||
|
},
|
||||||
|
sapDocId: String
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'internal_orders'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const InternalOrderModel = mongoose.model<IInternalOrder>('InternalOrder', InternalOrderSchema);
|
||||||
50
src/models/mongoose/Notification.schema.ts
Normal file
50
src/models/mongoose/Notification.schema.ts
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface INotification extends Document {
|
||||||
|
userId: string;
|
||||||
|
requestId?: string;
|
||||||
|
notificationType: string;
|
||||||
|
title: string;
|
||||||
|
message: string;
|
||||||
|
isRead: boolean;
|
||||||
|
priority: 'LOW' | 'MEDIUM' | 'HIGH' | 'URGENT';
|
||||||
|
actionUrl?: string;
|
||||||
|
actionRequired: boolean;
|
||||||
|
metadata?: any;
|
||||||
|
sentVia: string[]; // ['IN_APP', 'PUSH', 'EMAIL']
|
||||||
|
emailSent: boolean;
|
||||||
|
smsSent: boolean;
|
||||||
|
pushSent: boolean;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const NotificationSchema: Schema = new Schema({
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
requestId: { type: String, required: false, index: true },
|
||||||
|
notificationType: { type: String, required: true, default: 'general' },
|
||||||
|
title: { type: String, required: true },
|
||||||
|
message: { type: String, required: true },
|
||||||
|
isRead: { type: Boolean, default: false },
|
||||||
|
priority: {
|
||||||
|
type: String,
|
||||||
|
enum: ['LOW', 'MEDIUM', 'HIGH', 'URGENT'],
|
||||||
|
default: 'MEDIUM'
|
||||||
|
},
|
||||||
|
actionUrl: { type: String, required: false },
|
||||||
|
actionRequired: { type: Boolean, default: false },
|
||||||
|
metadata: { type: Schema.Types.Mixed, required: false },
|
||||||
|
sentVia: { type: [String], default: ['IN_APP'] },
|
||||||
|
emailSent: { type: Boolean, default: false },
|
||||||
|
smsSent: { type: Boolean, default: false },
|
||||||
|
pushSent: { type: Boolean, default: false }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'notifications' // Explicit collection name
|
||||||
|
});
|
||||||
|
|
||||||
|
// Indexes
|
||||||
|
NotificationSchema.index({ userId: 1, isRead: 1 });
|
||||||
|
NotificationSchema.index({ createdAt: -1 });
|
||||||
|
|
||||||
|
export const NotificationModel = mongoose.model<INotification>('Notification', NotificationSchema);
|
||||||
43
src/models/mongoose/Participant.schema.ts
Normal file
43
src/models/mongoose/Participant.schema.ts
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IParticipant extends Document {
|
||||||
|
participantId: string;
|
||||||
|
requestId: string; // Reference to WorkflowRequest.requestNumber (or _id if we prefer) - keeping requestNumber for easier joining with legacy data
|
||||||
|
userId: string;
|
||||||
|
userEmail: string;
|
||||||
|
userName: string;
|
||||||
|
participantType: 'SPECTATOR' | 'INITIATOR' | 'APPROVER' | 'CONSULTATION';
|
||||||
|
canComment: boolean;
|
||||||
|
canViewDocuments: boolean;
|
||||||
|
canDownloadDocuments: boolean;
|
||||||
|
notificationEnabled: boolean;
|
||||||
|
addedBy: string;
|
||||||
|
addedAt: Date;
|
||||||
|
isActive: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ParticipantSchema = new Schema<IParticipant>({
|
||||||
|
participantId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true }, // Indexed for fast lookups
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
userEmail: { type: String, required: true },
|
||||||
|
userName: { type: String, required: true },
|
||||||
|
participantType: { type: String, required: true, enum: ['SPECTATOR', 'INITIATOR', 'APPROVER', 'CONSULTATION'] },
|
||||||
|
|
||||||
|
canComment: { type: Boolean, default: true },
|
||||||
|
canViewDocuments: { type: Boolean, default: true },
|
||||||
|
canDownloadDocuments: { type: Boolean, default: false },
|
||||||
|
notificationEnabled: { type: Boolean, default: true },
|
||||||
|
|
||||||
|
addedBy: { type: String, required: true },
|
||||||
|
addedAt: { type: Date, default: Date.now },
|
||||||
|
isActive: { type: Boolean, default: true }
|
||||||
|
}, {
|
||||||
|
timestamps: false,
|
||||||
|
collection: 'participants'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Compound index for unique check
|
||||||
|
ParticipantSchema.index({ requestId: 1, userId: 1 }, { unique: true });
|
||||||
|
|
||||||
|
export const ParticipantModel = mongoose.model<IParticipant>('Participant', ParticipantSchema);
|
||||||
52
src/models/mongoose/RequestSummary.schema.ts
Normal file
52
src/models/mongoose/RequestSummary.schema.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IRequestSummary extends Document {
|
||||||
|
summaryId: string;
|
||||||
|
requestId: string;
|
||||||
|
initiatorId: string;
|
||||||
|
|
||||||
|
title: string;
|
||||||
|
description?: string;
|
||||||
|
closingRemarks?: string;
|
||||||
|
isAiGenerated: boolean;
|
||||||
|
|
||||||
|
conclusionId?: string; // Reference to old ConclusionRemark if needed, or embed logic here.
|
||||||
|
|
||||||
|
// Embedded Shared Details
|
||||||
|
sharedWith: {
|
||||||
|
userId: string;
|
||||||
|
sharedBy: string;
|
||||||
|
sharedAt: Date;
|
||||||
|
viewedAt?: Date;
|
||||||
|
isRead: boolean;
|
||||||
|
}[];
|
||||||
|
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const RequestSummarySchema = new Schema<IRequestSummary>({
|
||||||
|
summaryId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true, unique: true }, // One summary per request usually
|
||||||
|
initiatorId: { type: String, required: true },
|
||||||
|
|
||||||
|
title: { type: String, required: true },
|
||||||
|
description: String,
|
||||||
|
closingRemarks: String,
|
||||||
|
isAiGenerated: { type: Boolean, default: false },
|
||||||
|
|
||||||
|
conclusionId: String,
|
||||||
|
|
||||||
|
sharedWith: [{
|
||||||
|
userId: { type: String, required: true },
|
||||||
|
sharedBy: { type: String, required: true },
|
||||||
|
sharedAt: { type: Date, default: Date.now },
|
||||||
|
viewedAt: Date,
|
||||||
|
isRead: { type: Boolean, default: false }
|
||||||
|
}]
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'request_summaries'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RequestSummaryModel = mongoose.model<IRequestSummary>('RequestSummary', RequestSummarySchema);
|
||||||
17
src/models/mongoose/SharedSummary.schema.ts
Normal file
17
src/models/mongoose/SharedSummary.schema.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface ISharedSummary extends Document {
|
||||||
|
userId: string;
|
||||||
|
sharedBy: string;
|
||||||
|
sharedAt: Date;
|
||||||
|
viewedAt?: Date;
|
||||||
|
isRead: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SharedSummarySchema = new Schema<ISharedSummary>({
|
||||||
|
userId: { type: String, required: true },
|
||||||
|
sharedBy: { type: String, required: true },
|
||||||
|
sharedAt: { type: Date, default: Date.now },
|
||||||
|
viewedAt: Date,
|
||||||
|
isRead: { type: Boolean, default: false }
|
||||||
|
}, { _id: false });
|
||||||
24
src/models/mongoose/Subscription.schema.ts
Normal file
24
src/models/mongoose/Subscription.schema.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface ISubscription extends Document {
|
||||||
|
userId: string;
|
||||||
|
endpoint: string;
|
||||||
|
p256dh: string;
|
||||||
|
auth: string; // auth key
|
||||||
|
userAgent?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SubscriptionSchema: Schema = new Schema({
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
endpoint: { type: String, required: true, unique: true }, // Endpoint is unique identifier for web push
|
||||||
|
p256dh: { type: String, required: true },
|
||||||
|
auth: { type: String, required: true },
|
||||||
|
userAgent: { type: String, required: false }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'subscriptions'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const SubscriptionModel = mongoose.model<ISubscription>('Subscription', SubscriptionSchema);
|
||||||
54
src/models/mongoose/TatAlert.schema.ts
Normal file
54
src/models/mongoose/TatAlert.schema.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { Schema, model, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface ITatAlert extends Document {
|
||||||
|
requestId: string;
|
||||||
|
levelId: string;
|
||||||
|
approverId: string;
|
||||||
|
alertType: 'TAT_50' | 'TAT_75' | 'TAT_100';
|
||||||
|
thresholdPercentage: number;
|
||||||
|
tatHoursAllocated: number;
|
||||||
|
tatHoursElapsed: number;
|
||||||
|
tatHoursRemaining: number;
|
||||||
|
levelStartTime: Date;
|
||||||
|
alertSentAt: Date;
|
||||||
|
expectedCompletionTime: Date;
|
||||||
|
alertMessage: string;
|
||||||
|
notificationSent: boolean;
|
||||||
|
notificationChannels: string[];
|
||||||
|
isBreached: boolean;
|
||||||
|
metadata?: any;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const TatAlertSchema = new Schema<ITatAlert>({
|
||||||
|
requestId: { type: String, required: true, index: true },
|
||||||
|
levelId: { type: String, required: true, index: true },
|
||||||
|
approverId: { type: String, required: true, index: true },
|
||||||
|
alertType: {
|
||||||
|
type: String,
|
||||||
|
enum: ['TAT_50', 'TAT_75', 'TAT_100'],
|
||||||
|
required: true
|
||||||
|
},
|
||||||
|
thresholdPercentage: { type: Number, required: true },
|
||||||
|
tatHoursAllocated: { type: Number, required: true },
|
||||||
|
tatHoursElapsed: { type: Number, required: true },
|
||||||
|
tatHoursRemaining: { type: Number, required: true },
|
||||||
|
levelStartTime: { type: Date, required: true },
|
||||||
|
alertSentAt: { type: Date, required: true },
|
||||||
|
expectedCompletionTime: { type: Date, required: true },
|
||||||
|
alertMessage: { type: String, required: true },
|
||||||
|
notificationSent: { type: Boolean, default: false },
|
||||||
|
notificationChannels: { type: [String], default: [] },
|
||||||
|
isBreached: { type: Boolean, default: false },
|
||||||
|
metadata: { type: Schema.Types.Mixed, default: {} }
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'tat_alerts' // Explicit collection name
|
||||||
|
});
|
||||||
|
|
||||||
|
// Indexes for KPI reporting
|
||||||
|
TatAlertSchema.index({ createdAt: 1 });
|
||||||
|
TatAlertSchema.index({ isBreached: 1 });
|
||||||
|
|
||||||
|
export const TatAlertModel = model<ITatAlert>('TatAlert', TatAlertSchema);
|
||||||
97
src/models/mongoose/User.schema.ts
Normal file
97
src/models/mongoose/User.schema.ts
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
import { UserRole } from '../../types/user.types';
|
||||||
|
|
||||||
|
export interface IUser extends Document {
|
||||||
|
userId: string;
|
||||||
|
employeeId?: string;
|
||||||
|
oktaSub: string;
|
||||||
|
email: string;
|
||||||
|
firstName?: string;
|
||||||
|
lastName?: string;
|
||||||
|
displayName?: string;
|
||||||
|
department?: string;
|
||||||
|
designation?: string;
|
||||||
|
phone?: string;
|
||||||
|
|
||||||
|
// Extended Fields
|
||||||
|
manager?: string;
|
||||||
|
secondEmail?: string;
|
||||||
|
jobTitle?: string;
|
||||||
|
employeeNumber?: string;
|
||||||
|
postalAddress?: string;
|
||||||
|
mobilePhone?: string;
|
||||||
|
adGroups?: string[];
|
||||||
|
|
||||||
|
location?: {
|
||||||
|
city?: string;
|
||||||
|
state?: string;
|
||||||
|
country?: string;
|
||||||
|
office?: string;
|
||||||
|
timezone?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
notifications: {
|
||||||
|
email: boolean;
|
||||||
|
push: boolean;
|
||||||
|
inApp: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
isActive: boolean;
|
||||||
|
role: UserRole;
|
||||||
|
lastLogin?: Date;
|
||||||
|
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const UserSchema = new Schema<IUser>({
|
||||||
|
userId: { type: String, required: true, unique: true, index: true },
|
||||||
|
employeeId: { type: String, index: true },
|
||||||
|
oktaSub: { type: String, required: true, unique: true, index: true },
|
||||||
|
email: { type: String, required: true, unique: true, index: true },
|
||||||
|
firstName: String,
|
||||||
|
lastName: String,
|
||||||
|
displayName: String,
|
||||||
|
department: { type: String, index: true },
|
||||||
|
designation: String,
|
||||||
|
phone: String,
|
||||||
|
|
||||||
|
manager: { type: String, index: true },
|
||||||
|
secondEmail: String,
|
||||||
|
jobTitle: String,
|
||||||
|
employeeNumber: String,
|
||||||
|
postalAddress: { type: String, index: true },
|
||||||
|
mobilePhone: String,
|
||||||
|
adGroups: [String],
|
||||||
|
|
||||||
|
location: {
|
||||||
|
city: String,
|
||||||
|
state: String,
|
||||||
|
country: String,
|
||||||
|
office: String,
|
||||||
|
timezone: String
|
||||||
|
},
|
||||||
|
|
||||||
|
notifications: {
|
||||||
|
email: { type: Boolean, default: true },
|
||||||
|
push: { type: Boolean, default: true },
|
||||||
|
inApp: { type: Boolean, default: true }
|
||||||
|
},
|
||||||
|
|
||||||
|
isActive: { type: Boolean, default: true, index: true },
|
||||||
|
role: {
|
||||||
|
type: String,
|
||||||
|
enum: ['USER', 'MANAGEMENT', 'ADMIN'],
|
||||||
|
default: 'USER',
|
||||||
|
index: true
|
||||||
|
},
|
||||||
|
lastLogin: Date
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'users'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Text Search Index for Name/Email
|
||||||
|
UserSchema.index({ displayName: 'text', email: 'text', firstName: 'text', lastName: 'text' });
|
||||||
|
|
||||||
|
export const UserModel = mongoose.model<IUser>('User', UserSchema);
|
||||||
64
src/models/mongoose/WorkNote.schema.ts
Normal file
64
src/models/mongoose/WorkNote.schema.ts
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IWorkNote extends Document {
|
||||||
|
noteId: string;
|
||||||
|
requestId: string;
|
||||||
|
userId: string;
|
||||||
|
userName: string; // User display name
|
||||||
|
userRole: string; // User's role at time of comment
|
||||||
|
|
||||||
|
message: string; // The note content (max 2000 chars)
|
||||||
|
messageType: 'COMMENT' | 'QUESTION' | 'CLARIFICATION' | 'UPDATE' | 'SYSTEM';
|
||||||
|
isPriority: boolean; // Flag for important/priority notes
|
||||||
|
hasAttachment: boolean; // Quick check if note has attachments
|
||||||
|
|
||||||
|
parentNoteId?: string; // For threaded replies
|
||||||
|
mentionedUsers: string[]; // Array of user IDs that were @mentioned
|
||||||
|
reactions: any; // User reactions (likes, emojis, etc.)
|
||||||
|
|
||||||
|
isEdited: boolean; // Track if note was edited
|
||||||
|
isDeleted: boolean; // Soft delete flag
|
||||||
|
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const WorkNoteSchema = new Schema<IWorkNote>({
|
||||||
|
noteId: { type: String, required: true, unique: true },
|
||||||
|
requestId: { type: String, required: true, index: true },
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
userName: { type: String, required: true },
|
||||||
|
userRole: { type: String, required: true },
|
||||||
|
|
||||||
|
message: {
|
||||||
|
type: String,
|
||||||
|
required: true,
|
||||||
|
maxlength: 2000 // PostgreSQL constraint
|
||||||
|
},
|
||||||
|
messageType: {
|
||||||
|
type: String,
|
||||||
|
enum: ['COMMENT', 'QUESTION', 'CLARIFICATION', 'UPDATE', 'SYSTEM'],
|
||||||
|
default: 'COMMENT',
|
||||||
|
index: true
|
||||||
|
},
|
||||||
|
isPriority: { type: Boolean, default: false, index: true },
|
||||||
|
hasAttachment: { type: Boolean, default: false },
|
||||||
|
|
||||||
|
parentNoteId: { type: String, index: true }, // For threading
|
||||||
|
mentionedUsers: [{ type: String }], // Array of user IDs
|
||||||
|
reactions: { type: Schema.Types.Mixed, default: {} }, // JSONB equivalent
|
||||||
|
|
||||||
|
isEdited: { type: Boolean, default: false },
|
||||||
|
isDeleted: { type: Boolean, default: false, index: true }
|
||||||
|
}, {
|
||||||
|
timestamps: true, // Auto-manage createdAt and updatedAt
|
||||||
|
collection: 'work_notes'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Indexes for common queries
|
||||||
|
WorkNoteSchema.index({ requestId: 1, createdAt: -1 }); // Get notes for a request
|
||||||
|
WorkNoteSchema.index({ userId: 1, createdAt: -1 }); // Get user's notes
|
||||||
|
WorkNoteSchema.index({ parentNoteId: 1 }); // Get replies to a note
|
||||||
|
WorkNoteSchema.index({ isPriority: 1, isDeleted: 1 }); // Filter priority notes
|
||||||
|
|
||||||
|
export const WorkNoteModel = mongoose.model<IWorkNote>('WorkNote', WorkNoteSchema);
|
||||||
38
src/models/mongoose/WorkNoteAttachment.schema.ts
Normal file
38
src/models/mongoose/WorkNoteAttachment.schema.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IWorkNoteAttachment extends Document {
|
||||||
|
attachmentId: string;
|
||||||
|
noteId: string; // Reference to WorkNote
|
||||||
|
fileName: string;
|
||||||
|
fileType: string;
|
||||||
|
fileSize: number; // In bytes
|
||||||
|
filePath: string; // Internal file path
|
||||||
|
storageUrl?: string; // GCS/S3 URL
|
||||||
|
isDownloadable: boolean;
|
||||||
|
downloadCount: number;
|
||||||
|
uploadedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
const WorkNoteAttachmentSchema = new Schema<IWorkNoteAttachment>({
|
||||||
|
attachmentId: { type: String, required: true, unique: true },
|
||||||
|
noteId: { type: String, required: true, index: true }, // Reference to WorkNote
|
||||||
|
fileName: { type: String, required: true },
|
||||||
|
fileType: { type: String, required: true },
|
||||||
|
fileSize: { type: Number, required: true }, // Bytes
|
||||||
|
filePath: { type: String, required: true },
|
||||||
|
storageUrl: { type: String },
|
||||||
|
isDownloadable: { type: Boolean, default: true },
|
||||||
|
downloadCount: { type: Number, default: 0 },
|
||||||
|
uploadedAt: { type: Date, default: Date.now }
|
||||||
|
}, {
|
||||||
|
timestamps: false, // We use uploadedAt instead
|
||||||
|
collection: 'work_note_attachments'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Index for querying attachments by note
|
||||||
|
WorkNoteAttachmentSchema.index({ noteId: 1, uploadedAt: -1 });
|
||||||
|
|
||||||
|
export const WorkNoteAttachmentModel = mongoose.model<IWorkNoteAttachment>(
|
||||||
|
'WorkNoteAttachment',
|
||||||
|
WorkNoteAttachmentSchema
|
||||||
|
);
|
||||||
108
src/models/mongoose/WorkflowRequest.schema.ts
Normal file
108
src/models/mongoose/WorkflowRequest.schema.ts
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
// --- Main Interface ---
|
||||||
|
|
||||||
|
export interface IWorkflowRequest extends Document {
|
||||||
|
requestId: string;
|
||||||
|
requestNumber: string;
|
||||||
|
initiator: {
|
||||||
|
userId: string;
|
||||||
|
email: string;
|
||||||
|
name: string;
|
||||||
|
department?: string; // Critical for KPIs
|
||||||
|
};
|
||||||
|
templateType: 'CUSTOM' | 'TEMPLATE' | 'DEALER CLAIM';
|
||||||
|
workflowType?: string;
|
||||||
|
templateId?: string;
|
||||||
|
title: string;
|
||||||
|
description: string;
|
||||||
|
priority: 'STANDARD' | 'EXPRESS';
|
||||||
|
status: 'DRAFT' | 'PENDING' | 'IN_PROGRESS' | 'APPROVED' | 'REJECTED' | 'CLOSED' | 'PAUSED' | 'CANCELLED';
|
||||||
|
|
||||||
|
// Flattened/Cached Fields for KPIs
|
||||||
|
currentLevel: number; // Display purposes - can become stale when levels shift
|
||||||
|
currentLevelId?: string; // UUID reference to the active ApprovalLevel - always accurate
|
||||||
|
totalLevels: number;
|
||||||
|
totalTatHours: number;
|
||||||
|
|
||||||
|
// Flattened date fields (matching PostgreSQL)
|
||||||
|
submissionDate?: Date;
|
||||||
|
closureDate?: Date;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
|
||||||
|
// Flattened flag fields (matching PostgreSQL)
|
||||||
|
isDraft: boolean;
|
||||||
|
isDeleted: boolean;
|
||||||
|
isPaused: boolean;
|
||||||
|
|
||||||
|
// Flattened conclusion fields (matching PostgreSQL)
|
||||||
|
conclusionRemark?: string;
|
||||||
|
aiGeneratedConclusion?: string;
|
||||||
|
|
||||||
|
// Pause-related fields
|
||||||
|
pausedAt?: Date;
|
||||||
|
pausedBy?: string;
|
||||||
|
pauseReason?: string;
|
||||||
|
pauseResumeDate?: Date;
|
||||||
|
pauseTatSnapshot?: any;
|
||||||
|
|
||||||
|
// NOTE: Participants and ApprovalLevels are now in SEPARATE collections.
|
||||||
|
// They reference this document via 'requestNumber' or '_id'.
|
||||||
|
}
|
||||||
|
|
||||||
|
const WorkflowRequestSchema = new Schema<IWorkflowRequest>({
|
||||||
|
requestId: { type: String, required: true, unique: true, index: true },
|
||||||
|
requestNumber: { type: String, required: true, unique: true, index: true },
|
||||||
|
initiator: {
|
||||||
|
userId: { type: String, required: true, index: true },
|
||||||
|
email: { type: String, required: true },
|
||||||
|
name: { type: String, required: true },
|
||||||
|
department: { type: String, index: true } // Indexed for KPIs
|
||||||
|
},
|
||||||
|
templateType: { type: String, default: 'CUSTOM', index: true },
|
||||||
|
workflowType: { type: String, default: 'NON_TEMPLATIZED' },
|
||||||
|
templateId: String,
|
||||||
|
title: { type: String, required: true, index: 'text' }, // Text index for search
|
||||||
|
description: { type: String, required: true, index: 'text' },
|
||||||
|
priority: { type: String, enum: ['STANDARD', 'EXPRESS'], default: 'STANDARD' },
|
||||||
|
status: {
|
||||||
|
type: String,
|
||||||
|
enum: ['DRAFT', 'PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'CLOSED', 'PAUSED', 'CANCELLED'],
|
||||||
|
default: 'DRAFT',
|
||||||
|
index: true
|
||||||
|
},
|
||||||
|
|
||||||
|
currentLevel: { type: Number, default: 1 },
|
||||||
|
currentLevelId: { type: String }, // UUID reference to active ApprovalLevel
|
||||||
|
totalLevels: { type: Number, default: 1 },
|
||||||
|
totalTatHours: { type: Number, default: 0 },
|
||||||
|
|
||||||
|
// Flattened date fields
|
||||||
|
submissionDate: { type: Date, index: true },
|
||||||
|
closureDate: { type: Date, index: true }, // Index for date range filters
|
||||||
|
createdAt: { type: Date, default: Date.now, index: true },
|
||||||
|
updatedAt: { type: Date, default: Date.now },
|
||||||
|
|
||||||
|
// Flattened flag fields
|
||||||
|
isDraft: { type: Boolean, default: true, index: true },
|
||||||
|
isDeleted: { type: Boolean, default: false, index: true },
|
||||||
|
isPaused: { type: Boolean, default: false, index: true },
|
||||||
|
|
||||||
|
// Flattened conclusion fields
|
||||||
|
conclusionRemark: String,
|
||||||
|
aiGeneratedConclusion: String,
|
||||||
|
|
||||||
|
// Pause-related fields
|
||||||
|
pausedAt: Date,
|
||||||
|
pausedBy: String,
|
||||||
|
pauseReason: String,
|
||||||
|
pauseResumeDate: Date,
|
||||||
|
pauseTatSnapshot: Schema.Types.Mixed
|
||||||
|
|
||||||
|
}, {
|
||||||
|
timestamps: true, // This will auto-manage createdAt and updatedAt
|
||||||
|
collection: 'workflow_requests'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const WorkflowRequestModel = mongoose.model<IWorkflowRequest>('WorkflowRequest', WorkflowRequestSchema);
|
||||||
53
src/models/mongoose/WorkflowTemplate.schema.ts
Normal file
53
src/models/mongoose/WorkflowTemplate.schema.ts
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import mongoose, { Schema, Document } from 'mongoose';
|
||||||
|
|
||||||
|
export interface IWorkflowTemplate extends Document {
|
||||||
|
templateId: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
|
||||||
|
department: string;
|
||||||
|
workflowType: string; // e.g., 'CAPEX', 'OPEX'
|
||||||
|
isActive: boolean;
|
||||||
|
version: number;
|
||||||
|
|
||||||
|
// Normalized definition of stages
|
||||||
|
stages: {
|
||||||
|
stageNumber: number;
|
||||||
|
stageName: string;
|
||||||
|
approverRole?: string; // e.g. 'DEPT_HEAD'
|
||||||
|
specificApproverId?: string; // Optional hardcoded user
|
||||||
|
tatHours: number;
|
||||||
|
isMandatory: boolean;
|
||||||
|
}[];
|
||||||
|
|
||||||
|
createdBy: string;
|
||||||
|
updatedBy: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const WorkflowTemplateSchema = new Schema<IWorkflowTemplate>({
|
||||||
|
templateId: { type: String, required: true, unique: true },
|
||||||
|
name: { type: String, required: true },
|
||||||
|
description: String,
|
||||||
|
|
||||||
|
department: { type: String, required: true, index: true },
|
||||||
|
workflowType: { type: String, required: true },
|
||||||
|
isActive: { type: Boolean, default: true },
|
||||||
|
version: { type: Number, default: 1 },
|
||||||
|
|
||||||
|
stages: [{
|
||||||
|
stageNumber: Number,
|
||||||
|
stageName: String,
|
||||||
|
approverRole: String,
|
||||||
|
specificApproverId: String,
|
||||||
|
tatHours: Number,
|
||||||
|
isMandatory: { type: Boolean, default: true }
|
||||||
|
}],
|
||||||
|
|
||||||
|
createdBy: String,
|
||||||
|
updatedBy: String
|
||||||
|
}, {
|
||||||
|
timestamps: true,
|
||||||
|
collection: 'workflow_templates'
|
||||||
|
});
|
||||||
|
|
||||||
|
export const WorkflowTemplateModel = mongoose.model<IWorkflowTemplate>('WorkflowTemplate', WorkflowTemplateSchema);
|
||||||
26
src/models/mongoose/index.ts
Normal file
26
src/models/mongoose/index.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
export * from './Activity.schema';
|
||||||
|
export * from './ActivityType.schema';
|
||||||
|
export * from './ApprovalLevel.schema';
|
||||||
|
export * from './ClaimBudgetTracking.schema';
|
||||||
|
export * from './ClaimCreditNote.schema';
|
||||||
|
export * from './ClaimInvoice.schema';
|
||||||
|
export * from './ConclusionRemark.schema';
|
||||||
|
export * from './Dealer.schema';
|
||||||
|
export * from './DealerClaim.schema';
|
||||||
|
export * from './DealerClaimHistory.schema';
|
||||||
|
export * from './DealerCompletionExpense.schema';
|
||||||
|
export * from './DealerProposalCostItem.schema';
|
||||||
|
export * from './Document.schema';
|
||||||
|
export * from './Holiday.schema';
|
||||||
|
export * from './InternalOrder.schema';
|
||||||
|
export * from './Notification.schema';
|
||||||
|
export * from './Participant.schema';
|
||||||
|
export * from './RequestSummary.schema';
|
||||||
|
export * from './SharedSummary.schema';
|
||||||
|
export * from './Subscription.schema';
|
||||||
|
export * from './TatAlert.schema';
|
||||||
|
export * from './User.schema';
|
||||||
|
export * from './WorkNote.schema';
|
||||||
|
export * from './WorkNoteAttachment.schema';
|
||||||
|
export * from './WorkflowRequest.schema';
|
||||||
|
export * from './WorkflowTemplate.schema';
|
||||||
35
src/queues/pauseResumeProcessor.mongo.ts
Normal file
35
src/queues/pauseResumeProcessor.mongo.ts
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import { Job } from 'bullmq';
|
||||||
|
import { pauseMongoService } from '../services/pause.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
export async function handlePauseResumeJob(job: Job): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { type, requestId, levelId, scheduledResumeDate } = job.data;
|
||||||
|
|
||||||
|
if (type === 'auto-resume-workflow') {
|
||||||
|
logger.info(`[Pause Resume Processor] Processing dedicated auto-resume job ${job.id} for workflow ${requestId}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await pauseMongoService.resumeWorkflow(requestId);
|
||||||
|
logger.info(`[Pause Resume Processor] ✅ Auto-resumed workflow ${requestId} (scheduled for ${scheduledResumeDate})`);
|
||||||
|
} catch (resumeError: any) {
|
||||||
|
logger.error(`[Pause Resume Processor] Failed to auto-resume workflow ${requestId}:`, resumeError?.message || resumeError);
|
||||||
|
throw resumeError;
|
||||||
|
}
|
||||||
|
} else if (type === 'check_and_resume') {
|
||||||
|
logger.info(`[Pause Resume Processor] Processing bulk auto-resume check job ${job.id}`);
|
||||||
|
const resumedCount = await pauseMongoService.checkAndResumePausedWorkflows();
|
||||||
|
|
||||||
|
if (resumedCount > 0) {
|
||||||
|
logger.info(`[Pause Resume Processor] Auto-resumed ${resumedCount} workflow(s) via bulk check`);
|
||||||
|
} else {
|
||||||
|
logger.debug('[Pause Resume Processor] No workflows to auto-resume');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn(`[Pause Resume Processor] Unknown job type: ${type}`);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause Resume Processor] Failed to process job ${job.id}:`, error?.message || error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,6 +1,6 @@
|
|||||||
import { Worker } from 'bullmq';
|
import { Worker } from 'bullmq';
|
||||||
import { sharedRedisConnection } from './redisConnection';
|
import { sharedRedisConnection } from './redisConnection';
|
||||||
import { handlePauseResumeJob } from './pauseResumeProcessor';
|
import { handlePauseResumeJob } from './pauseResumeProcessor.mongo';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
let pauseResumeWorker: Worker | null = null;
|
let pauseResumeWorker: Worker | null = null;
|
||||||
@ -74,4 +74,3 @@ process.on('SIGINT', async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
export { pauseResumeWorker };
|
export { pauseResumeWorker };
|
||||||
|
|
||||||
|
|||||||
290
src/queues/tatProcessor.mongo.ts
Normal file
290
src/queues/tatProcessor.mongo.ts
Normal file
@ -0,0 +1,290 @@
|
|||||||
|
import { Job } from 'bullmq';
|
||||||
|
import { notificationMongoService } from '../services/notification.service';
|
||||||
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import { TatAlertModel } from '../models/mongoose/TatAlert.schema';
|
||||||
|
import { activityMongoService } from '../services/activity.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import { calculateElapsedWorkingHours, addWorkingHours, addWorkingHoursExpress } from '../utils/tatTimeUtils';
|
||||||
|
|
||||||
|
interface TatJobData {
|
||||||
|
type: 'threshold1' | 'threshold2' | 'breach';
|
||||||
|
threshold: number;
|
||||||
|
requestId: string;
|
||||||
|
levelId: string;
|
||||||
|
approverId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle TAT notification jobs (MongoDB Version)
|
||||||
|
*/
|
||||||
|
export async function handleTatJob(job: Job<TatJobData>) {
|
||||||
|
const { requestId, levelId, approverId, type, threshold } = job.data;
|
||||||
|
|
||||||
|
logger.info(`[TAT Processor] Processing ${type} (${threshold}%) for request ${requestId}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get approval level
|
||||||
|
const approvalLevel = await ApprovalLevelModel.findById(levelId);
|
||||||
|
|
||||||
|
if (!approvalLevel) {
|
||||||
|
logger.warn(`[TAT Processor] Approval level ${levelId} not found - likely already approved/rejected`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if level is still pending
|
||||||
|
if (approvalLevel.status !== 'PENDING' && approvalLevel.status !== 'IN_PROGRESS') {
|
||||||
|
logger.info(`[TAT Processor] Level ${levelId} is already ${approvalLevel.status}. Skipping notification.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get workflow - Try finding by UUID (requestId) first
|
||||||
|
let workflow: any = await WorkflowRequestModel.findOne({ requestId: requestId });
|
||||||
|
if (!workflow) {
|
||||||
|
// Fallback to requestNumber
|
||||||
|
workflow = await WorkflowRequestModel.findOne({ requestNumber: requestId });
|
||||||
|
}
|
||||||
|
if (!workflow) {
|
||||||
|
// Fallback to _id
|
||||||
|
workflow = await WorkflowRequestModel.findById(requestId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
logger.warn(`[TAT Processor] Workflow ${requestId} not found`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestNumber = workflow.requestNumber;
|
||||||
|
const title = workflow.title;
|
||||||
|
|
||||||
|
let message = '';
|
||||||
|
let activityDetails = '';
|
||||||
|
let thresholdPercentage: number = threshold;
|
||||||
|
let alertType: 'TAT_50' | 'TAT_75' | 'TAT_100' = 'TAT_50';
|
||||||
|
|
||||||
|
// Check if level is paused
|
||||||
|
if (approvalLevel.paused?.isPaused) {
|
||||||
|
logger.info(`[TAT Processor] Skipping ${type} notification - level ${levelId} is paused`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tatHours = Number(approvalLevel.tat?.assignedHours || 0);
|
||||||
|
const levelStartTime = approvalLevel.createdAt || new Date(); // Fallback
|
||||||
|
// Or check if approvalLevel has a specific tatStartTime
|
||||||
|
// Schema has 'tat.startTime'
|
||||||
|
const actualStartTime = approvalLevel.tat?.startTime || levelStartTime;
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const priority = (workflow.priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
|
||||||
|
// Check pause info
|
||||||
|
const isCurrentlyPaused = approvalLevel.paused?.isPaused === true;
|
||||||
|
const wasResumed = !isCurrentlyPaused &&
|
||||||
|
(approvalLevel.paused?.elapsedHoursBeforePause !== undefined && approvalLevel.paused?.elapsedHoursBeforePause !== null) &&
|
||||||
|
(approvalLevel.paused?.resumedAt !== undefined && approvalLevel.paused?.resumedAt !== null);
|
||||||
|
|
||||||
|
const pauseInfo = isCurrentlyPaused ? {
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: approvalLevel.paused?.pausedAt,
|
||||||
|
pauseElapsedHours: approvalLevel.paused?.elapsedHoursBeforePause,
|
||||||
|
pauseResumeDate: approvalLevel.paused?.resumedAt // Might be null
|
||||||
|
} : wasResumed ? {
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number(approvalLevel.paused?.elapsedHoursBeforePause),
|
||||||
|
pauseResumeDate: approvalLevel.paused?.resumedAt
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(approvalLevel.createdAt, now, priority, pauseInfo);
|
||||||
|
let remainingHours = Math.max(0, tatHours - elapsedHours);
|
||||||
|
|
||||||
|
const expectedCompletionTime = priority === 'express'
|
||||||
|
? (await addWorkingHoursExpress(actualStartTime, tatHours)).toDate()
|
||||||
|
: (await addWorkingHours(actualStartTime, tatHours)).toDate();
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case 'threshold1':
|
||||||
|
alertType = 'TAT_50';
|
||||||
|
thresholdPercentage = threshold;
|
||||||
|
message = `${threshold}% of TAT elapsed for Request ${requestNumber}: ${title}`;
|
||||||
|
activityDetails = `${threshold}% of TAT time has elapsed`;
|
||||||
|
|
||||||
|
await ApprovalLevelModel.updateOne(
|
||||||
|
{ _id: levelId },
|
||||||
|
{
|
||||||
|
'alerts.fiftyPercentSent': true,
|
||||||
|
// We can store generic TAT stats here if schema supports it, for now rely on alerts flag
|
||||||
|
'tat.actualParams.elapsedHours': elapsedHours
|
||||||
|
}
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'threshold2':
|
||||||
|
alertType = 'TAT_75';
|
||||||
|
thresholdPercentage = threshold;
|
||||||
|
message = `${threshold}% of TAT elapsed for Request ${requestNumber}: ${title}. Please take action soon.`;
|
||||||
|
activityDetails = `${threshold}% of TAT time has elapsed - Escalation warning`;
|
||||||
|
|
||||||
|
await ApprovalLevelModel.updateOne(
|
||||||
|
{ _id: levelId },
|
||||||
|
{
|
||||||
|
'alerts.seventyFivePercentSent': true,
|
||||||
|
'tat.actualParams.elapsedHours': elapsedHours
|
||||||
|
}
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'breach':
|
||||||
|
alertType = 'TAT_100';
|
||||||
|
thresholdPercentage = 100;
|
||||||
|
message = `TAT breached for Request ${requestNumber}: ${title}. Immediate action required!`;
|
||||||
|
activityDetails = 'TAT deadline reached - Breach notification';
|
||||||
|
remainingHours = 0;
|
||||||
|
|
||||||
|
await ApprovalLevelModel.updateOne(
|
||||||
|
{ _id: levelId },
|
||||||
|
{
|
||||||
|
'tat.isBreached': true,
|
||||||
|
'tat.actualParams.elapsedHours': elapsedHours
|
||||||
|
}
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create TAT Alert (Mongo)
|
||||||
|
try {
|
||||||
|
await TatAlertModel.create({
|
||||||
|
requestId: workflow.requestId, // Standardized to UUID
|
||||||
|
levelId,
|
||||||
|
approverId,
|
||||||
|
alertType,
|
||||||
|
thresholdPercentage,
|
||||||
|
tatHoursAllocated: tatHours,
|
||||||
|
tatHoursElapsed: elapsedHours,
|
||||||
|
tatHoursRemaining: remainingHours,
|
||||||
|
levelStartTime: actualStartTime,
|
||||||
|
alertSentAt: now,
|
||||||
|
expectedCompletionTime,
|
||||||
|
alertMessage: message,
|
||||||
|
notificationSent: true,
|
||||||
|
notificationChannels: ['push'],
|
||||||
|
isBreached: type === 'breach',
|
||||||
|
metadata: {
|
||||||
|
requestNumber,
|
||||||
|
requestTitle: title,
|
||||||
|
approverName: approvalLevel.approver?.name,
|
||||||
|
priority: priority,
|
||||||
|
levelNumber: approvalLevel.levelNumber
|
||||||
|
}
|
||||||
|
});
|
||||||
|
logger.info(`[TAT Processor] ✅ Alert created: ${type} (${threshold}%)`);
|
||||||
|
} catch (alertError: any) {
|
||||||
|
logger.error(`[TAT Processor] ❌ Alert creation failed: ${alertError.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const notificationPriority =
|
||||||
|
type === 'breach' ? 'URGENT' :
|
||||||
|
type === 'threshold2' ? 'HIGH' :
|
||||||
|
'MEDIUM';
|
||||||
|
|
||||||
|
const timeRemainingText = remainingHours > 0
|
||||||
|
? `${remainingHours.toFixed(1)} hours remaining`
|
||||||
|
: type === 'breach'
|
||||||
|
? `${Math.abs(remainingHours).toFixed(1)} hours overdue`
|
||||||
|
: 'Time exceeded';
|
||||||
|
|
||||||
|
// Notification
|
||||||
|
try {
|
||||||
|
await notificationMongoService.sendToUsers([approverId], {
|
||||||
|
title: type === 'breach' ? 'TAT Breach Alert' : 'TAT Reminder',
|
||||||
|
body: message,
|
||||||
|
requestId: workflow.requestId, // Standardized to UUID
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: type,
|
||||||
|
priority: notificationPriority as any,
|
||||||
|
actionRequired: type === 'breach' || type === 'threshold2',
|
||||||
|
metadata: {
|
||||||
|
thresholdPercentage,
|
||||||
|
tatInfo: {
|
||||||
|
thresholdPercentage,
|
||||||
|
timeRemaining: timeRemainingText,
|
||||||
|
tatDeadline: expectedCompletionTime,
|
||||||
|
assignedDate: actualStartTime,
|
||||||
|
timeOverdue: type === 'breach' ? timeRemainingText : undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
logger.info(`[TAT Processor] ✅ Notification sent to approver ${approverId}`);
|
||||||
|
} catch (notificationError: any) {
|
||||||
|
logger.error(`[TAT Processor] ❌ Failed to send notification: ${notificationError.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Breach initiator notification
|
||||||
|
if (type === 'breach') {
|
||||||
|
const initiatorId = workflow.initiator?.userId;
|
||||||
|
if (initiatorId && initiatorId !== approverId) {
|
||||||
|
try {
|
||||||
|
await notificationMongoService.sendToUsers([initiatorId], {
|
||||||
|
title: 'TAT Breach - Request Delayed',
|
||||||
|
body: `Your request ${requestNumber}: "${title}" has exceeded its TAT.`,
|
||||||
|
requestId: workflow.requestId, // Standardized to UUID
|
||||||
|
requestNumber,
|
||||||
|
type: 'tat_breach_initiator',
|
||||||
|
priority: 'HIGH'
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
logger.error('Initiator notification failed', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Activity Log
|
||||||
|
try {
|
||||||
|
// System user handling might differ in Mongo logic. Passing userId: 'system' is fine usually.
|
||||||
|
await activityMongoService.log({
|
||||||
|
requestId: workflow.requestId, // Standardized to UUID
|
||||||
|
type: 'sla_warning',
|
||||||
|
user: { userId: 'system', name: 'System' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: type === 'breach' ? 'TAT Breached' : 'TAT Warning',
|
||||||
|
details: activityDetails,
|
||||||
|
category: 'SYSTEM',
|
||||||
|
severity: type === 'breach' ? 'ERROR' : 'WARNING'
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
logger.warn('Activity log failed', e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Socket Emit
|
||||||
|
try {
|
||||||
|
const { emitToRequestRoom } = require('../realtime/socket');
|
||||||
|
if (emitToRequestRoom) {
|
||||||
|
// Fetch latest alert
|
||||||
|
const newAlert = await TatAlertModel.findOne({
|
||||||
|
requestId: workflow.requestId, levelId: levelId, alertType
|
||||||
|
}).sort({ createdAt: -1 });
|
||||||
|
|
||||||
|
if (newAlert) {
|
||||||
|
emitToRequestRoom(workflow.requestId, 'tat:alert', {
|
||||||
|
alert: newAlert.toJSON(),
|
||||||
|
requestId: workflow.requestId,
|
||||||
|
levelId,
|
||||||
|
type,
|
||||||
|
thresholdPercentage,
|
||||||
|
message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
logger.warn('Socket emit failed', e);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Processor] ✅ ${type} processed`);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Processor] Failed to process ${type}:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,6 +1,6 @@
|
|||||||
import { Worker } from 'bullmq';
|
import { Worker } from 'bullmq';
|
||||||
import { sharedRedisConnection } from './redisConnection';
|
import { sharedRedisConnection } from './redisConnection';
|
||||||
import { handleTatJob } from './tatProcessor';
|
import { handleTatJob } from './tatProcessor.mongo';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
let tatWorker: Worker | null = null;
|
let tatWorker: Worker | null = null;
|
||||||
|
|||||||
@ -132,5 +132,3 @@ export function emitToUser(userId: string, event: string, payload: any) {
|
|||||||
if (!io) return;
|
if (!io) return;
|
||||||
io.to(`user:${userId}`).emit(event, payload);
|
io.to(`user:${userId}`).emit(event, payload);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -12,11 +12,11 @@ import multer from 'multer';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { ensureUploadDir, UPLOAD_DIR } from '../config/storage';
|
import { ensureUploadDir, UPLOAD_DIR } from '../config/storage';
|
||||||
import { notificationService } from '../services/notification.service';
|
import { notificationMongoService as notificationService } from '../services/notification.service';
|
||||||
import { Activity } from '@models/Activity';
|
import { Activity } from '@models/Activity';
|
||||||
import { WorkflowService } from '../services/workflow.service';
|
import { WorkflowServiceMongo } from '../services/workflow.service';
|
||||||
import { WorkNoteController } from '../controllers/worknote.controller';
|
import { WorkNoteController } from '../controllers/worknote.controller';
|
||||||
import { workNoteService } from '../services/worknote.service';
|
import { workNoteMongoService as workNoteService } from '../services/worknote.service';
|
||||||
import { pauseController } from '../controllers/pause.controller';
|
import { pauseController } from '../controllers/pause.controller';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
@ -195,12 +195,17 @@ router.get('/:id/activity',
|
|||||||
authenticateToken,
|
authenticateToken,
|
||||||
validateParams(workflowParamsSchema),
|
validateParams(workflowParamsSchema),
|
||||||
asyncHandler(async (req: any, res: Response): Promise<void> => {
|
asyncHandler(async (req: any, res: Response): Promise<void> => {
|
||||||
// Resolve requestId UUID from identifier
|
// Resolve requestId UUID from identifier (supports both requestNumber and requestId)
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowServiceMongo();
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(req.params.id);
|
const workflow = await workflowService.getRequest(req.params.id);
|
||||||
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
|
if (!workflow) {
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
const rows = await Activity.findAll({ where: { requestId }, order: [['created_at', 'ASC']] as any });
|
return;
|
||||||
|
}
|
||||||
|
const requestId: string = workflow.requestId;
|
||||||
|
|
||||||
|
const { ActivityModel } = require('../models/mongoose/Activity.schema');
|
||||||
|
const rows = await ActivityModel.find({ requestId }).sort({ createdAt: 1 });
|
||||||
res.json({ success: true, data: rows });
|
res.json({ success: true, data: rows });
|
||||||
return;
|
return;
|
||||||
})
|
})
|
||||||
@ -221,16 +226,15 @@ router.post('/:id/work-notes',
|
|||||||
asyncHandler(workNoteController.create.bind(workNoteController))
|
asyncHandler(workNoteController.create.bind(workNoteController))
|
||||||
);
|
);
|
||||||
|
|
||||||
// Preview workflow document
|
|
||||||
router.get('/documents/:documentId/preview',
|
router.get('/documents/:documentId/preview',
|
||||||
authenticateToken,
|
authenticateToken,
|
||||||
asyncHandler(async (req: any, res: Response) => {
|
asyncHandler(async (req: any, res: Response) => {
|
||||||
const { documentId } = req.params;
|
const { documentId } = req.params;
|
||||||
const { Document } = require('@models/Document');
|
const { DocumentModel } = require('../models/mongoose/Document.schema');
|
||||||
const { gcsStorageService } = require('../services/gcsStorage.service');
|
const { gcsStorageService } = require('../services/gcsStorage.service');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
|
||||||
const document = await Document.findOne({ where: { documentId } });
|
const document = await DocumentModel.findOne({ documentId });
|
||||||
if (!document) {
|
if (!document) {
|
||||||
res.status(404).json({ success: false, error: 'Document not found' });
|
res.status(404).json({ success: false, error: 'Document not found' });
|
||||||
return;
|
return;
|
||||||
@ -415,11 +419,11 @@ router.get('/documents/:documentId/download',
|
|||||||
authenticateToken,
|
authenticateToken,
|
||||||
asyncHandler(async (req: any, res: Response) => {
|
asyncHandler(async (req: any, res: Response) => {
|
||||||
const { documentId } = req.params;
|
const { documentId } = req.params;
|
||||||
const { Document } = require('@models/Document');
|
const { DocumentModel } = require('../models/mongoose/Document.schema');
|
||||||
const { gcsStorageService } = require('../services/gcsStorage.service');
|
const { gcsStorageService } = require('../services/gcsStorage.service');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
|
||||||
const document = await Document.findOne({ where: { documentId } });
|
const document = await DocumentModel.findOne({ documentId });
|
||||||
if (!document) {
|
if (!document) {
|
||||||
res.status(404).json({ success: false, error: 'Document not found' });
|
res.status(404).json({ success: false, error: 'Document not found' });
|
||||||
return;
|
return;
|
||||||
@ -730,13 +734,13 @@ router.post('/:id/participants/approver',
|
|||||||
authenticateToken,
|
authenticateToken,
|
||||||
validateParams(workflowParamsSchema),
|
validateParams(workflowParamsSchema),
|
||||||
asyncHandler(async (req: any, res: Response) => {
|
asyncHandler(async (req: any, res: Response) => {
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowServiceMongo();
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(req.params.id);
|
const workflow = await workflowService.getRequest(req.params.id);
|
||||||
if (!wf) {
|
if (!workflow) {
|
||||||
res.status(404).json({ success: false, error: 'Workflow not found' });
|
res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
const requestId: string = workflow.requestId;
|
||||||
const { email } = req.body;
|
const { email } = req.body;
|
||||||
|
|
||||||
if (!email) {
|
if (!email) {
|
||||||
@ -753,13 +757,13 @@ router.post('/:id/participants/spectator',
|
|||||||
authenticateToken,
|
authenticateToken,
|
||||||
validateParams(workflowParamsSchema),
|
validateParams(workflowParamsSchema),
|
||||||
asyncHandler(async (req: any, res: Response) => {
|
asyncHandler(async (req: any, res: Response) => {
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowServiceMongo();
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(req.params.id);
|
const workflow = await workflowService.getRequest(req.params.id);
|
||||||
if (!wf) {
|
if (!workflow) {
|
||||||
res.status(404).json({ success: false, error: 'Workflow not found' });
|
res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
const requestId: string = workflow.requestId;
|
||||||
const { email } = req.body;
|
const { email } = req.body;
|
||||||
|
|
||||||
if (!email) {
|
if (!email) {
|
||||||
@ -778,13 +782,13 @@ router.post('/:id/approvals/:levelId/skip',
|
|||||||
requireParticipantTypes(['INITIATOR', 'APPROVER']), // Only initiator or other approvers can skip
|
requireParticipantTypes(['INITIATOR', 'APPROVER']), // Only initiator or other approvers can skip
|
||||||
validateParams(approvalParamsSchema),
|
validateParams(approvalParamsSchema),
|
||||||
asyncHandler(async (req: any, res: Response) => {
|
asyncHandler(async (req: any, res: Response) => {
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowServiceMongo();
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(req.params.id);
|
const workflow = await workflowService.getRequest(req.params.id);
|
||||||
if (!wf) {
|
if (!workflow) {
|
||||||
res.status(404).json({ success: false, error: 'Workflow not found' });
|
res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
const requestId: string = workflow.requestId;
|
||||||
const { levelId } = req.params;
|
const { levelId } = req.params;
|
||||||
const { reason } = req.body;
|
const { reason } = req.body;
|
||||||
|
|
||||||
@ -809,13 +813,19 @@ router.post('/:id/approvers/at-level',
|
|||||||
requireParticipantTypes(['INITIATOR', 'APPROVER']), // Only initiator or approvers can add new approvers
|
requireParticipantTypes(['INITIATOR', 'APPROVER']), // Only initiator or approvers can add new approvers
|
||||||
validateParams(workflowParamsSchema),
|
validateParams(workflowParamsSchema),
|
||||||
asyncHandler(async (req: any, res: Response) => {
|
asyncHandler(async (req: any, res: Response) => {
|
||||||
const workflowService = new WorkflowService();
|
console.log('[DEBUG] Add approver at level - identifier:', req.params.id);
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(req.params.id);
|
const workflowService = new WorkflowServiceMongo();
|
||||||
if (!wf) {
|
const workflow = await workflowService.getRequest(req.params.id);
|
||||||
|
console.log('[DEBUG] Workflow lookup result:', {
|
||||||
|
found: !!workflow,
|
||||||
|
requestId: workflow?.requestId,
|
||||||
|
requestNumber: workflow?.requestNumber
|
||||||
|
});
|
||||||
|
if (!workflow) {
|
||||||
res.status(404).json({ success: false, error: 'Workflow not found' });
|
res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
const requestId: string = workflow.requestId;
|
||||||
const { email, tatHours, level } = req.body;
|
const { email, tatHours, level } = req.body;
|
||||||
|
|
||||||
if (!email || !tatHours || !level) {
|
if (!email || !tatHours || !level) {
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
import { sequelize } from '../config/database';
|
import { sequelize } from '../config/database';
|
||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
|
|||||||
197
src/scripts/migrate-flatten-schema.ts
Normal file
197
src/scripts/migrate-flatten-schema.ts
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
import mongoose from 'mongoose';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration Script: Flatten WorkflowRequest Schema
|
||||||
|
*
|
||||||
|
* This script migrates existing WorkflowRequest documents from nested structure
|
||||||
|
* (dates, flags, conclusion objects) to flattened root-level fields.
|
||||||
|
*
|
||||||
|
* Run this script ONCE after deploying the new schema.
|
||||||
|
*/
|
||||||
|
|
||||||
|
async function migrateWorkflowRequests() {
|
||||||
|
try {
|
||||||
|
logger.info('[Migration] Starting WorkflowRequest schema flattening migration...');
|
||||||
|
|
||||||
|
// Find all workflow requests with the old nested structure
|
||||||
|
const workflows = await WorkflowRequestModel.find({}).lean();
|
||||||
|
|
||||||
|
logger.info(`[Migration] Found ${workflows.length} workflow requests to migrate`);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
let errors = 0;
|
||||||
|
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
try {
|
||||||
|
const updateData: any = {};
|
||||||
|
|
||||||
|
// Migrate dates fields
|
||||||
|
if ((workflow as any).dates) {
|
||||||
|
const dates = (workflow as any).dates;
|
||||||
|
if (dates.submission) updateData.submissionDate = dates.submission;
|
||||||
|
if (dates.closure) updateData.closureDate = dates.closure;
|
||||||
|
if (dates.created) updateData.createdAt = dates.created;
|
||||||
|
if (dates.updated) updateData.updatedAt = dates.updated;
|
||||||
|
|
||||||
|
// Remove old nested dates field
|
||||||
|
updateData.$unset = { dates: 1 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate flags fields
|
||||||
|
if ((workflow as any).flags) {
|
||||||
|
const flags = (workflow as any).flags;
|
||||||
|
if (flags.isDraft !== undefined) updateData.isDraft = flags.isDraft;
|
||||||
|
if (flags.isDeleted !== undefined) updateData.isDeleted = flags.isDeleted;
|
||||||
|
if (flags.isPaused !== undefined) updateData.isPaused = flags.isPaused;
|
||||||
|
|
||||||
|
// Remove old nested flags field
|
||||||
|
if (!updateData.$unset) updateData.$unset = {};
|
||||||
|
updateData.$unset.flags = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate conclusion fields
|
||||||
|
if ((workflow as any).conclusion) {
|
||||||
|
const conclusion = (workflow as any).conclusion;
|
||||||
|
if (conclusion.remark) updateData.conclusionRemark = conclusion.remark;
|
||||||
|
if (conclusion.aiGenerated) updateData.aiGeneratedConclusion = conclusion.aiGenerated;
|
||||||
|
|
||||||
|
// Remove old nested conclusion field
|
||||||
|
if (!updateData.$unset) updateData.$unset = {};
|
||||||
|
updateData.$unset.conclusion = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only update if there are changes
|
||||||
|
if (Object.keys(updateData).length > 0) {
|
||||||
|
await WorkflowRequestModel.updateOne(
|
||||||
|
{ _id: workflow._id },
|
||||||
|
updateData
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
|
||||||
|
if (migrated % 100 === 0) {
|
||||||
|
logger.info(`[Migration] Progress: ${migrated}/${workflows.length} migrated`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
errors++;
|
||||||
|
logger.error(`[Migration] Error migrating workflow ${workflow.requestNumber}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[Migration] Migration completed!');
|
||||||
|
logger.info(`[Migration] Summary: ${migrated} migrated, ${skipped} skipped, ${errors} errors`);
|
||||||
|
|
||||||
|
return { migrated, skipped, errors };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Migration] Migration failed:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rollback function (if needed)
|
||||||
|
* This can be used to revert the migration if something goes wrong
|
||||||
|
*/
|
||||||
|
async function rollbackMigration() {
|
||||||
|
try {
|
||||||
|
logger.info('[Migration] Starting rollback...');
|
||||||
|
|
||||||
|
const workflows = await WorkflowRequestModel.find({}).lean();
|
||||||
|
|
||||||
|
let rolledBack = 0;
|
||||||
|
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
try {
|
||||||
|
const updateData: any = {};
|
||||||
|
|
||||||
|
// Rebuild nested dates object
|
||||||
|
if ((workflow as any).submissionDate || (workflow as any).closureDate ||
|
||||||
|
(workflow as any).createdAt || (workflow as any).updatedAt) {
|
||||||
|
updateData.dates = {
|
||||||
|
submission: (workflow as any).submissionDate,
|
||||||
|
closure: (workflow as any).closureDate,
|
||||||
|
created: (workflow as any).createdAt,
|
||||||
|
updated: (workflow as any).updatedAt
|
||||||
|
};
|
||||||
|
updateData.$unset = {
|
||||||
|
submissionDate: 1,
|
||||||
|
closureDate: 1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuild nested flags object
|
||||||
|
if ((workflow as any).isDraft !== undefined || (workflow as any).isDeleted !== undefined ||
|
||||||
|
(workflow as any).isPaused !== undefined) {
|
||||||
|
updateData.flags = {
|
||||||
|
isDraft: (workflow as any).isDraft || false,
|
||||||
|
isDeleted: (workflow as any).isDeleted || false,
|
||||||
|
isPaused: (workflow as any).isPaused || false
|
||||||
|
};
|
||||||
|
if (!updateData.$unset) updateData.$unset = {};
|
||||||
|
updateData.$unset.isDraft = 1;
|
||||||
|
updateData.$unset.isDeleted = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuild nested conclusion object
|
||||||
|
if ((workflow as any).conclusionRemark || (workflow as any).aiGeneratedConclusion) {
|
||||||
|
updateData.conclusion = {
|
||||||
|
remark: (workflow as any).conclusionRemark,
|
||||||
|
aiGenerated: (workflow as any).aiGeneratedConclusion
|
||||||
|
};
|
||||||
|
if (!updateData.$unset) updateData.$unset = {};
|
||||||
|
updateData.$unset.conclusionRemark = 1;
|
||||||
|
updateData.$unset.aiGeneratedConclusion = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(updateData).length > 0) {
|
||||||
|
await WorkflowRequestModel.updateOne(
|
||||||
|
{ _id: workflow._id },
|
||||||
|
updateData
|
||||||
|
);
|
||||||
|
rolledBack++;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[Migration] Error rolling back workflow ${workflow.requestNumber}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[Migration] Rollback completed! ${rolledBack} workflows reverted`);
|
||||||
|
return { rolledBack };
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Migration] Rollback failed:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export functions
|
||||||
|
export { migrateWorkflowRequests, rollbackMigration };
|
||||||
|
|
||||||
|
// If running directly
|
||||||
|
if (require.main === module) {
|
||||||
|
const command = process.argv[2];
|
||||||
|
|
||||||
|
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL || 'mongodb://localhost:27017/re_workflow_db';
|
||||||
|
mongoose.connect(mongoUri)
|
||||||
|
.then(async () => {
|
||||||
|
logger.info('[Migration] Connected to MongoDB');
|
||||||
|
|
||||||
|
if (command === 'rollback') {
|
||||||
|
await rollbackMigration();
|
||||||
|
} else {
|
||||||
|
await migrateWorkflowRequests();
|
||||||
|
}
|
||||||
|
|
||||||
|
await mongoose.disconnect();
|
||||||
|
logger.info('[Migration] Disconnected from MongoDB');
|
||||||
|
process.exit(0);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('[Migration] Failed:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
769
src/scripts/migrate-postgres-to-mongo.ts
Normal file
769
src/scripts/migrate-postgres-to-mongo.ts
Normal file
@ -0,0 +1,769 @@
|
|||||||
|
import { sequelize, connectMongoDB } from '../config/database';
|
||||||
|
import { User as SqlUser } from '../models/User';
|
||||||
|
import { WorkflowRequest as SqlWorkflowRequest } from '../models/WorkflowRequest';
|
||||||
|
import { ApprovalLevel as SqlApprovalLevel } from '../models/ApprovalLevel';
|
||||||
|
import { Participant as SqlParticipant } from '../models/Participant';
|
||||||
|
import { Document as SqlDocument } from '../models/Document';
|
||||||
|
import { WorkNote as SqlWorkNote } from '../models/WorkNote';
|
||||||
|
import { WorkNoteAttachment as SqlWorkNoteAttachment } from '../models/WorkNoteAttachment';
|
||||||
|
import { Activity as SqlActivity } from '../models/Activity';
|
||||||
|
|
||||||
|
// Phase 6 SQL Models
|
||||||
|
import { WorkflowTemplate as SqlWorkflowTemplate } from '../models/WorkflowTemplate';
|
||||||
|
import { Holiday as SqlHoliday } from '../models/Holiday';
|
||||||
|
import { TatAlert as SqlTatAlert } from '../models/TatAlert';
|
||||||
|
import SqlRequestSummary from '../models/RequestSummary';
|
||||||
|
import SqlSharedSummary from '../models/SharedSummary';
|
||||||
|
|
||||||
|
// Phase 7 SQL Models
|
||||||
|
import { Dealer as SqlDealer } from '../models/Dealer';
|
||||||
|
import { DealerClaimDetails as SqlDealerClaimDetails } from '../models/DealerClaimDetails';
|
||||||
|
import { DealerProposalDetails as SqlDealerProposalDetails } from '../models/DealerProposalDetails';
|
||||||
|
import { DealerProposalCostItem as SqlDealerProposalCostItem } from '../models/DealerProposalCostItem';
|
||||||
|
import { DealerCompletionDetails as SqlDealerCompletionDetails } from '../models/DealerCompletionDetails';
|
||||||
|
import { DealerCompletionExpense as SqlDealerCompletionExpense } from '../models/DealerCompletionExpense';
|
||||||
|
import { ClaimBudgetTracking as SqlClaimBudgetTracking } from '../models/ClaimBudgetTracking';
|
||||||
|
import { ClaimInvoice as SqlClaimInvoice } from '../models/ClaimInvoice';
|
||||||
|
import { ClaimCreditNote as SqlClaimCreditNote } from '../models/ClaimCreditNote';
|
||||||
|
|
||||||
|
|
||||||
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import { ParticipantModel } from '../models/mongoose/Participant.schema';
|
||||||
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
|
import { DocumentModel } from '../models/mongoose/Document.schema';
|
||||||
|
import { WorkNoteModel } from '../models/mongoose/WorkNote.schema';
|
||||||
|
import { ActivityModel } from '../models/mongoose/Activity.schema';
|
||||||
|
|
||||||
|
// Phase 6 Mongo Models
|
||||||
|
import { WorkflowTemplateModel } from '../models/mongoose/WorkflowTemplate.schema';
|
||||||
|
import { HolidayModel } from '../models/mongoose/Holiday.schema';
|
||||||
|
import { TatAlertModel } from '../models/mongoose/TatAlert.schema';
|
||||||
|
import { RequestSummaryModel } from '../models/mongoose/RequestSummary.schema';
|
||||||
|
|
||||||
|
// Phase 7 Mongo Models
|
||||||
|
import { DealerModel } from '../models/mongoose/Dealer.schema';
|
||||||
|
import { DealerClaimModel } from '../models/mongoose/DealerClaim.schema';
|
||||||
|
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
// Batch size for processing
|
||||||
|
const BATCH_SIZE = 100;
|
||||||
|
|
||||||
|
const migrateUsers = async () => {
|
||||||
|
logger.info('🚀 Starting User Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
let hasMore = true;
|
||||||
|
let totalMigrated = 0;
|
||||||
|
|
||||||
|
while (hasMore) {
|
||||||
|
const users = await SqlUser.findAll({ limit: BATCH_SIZE, offset, raw: true });
|
||||||
|
if (users.length === 0) break;
|
||||||
|
|
||||||
|
const mongoUsers = users.map((u: any) => ({
|
||||||
|
userId: u.userId,
|
||||||
|
employeeId: u.employeeId,
|
||||||
|
oktaSub: u.oktaSub,
|
||||||
|
email: u.email,
|
||||||
|
firstName: u.firstName,
|
||||||
|
lastName: u.lastName,
|
||||||
|
displayName: u.displayName,
|
||||||
|
department: u.department,
|
||||||
|
designation: u.designation,
|
||||||
|
phone: u.phone,
|
||||||
|
manager: u.manager,
|
||||||
|
secondEmail: u.secondEmail,
|
||||||
|
jobTitle: u.jobTitle,
|
||||||
|
employeeNumber: u.employeeNumber,
|
||||||
|
postalAddress: u.postalAddress,
|
||||||
|
mobilePhone: u.mobilePhone,
|
||||||
|
adGroups: u.adGroups,
|
||||||
|
location: u.location,
|
||||||
|
notifications: { email: u.emailNotificationsEnabled, push: u.pushNotificationsEnabled, inApp: u.inAppNotificationsEnabled },
|
||||||
|
isActive: u.isActive,
|
||||||
|
role: u.role,
|
||||||
|
lastLogin: u.lastLogin,
|
||||||
|
createdAt: u.createdAt,
|
||||||
|
updatedAt: u.updatedAt
|
||||||
|
}));
|
||||||
|
|
||||||
|
await UserModel.bulkWrite(mongoUsers.map(u => ({
|
||||||
|
updateOne: { filter: { userId: u.userId }, update: { $set: u }, upsert: true }
|
||||||
|
})));
|
||||||
|
|
||||||
|
totalMigrated += users.length;
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${totalMigrated} users...`);
|
||||||
|
}
|
||||||
|
logger.info('✨ User Migration Completed.');
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateWorkflows = async () => {
|
||||||
|
logger.info('🚀 Starting Workflow Migration (Normalized)...');
|
||||||
|
let offset = 0;
|
||||||
|
let totalMigrated = 0;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({
|
||||||
|
limit: BATCH_SIZE,
|
||||||
|
offset,
|
||||||
|
include: [{ model: SqlUser, as: 'initiator' }]
|
||||||
|
});
|
||||||
|
if (requests.length === 0) break;
|
||||||
|
|
||||||
|
const requestIds = requests.map(r => r.requestId);
|
||||||
|
const allParticipants = await SqlParticipant.findAll({ where: { requestId: requestIds } });
|
||||||
|
const allLevels = await SqlApprovalLevel.findAll({ where: { requestId: requestIds }, order: [['levelNumber', 'ASC']] });
|
||||||
|
|
||||||
|
const mongoRequests = [];
|
||||||
|
const mongoParticipants = [];
|
||||||
|
const mongoApprovalLevels = [];
|
||||||
|
|
||||||
|
for (const req of requests) {
|
||||||
|
const r = req.get({ plain: true }) as any;
|
||||||
|
const reqParticipants = allParticipants.filter(p => p.requestId === r.requestId);
|
||||||
|
const reqLevels = allLevels.filter(l => l.requestId === r.requestId);
|
||||||
|
|
||||||
|
for (const p of reqParticipants as any[]) {
|
||||||
|
mongoParticipants.push({
|
||||||
|
requestId: r.requestNumber,
|
||||||
|
userId: p.userId,
|
||||||
|
userEmail: p.userEmail,
|
||||||
|
userName: p.userName,
|
||||||
|
participantType: p.participantType,
|
||||||
|
canComment: p.canComment,
|
||||||
|
canViewDocuments: p.canViewDocuments,
|
||||||
|
canDownloadDocuments: p.canDownloadDocuments,
|
||||||
|
notificationEnabled: p.notificationEnabled,
|
||||||
|
addedBy: p.addedBy,
|
||||||
|
addedAt: p.addedAt || new Date(),
|
||||||
|
isActive: p.isActive
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const l of reqLevels as any[]) {
|
||||||
|
mongoApprovalLevels.push({
|
||||||
|
levelId: l.levelId,
|
||||||
|
requestId: r.requestNumber,
|
||||||
|
levelNumber: l.levelNumber,
|
||||||
|
levelName: l.levelName,
|
||||||
|
approver: { userId: l.approverId, email: l.approverEmail, name: l.approverName },
|
||||||
|
tat: {
|
||||||
|
assignedHours: l.tatHours,
|
||||||
|
assignedDays: l.tatDays,
|
||||||
|
startTime: l.tatStartTime || l.levelStartTime,
|
||||||
|
endTime: l.levelEndTime,
|
||||||
|
elapsedHours: l.elapsedHours,
|
||||||
|
remainingHours: l.remainingHours,
|
||||||
|
percentageUsed: l.tatPercentageUsed,
|
||||||
|
isBreached: l.tatBreached,
|
||||||
|
breachReason: l.breachReason
|
||||||
|
},
|
||||||
|
status: l.status,
|
||||||
|
actionDate: l.actionDate,
|
||||||
|
comments: l.comments,
|
||||||
|
rejectionReason: l.rejectionReason,
|
||||||
|
isFinalApprover: l.isFinalApprover,
|
||||||
|
alerts: { fiftyPercentSent: l.tat50AlertSent, seventyFivePercentSent: l.tat75AlertSent },
|
||||||
|
paused: {
|
||||||
|
isPaused: l.isPaused,
|
||||||
|
pausedAt: l.pausedAt,
|
||||||
|
pausedBy: l.pausedBy,
|
||||||
|
reason: l.pauseReason,
|
||||||
|
resumeDate: l.pauseResumeDate,
|
||||||
|
tatSnapshot: l.pauseTatStartTime
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
mongoRequests.push({
|
||||||
|
requestNumber: r.requestNumber,
|
||||||
|
initiator: {
|
||||||
|
userId: r.initiatorId,
|
||||||
|
email: r.initiator?.email || 'unknown@re.com',
|
||||||
|
name: r.initiator?.displayName || 'Unknown User',
|
||||||
|
department: r.initiator?.department || 'Unassigned'
|
||||||
|
},
|
||||||
|
templateType: r.templateType,
|
||||||
|
workflowType: r.workflowType,
|
||||||
|
templateId: r.templateId,
|
||||||
|
title: r.title,
|
||||||
|
description: r.description,
|
||||||
|
priority: r.priority,
|
||||||
|
status: r.status,
|
||||||
|
currentLevel: r.currentLevel,
|
||||||
|
totalLevels: r.totalLevels,
|
||||||
|
totalTatHours: r.totalTatHours,
|
||||||
|
dates: { submission: r.submissionDate, closure: r.closureDate, created: r.createdAt, updated: r.updatedAt },
|
||||||
|
conclusion: { remark: r.conclusionRemark, aiGenerated: r.aiGeneratedConclusion },
|
||||||
|
flags: { isDraft: r.isDraft, isDeleted: r.isDeleted, isPaused: r.isPaused },
|
||||||
|
pausedData: {
|
||||||
|
pausedAt: r.pausedAt,
|
||||||
|
pausedBy: r.pausedBy,
|
||||||
|
reason: r.pauseReason,
|
||||||
|
resumeDate: r.pauseResumeDate,
|
||||||
|
tatSnapshot: r.pauseTatSnapshot
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mongoRequests.length > 0) {
|
||||||
|
await WorkflowRequestModel.bulkWrite(mongoRequests.map(req => ({
|
||||||
|
updateOne: { filter: { requestNumber: req.requestNumber }, update: { $set: req }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
if (mongoParticipants.length > 0) {
|
||||||
|
await ParticipantModel.bulkWrite(mongoParticipants.map(p => ({
|
||||||
|
updateOne: { filter: { requestId: p.requestId, userId: p.userId }, update: { $set: p }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
if (mongoApprovalLevels.length > 0) {
|
||||||
|
await ApprovalLevelModel.bulkWrite(mongoApprovalLevels.map(l => ({
|
||||||
|
updateOne: { filter: { requestId: l.requestId, levelNumber: l.levelNumber }, update: { $set: l }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
totalMigrated += requests.length;
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${totalMigrated} workflows (with relations)...`);
|
||||||
|
}
|
||||||
|
logger.info('✨ Workflow Migration Completed.');
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateDocuments = async () => {
|
||||||
|
logger.info('🚀 Starting Document Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const documents = await SqlDocument.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (documents.length === 0) break;
|
||||||
|
|
||||||
|
const requestIds = [...new Set(documents.map((d: any) => d.requestId).filter(Boolean))];
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({ where: { requestId: requestIds }, attributes: ['requestId', 'requestNumber'] });
|
||||||
|
const requestMap = new Map();
|
||||||
|
requests.forEach((r: any) => requestMap.set(r.requestId, r.requestNumber));
|
||||||
|
|
||||||
|
const mongoDocuments = documents.map((d: any) => {
|
||||||
|
const reqNumber = requestMap.get(d.requestId);
|
||||||
|
if (!reqNumber) return null;
|
||||||
|
return {
|
||||||
|
documentId: d.documentId,
|
||||||
|
requestId: reqNumber,
|
||||||
|
uploadedBy: d.uploadedBy,
|
||||||
|
fileName: d.fileName,
|
||||||
|
originalFileName: d.originalFileName,
|
||||||
|
fileType: d.fileType,
|
||||||
|
fileExtension: d.fileExtension,
|
||||||
|
fileSize: d.fileSize,
|
||||||
|
filePath: d.filePath,
|
||||||
|
storageUrl: d.storageUrl,
|
||||||
|
mimeType: d.mimeType,
|
||||||
|
checksum: d.checksum,
|
||||||
|
category: d.category,
|
||||||
|
version: d.version,
|
||||||
|
isDeleted: d.isDeleted,
|
||||||
|
createdAt: d.createdAt,
|
||||||
|
updatedAt: d.updatedAt
|
||||||
|
};
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (mongoDocuments.length > 0) {
|
||||||
|
await DocumentModel.bulkWrite(mongoDocuments.map((d: any) => ({
|
||||||
|
updateOne: { filter: { documentId: d.documentId }, update: { $set: d }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} documents...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Document Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateWorkNotes = async () => {
|
||||||
|
logger.info('🚀 Starting WorkNote Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const notes = await SqlWorkNote.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (notes.length === 0) break;
|
||||||
|
|
||||||
|
const requestIds = [...new Set(notes.map((n: any) => n.requestId).filter(Boolean))];
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({ where: { requestId: requestIds }, attributes: ['requestId', 'requestNumber'] });
|
||||||
|
const requestMap = new Map();
|
||||||
|
requests.forEach((r: any) => requestMap.set(r.requestId, r.requestNumber));
|
||||||
|
|
||||||
|
const noteIds = notes.map((n: any) => n.noteId);
|
||||||
|
const attachments = await SqlWorkNoteAttachment.findAll({ where: { noteId: noteIds } });
|
||||||
|
const attachmentMap = new Map();
|
||||||
|
attachments.forEach((a: any) => {
|
||||||
|
if (!attachmentMap.has(a.noteId)) attachmentMap.set(a.noteId, []);
|
||||||
|
attachmentMap.get(a.noteId).push(a);
|
||||||
|
});
|
||||||
|
|
||||||
|
const mongoNotes = notes.map((n: any) => {
|
||||||
|
const reqNumber = requestMap.get(n.requestId);
|
||||||
|
if (!reqNumber) return null;
|
||||||
|
return {
|
||||||
|
noteId: n.noteId,
|
||||||
|
requestId: reqNumber,
|
||||||
|
userId: n.userId,
|
||||||
|
note: n.note,
|
||||||
|
type: n.type,
|
||||||
|
isVisibleToDealer: n.isVisibleToDealer,
|
||||||
|
attachments: (attachmentMap.get(n.noteId) || []).map((a: any) => ({
|
||||||
|
attachmentId: a.attachmentId,
|
||||||
|
fileName: a.fileName,
|
||||||
|
fileUrl: a.fileUrl,
|
||||||
|
fileType: a.fileType
|
||||||
|
})),
|
||||||
|
createdAt: n.createdAt,
|
||||||
|
updatedAt: n.updatedAt
|
||||||
|
};
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (mongoNotes.length > 0) {
|
||||||
|
await WorkNoteModel.bulkWrite(mongoNotes.map((n: any) => ({
|
||||||
|
updateOne: { filter: { noteId: n.noteId }, update: { $set: n }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} notes...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ WorkNote Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateActivities = async () => {
|
||||||
|
logger.info('🚀 Starting Activity Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const activities = await SqlActivity.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (activities.length === 0) break;
|
||||||
|
|
||||||
|
const requestIds = [...new Set(activities.map((a: any) => a.requestId).filter(Boolean))];
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({ where: { requestId: requestIds }, attributes: ['requestId', 'requestNumber'] });
|
||||||
|
const requestMap = new Map();
|
||||||
|
requests.forEach((r: any) => requestMap.set(r.requestId, r.requestNumber));
|
||||||
|
|
||||||
|
const mongoActivities = activities.map((a: any) => {
|
||||||
|
const reqNumber = requestMap.get(a.requestId);
|
||||||
|
if (!reqNumber) return null;
|
||||||
|
return {
|
||||||
|
activityId: a.activityId,
|
||||||
|
requestId: reqNumber,
|
||||||
|
userId: a.userId,
|
||||||
|
type: a.type,
|
||||||
|
action: a.action,
|
||||||
|
details: a.details,
|
||||||
|
metadata: a.metadata,
|
||||||
|
ipAddress: a.ipAddress,
|
||||||
|
userAgent: a.userAgent,
|
||||||
|
timestamp: a.timestamp
|
||||||
|
};
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (mongoActivities.length > 0) {
|
||||||
|
await ActivityModel.bulkWrite(mongoActivities.map((a: any) => ({
|
||||||
|
updateOne: { filter: { activityId: a.activityId }, update: { $set: a }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} activities...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Activity Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
// --- PHASE 6 ---
|
||||||
|
|
||||||
|
const migrateTemplates = async () => {
|
||||||
|
logger.info('🚀 Starting Workflow Template Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const templates = await SqlWorkflowTemplate.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (templates.length === 0) break;
|
||||||
|
|
||||||
|
const mongoTemplates = templates.map((t: any) => ({
|
||||||
|
templateId: t.templateId,
|
||||||
|
name: t.name,
|
||||||
|
description: t.description,
|
||||||
|
department: t.department,
|
||||||
|
workflowType: t.workflowType,
|
||||||
|
isActive: t.isActive,
|
||||||
|
version: t.version,
|
||||||
|
stages: t.stages,
|
||||||
|
createdBy: t.createdBy,
|
||||||
|
updatedBy: t.updatedBy,
|
||||||
|
createdAt: t.createdAt,
|
||||||
|
updatedAt: t.updatedAt
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (mongoTemplates.length > 0) {
|
||||||
|
await WorkflowTemplateModel.bulkWrite(mongoTemplates.map((t: any) => ({
|
||||||
|
updateOne: { filter: { templateId: t.templateId }, update: { $set: t }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} templates...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Template Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateHolidays = async () => {
|
||||||
|
logger.info('🚀 Starting Holiday Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const holidays = await SqlHoliday.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (holidays.length === 0) break;
|
||||||
|
|
||||||
|
if (holidays.length > 0) {
|
||||||
|
await HolidayModel.bulkWrite(holidays.map((h: any) => ({
|
||||||
|
updateOne: { filter: { date: h.date }, update: { $set: h }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} holidays...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Holiday Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateTatAlerts = async () => {
|
||||||
|
logger.info('🚀 Starting TAT Alert Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const alerts = await SqlTatAlert.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (alerts.length === 0) break;
|
||||||
|
|
||||||
|
const requestIds = [...new Set(alerts.map((a: any) => a.requestId).filter(Boolean))];
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({ where: { requestId: requestIds }, attributes: ['requestId', 'requestNumber'] });
|
||||||
|
const requestMap = new Map();
|
||||||
|
requests.forEach((r: any) => requestMap.set(r.requestId, r.requestNumber));
|
||||||
|
|
||||||
|
const mongoAlerts = alerts.map((a: any) => {
|
||||||
|
const reqNumber = requestMap.get(a.requestId);
|
||||||
|
if (!reqNumber) return null;
|
||||||
|
return {
|
||||||
|
alertId: a.alertId,
|
||||||
|
requestId: reqNumber,
|
||||||
|
levelNumber: a.levelNumber,
|
||||||
|
alertType: a.alertType,
|
||||||
|
sentToValues: a.sentToValues,
|
||||||
|
sentAt: a.sentAt,
|
||||||
|
metadata: a.metadata,
|
||||||
|
createdAt: a.createdAt,
|
||||||
|
updatedAt: a.updatedAt
|
||||||
|
};
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (mongoAlerts.length > 0) {
|
||||||
|
await TatAlertModel.bulkWrite(mongoAlerts.map((a: any) => ({
|
||||||
|
updateOne: { filter: { alertId: a.alertId }, update: { $set: a }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} alerts...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Alert Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateSummaries = async () => {
|
||||||
|
logger.info('🚀 Starting Request Summary Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
// Find summaries without include to skip association issues
|
||||||
|
const summaries = await SqlRequestSummary.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (summaries.length === 0) break;
|
||||||
|
|
||||||
|
// 1. Get Request Numbers
|
||||||
|
const requestIds = [...new Set(summaries.map((s: any) => s.requestId).filter(Boolean))];
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({ where: { requestId: requestIds }, attributes: ['requestId', 'requestNumber'] });
|
||||||
|
const requestMap = new Map();
|
||||||
|
requests.forEach((r: any) => requestMap.set(r.requestId, r.requestNumber));
|
||||||
|
|
||||||
|
// 2. Get Shared Summaries
|
||||||
|
const summaryIds = summaries.map((s: any) => s.summaryId);
|
||||||
|
const sharedSummaries = await SqlSharedSummary.findAll({ where: { summaryId: summaryIds } });
|
||||||
|
const sharedMap = new Map();
|
||||||
|
sharedSummaries.forEach((sh: any) => {
|
||||||
|
if (!sharedMap.has(sh.summaryId)) sharedMap.set(sh.summaryId, []);
|
||||||
|
sharedMap.get(sh.summaryId).push(sh);
|
||||||
|
});
|
||||||
|
|
||||||
|
const mongoSummaries = summaries.map((s: any) => {
|
||||||
|
const reqNumber = requestMap.get(s.requestId);
|
||||||
|
if (!reqNumber) return null;
|
||||||
|
return {
|
||||||
|
summaryId: s.summaryId,
|
||||||
|
requestId: reqNumber,
|
||||||
|
initiatorId: s.initiatorId,
|
||||||
|
title: s.title,
|
||||||
|
description: s.description,
|
||||||
|
closingRemarks: s.closingRemarks,
|
||||||
|
isAiGenerated: s.isAiGenerated,
|
||||||
|
conclusionId: s.conclusionId,
|
||||||
|
createdAt: s.createdAt,
|
||||||
|
updatedAt: s.updatedAt,
|
||||||
|
sharedWith: (sharedMap.get(s.summaryId) || []).map((sh: any) => ({
|
||||||
|
userId: sh.sharedWith,
|
||||||
|
sharedBy: sh.sharedBy,
|
||||||
|
sharedAt: sh.sharedAt,
|
||||||
|
viewedAt: sh.viewedAt,
|
||||||
|
isRead: sh.isRead
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (mongoSummaries.length > 0) {
|
||||||
|
await RequestSummaryModel.bulkWrite(mongoSummaries.map((s: any) => ({
|
||||||
|
updateOne: { filter: { summaryId: s.summaryId }, update: { $set: s }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} summaries...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Request Summary Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
// --- PHASE 7: DEALERS & CLAIMS ---
|
||||||
|
|
||||||
|
const migrateDealers = async () => {
|
||||||
|
logger.info('🚀 Starting Dealer Migration...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
const dealers = await SqlDealer.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (dealers.length === 0) break;
|
||||||
|
|
||||||
|
const mongoDealers = dealers.map((d: any) => ({
|
||||||
|
dealerCode: d.dealerCode, // Maps to PK
|
||||||
|
dealerName: d.dealerName,
|
||||||
|
region: d.region,
|
||||||
|
state: d.state,
|
||||||
|
city: d.city,
|
||||||
|
zone: d.zone,
|
||||||
|
location: d.location,
|
||||||
|
sapCode: d.sapCode,
|
||||||
|
email: d.email,
|
||||||
|
phone: d.phone,
|
||||||
|
address: d.address,
|
||||||
|
gstin: d.gstin,
|
||||||
|
pan: d.pan,
|
||||||
|
isActive: d.isActive,
|
||||||
|
createdAt: d.createdAt,
|
||||||
|
updatedAt: d.updatedAt
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (mongoDealers.length > 0) {
|
||||||
|
await DealerModel.bulkWrite(mongoDealers.map((d: any) => ({
|
||||||
|
updateOne: { filter: { dealerCode: d.dealerCode }, update: { $set: d }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} dealers...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Dealer Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateClaims = async () => {
|
||||||
|
logger.info('🚀 Starting Dealer Claim Migration (Aggregation)...');
|
||||||
|
let offset = 0;
|
||||||
|
while (true) {
|
||||||
|
// Trigger from DealerClaimDetails (The root of a claim)
|
||||||
|
const claimDetails = await SqlDealerClaimDetails.findAll({ limit: BATCH_SIZE, offset });
|
||||||
|
if (claimDetails.length === 0) break;
|
||||||
|
|
||||||
|
const claimIds = claimDetails.map((c: any) => c.claimId);
|
||||||
|
const requestIds = [...new Set(claimDetails.map((c: any) => c.requestId).filter(Boolean))];
|
||||||
|
const dealerCodes = [...new Set(claimDetails.map((c: any) => c.dealerCode).filter(Boolean))];
|
||||||
|
|
||||||
|
// 0. Fetch Dealer Details (For Region/State filters)
|
||||||
|
// 0. Fetch Dealer Details (For Region/State filters)
|
||||||
|
const dealers = await SqlDealer.findAll({
|
||||||
|
where: { salesCode: dealerCodes },
|
||||||
|
attributes: ['salesCode', 'region', 'state', 'city']
|
||||||
|
});
|
||||||
|
const dealerMap = new Map();
|
||||||
|
dealers.forEach((d: any) => dealerMap.set(d.salesCode, d.get({ plain: true })));
|
||||||
|
|
||||||
|
// 1. Fetch Workflows for Request Numbers
|
||||||
|
const requests = await SqlWorkflowRequest.findAll({ where: { requestId: requestIds }, attributes: ['requestId', 'requestNumber'] });
|
||||||
|
const requestMap = new Map();
|
||||||
|
requests.forEach((r: any) => requestMap.set(r.requestId, r.requestNumber));
|
||||||
|
|
||||||
|
// 2. Fetch Proposals
|
||||||
|
const proposals = await SqlDealerProposalDetails.findAll({ where: { requestId: requestIds } });
|
||||||
|
const proposalIds = proposals.map((p: any) => p.proposalId);
|
||||||
|
const proposalItems = await SqlDealerProposalCostItem.findAll({ where: { proposalId: proposalIds } });
|
||||||
|
const proposalMap = new Map();
|
||||||
|
proposals.forEach((p: any) => {
|
||||||
|
const items = proposalItems.filter((i: any) => i.proposalId === p.proposalId);
|
||||||
|
proposalMap.set(p.requestId, { ...p.get({ plain: true }), costItems: items.map((i: any) => i.get({ plain: true })) });
|
||||||
|
});
|
||||||
|
|
||||||
|
// 3. Fetch Completions
|
||||||
|
const completions = await SqlDealerCompletionDetails.findAll({ where: { requestId: requestIds } });
|
||||||
|
const completionIds = completions.map((c: any) => c.completionId);
|
||||||
|
const completionExpenses = await SqlDealerCompletionExpense.findAll({ where: { completionId: completionIds } });
|
||||||
|
const completionMap = new Map();
|
||||||
|
completions.forEach((c: any) => {
|
||||||
|
const expenses = completionExpenses.filter((e: any) => e.completionId === c.completionId);
|
||||||
|
completionMap.set(c.requestId, { ...c.get({ plain: true }), expenses: expenses.map((e: any) => e.get({ plain: true })) });
|
||||||
|
});
|
||||||
|
|
||||||
|
// 4. Fetch Budget Tracking
|
||||||
|
const budgets = await SqlClaimBudgetTracking.findAll({ where: { requestId: requestIds } });
|
||||||
|
const budgetMap = new Map();
|
||||||
|
budgets.forEach((b: any) => budgetMap.set(b.requestId, b.get({ plain: true })));
|
||||||
|
|
||||||
|
// 5. Fetch Invoices & Credit Notes
|
||||||
|
const invoices = await SqlClaimInvoice.findAll({ where: { requestId: requestIds } });
|
||||||
|
const creditNotes = await SqlClaimCreditNote.findAll({ where: { requestId: requestIds } });
|
||||||
|
const invoiceMap = new Map(); // requestId -> [invoices]
|
||||||
|
const creditNoteMap = new Map(); // requestId -> [notes]
|
||||||
|
|
||||||
|
invoices.forEach((i: any) => {
|
||||||
|
if (!invoiceMap.has(i.requestId)) invoiceMap.set(i.requestId, []);
|
||||||
|
invoiceMap.get(i.requestId).push(i.get({ plain: true }));
|
||||||
|
});
|
||||||
|
creditNotes.forEach((rn: any) => {
|
||||||
|
if (!creditNoteMap.has(rn.requestId)) creditNoteMap.set(rn.requestId, []);
|
||||||
|
creditNoteMap.get(rn.requestId).push(rn.get({ plain: true }));
|
||||||
|
});
|
||||||
|
|
||||||
|
// 6. Aggregate into DealerClaim
|
||||||
|
const mongoClaims = claimDetails.map((c: any) => {
|
||||||
|
const reqNumber = requestMap.get(c.requestId);
|
||||||
|
if (!reqNumber) return null;
|
||||||
|
|
||||||
|
const p = proposalMap.get(c.requestId);
|
||||||
|
const comp = completionMap.get(c.requestId);
|
||||||
|
const b = budgetMap.get(c.requestId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
claimId: c.claimId,
|
||||||
|
requestNumber: reqNumber,
|
||||||
|
claimDate: c.activityDate,
|
||||||
|
|
||||||
|
dealer: {
|
||||||
|
code: c.dealerCode,
|
||||||
|
name: c.dealerName,
|
||||||
|
email: c.dealerEmail,
|
||||||
|
phone: c.dealerPhone,
|
||||||
|
address: c.dealerAddress,
|
||||||
|
location: c.location,
|
||||||
|
region: dealerMap.get(c.dealerCode)?.region,
|
||||||
|
state: dealerMap.get(c.dealerCode)?.state,
|
||||||
|
city: dealerMap.get(c.dealerCode)?.city
|
||||||
|
},
|
||||||
|
|
||||||
|
activity: {
|
||||||
|
name: c.activityName,
|
||||||
|
type: c.activityType,
|
||||||
|
periodStart: c.periodStartDate,
|
||||||
|
periodEnd: c.periodEndDate
|
||||||
|
},
|
||||||
|
|
||||||
|
proposal: p ? {
|
||||||
|
proposalId: p.proposalId,
|
||||||
|
totalEstimatedBudget: p.totalEstimatedBudget,
|
||||||
|
timelineMode: p.timelineMode,
|
||||||
|
expectedCompletion: p.expectedCompletionDate || p.expectedCompletionDays,
|
||||||
|
dealerComments: p.dealerComments,
|
||||||
|
submittedAt: p.submittedAt,
|
||||||
|
documentUrl: p.proposalDocumentUrl,
|
||||||
|
costItems: (p.costItems || []).map((i: any) => ({
|
||||||
|
itemId: i.itemId,
|
||||||
|
description: i.itemDescription,
|
||||||
|
quantity: i.quantity,
|
||||||
|
unitCost: i.unitCost,
|
||||||
|
totalCost: i.totalCost,
|
||||||
|
category: i.category
|
||||||
|
}))
|
||||||
|
} : undefined,
|
||||||
|
|
||||||
|
completion: comp ? {
|
||||||
|
completionId: comp.completionId,
|
||||||
|
actualTotalCost: comp.actualTotalCost,
|
||||||
|
completionDate: comp.completionDate,
|
||||||
|
dealerComments: comp.dealerComments,
|
||||||
|
submittedAt: comp.submittedAt,
|
||||||
|
expenses: (comp.expenses || []).map((e: any) => ({
|
||||||
|
expenseId: e.expenseId,
|
||||||
|
description: e.description,
|
||||||
|
amount: e.amount,
|
||||||
|
category: e.category,
|
||||||
|
invoiceNumber: e.invoiceNumber,
|
||||||
|
invoiceDate: e.invoiceDate,
|
||||||
|
documentUrl: e.documentUrl
|
||||||
|
}))
|
||||||
|
} : undefined,
|
||||||
|
|
||||||
|
budgetTracking: b ? {
|
||||||
|
approvedBudget: b.approvedBudget,
|
||||||
|
utilizedBudget: b.closedExpenses, // or finalClaimAmount
|
||||||
|
remainingBudget: b.varianceAmount, // approximate mapping
|
||||||
|
sapInsertionStatus: b.budgetStatus === 'SETTLED' ? 'COMPLETED' : 'PENDING',
|
||||||
|
sapDocId: b.sapDocId // if available
|
||||||
|
} : undefined,
|
||||||
|
|
||||||
|
invoices: (invoiceMap.get(c.requestId) || []).map((inv: any) => ({
|
||||||
|
invoiceId: inv.invoiceId,
|
||||||
|
invoiceNumber: inv.invoiceNumber,
|
||||||
|
amount: inv.amount,
|
||||||
|
date: inv.invoiceDate,
|
||||||
|
status: inv.status,
|
||||||
|
documentUrl: inv.invoiceFilePath
|
||||||
|
})),
|
||||||
|
|
||||||
|
creditNotes: (creditNoteMap.get(c.requestId) || []).map((cn: any) => ({
|
||||||
|
noteId: cn.creditNoteId,
|
||||||
|
noteNumber: cn.creditNoteNumber,
|
||||||
|
amount: cn.amount,
|
||||||
|
date: cn.creditNoteDate,
|
||||||
|
sapDocId: cn.sapDocId
|
||||||
|
})),
|
||||||
|
|
||||||
|
createdAt: c.createdAt,
|
||||||
|
updatedAt: c.updatedAt,
|
||||||
|
// Initialize empty revision history for migrated data
|
||||||
|
revisions: []
|
||||||
|
};
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (mongoClaims.length > 0) {
|
||||||
|
await DealerClaimModel.bulkWrite(mongoClaims.map((c: any) => ({
|
||||||
|
updateOne: { filter: { claimId: c.claimId }, update: { $set: c }, upsert: true }
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
offset += BATCH_SIZE;
|
||||||
|
logger.info(`✅ Migrated ${offset} aggregated claims...`);
|
||||||
|
}
|
||||||
|
logger.info(`✨ Dealer Claim Migration Completed.`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const runMigration = async () => {
|
||||||
|
try {
|
||||||
|
await sequelize.authenticate();
|
||||||
|
logger.info('🐘 PostgreSQL Connected.');
|
||||||
|
await connectMongoDB();
|
||||||
|
|
||||||
|
await migrateUsers();
|
||||||
|
await migrateWorkflows();
|
||||||
|
|
||||||
|
await migrateDocuments();
|
||||||
|
await migrateWorkNotes();
|
||||||
|
await migrateActivities();
|
||||||
|
|
||||||
|
// PHASE 6
|
||||||
|
await migrateTemplates();
|
||||||
|
await migrateHolidays();
|
||||||
|
await migrateTatAlerts();
|
||||||
|
await migrateSummaries();
|
||||||
|
|
||||||
|
// PHASE 7
|
||||||
|
// await migrateDealers(); // Uncomment if Dealer table is populated
|
||||||
|
await migrateClaims();
|
||||||
|
|
||||||
|
logger.info('🎉 FULL MIGRATION SUCCESSFUL!');
|
||||||
|
process.exit(0);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Migration Failed:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
runMigration();
|
||||||
28
src/scripts/reset-mongo-db.ts
Normal file
28
src/scripts/reset-mongo-db.ts
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import mongoose from 'mongoose';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
dotenv.config({ path: path.resolve(__dirname, '../../.env') });
|
||||||
|
|
||||||
|
const resetMongoDB = async () => {
|
||||||
|
try {
|
||||||
|
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL || 'mongodb://localhost:27017/re_workflow_db';
|
||||||
|
console.log(`🔌 Connecting to MongoDB at ${mongoUri}...`);
|
||||||
|
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
console.log('✅ Connected to MongoDB.');
|
||||||
|
|
||||||
|
console.log('🗑️ Dropping database...');
|
||||||
|
await mongoose.connection.dropDatabase();
|
||||||
|
console.log('✅ Database dropped successfully.');
|
||||||
|
|
||||||
|
await mongoose.disconnect();
|
||||||
|
console.log('👋 Disconnected.');
|
||||||
|
process.exit(0);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to reset MongoDB:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
resetMongoDB();
|
||||||
19
src/scripts/seed-admin-config.mongo.ts
Normal file
19
src/scripts/seed-admin-config.mongo.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import { connectMongoDB, mongoose } from '../config/database';
|
||||||
|
import { seedDefaultConfigurationsMongo } from '../services/configSeed.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const seedAdminConfigurationsMongo = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
await seedDefaultConfigurationsMongo();
|
||||||
|
|
||||||
|
logger.info('✅ Mongo Config Seeding completed.');
|
||||||
|
await mongoose.disconnect();
|
||||||
|
process.exit(0);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Failed to seed Mongo configs:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
seedAdminConfigurationsMongo();
|
||||||
@ -4,9 +4,12 @@
|
|||||||
* These users will act as action takers in the workflow
|
* These users will act as action takers in the workflow
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { sequelize } from '../config/database';
|
import { UserModel, IUser } from '../models/mongoose/User.schema';
|
||||||
import { User } from '../models/User';
|
import mongoose from 'mongoose';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
interface DealerData {
|
interface DealerData {
|
||||||
email: string;
|
email: string;
|
||||||
@ -47,9 +50,9 @@ async function seedDealers(): Promise<void> {
|
|||||||
logger.info('[Seed Dealers] Starting dealer user seeding...');
|
logger.info('[Seed Dealers] Starting dealer user seeding...');
|
||||||
|
|
||||||
for (const dealer of dealers) {
|
for (const dealer of dealers) {
|
||||||
// Check if user already exists
|
// Check if user already exists in MongoDB
|
||||||
const existingUser = await User.findOne({
|
const existingUser = await UserModel.findOne({
|
||||||
where: { email: dealer.email },
|
email: dealer.email.toLowerCase()
|
||||||
});
|
});
|
||||||
|
|
||||||
if (existingUser) {
|
if (existingUser) {
|
||||||
@ -102,7 +105,8 @@ async function seedDealers(): Promise<void> {
|
|||||||
updateData.lastName = lastName;
|
updateData.lastName = lastName;
|
||||||
}
|
}
|
||||||
|
|
||||||
await existingUser.update(updateData);
|
Object.assign(existingUser, updateData);
|
||||||
|
await (existingUser as any).save();
|
||||||
|
|
||||||
if (isOktaUser) {
|
if (isOktaUser) {
|
||||||
logger.info(`[Seed Dealers] ✅ Updated existing Okta user ${dealer.email} with dealer code: ${dealer.dealerCode}`);
|
logger.info(`[Seed Dealers] ✅ Updated existing Okta user ${dealer.email} with dealer code: ${dealer.dealerCode}`);
|
||||||
@ -126,7 +130,7 @@ async function seedDealers(): Promise<void> {
|
|||||||
const firstName = nameParts[0] || dealer.dealerName;
|
const firstName = nameParts[0] || dealer.dealerName;
|
||||||
const lastName = nameParts.slice(1).join(' ') || '';
|
const lastName = nameParts.slice(1).join(' ') || '';
|
||||||
|
|
||||||
await User.create({
|
await UserModel.create({
|
||||||
userId,
|
userId,
|
||||||
email: dealer.email.toLowerCase(),
|
email: dealer.email.toLowerCase(),
|
||||||
displayName: dealer.displayName,
|
displayName: dealer.displayName,
|
||||||
@ -135,18 +139,18 @@ async function seedDealers(): Promise<void> {
|
|||||||
department: dealer.department || 'Dealer Operations',
|
department: dealer.department || 'Dealer Operations',
|
||||||
designation: dealer.designation || 'Dealer',
|
designation: dealer.designation || 'Dealer',
|
||||||
phone: dealer.phone,
|
phone: dealer.phone,
|
||||||
role: dealer.role || 'USER',
|
role: (dealer.role || 'USER') as any,
|
||||||
employeeId: dealer.dealerCode, // Store dealer code in employeeId field
|
employeeId: dealer.dealerCode,
|
||||||
isActive: true,
|
isActive: true,
|
||||||
// Set placeholder oktaSub - will be updated when user logs in via SSO
|
|
||||||
// Using a recognizable pattern so we know it's a placeholder
|
|
||||||
oktaSub: `dealer-${dealer.dealerCode}-pending-sso`,
|
oktaSub: `dealer-${dealer.dealerCode}-pending-sso`,
|
||||||
emailNotificationsEnabled: true,
|
notifications: {
|
||||||
pushNotificationsEnabled: false,
|
email: true,
|
||||||
inAppNotificationsEnabled: true,
|
push: false,
|
||||||
|
inApp: true
|
||||||
|
},
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
} as any);
|
});
|
||||||
|
|
||||||
logger.info(`[Seed Dealers] ⚠️ Created placeholder dealer user: ${dealer.email} (${dealer.dealerCode})`);
|
logger.info(`[Seed Dealers] ⚠️ Created placeholder dealer user: ${dealer.email} (${dealer.dealerCode})`);
|
||||||
logger.info(`[Seed Dealers] ⚠️ User should login via SSO to update oktaSub field with real Okta subject ID`);
|
logger.info(`[Seed Dealers] ⚠️ User should login via SSO to update oktaSub field with real Okta subject ID`);
|
||||||
@ -162,10 +166,10 @@ async function seedDealers(): Promise<void> {
|
|||||||
|
|
||||||
// Run if called directly
|
// Run if called directly
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
sequelize
|
const mongoUri = process.env.MONGO_URI || 'mongodb://localhost:27017/re_workflow_db';
|
||||||
.authenticate()
|
mongoose.connect(mongoUri)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
logger.info('[Seed Dealers] Database connection established');
|
logger.info('[Seed Dealers] MongoDB connection established');
|
||||||
return seedDealers();
|
return seedDealers();
|
||||||
})
|
})
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
|||||||
52
src/scripts/seed-test-dealer.mongo.ts
Normal file
52
src/scripts/seed-test-dealer.mongo.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import { connectMongoDB, mongoose } from '../config/database';
|
||||||
|
import { DealerModel } from '../models/mongoose/Dealer.schema';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const seedTestDealerMongo = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
|
||||||
|
const dealerData = {
|
||||||
|
dealerCode: 'TEST001',
|
||||||
|
dealerName: 'TEST REFLOW DEALERSHIP',
|
||||||
|
region: 'TEST',
|
||||||
|
state: 'Test State',
|
||||||
|
city: 'Test City',
|
||||||
|
zone: 'Test Zone',
|
||||||
|
location: 'Test Location',
|
||||||
|
sapCode: 'SAP001',
|
||||||
|
email: 'testreflow@example.com',
|
||||||
|
phone: '9999999999',
|
||||||
|
address: 'Test Address, Test City',
|
||||||
|
isActive: true,
|
||||||
|
// Additional fields can be added if schema supports them
|
||||||
|
};
|
||||||
|
|
||||||
|
const existingDealer = await DealerModel.findOne({
|
||||||
|
$or: [
|
||||||
|
{ dealerCode: dealerData.dealerCode },
|
||||||
|
{ email: dealerData.email }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingDealer) {
|
||||||
|
logger.info('[Seed Test Dealer Mongo] Dealer already exists, updating...');
|
||||||
|
Object.assign(existingDealer, dealerData);
|
||||||
|
await existingDealer.save();
|
||||||
|
logger.info(`[Seed Test Dealer Mongo] ✅ Updated dealer: ${existingDealer.dealerCode}`);
|
||||||
|
} else {
|
||||||
|
const newDealer = await DealerModel.create(dealerData);
|
||||||
|
logger.info(`[Seed Test Dealer Mongo] ✅ Created dealer: ${newDealer.dealerCode}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await mongoose.disconnect();
|
||||||
|
logger.info('✅ Mongo Test Dealer Seeding completed.');
|
||||||
|
process.exit(0);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Failed to seed Mongo test dealer:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
seedTestDealerMongo();
|
||||||
97
src/scripts/test-mongo-performance.ts
Normal file
97
src/scripts/test-mongo-performance.ts
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
import mongoose from 'mongoose';
|
||||||
|
import { connectMongoDB } from '../config/database';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
|
import { ParticipantModel } from '../models/mongoose/Participant.schema';
|
||||||
|
import { WorkflowServiceMongo } from '../services/workflow.service';
|
||||||
|
|
||||||
|
const runTest = async () => {
|
||||||
|
await connectMongoDB();
|
||||||
|
const service = new WorkflowServiceMongo();
|
||||||
|
|
||||||
|
console.log('🧹 Cleaning up old test data...');
|
||||||
|
// Clean up all collections
|
||||||
|
await WorkflowRequestModel.deleteMany({ requestNumber: { $regex: /^TEST-/ } });
|
||||||
|
await ApprovalLevelModel.deleteMany({ requestId: { $regex: /^TEST-/ } });
|
||||||
|
await ParticipantModel.deleteMany({ requestId: { $regex: /^TEST-/ } });
|
||||||
|
|
||||||
|
console.log('🌱 Seeding sample data (Normalized)...');
|
||||||
|
const requestSamples = [];
|
||||||
|
const levelSamples = [];
|
||||||
|
const departments = ['Sales', 'Marketing', 'IT', 'HR'];
|
||||||
|
|
||||||
|
for (let i = 0; i < 50; i++) {
|
||||||
|
const dept = departments[i % departments.length];
|
||||||
|
const isBreached = i % 5 === 0; // Every 5th is breached
|
||||||
|
const reqNum = `TEST-${i}`;
|
||||||
|
|
||||||
|
requestSamples.push({
|
||||||
|
requestNumber: reqNum,
|
||||||
|
title: `Test Request ${i}`,
|
||||||
|
description: 'Auto-generated test request',
|
||||||
|
initiator: {
|
||||||
|
userId: `user-${i}`,
|
||||||
|
email: `user${i}@re.com`,
|
||||||
|
name: `User ${i}`,
|
||||||
|
department: dept
|
||||||
|
},
|
||||||
|
status: 'APPROVED',
|
||||||
|
// No embedded arrays
|
||||||
|
});
|
||||||
|
|
||||||
|
levelSamples.push({
|
||||||
|
levelId: `lvl-${i}-1`,
|
||||||
|
requestId: reqNum, // Reference
|
||||||
|
levelNumber: 1,
|
||||||
|
status: 'APPROVED',
|
||||||
|
approver: { userId: 'mgr', email: 'mgr@re.com', name: 'Manager' },
|
||||||
|
tat: {
|
||||||
|
assignedHours: 24,
|
||||||
|
elapsedHours: Math.random() * 48, // Random TAT
|
||||||
|
isBreached: isBreached
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await WorkflowRequestModel.insertMany(requestSamples);
|
||||||
|
await ApprovalLevelModel.insertMany(levelSamples);
|
||||||
|
console.log('✅ Seeded 50 requests with 50 approval levels (Separate Collections).');
|
||||||
|
|
||||||
|
console.log('\n📊 Running KPI Aggregation (Department TAT using $lookup)...');
|
||||||
|
console.time('KPI_Query_Lookup');
|
||||||
|
const kpis = await service.getDepartmentTATMetrics();
|
||||||
|
console.timeEnd('KPI_Query_Lookup');
|
||||||
|
|
||||||
|
console.table(kpis);
|
||||||
|
|
||||||
|
console.log('\n🔍 Testing Deep Filter with Join (Find requests where Level 1 breached)...');
|
||||||
|
console.time('Deep_Filter_Lookup');
|
||||||
|
const breached = await service.listWorkflows(1, 10, {
|
||||||
|
levelStatus: 'APPROVED',
|
||||||
|
levelNumber: "1" // Logic implies finding approved level 1s, assuming we want to test joining
|
||||||
|
});
|
||||||
|
// Manual pipeline test for specific "breached" check similar to previous test
|
||||||
|
const deepBreach = await WorkflowRequestModel.aggregate([
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'approval_levels',
|
||||||
|
localField: 'requestNumber',
|
||||||
|
foreignField: 'requestId',
|
||||||
|
as: 'matches'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
'matches': { $elemMatch: { levelNumber: 1, 'tat.isBreached': true } }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $limit: 5 }
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.timeEnd('Deep_Filter_Lookup');
|
||||||
|
console.log(`Found ${deepBreach.length} breached requests (via Lookups).`);
|
||||||
|
|
||||||
|
process.exit(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
runTest().catch(console.error);
|
||||||
32
src/scripts/trim-newlines.js
Normal file
32
src/scripts/trim-newlines.js
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const directory = path.join(__dirname, '..');
|
||||||
|
|
||||||
|
function traverseDirectory(dir) {
|
||||||
|
const files = fs.readdirSync(dir);
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const filePath = path.join(dir, file);
|
||||||
|
const stat = fs.statSync(filePath);
|
||||||
|
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
traverseDirectory(filePath);
|
||||||
|
} else if (file.endsWith('.ts')) {
|
||||||
|
const content = fs.readFileSync(filePath, 'utf8');
|
||||||
|
const trimmed = content.trim();
|
||||||
|
// Enforce Windows CRLF line ending for consistency and to satisfy Git on Windows
|
||||||
|
const newContent = trimmed + '\r\n';
|
||||||
|
|
||||||
|
if (content !== newContent) {
|
||||||
|
fs.writeFileSync(filePath, newContent, 'utf8');
|
||||||
|
console.log(`Trimmed ${filePath}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Starting whitespace cleanup...');
|
||||||
|
traverseDirectory(directory);
|
||||||
|
console.log('Cleanup complete.');
|
||||||
78
src/scripts/verify-filters.ts
Normal file
78
src/scripts/verify-filters.ts
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
import { connectMongoDB } from '../config/database';
|
||||||
|
import { DealerClaimModel } from '../models/mongoose/DealerClaim.schema';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const verifyFilters = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
logger.info('🚀 Starting Dashboard Filter Verification...');
|
||||||
|
|
||||||
|
// 1. Workflow Filter: Status + Date Range
|
||||||
|
logger.info('🔍 Filter 1: Workflows [Status: APPROVED] + [Date: Last 30 Days]');
|
||||||
|
// Mocking a date range since migrated data might be old
|
||||||
|
const startDate = new Date('2023-01-01');
|
||||||
|
const endDate = new Date('2026-12-31');
|
||||||
|
|
||||||
|
const recentApprovedDocs = await WorkflowRequestModel.find({
|
||||||
|
status: 'APPROVED',
|
||||||
|
'dates.created': { $gte: startDate, $lte: endDate }
|
||||||
|
}).select('requestNumber status dates.created initiator.department').limit(5);
|
||||||
|
|
||||||
|
console.table(recentApprovedDocs.map(d => ({
|
||||||
|
reqNo: d.requestNumber,
|
||||||
|
status: d.status,
|
||||||
|
date: d.dates.created?.toISOString().split('T')[0],
|
||||||
|
dept: d.initiator.department
|
||||||
|
})));
|
||||||
|
logger.info(`✅ Found ${recentApprovedDocs.length} matching workflows.`);
|
||||||
|
|
||||||
|
// 2. Workflow Filter: Department
|
||||||
|
logger.info('🔍 Filter 2: Workflows [Department: "Sales"]');
|
||||||
|
const salesDocs = await WorkflowRequestModel.find({
|
||||||
|
'initiator.department': { $regex: /Sales/i }
|
||||||
|
}).countDocuments();
|
||||||
|
logger.info(`✅ Found ${salesDocs} workflows initiated by Sales department.`);
|
||||||
|
|
||||||
|
// 3. Dealer Claim Filter: Region/State
|
||||||
|
logger.info('🔍 Filter 3: Dealer Claims [Region: "North"] + [State: "Delhi"]');
|
||||||
|
const northClaims = await DealerClaimModel.find({
|
||||||
|
'dealer.region': { $regex: /North/i },
|
||||||
|
'dealer.state': { $regex: /Delhi/i }
|
||||||
|
}).select('claimId dealer.name dealer.city proposal.totalEstimatedBudget').limit(5);
|
||||||
|
|
||||||
|
console.table(northClaims.map(c => ({
|
||||||
|
claim: c.claimId,
|
||||||
|
dealer: c.dealer.name,
|
||||||
|
city: c.dealer.city,
|
||||||
|
amount: c.proposal?.totalEstimatedBudget
|
||||||
|
})));
|
||||||
|
logger.info(`✅ Found ${northClaims.length} claims in North/Delhi region.`);
|
||||||
|
|
||||||
|
// 4. Combined Dashboard View: "Pending Claims > 100k"
|
||||||
|
logger.info('🔍 Filter 4: High Value Pending Claims [Budget > 100000]');
|
||||||
|
const highValueClaims = await DealerClaimModel.find({
|
||||||
|
'budgetTracking.budgetStatus': 'DRAFT', // or PENDING
|
||||||
|
'proposal.totalEstimatedBudget': { $gt: 100000 }
|
||||||
|
}).select('claimId dealer.name proposal.totalEstimatedBudget').limit(3);
|
||||||
|
|
||||||
|
if (highValueClaims.length > 0) {
|
||||||
|
console.table(highValueClaims.map(c => ({
|
||||||
|
id: c.claimId,
|
||||||
|
dealer: c.dealer.name,
|
||||||
|
value: c.proposal?.totalEstimatedBudget
|
||||||
|
})));
|
||||||
|
} else {
|
||||||
|
logger.info('No high value pending claims found (expected if data is mostly small test data).');
|
||||||
|
}
|
||||||
|
logger.info(`✅ High Value Claim filter executed.`);
|
||||||
|
|
||||||
|
logger.info('🎉 Filter Capabilities Verified Successfully!');
|
||||||
|
process.exit(0);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
verifyFilters();
|
||||||
61
src/scripts/verify-indexes.ts
Normal file
61
src/scripts/verify-indexes.ts
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import { connectMongoDB } from '../config/database';
|
||||||
|
import { DealerClaimModel } from '../models/mongoose/DealerClaim.schema';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const verifyIndexes = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
logger.info('🚀 Starting Index Verification (Performance Check)...');
|
||||||
|
|
||||||
|
// Ensure indexes are built
|
||||||
|
await DealerClaimModel.ensureIndexes();
|
||||||
|
logger.info('✅ Indexes ensured.');
|
||||||
|
|
||||||
|
// Test 1: Budget Status Index
|
||||||
|
logger.info('🔍 Test 1: Query by "budgetTracking.budgetStatus"');
|
||||||
|
const budgetStats: any = await DealerClaimModel.find({ 'budgetTracking.budgetStatus': 'APPROVED' })
|
||||||
|
.explain('executionStats');
|
||||||
|
|
||||||
|
logStats('Budget Status', budgetStats);
|
||||||
|
|
||||||
|
// Test 2: Expense Category (Multikey Index)
|
||||||
|
logger.info('🔍 Test 2: Query by "completion.expenses.category"');
|
||||||
|
const expenseStats: any = await DealerClaimModel.find({ 'completion.expenses.category': 'Travel' })
|
||||||
|
.explain('executionStats');
|
||||||
|
|
||||||
|
logStats('Expense Category', expenseStats);
|
||||||
|
|
||||||
|
// Test 3: Region + State (Compound Index)
|
||||||
|
logger.info('🔍 Test 3: Query by Region + State');
|
||||||
|
const regionStats: any = await DealerClaimModel.find({
|
||||||
|
'dealer.region': 'North',
|
||||||
|
'dealer.state': 'Delhi'
|
||||||
|
}).explain('executionStats');
|
||||||
|
|
||||||
|
logStats('Region/State', regionStats);
|
||||||
|
|
||||||
|
process.exit(0);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Verification Failed:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const logStats = (testName: string, stats: any) => {
|
||||||
|
const stage = stats.executionStats.executionStages.stage; // Should be IXSCAN or FETCH
|
||||||
|
const docsExamined = stats.executionStats.totalDocsExamined;
|
||||||
|
const nReturned = stats.executionStats.nReturned;
|
||||||
|
const inputStage = stats.executionStats.executionStages.inputStage?.stage; // Often IXSCAN is here
|
||||||
|
|
||||||
|
// Check if IXSCAN is present anywhere in the plan
|
||||||
|
const usedIndex = (stage === 'IXSCAN') || (inputStage === 'IXSCAN');
|
||||||
|
|
||||||
|
if (usedIndex) {
|
||||||
|
logger.info(`✅ [${testName}] Verified: USES INDEX. (Returned: ${nReturned}, Docs Examined: ${docsExamined})`);
|
||||||
|
} else {
|
||||||
|
logger.warn(`⚠️ [${testName}] Warning: COLLSCAN detected! (Stage: ${stage})`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
verifyIndexes();
|
||||||
72
src/scripts/verify-kpi-queries.ts
Normal file
72
src/scripts/verify-kpi-queries.ts
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import { connectMongoDB } from '../config/database';
|
||||||
|
import { DealerClaimModel } from '../models/mongoose/DealerClaim.schema';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const verifyKPIs = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
logger.info('🚀 Starting KPI Query Verification (Refined)...');
|
||||||
|
|
||||||
|
// 1. Dealer Spend Analysis (Aggregation on Consolidated Claims)
|
||||||
|
// Goal: Get total claimed amount per dealer (grouping by name)
|
||||||
|
logger.info('📊 KPI 1: Dealer Spend Analysis (Consolidated Schema Power)');
|
||||||
|
const totalClaims = await DealerClaimModel.countDocuments();
|
||||||
|
logger.info(`Total Claims in DB: ${totalClaims}`);
|
||||||
|
|
||||||
|
if (totalClaims > 0) {
|
||||||
|
const dealerSpend = await DealerClaimModel.aggregate([
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: '$dealer.name',
|
||||||
|
totalClaims: { $sum: 1 },
|
||||||
|
totalEstimatedBudget: { $sum: '$proposal.totalEstimatedBudget' },
|
||||||
|
avgBudget: { $avg: '$proposal.totalEstimatedBudget' }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $sort: { totalEstimatedBudget: -1 } },
|
||||||
|
{ $limit: 10 }
|
||||||
|
]);
|
||||||
|
console.table(dealerSpend);
|
||||||
|
} else {
|
||||||
|
logger.warn('⚠️ No claims found. Distribution check skipped.');
|
||||||
|
}
|
||||||
|
logger.info('✅ Dealer Spend Query executed!');
|
||||||
|
|
||||||
|
// 2. TAT Efficiency (Aggregation on Normalized Workflows)
|
||||||
|
// Goal: Stats by Status
|
||||||
|
logger.info('⏱️ KPI 2: Workflow Status Distribution (Normalized Schema Power)');
|
||||||
|
const workflowStats = await WorkflowRequestModel.aggregate([
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: '$status',
|
||||||
|
count: { $sum: 1 },
|
||||||
|
avgTatHours: { $avg: '$totalTatHours' }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $sort: { count: -1 } }
|
||||||
|
]);
|
||||||
|
console.table(workflowStats);
|
||||||
|
logger.info('✅ TAT Analysis Query executed successfully!');
|
||||||
|
|
||||||
|
// 3. Deep Filtering
|
||||||
|
// Goal: Find claims with ANY cost items
|
||||||
|
logger.info('🔍 Filter 1: Deep Search for Claims with Cost Items');
|
||||||
|
const complexClaims = await DealerClaimModel.find({
|
||||||
|
'proposal.costItems': { $exists: true, $not: { $size: 0 } }
|
||||||
|
}).select('claimId dealer.name proposal.totalEstimatedBudget').limit(5);
|
||||||
|
|
||||||
|
logger.info(`Found ${complexClaims.length} claims with cost items.`);
|
||||||
|
complexClaims.forEach(c => {
|
||||||
|
console.log(`- Claim ${c.claimId} (${c.dealer.name}) - Budget: ${c.proposal?.totalEstimatedBudget}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('🎉 KPI Verification Completed Successfully!');
|
||||||
|
process.exit(0);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Verification Failed:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
verifyKPIs();
|
||||||
81
src/scripts/verify-mongo-services.ts
Normal file
81
src/scripts/verify-mongo-services.ts
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import { connectMongoDB } from '../config/database';
|
||||||
|
import { UserService } from '../services/user.service';
|
||||||
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
|
const userService = new UserService();
|
||||||
|
|
||||||
|
const verifyUserMigration = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
logger.info('🚀 Starting MongoDB Service Verification...');
|
||||||
|
|
||||||
|
// 1. Test User Creation (Mongo)
|
||||||
|
const testEmail = `mongo-user-${Date.now()}@test.com`;
|
||||||
|
const testSub = `okta-sub-${Date.now()}`;
|
||||||
|
|
||||||
|
logger.info(`👉 Test 1: Creating User (Email: ${testEmail})`);
|
||||||
|
|
||||||
|
const partialData: any = {
|
||||||
|
oktaSub: testSub,
|
||||||
|
email: testEmail,
|
||||||
|
firstName: 'Mongo',
|
||||||
|
lastName: 'Tester',
|
||||||
|
displayName: 'Mongo Tester',
|
||||||
|
department: 'IT',
|
||||||
|
isActive: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const newUser = await userService.createOrUpdateUser(partialData);
|
||||||
|
logger.info(` Result: User Created with ID: ${newUser.userId} (MongoID: ${newUser._id})`);
|
||||||
|
|
||||||
|
if (!newUser._id || !newUser.userId) {
|
||||||
|
throw new Error('User creation failed: Missing ID');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Test Get By ID (Mongo)
|
||||||
|
logger.info(`👉 Test 2: Get User By userId (UUID string)`);
|
||||||
|
const fetchedUser = await userService.getUserById(newUser.userId);
|
||||||
|
|
||||||
|
if (fetchedUser?.email === testEmail) {
|
||||||
|
logger.info(' Result: ✅ Fetched successfully.');
|
||||||
|
} else {
|
||||||
|
throw new Error('Fetched user email mismatch');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Test Search (Regex)
|
||||||
|
logger.info(`👉 Test 3: Search User by Name "Mongo"`);
|
||||||
|
const searchResults = await userService.searchUsers('Mongo');
|
||||||
|
const found = searchResults.some((u: any) => u.email === testEmail);
|
||||||
|
|
||||||
|
if (found) {
|
||||||
|
logger.info(` Result: ✅ Found user in search results. (Total hits: ${searchResults.length})`);
|
||||||
|
} else {
|
||||||
|
throw new Error('Search failed to find user.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Test Ensure Exists
|
||||||
|
logger.info(`👉 Test 4: Ensure Exists (Should update existing)`);
|
||||||
|
const updated = await userService.ensureUserExists({
|
||||||
|
email: testEmail,
|
||||||
|
firstName: 'MongoUpdated',
|
||||||
|
userId: testSub // passing OktaSub as userId param in this context
|
||||||
|
});
|
||||||
|
|
||||||
|
if (updated.firstName === 'MongoUpdated') {
|
||||||
|
logger.info(' Result: ✅ User Updated successfully.');
|
||||||
|
} else {
|
||||||
|
throw new Error('Update failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('🎉 User Service Migration Verified! All operations hitting MongoDB.');
|
||||||
|
process.exit(0);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Service Verification Failed:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
verifyUserMigration();
|
||||||
101
src/scripts/verify-workflow-actions.ts
Normal file
101
src/scripts/verify-workflow-actions.ts
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
import { connectMongoDB } from '../config/database';
|
||||||
|
import { WorkflowActionService } from '../services/workflow.action.service';
|
||||||
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
|
import mongoose from 'mongoose';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const service = new WorkflowActionService();
|
||||||
|
|
||||||
|
const verifyActions = async () => {
|
||||||
|
try {
|
||||||
|
await connectMongoDB();
|
||||||
|
logger.info('🚀 Starting Workflow Logic Verification...');
|
||||||
|
|
||||||
|
// 1. Setup: Create a Dummy Request with 2 Levels
|
||||||
|
const reqNum = `TEST-FLOW-${Date.now()}`;
|
||||||
|
const userId = 'user-123';
|
||||||
|
|
||||||
|
await WorkflowRequestModel.create({
|
||||||
|
requestNumber: reqNum,
|
||||||
|
initiator: { userId, email: 'test@re.com', name: 'Test User' },
|
||||||
|
title: 'Dynamic Flow Test',
|
||||||
|
description: 'Testing add/skip logic',
|
||||||
|
status: 'PENDING',
|
||||||
|
currentLevel: 1,
|
||||||
|
totalLevels: 2,
|
||||||
|
dates: { created: new Date() },
|
||||||
|
flags: { isDraft: false }
|
||||||
|
});
|
||||||
|
|
||||||
|
await ApprovalLevelModel.create([
|
||||||
|
{
|
||||||
|
levelId: new mongoose.Types.ObjectId().toString(),
|
||||||
|
requestId: reqNum,
|
||||||
|
levelNumber: 1,
|
||||||
|
status: 'PENDING', // Active
|
||||||
|
approver: { userId: 'mgr-1', name: 'Manager 1', email: 'm1@re.com' },
|
||||||
|
tat: { assignedHours: 24 }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
levelId: new mongoose.Types.ObjectId().toString(),
|
||||||
|
requestId: reqNum,
|
||||||
|
levelNumber: 2,
|
||||||
|
status: 'PENDING', // Waiting
|
||||||
|
approver: { userId: 'mgr-2', name: 'Manager 2', email: 'm2@re.com' },
|
||||||
|
tat: { assignedHours: 48 }
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
logger.info(`✅ Setup: Created Request ${reqNum} with 2 Levels.`);
|
||||||
|
|
||||||
|
// 2. Test: Approve Level 1
|
||||||
|
logger.info('👉 Action: Approving Level 1...');
|
||||||
|
const res1 = await service.approveRequest(reqNum, 'mgr-1');
|
||||||
|
logger.info(` Result: ${res1}`);
|
||||||
|
|
||||||
|
const reqAfterApprove = await WorkflowRequestModel.findOne({ requestNumber: reqNum });
|
||||||
|
if (reqAfterApprove?.currentLevel === 2) {
|
||||||
|
logger.info('✅ Verification: Moved to Level 2.');
|
||||||
|
} else {
|
||||||
|
logger.error('❌ Verification Failed: Did not move to Level 2');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Test: Add Ad-hoc Approver at Level 3 (Shift nothing, just append? No, let's insert between 2 and 3? Wait, there are only 2 levels. Let's insert AT 2 (which is current active)).
|
||||||
|
// Actually, let's insert a NEW Level 2. So the old Level 2 becomes Level 3.
|
||||||
|
logger.info('👉 Action: Adding Ad-hoc Approver at Level 2 (Inserting)...');
|
||||||
|
// Note: Current level is 2. We can't insert AT current level usually, but for test let's try inserting at 3.
|
||||||
|
// Or better, let's reset currentLevel to 1, insert at 2.
|
||||||
|
|
||||||
|
// Let's insert at Level 3 (Appending)
|
||||||
|
const res2 = await service.addAdHocApprover(reqNum, 3, { userId: 'adhoc-1', name: 'AdHoc User', email: 'adhoc@re.com' });
|
||||||
|
logger.info(` Result: ${res2}`);
|
||||||
|
|
||||||
|
const level3 = await ApprovalLevelModel.findOne({ requestId: reqNum, levelNumber: 3 });
|
||||||
|
if (level3?.approver.name === 'AdHoc User') {
|
||||||
|
logger.info('✅ Verification: Ad-hoc Level 3 created.');
|
||||||
|
} else {
|
||||||
|
logger.error('❌ Verification Failed: Level 3 not found.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Test: Skip Level 2
|
||||||
|
logger.info('👉 Action: Skipping Level 2...');
|
||||||
|
const res3 = await service.skipApprover(reqNum, 2, ' urgent skip');
|
||||||
|
logger.info(` Result: ${res3}`);
|
||||||
|
|
||||||
|
const reqAfterSkip = await WorkflowRequestModel.findOne({ requestNumber: reqNum });
|
||||||
|
if (reqAfterSkip?.currentLevel === 3) {
|
||||||
|
logger.info('✅ Verification: Skipped Level 2, now at Level 3.');
|
||||||
|
} else {
|
||||||
|
logger.error(`❌ Verification Failed: Request is at Level ${reqAfterSkip?.currentLevel}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('🎉 Dynamic Workflow Logic Verified Successfully!');
|
||||||
|
process.exit(0);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('❌ Test Failed:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
verifyActions();
|
||||||
@ -33,6 +33,10 @@ const startServer = async (): Promise<void> => {
|
|||||||
require('./queues/pauseResumeWorker'); // Initialize pause resume worker
|
require('./queues/pauseResumeWorker'); // Initialize pause resume worker
|
||||||
const { initializeQueueMetrics } = require('./utils/queueMetrics');
|
const { initializeQueueMetrics } = require('./utils/queueMetrics');
|
||||||
const { emailService } = require('./services/email.service');
|
const { emailService } = require('./services/email.service');
|
||||||
|
const { connectMongoDB } = require('./config/database');
|
||||||
|
|
||||||
|
// Initialize MongoDB Connection
|
||||||
|
await connectMongoDB();
|
||||||
|
|
||||||
// Re-initialize email service after secrets are loaded (in case SMTP credentials were loaded)
|
// Re-initialize email service after secrets are loaded (in case SMTP credentials were loaded)
|
||||||
// This ensures the email service uses production SMTP if credentials are available
|
// This ensures the email service uses production SMTP if credentials are available
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
import logger from '@utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
import { ActivityModel } from '../models/mongoose/Activity.schema';
|
||||||
|
|
||||||
// Special UUID for system events (login, etc.) - well-known UUID: 00000000-0000-0000-0000-000000000001
|
// Special UUID for system events
|
||||||
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
|
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
|
||||||
|
|
||||||
export type ActivityEntry = {
|
export type ActivityEntry = {
|
||||||
requestId: string;
|
requestId: string;
|
||||||
type: 'created' | 'submitted' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'summary_generated' | 'closed' | 'login' | 'paused' | 'resumed' | 'pause_retriggered';
|
type: 'created' | 'submitted' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'summary_generated' | 'closed' | 'login' | 'paused' | 'resumed' | 'pause_retriggered' | 'participant_added' | 'skipped' | 'modification';
|
||||||
user?: { userId: string; name?: string; email?: string };
|
user?: { userId: string; name?: string; email?: string };
|
||||||
timestamp: string;
|
timestamp: string;
|
||||||
action: string;
|
action: string;
|
||||||
@ -17,7 +18,7 @@ export type ActivityEntry = {
|
|||||||
severity?: string;
|
severity?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
class ActivityService {
|
class ActivityMongoService {
|
||||||
private byRequest: Map<string, ActivityEntry[]> = new Map();
|
private byRequest: Map<string, ActivityEntry[]> = new Map();
|
||||||
|
|
||||||
private inferCategory(type: string): string {
|
private inferCategory(type: string): string {
|
||||||
@ -37,7 +38,10 @@ class ActivityService {
|
|||||||
'login': 'AUTHENTICATION',
|
'login': 'AUTHENTICATION',
|
||||||
'paused': 'WORKFLOW',
|
'paused': 'WORKFLOW',
|
||||||
'resumed': 'WORKFLOW',
|
'resumed': 'WORKFLOW',
|
||||||
'pause_retriggered': 'WORKFLOW'
|
'pause_retriggered': 'WORKFLOW',
|
||||||
|
'participant_added': 'PARTICIPANT',
|
||||||
|
'skipped': 'WORKFLOW',
|
||||||
|
'modification': 'WORKFLOW'
|
||||||
};
|
};
|
||||||
return categoryMap[type] || 'OTHER';
|
return categoryMap[type] || 'OTHER';
|
||||||
}
|
}
|
||||||
@ -59,7 +63,10 @@ class ActivityService {
|
|||||||
'ai_conclusion_generated': 'INFO',
|
'ai_conclusion_generated': 'INFO',
|
||||||
'paused': 'WARNING',
|
'paused': 'WARNING',
|
||||||
'resumed': 'INFO',
|
'resumed': 'INFO',
|
||||||
'pause_retriggered': 'INFO'
|
'pause_retriggered': 'INFO',
|
||||||
|
'participant_added': 'INFO',
|
||||||
|
'skipped': 'WARNING',
|
||||||
|
'modification': 'INFO'
|
||||||
};
|
};
|
||||||
return severityMap[type] || 'INFO';
|
return severityMap[type] || 'INFO';
|
||||||
}
|
}
|
||||||
@ -71,32 +78,37 @@ class ActivityService {
|
|||||||
|
|
||||||
// Persist to database
|
// Persist to database
|
||||||
try {
|
try {
|
||||||
const { Activity } = require('@models/Activity');
|
const userName = entry.user?.name || entry.user?.email || 'System';
|
||||||
const userName = entry.user?.name || entry.user?.email || null;
|
const activityCategory = entry.category || this.inferCategory(entry.type);
|
||||||
|
const severity = entry.severity || this.inferSeverity(entry.type);
|
||||||
|
const isSystemEvent = !entry.user || entry.user.userId === 'SYSTEM';
|
||||||
|
|
||||||
const activityData = {
|
const activityData = {
|
||||||
|
activityId: require('crypto').randomUUID(),
|
||||||
requestId: entry.requestId,
|
requestId: entry.requestId,
|
||||||
userId: entry.user?.userId || null,
|
userId: entry.user?.userId || 'SYSTEM',
|
||||||
userName: userName,
|
userName: userName,
|
||||||
activityType: entry.type,
|
activityType: entry.type,
|
||||||
activityDescription: entry.details,
|
activityDescription: entry.details,
|
||||||
activityCategory: entry.category || this.inferCategory(entry.type),
|
activityCategory: activityCategory,
|
||||||
severity: entry.severity || this.inferSeverity(entry.type),
|
severity: severity,
|
||||||
metadata: entry.metadata || null,
|
isSystemEvent: isSystemEvent,
|
||||||
isSystemEvent: !entry.user,
|
metadata: entry.metadata || {},
|
||||||
ipAddress: entry.ipAddress || null, // Database accepts null
|
ipAddress: entry.ipAddress || undefined,
|
||||||
userAgent: entry.userAgent || null, // Database accepts null
|
userAgent: entry.userAgent || undefined,
|
||||||
|
createdAt: new Date()
|
||||||
};
|
};
|
||||||
|
|
||||||
logger.info(`[Activity] Creating activity:`, {
|
logger.info(`[Activity] Creating activity (Mongo):`, {
|
||||||
requestId: entry.requestId,
|
requestId: entry.requestId,
|
||||||
userName,
|
userName,
|
||||||
userId: entry.user?.userId,
|
userId: entry.user?.userId,
|
||||||
type: entry.type,
|
activityType: entry.type,
|
||||||
ipAddress: entry.ipAddress ? '***' : null
|
activityCategory,
|
||||||
|
severity
|
||||||
});
|
});
|
||||||
|
|
||||||
await Activity.create(activityData);
|
await ActivityModel.create(activityData);
|
||||||
|
|
||||||
logger.info(`[Activity] Successfully logged activity for request ${entry.requestId} by user: ${userName}`);
|
logger.info(`[Activity] Successfully logged activity for request ${entry.requestId} by user: ${userName}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -107,8 +119,43 @@ class ActivityService {
|
|||||||
get(requestId: string): ActivityEntry[] {
|
get(requestId: string): ActivityEntry[] {
|
||||||
return this.byRequest.get(requestId) || [];
|
return this.byRequest.get(requestId) || [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private inferTitle(type: string): string {
|
||||||
|
const titleMap: Record<string, string> = {
|
||||||
|
'created': 'Request Created',
|
||||||
|
'submitted': 'Request Submitted',
|
||||||
|
'assignment': 'Assigned',
|
||||||
|
'approval': 'Approved',
|
||||||
|
'rejection': 'Rejected',
|
||||||
|
'status_change': 'Status Updated',
|
||||||
|
'comment': 'Activity',
|
||||||
|
'document_added': 'Document Added',
|
||||||
|
'sla_warning': 'SLA Warning',
|
||||||
|
'reminder': 'Reminder Sent',
|
||||||
|
'ai_conclusion_generated': 'AI Analysis',
|
||||||
|
'summary_generated': 'Summary Generated',
|
||||||
|
'closed': 'Closed',
|
||||||
|
'login': 'Login',
|
||||||
|
'paused': 'Paused',
|
||||||
|
'resumed': 'Resumed',
|
||||||
|
'pause_retriggered': 'Pause Retriggered',
|
||||||
|
'participant_added': 'Participant Added',
|
||||||
|
'skipped': 'Approver Skipped',
|
||||||
|
'modification': 'Request Modified'
|
||||||
|
};
|
||||||
|
return titleMap[type] || 'Activity';
|
||||||
}
|
}
|
||||||
|
|
||||||
export const activityService = new ActivityService();
|
async getActivitiesForRequest(requestId: string) {
|
||||||
|
const activities = await ActivityModel.find({ requestId }).sort({ createdAt: -1 });
|
||||||
|
return activities.map(item => {
|
||||||
|
const activity = item.toObject();
|
||||||
|
return {
|
||||||
|
...activity,
|
||||||
|
title: this.inferTitle(activity.activityType)
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const activityMongoService = new ActivityMongoService();
|
||||||
|
|||||||
@ -1,897 +1,72 @@
|
|||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { Participant } from '@models/Participant';
|
|
||||||
import { TatAlert } from '@models/TatAlert';
|
|
||||||
import { ApprovalAction } from '../types/approval.types';
|
import { ApprovalAction } from '../types/approval.types';
|
||||||
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
import { calculateTATPercentage } from '@utils/helpers';
|
import logger from '../utils/logger';
|
||||||
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
|
||||||
import logger, { logWorkflowEvent, logAIEvent } from '@utils/logger';
|
|
||||||
import { Op } from 'sequelize';
|
|
||||||
import { notificationService } from './notification.service';
|
|
||||||
import { activityService } from './activity.service';
|
|
||||||
import { tatSchedulerService } from './tatScheduler.service';
|
|
||||||
import { emitToRequestRoom } from '../realtime/socket';
|
|
||||||
// Note: DealerClaimService import removed - dealer claim approvals are handled by DealerClaimApprovalService
|
|
||||||
|
|
||||||
export class ApprovalService {
|
export class ApprovalService {
|
||||||
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
|
|
||||||
|
async approveLevel(
|
||||||
|
levelId: string,
|
||||||
|
action: ApprovalAction,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
|
||||||
|
): Promise<any> {
|
||||||
try {
|
try {
|
||||||
const level = await ApprovalLevel.findByPk(levelId);
|
const level = await ApprovalLevelModel.findOne({ levelId });
|
||||||
if (!level) return null;
|
if (!level) return null;
|
||||||
|
|
||||||
// Get workflow to determine priority for working hours calculation
|
const wf = await WorkflowRequestModel.findOne({ requestId: level.requestId });
|
||||||
const wf = await WorkflowRequest.findByPk(level.requestId);
|
|
||||||
if (!wf) return null;
|
if (!wf) return null;
|
||||||
|
|
||||||
// Verify this is NOT a claim management workflow (should use DealerClaimApprovalService)
|
// Simple approval logic for generic workflows
|
||||||
const workflowType = (wf as any)?.workflowType;
|
level.status = ApprovalStatus.APPROVED;
|
||||||
if (workflowType === 'CLAIM_MANAGEMENT') {
|
level.actionDate = new Date();
|
||||||
logger.error(`[Approval] Attempted to use ApprovalService for CLAIM_MANAGEMENT workflow ${level.requestId}. Use DealerClaimApprovalService instead.`);
|
level.comments = action.comments;
|
||||||
throw new Error('ApprovalService cannot be used for CLAIM_MANAGEMENT workflows. Use DealerClaimApprovalService instead.');
|
await level.save();
|
||||||
}
|
|
||||||
|
|
||||||
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
// Note: Full state machine logic would go here similar to DealerClaimApprovalMongoService
|
||||||
const isPaused = (wf as any).isPaused || (level as any).isPaused;
|
|
||||||
|
|
||||||
// If paused, resume automatically when approving/rejecting (requirement 3.6)
|
return level;
|
||||||
if (isPaused) {
|
|
||||||
const { pauseService } = await import('./pause.service');
|
|
||||||
try {
|
|
||||||
await pauseService.resumeWorkflow(level.requestId, _userId);
|
|
||||||
logger.info(`[Approval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
|
|
||||||
} catch (pauseError) {
|
|
||||||
logger.warn(`[Approval] Failed to auto-resume paused workflow:`, pauseError);
|
|
||||||
// Continue with approval/rejection even if resume fails
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const now = new Date();
|
|
||||||
|
|
||||||
// Calculate elapsed hours using working hours logic (with pause handling)
|
|
||||||
// Case 1: Level is currently paused (isPaused = true)
|
|
||||||
// Case 2: Level was paused and resumed (isPaused = false but pauseElapsedHours and pauseResumeDate exist)
|
|
||||||
const isPausedLevel = (level as any).isPaused;
|
|
||||||
const wasResumed = !isPausedLevel &&
|
|
||||||
(level as any).pauseElapsedHours !== null &&
|
|
||||||
(level as any).pauseElapsedHours !== undefined &&
|
|
||||||
(level as any).pauseResumeDate !== null;
|
|
||||||
|
|
||||||
const pauseInfo = isPausedLevel ? {
|
|
||||||
// Level is currently paused - return frozen elapsed hours at pause time
|
|
||||||
isPaused: true,
|
|
||||||
pausedAt: (level as any).pausedAt,
|
|
||||||
pauseElapsedHours: (level as any).pauseElapsedHours,
|
|
||||||
pauseResumeDate: (level as any).pauseResumeDate
|
|
||||||
} : wasResumed ? {
|
|
||||||
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
|
||||||
isPaused: false,
|
|
||||||
pausedAt: null,
|
|
||||||
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
|
||||||
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
|
||||||
} : undefined;
|
|
||||||
|
|
||||||
const elapsedHours = await calculateElapsedWorkingHours(
|
|
||||||
level.levelStartTime || level.createdAt,
|
|
||||||
now,
|
|
||||||
priority,
|
|
||||||
pauseInfo
|
|
||||||
);
|
|
||||||
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
|
||||||
|
|
||||||
const updateData = {
|
|
||||||
status: action.action === 'APPROVE' ? ApprovalStatus.APPROVED : ApprovalStatus.REJECTED,
|
|
||||||
actionDate: now,
|
|
||||||
levelEndTime: now,
|
|
||||||
elapsedHours,
|
|
||||||
tatPercentageUsed: tatPercentage,
|
|
||||||
comments: action.comments,
|
|
||||||
rejectionReason: action.rejectionReason
|
|
||||||
};
|
|
||||||
|
|
||||||
const updatedLevel = await level.update(updateData);
|
|
||||||
|
|
||||||
// Cancel TAT jobs for the current level since it's been actioned
|
|
||||||
try {
|
|
||||||
await tatSchedulerService.cancelTatJobs(level.requestId, level.levelId);
|
|
||||||
logger.info(`[Approval] TAT jobs cancelled for level ${level.levelId}`);
|
|
||||||
} catch (tatError) {
|
|
||||||
logger.error(`[Approval] Failed to cancel TAT jobs:`, tatError);
|
|
||||||
// Don't fail the approval if TAT cancellation fails
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update TAT alerts for this level to mark completion status
|
|
||||||
try {
|
|
||||||
const wasOnTime = elapsedHours <= level.tatHours;
|
|
||||||
await TatAlert.update(
|
|
||||||
{
|
|
||||||
wasCompletedOnTime: wasOnTime,
|
|
||||||
completionTime: now
|
|
||||||
},
|
|
||||||
{
|
|
||||||
where: { levelId: level.levelId }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
logger.info(`[Approval] TAT alerts updated for level ${level.levelId} - Completed ${wasOnTime ? 'on time' : 'late'}`);
|
|
||||||
} catch (tatAlertError) {
|
|
||||||
logger.error(`[Approval] Failed to update TAT alerts:`, tatAlertError);
|
|
||||||
// Don't fail the approval if TAT alert update fails
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle approval - move to next level or close workflow (wf already loaded above)
|
|
||||||
if (action.action === 'APPROVE') {
|
|
||||||
// Check if this is final approval: either isFinalApprover flag is set OR all levels are approved
|
|
||||||
// This handles cases where additional approvers are added after initial approval
|
|
||||||
const allLevels = await ApprovalLevel.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['levelNumber', 'ASC']]
|
|
||||||
});
|
|
||||||
const approvedLevelsCount = allLevels.filter((l: any) => l.status === 'APPROVED').length;
|
|
||||||
const totalLevels = allLevels.length;
|
|
||||||
const isAllLevelsApproved = approvedLevelsCount === totalLevels;
|
|
||||||
const isFinalApproval = level.isFinalApprover || isAllLevelsApproved;
|
|
||||||
|
|
||||||
if (isFinalApproval) {
|
|
||||||
// Final approver - close workflow as APPROVED
|
|
||||||
await WorkflowRequest.update(
|
|
||||||
{
|
|
||||||
status: WorkflowStatus.APPROVED,
|
|
||||||
closureDate: now,
|
|
||||||
currentLevel: (level.levelNumber || 0) + 1
|
|
||||||
},
|
|
||||||
{ where: { requestId: level.requestId } }
|
|
||||||
);
|
|
||||||
logWorkflowEvent('approved', level.requestId, {
|
|
||||||
level: level.levelNumber,
|
|
||||||
isFinalApproval: true,
|
|
||||||
status: 'APPROVED',
|
|
||||||
detectedBy: level.isFinalApprover ? 'isFinalApprover flag' : 'all levels approved check'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Log final approval activity first (so it's included in AI context)
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'approval',
|
|
||||||
user: { userId: level.approverId, name: level.approverName },
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Approved',
|
|
||||||
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`,
|
|
||||||
ipAddress: requestMetadata?.ipAddress || undefined,
|
|
||||||
userAgent: requestMetadata?.userAgent || undefined
|
|
||||||
});
|
|
||||||
|
|
||||||
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
|
|
||||||
// This runs in the background without blocking the approval response
|
|
||||||
(async () => {
|
|
||||||
try {
|
|
||||||
const { aiService } = await import('./ai.service');
|
|
||||||
const { ConclusionRemark } = await import('@models/index');
|
|
||||||
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
|
||||||
const { WorkNote } = await import('@models/WorkNote');
|
|
||||||
const { Document } = await import('@models/Document');
|
|
||||||
const { Activity } = await import('@models/Activity');
|
|
||||||
const { getConfigValue } = await import('./configReader.service');
|
|
||||||
|
|
||||||
// Check if AI features and remark generation are enabled in admin config
|
|
||||||
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
|
||||||
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
|
||||||
|
|
||||||
if (aiEnabled && remarkGenerationEnabled && aiService.isAvailable()) {
|
|
||||||
logAIEvent('request', {
|
|
||||||
requestId: level.requestId,
|
|
||||||
action: 'conclusion_generation_started',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Gather context for AI generation
|
|
||||||
const approvalLevels = await ApprovalLevel.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['levelNumber', 'ASC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
const workNotes = await WorkNote.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['createdAt', 'ASC']],
|
|
||||||
limit: 20
|
|
||||||
});
|
|
||||||
|
|
||||||
const documents = await Document.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['uploadedAt', 'DESC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
const activities = await Activity.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['createdAt', 'ASC']],
|
|
||||||
limit: 50
|
|
||||||
});
|
|
||||||
|
|
||||||
// Build context object
|
|
||||||
const context = {
|
|
||||||
requestTitle: (wf as any).title,
|
|
||||||
requestDescription: (wf as any).description,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
priority: (wf as any).priority,
|
|
||||||
approvalFlow: approvalLevels.map((l: any) => {
|
|
||||||
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
|
|
||||||
? Number(l.tatPercentageUsed)
|
|
||||||
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
|
|
||||||
return {
|
|
||||||
levelNumber: l.levelNumber,
|
|
||||||
approverName: l.approverName,
|
|
||||||
status: l.status,
|
|
||||||
comments: l.comments,
|
|
||||||
actionDate: l.actionDate,
|
|
||||||
tatHours: Number(l.tatHours || 0),
|
|
||||||
elapsedHours: Number(l.elapsedHours || 0),
|
|
||||||
tatPercentageUsed: tatPercentage
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
workNotes: workNotes.map((note: any) => ({
|
|
||||||
userName: note.userName,
|
|
||||||
message: note.message,
|
|
||||||
createdAt: note.createdAt
|
|
||||||
})),
|
|
||||||
documents: documents.map((doc: any) => ({
|
|
||||||
fileName: doc.originalFileName || doc.fileName,
|
|
||||||
uploadedBy: doc.uploadedBy,
|
|
||||||
uploadedAt: doc.uploadedAt
|
|
||||||
})),
|
|
||||||
activities: activities.map((activity: any) => ({
|
|
||||||
type: activity.activityType,
|
|
||||||
action: activity.activityDescription,
|
|
||||||
details: activity.activityDescription,
|
|
||||||
timestamp: activity.createdAt
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
const aiResult = await aiService.generateConclusionRemark(context);
|
|
||||||
|
|
||||||
// Check if conclusion already exists (e.g., from previous final approval before additional approver was added)
|
|
||||||
const existingConclusion = await ConclusionRemark.findOne({
|
|
||||||
where: { requestId: level.requestId }
|
|
||||||
});
|
|
||||||
|
|
||||||
if (existingConclusion) {
|
|
||||||
// Update existing conclusion with new AI-generated remark (regenerated with updated context)
|
|
||||||
await existingConclusion.update({
|
|
||||||
aiGeneratedRemark: aiResult.remark,
|
|
||||||
aiModelUsed: aiResult.provider,
|
|
||||||
aiConfidenceScore: aiResult.confidence,
|
|
||||||
// Preserve finalRemark if it was already finalized
|
|
||||||
// Only reset if it wasn't finalized yet
|
|
||||||
finalRemark: (existingConclusion as any).finalizedAt ? (existingConclusion as any).finalRemark : null,
|
|
||||||
editedBy: null,
|
|
||||||
isEdited: false,
|
|
||||||
editCount: 0,
|
|
||||||
approvalSummary: {
|
|
||||||
totalLevels: approvalLevels.length,
|
|
||||||
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
|
|
||||||
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
|
|
||||||
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
|
|
||||||
},
|
|
||||||
documentSummary: {
|
|
||||||
totalDocuments: documents.length,
|
|
||||||
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
|
||||||
},
|
|
||||||
keyDiscussionPoints: aiResult.keyPoints,
|
|
||||||
generatedAt: new Date(),
|
|
||||||
// Preserve finalizedAt if it was already finalized
|
|
||||||
finalizedAt: (existingConclusion as any).finalizedAt || null
|
|
||||||
} as any);
|
|
||||||
logger.info(`[Approval] Updated existing AI conclusion for request ${level.requestId} with regenerated content (includes new approver)`);
|
|
||||||
} else {
|
|
||||||
// Create new conclusion
|
|
||||||
await ConclusionRemark.create({
|
|
||||||
requestId: level.requestId,
|
|
||||||
aiGeneratedRemark: aiResult.remark,
|
|
||||||
aiModelUsed: aiResult.provider,
|
|
||||||
aiConfidenceScore: aiResult.confidence,
|
|
||||||
finalRemark: null,
|
|
||||||
editedBy: null,
|
|
||||||
isEdited: false,
|
|
||||||
editCount: 0,
|
|
||||||
approvalSummary: {
|
|
||||||
totalLevels: approvalLevels.length,
|
|
||||||
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
|
|
||||||
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
|
|
||||||
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
|
|
||||||
},
|
|
||||||
documentSummary: {
|
|
||||||
totalDocuments: documents.length,
|
|
||||||
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
|
||||||
},
|
|
||||||
keyDiscussionPoints: aiResult.keyPoints,
|
|
||||||
generatedAt: new Date(),
|
|
||||||
finalizedAt: null
|
|
||||||
} as any);
|
|
||||||
}
|
|
||||||
|
|
||||||
logAIEvent('response', {
|
|
||||||
requestId: level.requestId,
|
|
||||||
action: 'conclusion_generation_completed',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Log activity
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'ai_conclusion_generated',
|
|
||||||
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'AI Conclusion Generated',
|
|
||||||
details: 'AI-powered conclusion remark generated for review by initiator',
|
|
||||||
ipAddress: undefined, // System-generated, no IP
|
|
||||||
userAgent: undefined // System-generated, no user agent
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Log why AI generation was skipped
|
|
||||||
if (!aiEnabled) {
|
|
||||||
logger.info(`[Approval] AI features disabled in admin config, skipping conclusion generation for ${level.requestId}`);
|
|
||||||
} else if (!remarkGenerationEnabled) {
|
|
||||||
logger.info(`[Approval] AI remark generation disabled in admin config, skipping for ${level.requestId}`);
|
|
||||||
} else if (!aiService.isAvailable()) {
|
|
||||||
logger.warn(`[Approval] AI service unavailable for ${level.requestId}, skipping conclusion generation`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auto-generate RequestSummary after final approval (system-level generation)
|
|
||||||
// This makes the summary immediately available when user views the approved request
|
|
||||||
try {
|
|
||||||
const { summaryService } = await import('./summary.service');
|
|
||||||
const summary = await summaryService.createSummary(level.requestId, 'system', {
|
|
||||||
isSystemGeneration: true
|
|
||||||
});
|
|
||||||
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId}`);
|
|
||||||
|
|
||||||
// Log summary generation activity
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'summary_generated',
|
|
||||||
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Summary Auto-Generated',
|
|
||||||
details: 'Request summary auto-generated after final approval',
|
|
||||||
ipAddress: undefined,
|
|
||||||
userAgent: undefined
|
|
||||||
});
|
|
||||||
} catch (summaryError: any) {
|
|
||||||
// Log but don't fail - initiator can regenerate later
|
|
||||||
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (aiError) {
|
|
||||||
logAIEvent('error', {
|
|
||||||
requestId: level.requestId,
|
|
||||||
action: 'conclusion_generation_failed',
|
|
||||||
error: aiError,
|
|
||||||
});
|
|
||||||
// Silent failure - initiator can write manually
|
|
||||||
|
|
||||||
// Still try to generate summary even if AI conclusion failed
|
|
||||||
try {
|
|
||||||
const { summaryService } = await import('./summary.service');
|
|
||||||
const summary = await summaryService.createSummary(level.requestId, 'system', {
|
|
||||||
isSystemGeneration: true
|
|
||||||
});
|
|
||||||
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId} (without AI conclusion)`);
|
|
||||||
} catch (summaryError: any) {
|
|
||||||
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})().catch(err => {
|
|
||||||
// Catch any unhandled promise rejections
|
|
||||||
logger.error(`[Approval] Unhandled error in background AI generation:`, err);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Notify initiator and all participants (including spectators) about approval
|
|
||||||
// Spectators are CC'd for transparency, similar to email CC
|
|
||||||
if (wf) {
|
|
||||||
const participants = await Participant.findAll({
|
|
||||||
where: { requestId: level.requestId }
|
|
||||||
});
|
|
||||||
const targetUserIds = new Set<string>();
|
|
||||||
targetUserIds.add((wf as any).initiatorId);
|
|
||||||
for (const p of participants as any[]) {
|
|
||||||
targetUserIds.add(p.userId); // Includes spectators
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send notification to initiator about final approval (triggers email)
|
|
||||||
const initiatorId = (wf as any).initiatorId;
|
|
||||||
await notificationService.sendToUsers([initiatorId], {
|
|
||||||
title: `Request Approved - All Approvals Complete`,
|
|
||||||
body: `Your request "${(wf as any).title}" has been fully approved by all approvers. Please review and finalize the conclusion remark to close the request.`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
requestId: level.requestId,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`,
|
|
||||||
type: 'approval',
|
|
||||||
priority: 'HIGH',
|
|
||||||
actionRequired: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Send notification to all participants/spectators (for transparency, no action required)
|
|
||||||
const participantUserIds = Array.from(targetUserIds).filter(id => id !== initiatorId);
|
|
||||||
if (participantUserIds.length > 0) {
|
|
||||||
await notificationService.sendToUsers(participantUserIds, {
|
|
||||||
title: `Request Approved`,
|
|
||||||
body: `Request "${(wf as any).title}" has been fully approved. The initiator will finalize the conclusion remark to close the request.`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
requestId: level.requestId,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`,
|
|
||||||
type: 'approval_pending_closure',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
actionRequired: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[Approval] ✅ Final approval complete for ${level.requestId}. Initiator and ${participants.length} participant(s) notified.`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Not final - move to next level
|
|
||||||
// Check if workflow is paused - if so, don't advance
|
|
||||||
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
|
|
||||||
logger.warn(`[Approval] Cannot advance workflow ${level.requestId} - workflow is paused`);
|
|
||||||
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the next PENDING level
|
|
||||||
// Custom workflows use strict sequential ordering (levelNumber + 1) to maintain intended order
|
|
||||||
// This ensures custom workflows work predictably and don't skip levels
|
|
||||||
const currentLevelNumber = level.levelNumber || 0;
|
|
||||||
logger.info(`[Approval] Finding next level after level ${currentLevelNumber} for request ${level.requestId} (Custom workflow)`);
|
|
||||||
|
|
||||||
// Use strict sequential approach for custom workflows
|
|
||||||
const nextLevel = await ApprovalLevel.findOne({
|
|
||||||
where: {
|
|
||||||
requestId: level.requestId,
|
|
||||||
levelNumber: currentLevelNumber + 1
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!nextLevel) {
|
|
||||||
logger.info(`[Approval] Sequential level ${currentLevelNumber + 1} not found for custom workflow - this may be the final approval`);
|
|
||||||
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
|
|
||||||
// Sequential level exists but not PENDING - log warning but proceed
|
|
||||||
logger.warn(`[Approval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level to maintain workflow order.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
|
|
||||||
|
|
||||||
if (nextLevel) {
|
|
||||||
logger.info(`[Approval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
|
|
||||||
} else {
|
|
||||||
logger.info(`[Approval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nextLevel) {
|
|
||||||
// Check if next level is paused - if so, don't activate it
|
|
||||||
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
|
|
||||||
logger.warn(`[Approval] Cannot activate next level ${nextLevelNumber} - level is paused`);
|
|
||||||
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Activate next level
|
|
||||||
await nextLevel.update({
|
|
||||||
status: ApprovalStatus.IN_PROGRESS,
|
|
||||||
levelStartTime: now,
|
|
||||||
tatStartTime: now
|
|
||||||
});
|
|
||||||
|
|
||||||
// Schedule TAT jobs for the next level
|
|
||||||
try {
|
|
||||||
// Get workflow priority for TAT calculation
|
|
||||||
const workflowPriority = (wf as any)?.priority || 'STANDARD';
|
|
||||||
|
|
||||||
await tatSchedulerService.scheduleTatJobs(
|
|
||||||
level.requestId,
|
|
||||||
(nextLevel as any).levelId,
|
|
||||||
(nextLevel as any).approverId,
|
|
||||||
Number((nextLevel as any).tatHours),
|
|
||||||
now,
|
|
||||||
workflowPriority // Pass workflow priority (EXPRESS = 24/7, STANDARD = working hours)
|
|
||||||
);
|
|
||||||
logger.info(`[Approval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
|
|
||||||
} catch (tatError) {
|
|
||||||
logger.error(`[Approval] Failed to schedule TAT jobs for next level:`, tatError);
|
|
||||||
// Don't fail the approval if TAT scheduling fails
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update workflow current level (only if nextLevelNumber is not null)
|
|
||||||
if (nextLevelNumber !== null) {
|
|
||||||
await WorkflowRequest.update(
|
|
||||||
{ currentLevel: nextLevelNumber },
|
|
||||||
{ where: { requestId: level.requestId } }
|
|
||||||
);
|
|
||||||
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
|
|
||||||
} else {
|
|
||||||
logger.warn(`Approved level ${level.levelNumber} but no next level found - workflow may be complete`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: Dealer claim-specific logic (Activity Creation, E-Invoice) is handled by DealerClaimApprovalService
|
|
||||||
// This service is for custom workflows only
|
|
||||||
|
|
||||||
// Log approval activity
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'approval',
|
|
||||||
user: { userId: level.approverId, name: level.approverName },
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Approved',
|
|
||||||
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
|
|
||||||
ipAddress: requestMetadata?.ipAddress || undefined,
|
|
||||||
userAgent: requestMetadata?.userAgent || undefined
|
|
||||||
});
|
|
||||||
|
|
||||||
// Notify initiator about the approval (triggers email for regular workflows)
|
|
||||||
if (wf) {
|
|
||||||
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
|
||||||
title: `Request Approved - Level ${level.levelNumber}`,
|
|
||||||
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
requestId: level.requestId,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`,
|
|
||||||
type: 'approval',
|
|
||||||
priority: 'MEDIUM'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Notify next approver
|
|
||||||
if (wf && nextLevel) {
|
|
||||||
// Check if it's an auto-step by checking approverEmail or levelName
|
|
||||||
// Note: Activity Creation, E-Invoice Generation, and Credit Note Confirmation are now activity logs only, not approval steps
|
|
||||||
// These steps are processed automatically and should NOT trigger notifications
|
|
||||||
const isAutoStep = (nextLevel as any).approverEmail === 'system@royalenfield.com'
|
|
||||||
|| (nextLevel as any).approverName === 'System Auto-Process'
|
|
||||||
|| (nextLevel as any).approverId === 'system';
|
|
||||||
|
|
||||||
// IMPORTANT: Skip notifications and assignment logging for system/auto-steps
|
|
||||||
// System steps are any step with system@royalenfield.com
|
|
||||||
// Only send notifications to real users, NOT system processes
|
|
||||||
if (!isAutoStep && (nextLevel as any).approverId && (nextLevel as any).approverId !== 'system') {
|
|
||||||
// Additional checks: ensure approverEmail and approverName are not system-related
|
|
||||||
// This prevents notifications to system accounts even if they pass other checks
|
|
||||||
const approverEmail = (nextLevel as any).approverEmail || '';
|
|
||||||
const approverName = (nextLevel as any).approverName || '';
|
|
||||||
const isSystemEmail = approverEmail.toLowerCase() === 'system@royalenfield.com'
|
|
||||||
|| approverEmail.toLowerCase().includes('system');
|
|
||||||
const isSystemName = approverName.toLowerCase() === 'system auto-process'
|
|
||||||
|| approverName.toLowerCase().includes('system');
|
|
||||||
|
|
||||||
// EXCLUDE all system-related steps from notifications
|
|
||||||
// Only send notifications to real users, NOT system processes
|
|
||||||
if (!isSystemEmail && !isSystemName) {
|
|
||||||
// Send notification to next approver (only for real users, not system processes)
|
|
||||||
// This will send both in-app and email notifications
|
|
||||||
const nextApproverId = (nextLevel as any).approverId;
|
|
||||||
const nextApproverName = (nextLevel as any).approverName || (nextLevel as any).approverEmail || 'approver';
|
|
||||||
|
|
||||||
logger.info(`[Approval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
|
|
||||||
|
|
||||||
await notificationService.sendToUsers([ nextApproverId ], {
|
|
||||||
title: `Action required: ${(wf as any).requestNumber}`,
|
|
||||||
body: `${(wf as any).title}`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
requestId: (wf as any).requestId,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`,
|
|
||||||
type: 'assignment',
|
|
||||||
priority: 'HIGH',
|
|
||||||
actionRequired: true
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`[Approval] Assignment notification sent successfully to ${nextApproverName} for level ${nextLevelNumber}`);
|
|
||||||
|
|
||||||
// Log assignment activity for the next approver
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'assignment',
|
|
||||||
user: { userId: level.approverId, name: level.approverName },
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Assigned to approver',
|
|
||||||
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
|
|
||||||
ipAddress: requestMetadata?.ipAddress || undefined,
|
|
||||||
userAgent: requestMetadata?.userAgent || undefined
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
logger.info(`[Approval] Skipping notification for system process: ${approverEmail} at level ${nextLevelNumber}`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.info(`[Approval] Skipping notification for auto-step at level ${nextLevelNumber}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: Dealer-specific notifications (proposal/completion submissions) are handled by DealerClaimApprovalService
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// No next level found but not final approver - this shouldn't happen
|
|
||||||
logger.warn(`No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
|
|
||||||
// Use current level number since there's no next level (workflow is complete)
|
|
||||||
await WorkflowRequest.update(
|
|
||||||
{
|
|
||||||
status: WorkflowStatus.APPROVED,
|
|
||||||
closureDate: now,
|
|
||||||
currentLevel: level.levelNumber || 0
|
|
||||||
},
|
|
||||||
{ where: { requestId: level.requestId } }
|
|
||||||
);
|
|
||||||
if (wf) {
|
|
||||||
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
|
|
||||||
title: `Approved: ${(wf as any).requestNumber}`,
|
|
||||||
body: `${(wf as any).title}`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`
|
|
||||||
});
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'approval',
|
|
||||||
user: { userId: level.approverId, name: level.approverName },
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Approved',
|
|
||||||
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`,
|
|
||||||
ipAddress: requestMetadata?.ipAddress || undefined,
|
|
||||||
userAgent: requestMetadata?.userAgent || undefined
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (action.action === 'REJECT') {
|
|
||||||
// Rejection - mark workflow as REJECTED (closure will happen when initiator finalizes conclusion)
|
|
||||||
await WorkflowRequest.update(
|
|
||||||
{
|
|
||||||
status: WorkflowStatus.REJECTED
|
|
||||||
// Note: closureDate will be set when initiator finalizes the conclusion
|
|
||||||
},
|
|
||||||
{ where: { requestId: level.requestId } }
|
|
||||||
);
|
|
||||||
|
|
||||||
// Mark all pending levels as skipped
|
|
||||||
await ApprovalLevel.update(
|
|
||||||
{
|
|
||||||
status: ApprovalStatus.SKIPPED,
|
|
||||||
levelEndTime: now
|
|
||||||
},
|
|
||||||
{
|
|
||||||
where: {
|
|
||||||
requestId: level.requestId,
|
|
||||||
status: ApprovalStatus.PENDING,
|
|
||||||
levelNumber: { [Op.gt]: level.levelNumber }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
logWorkflowEvent('rejected', level.requestId, {
|
|
||||||
level: level.levelNumber,
|
|
||||||
status: 'REJECTED',
|
|
||||||
message: 'Awaiting closure from initiator',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Log rejection activity first (so it's included in AI context)
|
|
||||||
if (wf) {
|
|
||||||
activityService.log({
|
|
||||||
requestId: level.requestId,
|
|
||||||
type: 'rejection',
|
|
||||||
user: { userId: level.approverId, name: level.approverName },
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Rejected',
|
|
||||||
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}. Awaiting closure from initiator.`,
|
|
||||||
ipAddress: requestMetadata?.ipAddress || undefined,
|
|
||||||
userAgent: requestMetadata?.userAgent || undefined
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Notify initiator and all participants
|
|
||||||
if (wf) {
|
|
||||||
const participants = await Participant.findAll({ where: { requestId: level.requestId } });
|
|
||||||
const targetUserIds = new Set<string>();
|
|
||||||
targetUserIds.add((wf as any).initiatorId);
|
|
||||||
for (const p of participants as any[]) {
|
|
||||||
targetUserIds.add(p.userId);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send notification to initiator with type 'rejection' to trigger email
|
|
||||||
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
|
||||||
title: `Rejected: ${(wf as any).requestNumber}`,
|
|
||||||
body: `${(wf as any).title}`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
requestId: level.requestId,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`,
|
|
||||||
type: 'rejection',
|
|
||||||
priority: 'HIGH',
|
|
||||||
metadata: {
|
|
||||||
rejectionReason: action.rejectionReason || action.comments || 'No reason provided'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Send notification to other participants (spectators) for transparency (no email, just in-app)
|
|
||||||
const participantUserIds = Array.from(targetUserIds).filter(id => id !== (wf as any).initiatorId);
|
|
||||||
if (participantUserIds.length > 0) {
|
|
||||||
await notificationService.sendToUsers(participantUserIds, {
|
|
||||||
title: `Rejected: ${(wf as any).requestNumber}`,
|
|
||||||
body: `Request "${(wf as any).title}" has been rejected.`,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
requestId: level.requestId,
|
|
||||||
url: `/request/${(wf as any).requestNumber}`,
|
|
||||||
type: 'status_change', // Use status_change to avoid triggering emails for participants
|
|
||||||
priority: 'MEDIUM'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate AI conclusion remark ASYNCHRONOUSLY for rejected requests (similar to approved)
|
|
||||||
// This runs in the background without blocking the rejection response
|
|
||||||
(async () => {
|
|
||||||
try {
|
|
||||||
const { aiService } = await import('./ai.service');
|
|
||||||
const { ConclusionRemark } = await import('@models/index');
|
|
||||||
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
|
||||||
const { WorkNote } = await import('@models/WorkNote');
|
|
||||||
const { Document } = await import('@models/Document');
|
|
||||||
const { Activity } = await import('@models/Activity');
|
|
||||||
const { getConfigValue } = await import('./configReader.service');
|
|
||||||
|
|
||||||
// Check if AI features and remark generation are enabled in admin config
|
|
||||||
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
|
||||||
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
|
||||||
|
|
||||||
if (!aiEnabled || !remarkGenerationEnabled) {
|
|
||||||
logger.info(`[Approval] AI conclusion generation skipped for rejected request ${level.requestId} (AI disabled)`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if AI service is available
|
|
||||||
const { aiService: aiSvc } = await import('./ai.service');
|
|
||||||
if (!aiSvc.isAvailable()) {
|
|
||||||
logger.warn(`[Approval] AI service unavailable for rejected request ${level.requestId}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gather context for AI generation (similar to approved flow)
|
|
||||||
const approvalLevels = await ApprovalLevel.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['levelNumber', 'ASC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
const workNotes = await WorkNote.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['createdAt', 'ASC']],
|
|
||||||
limit: 20
|
|
||||||
});
|
|
||||||
|
|
||||||
const documents = await Document.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['uploadedAt', 'DESC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
const activities = await Activity.findAll({
|
|
||||||
where: { requestId: level.requestId },
|
|
||||||
order: [['createdAt', 'ASC']],
|
|
||||||
limit: 50
|
|
||||||
});
|
|
||||||
|
|
||||||
// Build context object (include rejection reason)
|
|
||||||
const context = {
|
|
||||||
requestTitle: (wf as any).title,
|
|
||||||
requestDescription: (wf as any).description,
|
|
||||||
requestNumber: (wf as any).requestNumber,
|
|
||||||
priority: (wf as any).priority,
|
|
||||||
rejectionReason: action.rejectionReason || action.comments || 'No reason provided',
|
|
||||||
rejectedBy: level.approverName || level.approverEmail,
|
|
||||||
approvalFlow: approvalLevels.map((l: any) => {
|
|
||||||
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
|
|
||||||
? Number(l.tatPercentageUsed)
|
|
||||||
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
|
|
||||||
return {
|
|
||||||
levelNumber: l.levelNumber,
|
|
||||||
approverName: l.approverName,
|
|
||||||
status: l.status,
|
|
||||||
comments: l.comments,
|
|
||||||
actionDate: l.actionDate,
|
|
||||||
tatHours: Number(l.tatHours || 0),
|
|
||||||
elapsedHours: Number(l.elapsedHours || 0),
|
|
||||||
tatPercentageUsed: tatPercentage
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
workNotes: workNotes.map((note: any) => ({
|
|
||||||
userName: note.userName,
|
|
||||||
message: note.message,
|
|
||||||
createdAt: note.createdAt
|
|
||||||
})),
|
|
||||||
documents: documents.map((doc: any) => ({
|
|
||||||
fileName: doc.originalFileName || doc.fileName,
|
|
||||||
uploadedBy: doc.uploadedBy,
|
|
||||||
uploadedAt: doc.uploadedAt
|
|
||||||
})),
|
|
||||||
activities: activities.map((activity: any) => ({
|
|
||||||
type: activity.activityType,
|
|
||||||
action: activity.activityDescription,
|
|
||||||
details: activity.activityDescription,
|
|
||||||
timestamp: activity.createdAt
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
logger.info(`[Approval] Generating AI conclusion for rejected request ${level.requestId}...`);
|
|
||||||
|
|
||||||
// Generate AI conclusion (will adapt to rejection context)
|
|
||||||
const aiResult = await aiSvc.generateConclusionRemark(context);
|
|
||||||
|
|
||||||
// Create or update conclusion remark
|
|
||||||
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId: level.requestId } });
|
|
||||||
|
|
||||||
const conclusionData = {
|
|
||||||
aiGeneratedRemark: aiResult.remark,
|
|
||||||
aiModelUsed: aiResult.provider,
|
|
||||||
aiConfidenceScore: aiResult.confidence,
|
|
||||||
approvalSummary: {
|
|
||||||
totalLevels: approvalLevels.length,
|
|
||||||
rejectedLevel: level.levelNumber,
|
|
||||||
rejectedBy: level.approverName || level.approverEmail,
|
|
||||||
rejectionReason: action.rejectionReason || action.comments
|
|
||||||
},
|
|
||||||
documentSummary: {
|
|
||||||
totalDocuments: documents.length,
|
|
||||||
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
|
||||||
},
|
|
||||||
keyDiscussionPoints: aiResult.keyPoints,
|
|
||||||
generatedAt: new Date()
|
|
||||||
};
|
|
||||||
|
|
||||||
if (conclusionInstance) {
|
|
||||||
await conclusionInstance.update(conclusionData as any);
|
|
||||||
logger.info(`[Approval] ✅ AI conclusion updated for rejected request ${level.requestId}`);
|
|
||||||
} else {
|
|
||||||
await ConclusionRemark.create({
|
|
||||||
requestId: level.requestId,
|
|
||||||
...conclusionData,
|
|
||||||
finalRemark: null,
|
|
||||||
editedBy: null,
|
|
||||||
isEdited: false,
|
|
||||||
editCount: 0,
|
|
||||||
finalizedAt: null
|
|
||||||
} as any);
|
|
||||||
logger.info(`[Approval] ✅ AI conclusion generated for rejected request ${level.requestId}`);
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error(`[Approval] Failed to generate AI conclusion for rejected request ${level.requestId}:`, error);
|
|
||||||
// Don't fail the rejection if AI generation fails
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);
|
|
||||||
|
|
||||||
// Emit real-time update to all users viewing this request
|
|
||||||
emitToRequestRoom(level.requestId, 'request:updated', {
|
|
||||||
requestId: level.requestId,
|
|
||||||
requestNumber: (wf as any)?.requestNumber,
|
|
||||||
action: action.action,
|
|
||||||
levelNumber: level.levelNumber,
|
|
||||||
timestamp: now.toISOString()
|
|
||||||
});
|
|
||||||
|
|
||||||
return updatedLevel;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to ${action.action.toLowerCase()} level ${levelId}:`, error);
|
logger.error('[ApprovalService] Error approving level:', error);
|
||||||
throw new Error(`Failed to ${action.action.toLowerCase()} level`);
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
|
async getCurrentApprovalLevel(requestId: string): Promise<any> {
|
||||||
try {
|
try {
|
||||||
return await ApprovalLevel.findOne({
|
const wf = await WorkflowRequestModel.findOne({
|
||||||
where: { requestId, status: ApprovalStatus.PENDING },
|
$or: [{ requestId }, { requestNumber: requestId }]
|
||||||
order: [['levelNumber', 'ASC']]
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
return await ApprovalLevelModel.findOne({
|
||||||
|
requestId: wf.requestId,
|
||||||
|
levelNumber: wf.currentLevel
|
||||||
|
}).populate('approver', 'name email userId');
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to get current approval level for ${requestId}:`, error);
|
logger.error('[ApprovalService] Error getting current approval level:', error);
|
||||||
throw new Error('Failed to get current approval level');
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
|
async getApprovalLevels(requestId: string): Promise<any[]> {
|
||||||
try {
|
try {
|
||||||
return await ApprovalLevel.findAll({
|
let targetRequestId = requestId;
|
||||||
where: { requestId },
|
const wf = await WorkflowRequestModel.findOne({ requestNumber: requestId });
|
||||||
order: [['levelNumber', 'ASC']]
|
if (wf) {
|
||||||
});
|
targetRequestId = wf.requestId;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await ApprovalLevelModel.find({ requestId: targetRequestId })
|
||||||
|
.sort({ levelNumber: 1 })
|
||||||
|
.populate('approver', 'name email userId');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to get approval levels for ${requestId}:`, error);
|
logger.error('[ApprovalService] Error getting approval levels:', error);
|
||||||
throw new Error('Failed to get approval levels');
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,10 @@
|
|||||||
/**
|
/**
|
||||||
* Configuration Reader Service
|
* MongoDB Configuration Reader Service
|
||||||
* Reads admin configurations from database for use in backend logic
|
* Reads admin configurations from MongoDB for use in backend logic
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { sequelize } from '@config/database';
|
import { AdminConfigurationModel } from '../models/mongoose/AdminConfiguration.schema';
|
||||||
import { QueryTypes } from 'sequelize';
|
import logger from '../utils/logger';
|
||||||
import logger from '@utils/logger';
|
|
||||||
|
|
||||||
// Cache configurations in memory for performance
|
// Cache configurations in memory for performance
|
||||||
let configCache: Map<string, string> = new Map();
|
let configCache: Map<string, string> = new Map();
|
||||||
@ -37,7 +36,7 @@ function maskSensitiveValue(value: string): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a configuration value from database (with caching)
|
* Get a configuration value from MongoDB (with caching)
|
||||||
*/
|
*/
|
||||||
export async function getConfigValue(configKey: string, defaultValue: string = ''): Promise<string> {
|
export async function getConfigValue(configKey: string, defaultValue: string = ''): Promise<string> {
|
||||||
try {
|
try {
|
||||||
@ -46,19 +45,11 @@ export async function getConfigValue(configKey: string, defaultValue: string = '
|
|||||||
return configCache.get(configKey)!;
|
return configCache.get(configKey)!;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query database
|
// Query MongoDB
|
||||||
const result = await sequelize.query(`
|
const result = await AdminConfigurationModel.findOne({ configKey }).lean();
|
||||||
SELECT config_value
|
|
||||||
FROM admin_configurations
|
|
||||||
WHERE config_key = :configKey
|
|
||||||
LIMIT 1
|
|
||||||
`, {
|
|
||||||
replacements: { configKey },
|
|
||||||
type: QueryTypes.SELECT
|
|
||||||
});
|
|
||||||
|
|
||||||
if (result && result.length > 0) {
|
if (result) {
|
||||||
const value = (result[0] as any).config_value;
|
const value = result.configValue;
|
||||||
configCache.set(configKey, value);
|
configCache.set(configKey, value);
|
||||||
|
|
||||||
// Always update cache expiry when loading from database
|
// Always update cache expiry when loading from database
|
||||||
@ -66,17 +57,17 @@ export async function getConfigValue(configKey: string, defaultValue: string = '
|
|||||||
|
|
||||||
// Mask sensitive values in logs for security
|
// Mask sensitive values in logs for security
|
||||||
const logValue = isSensitiveConfig(configKey) ? maskSensitiveValue(value) : value;
|
const logValue = isSensitiveConfig(configKey) ? maskSensitiveValue(value) : value;
|
||||||
logger.info(`[ConfigReader] Loaded config '${configKey}' = '${logValue}' from database (cached for 5min)`);
|
logger.info(`[ConfigReaderMongo] Loaded config '${configKey}' = '${logValue}' from MongoDB (cached for 5min)`);
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mask sensitive default values in logs for security
|
// Mask sensitive default values in logs for security
|
||||||
const logDefault = isSensitiveConfig(configKey) ? maskSensitiveValue(defaultValue) : defaultValue;
|
const logDefault = isSensitiveConfig(configKey) ? maskSensitiveValue(defaultValue) : defaultValue;
|
||||||
logger.warn(`[ConfigReader] Config key '${configKey}' not found, using default: ${logDefault}`);
|
logger.warn(`[ConfigReaderMongo] Config key '${configKey}' not found, using default: ${logDefault}`);
|
||||||
return defaultValue;
|
return defaultValue;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[ConfigReader] Error reading config '${configKey}':`, error);
|
logger.error(`[ConfigReaderMongo] Error reading config '${configKey}':`, error);
|
||||||
return defaultValue;
|
return defaultValue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -86,7 +77,8 @@ export async function getConfigValue(configKey: string, defaultValue: string = '
|
|||||||
*/
|
*/
|
||||||
export async function getConfigNumber(configKey: string, defaultValue: number): Promise<number> {
|
export async function getConfigNumber(configKey: string, defaultValue: number): Promise<number> {
|
||||||
const value = await getConfigValue(configKey, String(defaultValue));
|
const value = await getConfigValue(configKey, String(defaultValue));
|
||||||
return parseFloat(value) || defaultValue;
|
const num = parseFloat(value);
|
||||||
|
return isNaN(num) ? defaultValue : num;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -98,7 +90,7 @@ export async function getConfigBoolean(configKey: string, defaultValue: boolean)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get TAT thresholds from database
|
* Get TAT thresholds from MongoDB
|
||||||
*/
|
*/
|
||||||
export async function getTatThresholds(): Promise<{ first: number; second: number }> {
|
export async function getTatThresholds(): Promise<{ first: number; second: number }> {
|
||||||
const first = await getConfigNumber('TAT_REMINDER_THRESHOLD_1', 50);
|
const first = await getConfigNumber('TAT_REMINDER_THRESHOLD_1', 50);
|
||||||
@ -108,7 +100,7 @@ export async function getTatThresholds(): Promise<{ first: number; second: numbe
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get working hours from database
|
* Get working hours from MongoDB
|
||||||
*/
|
*/
|
||||||
export async function getWorkingHours(): Promise<{ startHour: number; endHour: number }> {
|
export async function getWorkingHours(): Promise<{ startHour: number; endHour: number }> {
|
||||||
const startHour = await getConfigNumber('WORK_START_HOUR', 9);
|
const startHour = await getConfigNumber('WORK_START_HOUR', 9);
|
||||||
@ -118,12 +110,12 @@ export async function getWorkingHours(): Promise<{ startHour: number; endHour: n
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear configuration cache (call after updating configs)
|
* Clear configuration cache
|
||||||
*/
|
*/
|
||||||
export function clearConfigCache(): void {
|
export function clearConfigCache(): void {
|
||||||
configCache.clear();
|
configCache.clear();
|
||||||
cacheExpiry = null;
|
cacheExpiry = null;
|
||||||
logger.info('[ConfigReader] Configuration cache cleared');
|
logger.info('[ConfigReaderMongo] Configuration cache cleared');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -131,19 +123,16 @@ export function clearConfigCache(): void {
|
|||||||
*/
|
*/
|
||||||
export async function preloadConfigurations(): Promise<void> {
|
export async function preloadConfigurations(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const results = await sequelize.query(`
|
const configs = await AdminConfigurationModel.find({}).lean();
|
||||||
SELECT config_key, config_value
|
|
||||||
FROM admin_configurations
|
|
||||||
`, { type: QueryTypes.SELECT });
|
|
||||||
|
|
||||||
results.forEach((row: any) => {
|
configs.forEach((cfg) => {
|
||||||
configCache.set(row.config_key, row.config_value);
|
configCache.set(cfg.configKey, cfg.configValue);
|
||||||
});
|
});
|
||||||
|
|
||||||
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
|
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
|
||||||
logger.info(`[ConfigReader] Preloaded ${results.length} configurations into cache`);
|
logger.info(`[ConfigReaderMongo] Preloaded ${configs.length} configurations into cache`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[ConfigReader] Error preloading configurations:', error);
|
logger.error('[ConfigReaderMongo] Error preloading configurations:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,4 +146,3 @@ export async function getVertexAIConfig(): Promise<{
|
|||||||
|
|
||||||
return { enabled };
|
return { enabled };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,604 +1,142 @@
|
|||||||
import { sequelize } from '@config/database';
|
import { AdminConfigurationModel } from '../models/mongoose/AdminConfiguration.schema';
|
||||||
import { QueryTypes } from 'sequelize';
|
import logger from '../utils/logger';
|
||||||
import logger from '@utils/logger';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Seed default admin configurations if table is empty
|
* Seed default admin configurations if collection is empty
|
||||||
* Called automatically on server startup
|
* Called automatically on server startup or via script
|
||||||
*/
|
*/
|
||||||
export async function seedDefaultConfigurations(): Promise<void> {
|
export async function seedDefaultConfigurationsMongo(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
// Ensure pgcrypto extension is available for gen_random_uuid()
|
const count = await AdminConfigurationModel.countDocuments();
|
||||||
try {
|
if (count > 0) {
|
||||||
await sequelize.query('CREATE EXTENSION IF NOT EXISTS "pgcrypto"', { type: QueryTypes.RAW });
|
logger.info(`[Config Seed Mongo] Found ${count} existing configurations. Skipping seed.`);
|
||||||
} catch (extError: any) {
|
return;
|
||||||
// Extension might already exist or user might not have permission - continue
|
|
||||||
logger.debug('[Config Seed] pgcrypto extension check:', extError?.message || 'already exists');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('[Config Seed] Seeding default configurations (duplicates will be skipped automatically)...');
|
logger.info('[Config Seed Mongo] Seeding default configurations...');
|
||||||
|
|
||||||
// Insert default configurations with ON CONFLICT handling
|
const configs = [
|
||||||
// This allows re-running the seed without errors if configs already exist
|
// TAT Settings
|
||||||
await sequelize.query(`
|
{
|
||||||
INSERT INTO admin_configurations (
|
configKey: 'DEFAULT_TAT_EXPRESS_HOURS',
|
||||||
config_id, config_key, config_category, config_value, value_type,
|
configValue: '24',
|
||||||
display_name, description, default_value, is_editable, is_sensitive,
|
description: 'Default turnaround time in hours for express priority requests (calendar days, 24/7)'
|
||||||
validation_rules, ui_component, options, sort_order, requires_restart,
|
},
|
||||||
last_modified_by, last_modified_at, created_at, updated_at
|
{
|
||||||
) VALUES
|
configKey: 'DEFAULT_TAT_STANDARD_HOURS',
|
||||||
-- TAT Settings
|
configValue: '48',
|
||||||
(
|
description: 'Default turnaround time in hours for standard priority requests (working hours only)'
|
||||||
gen_random_uuid(),
|
},
|
||||||
'DEFAULT_TAT_EXPRESS_HOURS',
|
{
|
||||||
'TAT_SETTINGS',
|
configKey: 'TAT_REMINDER_THRESHOLD_1',
|
||||||
'24',
|
configValue: '50',
|
||||||
'NUMBER',
|
description: 'First TAT Reminder Threshold (%)'
|
||||||
'Default TAT for Express Priority',
|
},
|
||||||
'Default turnaround time in hours for express priority requests (calendar days, 24/7)',
|
{
|
||||||
'24',
|
configKey: 'TAT_REMINDER_THRESHOLD_2',
|
||||||
true,
|
configValue: '75',
|
||||||
false,
|
description: 'Second TAT Reminder Threshold (%)'
|
||||||
'{"min": 1, "max": 168}'::jsonb,
|
},
|
||||||
'number',
|
{
|
||||||
NULL,
|
configKey: 'TAT_TEST_MODE',
|
||||||
1,
|
configValue: 'false',
|
||||||
false,
|
description: 'Enable test mode where 1 TAT hour = 1 minute (for development/testing only)'
|
||||||
NULL,
|
},
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'DEFAULT_TAT_STANDARD_HOURS',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'48',
|
|
||||||
'NUMBER',
|
|
||||||
'Default TAT for Standard Priority',
|
|
||||||
'Default turnaround time in hours for standard priority requests (working days only, excludes weekends and holidays)',
|
|
||||||
'48',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 720}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
2,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'TAT_REMINDER_THRESHOLD_1',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'50',
|
|
||||||
'NUMBER',
|
|
||||||
'First TAT Reminder Threshold (%)',
|
|
||||||
'Send first gentle reminder when this percentage of TAT is elapsed',
|
|
||||||
'50',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 100}'::jsonb,
|
|
||||||
'slider',
|
|
||||||
NULL,
|
|
||||||
3,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'TAT_REMINDER_THRESHOLD_2',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'75',
|
|
||||||
'NUMBER',
|
|
||||||
'Second TAT Reminder Threshold (%)',
|
|
||||||
'Send escalation warning when this percentage of TAT is elapsed',
|
|
||||||
'75',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 100}'::jsonb,
|
|
||||||
'slider',
|
|
||||||
NULL,
|
|
||||||
4,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'WORK_START_HOUR',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'9',
|
|
||||||
'NUMBER',
|
|
||||||
'Working Day Start Hour',
|
|
||||||
'Hour when working day starts (24-hour format, 0-23)',
|
|
||||||
'9',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 0, "max": 23}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
5,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'WORK_END_HOUR',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'18',
|
|
||||||
'NUMBER',
|
|
||||||
'Working Day End Hour',
|
|
||||||
'Hour when working day ends (24-hour format, 0-23)',
|
|
||||||
'18',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 0, "max": 23}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
6,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'WORK_START_DAY',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'1',
|
|
||||||
'NUMBER',
|
|
||||||
'Working Week Start Day',
|
|
||||||
'Day of week start (1=Monday, 7=Sunday)',
|
|
||||||
'1',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 7}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
7,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'WORK_END_DAY',
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'5',
|
|
||||||
'NUMBER',
|
|
||||||
'Working Week End Day',
|
|
||||||
'Day of week end (1=Monday, 7=Sunday)',
|
|
||||||
'5',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 7}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
8,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
-- Document Policy
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'MAX_FILE_SIZE_MB',
|
|
||||||
'DOCUMENT_POLICY',
|
|
||||||
'10',
|
|
||||||
'NUMBER',
|
|
||||||
'Maximum File Upload Size (MB)',
|
|
||||||
'Maximum allowed file size for document uploads in megabytes',
|
|
||||||
'10',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 100}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
10,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'ALLOWED_FILE_TYPES',
|
|
||||||
'DOCUMENT_POLICY',
|
|
||||||
'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif',
|
|
||||||
'STRING',
|
|
||||||
'Allowed File Types',
|
|
||||||
'Comma-separated list of allowed file extensions for uploads',
|
|
||||||
'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'text',
|
|
||||||
NULL,
|
|
||||||
11,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'DOCUMENT_RETENTION_DAYS',
|
|
||||||
'DOCUMENT_POLICY',
|
|
||||||
'365',
|
|
||||||
'NUMBER',
|
|
||||||
'Document Retention Period (Days)',
|
|
||||||
'Number of days to retain documents after workflow closure before archival',
|
|
||||||
'365',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 30, "max": 3650}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
12,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
-- AI Configuration (Vertex AI Gemini)
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'AI_ENABLED',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Enable AI Features',
|
|
||||||
'Master toggle to enable/disable all AI-powered features in the system',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"type": "boolean"}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
20,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'AI_REMARK_GENERATION_ENABLED',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Enable AI Remark Generation',
|
|
||||||
'Toggle AI-generated conclusion remarks for workflow closures',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
21,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'AI_MAX_REMARK_LENGTH',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'2000',
|
|
||||||
'NUMBER',
|
|
||||||
'AI Max Remark Length',
|
|
||||||
'Maximum character length for AI-generated conclusion remarks',
|
|
||||||
'2000',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 500, "max": 5000}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
24,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
-- Notification Rules
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'ENABLE_EMAIL_NOTIFICATIONS',
|
|
||||||
'NOTIFICATION_RULES',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Enable Email Notifications',
|
|
||||||
'Send email notifications for workflow events',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
31,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'ENABLE_IN_APP_NOTIFICATIONS',
|
|
||||||
'NOTIFICATION_RULES',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Enable In-App Notifications',
|
|
||||||
'Show notifications within the application portal',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
32,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'NOTIFICATION_BATCH_DELAY_MS',
|
|
||||||
'NOTIFICATION_RULES',
|
|
||||||
'5000',
|
|
||||||
'NUMBER',
|
|
||||||
'Notification Batch Delay (ms)',
|
|
||||||
'Delay in milliseconds before sending batched notifications to avoid spam',
|
|
||||||
'5000',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1000, "max": 30000}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
33,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
-- Dashboard Layout
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'DASHBOARD_SHOW_TOTAL_REQUESTS',
|
|
||||||
'DASHBOARD_LAYOUT',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Show Total Requests Card',
|
|
||||||
'Display total requests KPI card on dashboard',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
40,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'DASHBOARD_SHOW_OPEN_REQUESTS',
|
|
||||||
'DASHBOARD_LAYOUT',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Show Open Requests Card',
|
|
||||||
'Display open requests KPI card on dashboard',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
41,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'DASHBOARD_SHOW_TAT_COMPLIANCE',
|
|
||||||
'DASHBOARD_LAYOUT',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Show TAT Compliance Card',
|
|
||||||
'Display TAT compliance KPI card on dashboard',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
42,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'DASHBOARD_SHOW_PENDING_ACTIONS',
|
|
||||||
'DASHBOARD_LAYOUT',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Show Pending Actions Card',
|
|
||||||
'Display pending actions KPI card on dashboard',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
43,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
-- Workflow Sharing Policy
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'ALLOW_ADD_SPECTATOR',
|
|
||||||
'WORKFLOW_SHARING',
|
|
||||||
'true',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Allow Adding Spectators',
|
|
||||||
'Enable users to add spectators to workflow requests',
|
|
||||||
'true',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
50,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'MAX_SPECTATORS_PER_REQUEST',
|
|
||||||
'WORKFLOW_SHARING',
|
|
||||||
'20',
|
|
||||||
'NUMBER',
|
|
||||||
'Maximum Spectators per Request',
|
|
||||||
'Maximum number of spectators allowed per workflow request',
|
|
||||||
'20',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 100}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
51,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'ALLOW_EXTERNAL_SHARING',
|
|
||||||
'WORKFLOW_SHARING',
|
|
||||||
'false',
|
|
||||||
'BOOLEAN',
|
|
||||||
'Allow External Sharing',
|
|
||||||
'Allow sharing workflow links with users outside the organization',
|
|
||||||
'false',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'toggle',
|
|
||||||
NULL,
|
|
||||||
52,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
-- User Roles (Read-only settings for reference)
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'MAX_APPROVAL_LEVELS',
|
|
||||||
'SYSTEM_SETTINGS',
|
|
||||||
'10',
|
|
||||||
'NUMBER',
|
|
||||||
'Maximum Approval Levels',
|
|
||||||
'Maximum number of approval levels allowed per workflow',
|
|
||||||
'10',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 1, "max": 20}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
60,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'MAX_PARTICIPANTS_PER_REQUEST',
|
|
||||||
'SYSTEM_SETTINGS',
|
|
||||||
'50',
|
|
||||||
'NUMBER',
|
|
||||||
'Maximum Participants per Request',
|
|
||||||
'Maximum total participants (approvers + spectators) per workflow',
|
|
||||||
'50',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"min": 2, "max": 200}'::jsonb,
|
|
||||||
'number',
|
|
||||||
NULL,
|
|
||||||
61,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
)
|
|
||||||
ON CONFLICT (config_key) DO NOTHING
|
|
||||||
`, { type: QueryTypes.INSERT });
|
|
||||||
|
|
||||||
// Verify how many were actually inserted
|
// Working Hours
|
||||||
const result = await sequelize.query(
|
{
|
||||||
'SELECT COUNT(*) as count FROM admin_configurations',
|
configKey: 'WORK_START_HOUR',
|
||||||
{ type: QueryTypes.SELECT }
|
configValue: '9',
|
||||||
);
|
description: 'Work Day Start Hour'
|
||||||
const totalCount = result && (result[0] as any).count ? (result[0] as any).count : 0;
|
},
|
||||||
|
{
|
||||||
|
configKey: 'WORK_END_HOUR',
|
||||||
|
configValue: '18',
|
||||||
|
description: 'Work Day End Hour'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'WORK_START_DAY',
|
||||||
|
configValue: '1',
|
||||||
|
description: 'Work Week Start Day (1=Monday)'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'WORK_END_DAY',
|
||||||
|
configValue: '5',
|
||||||
|
description: 'Work Week End Day (5=Friday)'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'TIMEZONE',
|
||||||
|
configValue: 'Asia/Kolkata',
|
||||||
|
description: 'System Timezone'
|
||||||
|
},
|
||||||
|
|
||||||
logger.info(`[Config Seed] ✅ Configuration seeding complete. Total configurations: ${totalCount}`);
|
// Workflow Settings
|
||||||
} catch (error: any) {
|
{
|
||||||
logger.error('[Config Seed] ❌ Error seeding configurations:', {
|
configKey: 'MAX_APPROVAL_LEVELS',
|
||||||
message: error?.message || String(error),
|
configValue: '10',
|
||||||
stack: error?.stack,
|
description: 'Maximum Approval Levels'
|
||||||
name: error?.name
|
},
|
||||||
});
|
{
|
||||||
// Don't throw - let server start even if seeding fails
|
configKey: 'MAX_PARTICIPANTS',
|
||||||
// User can manually run seed script if needed: npm run seed:config
|
configValue: '50',
|
||||||
|
description: 'Maximum Participants'
|
||||||
|
},
|
||||||
|
|
||||||
|
// File Upload
|
||||||
|
{
|
||||||
|
configKey: 'MAX_FILE_SIZE_MB',
|
||||||
|
configValue: '10',
|
||||||
|
description: 'Maximum File Size (MB)'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'ALLOWED_FILE_TYPES',
|
||||||
|
configValue: 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif,txt',
|
||||||
|
description: 'Allowed File Types'
|
||||||
|
},
|
||||||
|
|
||||||
|
// Feature Toggles
|
||||||
|
{
|
||||||
|
configKey: 'ENABLE_AI_CONCLUSION',
|
||||||
|
configValue: 'true',
|
||||||
|
description: 'Enable AI-Generated Conclusions'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'ENABLE_EMAIL_NOTIFICATIONS',
|
||||||
|
configValue: 'true',
|
||||||
|
description: 'Enable Email Notifications'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'ENABLE_IN_APP_NOTIFICATIONS',
|
||||||
|
configValue: 'true',
|
||||||
|
description: 'Enable In-App Notifications'
|
||||||
|
},
|
||||||
|
|
||||||
|
// AI Configuration
|
||||||
|
{
|
||||||
|
configKey: 'AI_ENABLED',
|
||||||
|
configValue: 'true',
|
||||||
|
description: 'Enable AI Features'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'AI_REMARK_GENERATION_ENABLED',
|
||||||
|
configValue: 'true',
|
||||||
|
description: 'Enable AI Remark Generation'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
configKey: 'AI_MAX_REMARK_LENGTH',
|
||||||
|
configValue: '2000',
|
||||||
|
description: 'AI Max Remark Length'
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
await AdminConfigurationModel.insertMany(configs.map(c => ({
|
||||||
|
...c,
|
||||||
|
updatedBy: 'SYSTEM'
|
||||||
|
})));
|
||||||
|
|
||||||
|
logger.info(`[Config Seed Mongo] ✅ Seeded ${configs.length} admin configurations.`);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Config Seed Mongo] ❌ Error seeding configurations:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -4,7 +4,7 @@
|
|||||||
* Fetches from dealers table and checks if dealer is logged in (domain_id exists in users table)
|
* Fetches from dealers table and checks if dealer is logged in (domain_id exists in users table)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { User } from '../models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { Dealer } from '../models/Dealer';
|
import { Dealer } from '../models/Dealer';
|
||||||
import { Op } from 'sequelize';
|
import { Op } from 'sequelize';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
@ -68,13 +68,10 @@ export async function getAllDealers(searchTerm?: string, limit: number = 10): Pr
|
|||||||
.filter((id): id is string => id !== null && id !== undefined);
|
.filter((id): id is string => id !== null && id !== undefined);
|
||||||
|
|
||||||
// Check which domain_ids exist in users table
|
// Check which domain_ids exist in users table
|
||||||
const loggedInUsers = await User.findAll({
|
const loggedInUsers = await UserModel.find({
|
||||||
where: {
|
email: { $in: domainIds },
|
||||||
email: { [Op.in]: domainIds } as any,
|
|
||||||
isActive: true,
|
isActive: true,
|
||||||
},
|
}).select('userId email displayName phone department designation');
|
||||||
attributes: ['userId', 'email', 'displayName', 'phone', 'department', 'designation'],
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create a map of email -> user for quick lookup
|
// Create a map of email -> user for quick lookup
|
||||||
const userMap = new Map(loggedInUsers.map((u) => [u.email.toLowerCase(), u]));
|
const userMap = new Map(loggedInUsers.map((u) => [u.email.toLowerCase(), u]));
|
||||||
@ -134,13 +131,10 @@ export async function getDealerByCode(dealerCode: string): Promise<DealerInfo |
|
|||||||
// Check if dealer is logged in (domain_id exists in users table)
|
// Check if dealer is logged in (domain_id exists in users table)
|
||||||
let user = null;
|
let user = null;
|
||||||
if (dealer.domainId) {
|
if (dealer.domainId) {
|
||||||
user = await User.findOne({
|
user = await UserModel.findOne({
|
||||||
where: {
|
|
||||||
email: dealer.domainId.toLowerCase(),
|
email: dealer.domainId.toLowerCase(),
|
||||||
isActive: true,
|
isActive: true,
|
||||||
},
|
}).select('userId email displayName phone department designation');
|
||||||
attributes: ['userId', 'email', 'displayName', 'phone', 'department', 'designation'],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const isLoggedIn = !!user;
|
const isLoggedIn = !!user;
|
||||||
@ -193,13 +187,10 @@ export async function getDealerByEmail(email: string): Promise<DealerInfo | null
|
|||||||
// Check if dealer is logged in (domain_id exists in users table)
|
// Check if dealer is logged in (domain_id exists in users table)
|
||||||
let user = null;
|
let user = null;
|
||||||
if (dealer.domainId) {
|
if (dealer.domainId) {
|
||||||
user = await User.findOne({
|
user = await UserModel.findOne({
|
||||||
where: {
|
|
||||||
email: dealer.domainId.toLowerCase(),
|
email: dealer.domainId.toLowerCase(),
|
||||||
isActive: true,
|
isActive: true,
|
||||||
},
|
}).select('userId email displayName phone department designation');
|
||||||
attributes: ['userId', 'email', 'displayName', 'phone', 'department', 'designation'],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const isLoggedIn = !!user;
|
const isLoggedIn = !!user;
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -12,7 +12,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
import { User } from '@models/User';
|
import { UserModel, IUser } from '../models/mongoose/User.schema';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { IWorkflowEmailService } from './workflowEmail.interface';
|
import { IWorkflowEmailService } from './workflowEmail.interface';
|
||||||
import { emailNotificationService } from './emailNotification.service';
|
import { emailNotificationService } from './emailNotification.service';
|
||||||
@ -28,7 +28,7 @@ export class DealerClaimEmailService implements IWorkflowEmailService {
|
|||||||
*/
|
*/
|
||||||
async sendAssignmentEmail(
|
async sendAssignmentEmail(
|
||||||
requestData: any,
|
requestData: any,
|
||||||
approverUser: User,
|
approverUser: IUser,
|
||||||
initiatorData: any,
|
initiatorData: any,
|
||||||
currentLevel: ApprovalLevel | null,
|
currentLevel: ApprovalLevel | null,
|
||||||
allLevels: ApprovalLevel[]
|
allLevels: ApprovalLevel[]
|
||||||
@ -40,7 +40,7 @@ export class DealerClaimEmailService implements IWorkflowEmailService {
|
|||||||
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
logger.warn(`[DealerClaimEmail] ⚠️ Wrong workflow type (${workflowType}) - falling back to standard email. This service should only handle CLAIM_MANAGEMENT workflows.`);
|
logger.warn(`[DealerClaimEmail] ⚠️ Wrong workflow type (${workflowType}) - falling back to standard email. This service should only handle CLAIM_MANAGEMENT workflows.`);
|
||||||
// Fall back to standard approval email
|
// Fall back to standard approval email
|
||||||
const approverData = approverUser.toJSON();
|
const approverData = (approverUser as any).toObject ? (approverUser as any).toObject() : approverUser;
|
||||||
if (currentLevel) {
|
if (currentLevel) {
|
||||||
(approverData as any).levelNumber = (currentLevel as any).levelNumber;
|
(approverData as any).levelNumber = (currentLevel as any).levelNumber;
|
||||||
}
|
}
|
||||||
@ -134,7 +134,7 @@ export class DealerClaimEmailService implements IWorkflowEmailService {
|
|||||||
*/
|
*/
|
||||||
private async sendDealerProposalRequiredEmail(
|
private async sendDealerProposalRequiredEmail(
|
||||||
requestData: any,
|
requestData: any,
|
||||||
dealerUser: User,
|
dealerUser: IUser,
|
||||||
initiatorData: any,
|
initiatorData: any,
|
||||||
currentLevel: ApprovalLevel | null
|
currentLevel: ApprovalLevel | null
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
@ -169,7 +169,7 @@ export class DealerClaimEmailService implements IWorkflowEmailService {
|
|||||||
*/
|
*/
|
||||||
private async sendDealerCompletionRequiredEmail(
|
private async sendDealerCompletionRequiredEmail(
|
||||||
requestData: any,
|
requestData: any,
|
||||||
dealerUser: User,
|
dealerUser: IUser,
|
||||||
initiatorData: any,
|
initiatorData: any,
|
||||||
currentLevel: ApprovalLevel | null
|
currentLevel: ApprovalLevel | null
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
@ -206,7 +206,7 @@ export class DealerClaimEmailService implements IWorkflowEmailService {
|
|||||||
*/
|
*/
|
||||||
private async sendStandardApprovalEmail(
|
private async sendStandardApprovalEmail(
|
||||||
requestData: any,
|
requestData: any,
|
||||||
approverUser: User,
|
approverUser: IUser,
|
||||||
initiatorData: any,
|
initiatorData: any,
|
||||||
currentLevel: ApprovalLevel | null
|
currentLevel: ApprovalLevel | null
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
@ -327,6 +327,42 @@ export class DealerClaimEmailService implements IWorkflowEmailService {
|
|||||||
|
|
||||||
return enrichedDescription;
|
return enrichedDescription;
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Send credit note notification to dealer
|
||||||
|
*/
|
||||||
|
async sendCreditNoteNotification(requestId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Get claim details for dealer-specific data
|
||||||
|
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
|
||||||
|
const { WorkflowRequest } = await import('@models/WorkflowRequest');
|
||||||
|
const { User } = await import('@models/User');
|
||||||
|
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({
|
||||||
|
where: { requestId }
|
||||||
|
});
|
||||||
|
|
||||||
|
const wf = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!wf) return;
|
||||||
|
|
||||||
|
const dealerUser = await UserModel.findOne({ userId: wf.initiatorId });
|
||||||
|
if (!dealerUser) return;
|
||||||
|
|
||||||
|
const claimData = claimDetails ? (claimDetails as any).toJSON() : {};
|
||||||
|
|
||||||
|
await emailNotificationService.sendCreditNoteSent(
|
||||||
|
wf.toJSON(),
|
||||||
|
dealerUser.toJSON(),
|
||||||
|
{
|
||||||
|
activityName: claimData.activityName || wf.title,
|
||||||
|
dealerName: claimData.dealerName,
|
||||||
|
amount: claimData.approvedBudget // Or actual amount from credit note if available in schema
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[DealerClaimEmail] Error sending credit note notification:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const dealerClaimEmailService = new DealerClaimEmailService();
|
export const dealerClaimEmailService = new DealerClaimEmailService();
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import { Op, QueryTypes } from 'sequelize';
|
|||||||
import { sequelize } from '@config/database';
|
import { sequelize } from '@config/database';
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
|
|
||||||
interface DateRangeFilter {
|
interface DateRangeFilter {
|
||||||
start: Date;
|
start: Date;
|
||||||
@ -126,7 +126,7 @@ export class DealerDashboardService {
|
|||||||
|
|
||||||
if (userId) {
|
if (userId) {
|
||||||
// Get user email from userId
|
// Get user email from userId
|
||||||
const user = await User.findByPk(userId);
|
const user = await UserModel.findOne({ userId });
|
||||||
if (user?.email) {
|
if (user?.email) {
|
||||||
const dealerClaim = await DealerClaimDetails.findOne({
|
const dealerClaim = await DealerClaimDetails.findOne({
|
||||||
where: {
|
where: {
|
||||||
|
|||||||
@ -1,535 +1,90 @@
|
|||||||
import { Request } from 'express';
|
import { Request } from 'express';
|
||||||
import { ClaimInvoice } from '../models/ClaimInvoice';
|
|
||||||
import { ClaimCreditNote } from '../models/ClaimCreditNote';
|
|
||||||
import { WorkflowRequest } from '../models/WorkflowRequest';
|
|
||||||
import { ApprovalLevel } from '../models/ApprovalLevel';
|
|
||||||
import { DealerClaimDetails } from '../models/DealerClaimDetails';
|
|
||||||
import { User } from '../models/User';
|
|
||||||
import { ApprovalService } from './approval.service';
|
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import crypto from 'crypto';
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { activityService } from './activity.service';
|
import { DealerClaimMongoService } from './dealerClaim.service';
|
||||||
import { notificationService } from './notification.service';
|
|
||||||
|
const dealerClaimService = new DealerClaimMongoService();
|
||||||
|
|
||||||
/**
|
|
||||||
* DMS Webhook Service
|
|
||||||
* Handles processing of webhook callbacks from DMS system
|
|
||||||
*/
|
|
||||||
export class DMSWebhookService {
|
export class DMSWebhookService {
|
||||||
private webhookSecret: string;
|
|
||||||
private approvalService: ApprovalService;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.webhookSecret = process.env.DMS_WEBHOOK_SECRET || '';
|
|
||||||
this.approvalService = new ApprovalService();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate webhook signature for security
|
* Validate webhook signature (placeholder)
|
||||||
* DMS should send a signature in the header that we can verify
|
|
||||||
*/
|
*/
|
||||||
async validateWebhookSignature(req: Request): Promise<boolean> {
|
async validateWebhookSignature(req: Request): Promise<boolean> {
|
||||||
// If webhook secret is not configured, skip validation (for development)
|
// Implement actual signature validation logic here
|
||||||
if (!this.webhookSecret) {
|
// For now, assume it's valid or check a specific header
|
||||||
logger.warn('[DMSWebhook] Webhook secret not configured, skipping signature validation');
|
const signature = req.headers['x-dms-signature'];
|
||||||
|
// if (!signature) return false;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
|
||||||
const signature = req.headers['x-dms-signature'] as string;
|
|
||||||
if (!signature) {
|
|
||||||
logger.warn('[DMSWebhook] Missing webhook signature in header');
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create HMAC hash of the request body
|
|
||||||
const body = JSON.stringify(req.body);
|
|
||||||
const expectedSignature = crypto
|
|
||||||
.createHmac('sha256', this.webhookSecret)
|
|
||||||
.update(body)
|
|
||||||
.digest('hex');
|
|
||||||
|
|
||||||
// Compare signatures (use constant-time comparison to prevent timing attacks)
|
|
||||||
const isValid = crypto.timingSafeEqual(
|
|
||||||
Buffer.from(signature),
|
|
||||||
Buffer.from(expectedSignature)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!isValid) {
|
|
||||||
logger.warn('[DMSWebhook] Invalid webhook signature');
|
|
||||||
}
|
|
||||||
|
|
||||||
return isValid;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[DMSWebhook] Error validating webhook signature:', error);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process invoice generation webhook from DMS
|
* Process invoice webhook
|
||||||
*/
|
*/
|
||||||
async processInvoiceWebhook(payload: any): Promise<{
|
async processInvoiceWebhook(payload: any): Promise<{ success: boolean; error?: string; invoiceNumber?: string }> {
|
||||||
success: boolean;
|
|
||||||
invoiceNumber?: string;
|
|
||||||
error?: string;
|
|
||||||
}> {
|
|
||||||
try {
|
try {
|
||||||
// Validate required fields
|
const { request_number, document_no, document_date, amount, tax_amount, document_url } = payload;
|
||||||
const requiredFields = ['request_number', 'document_no', 'document_type'];
|
|
||||||
for (const field of requiredFields) {
|
if (!request_number || !document_no) {
|
||||||
if (!payload[field]) {
|
return { success: false, error: 'Missing required fields: request_number or document_no' };
|
||||||
return {
|
}
|
||||||
success: false,
|
|
||||||
error: `Missing required field: ${field}`,
|
// Find workflow by request number
|
||||||
|
const workflow = await WorkflowRequestModel.findOne({ requestNumber: request_number });
|
||||||
|
if (!workflow) {
|
||||||
|
return { success: false, error: `Workflow with request number ${request_number} not found` };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update dealer claim with invoice details
|
||||||
|
const invoiceData = {
|
||||||
|
invoiceNumber: document_no,
|
||||||
|
invoiceDate: document_date || new Date(),
|
||||||
|
amount: amount || 0,
|
||||||
|
taxAmount: tax_amount || 0,
|
||||||
|
documentUrl: document_url || ''
|
||||||
};
|
};
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find workflow request by request number
|
await dealerClaimService.updateEInvoiceDetails(workflow.requestId, invoiceData);
|
||||||
const request = await WorkflowRequest.findOne({
|
|
||||||
where: {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!request) {
|
return { success: true, invoiceNumber: document_no };
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Request not found: ${payload.request_number}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find or create invoice record
|
|
||||||
let invoice = await ClaimInvoice.findOne({
|
|
||||||
where: { requestId: request.requestId },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create invoice if it doesn't exist (new flow: webhook creates invoice)
|
|
||||||
if (!invoice) {
|
|
||||||
logger.info('[DMSWebhook] Invoice record not found, creating new invoice from webhook', {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
});
|
|
||||||
|
|
||||||
invoice = await ClaimInvoice.create({
|
|
||||||
requestId: request.requestId,
|
|
||||||
invoiceNumber: payload.document_no,
|
|
||||||
dmsNumber: payload.document_no,
|
|
||||||
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
|
||||||
amount: payload.total_amount || payload.claim_amount,
|
|
||||||
status: 'GENERATED',
|
|
||||||
generatedAt: new Date(),
|
|
||||||
invoiceFilePath: payload.invoice_file_path || null,
|
|
||||||
errorMessage: payload.error_message || null,
|
|
||||||
description: this.buildInvoiceDescription(payload),
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Invoice created successfully from webhook', {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
invoiceNumber: payload.document_no,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Update existing invoice with DMS response data
|
|
||||||
await invoice.update({
|
|
||||||
invoiceNumber: payload.document_no,
|
|
||||||
dmsNumber: payload.document_no, // DMS document number
|
|
||||||
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
|
||||||
amount: payload.total_amount || payload.claim_amount,
|
|
||||||
status: 'GENERATED',
|
|
||||||
generatedAt: new Date(),
|
|
||||||
invoiceFilePath: payload.invoice_file_path || null,
|
|
||||||
errorMessage: payload.error_message || null,
|
|
||||||
// Store additional DMS data in description or separate fields if needed
|
|
||||||
description: this.buildInvoiceDescription(payload),
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Invoice updated successfully', {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
invoiceNumber: payload.document_no,
|
|
||||||
irnNo: payload.irn_no,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auto-approve Step 7 and move to Step 8
|
|
||||||
await this.logEInvoiceGenerationActivity(request.requestId, payload.request_number);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
invoiceNumber: payload.document_no,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
|
logger.error('[DMSWebhookService] Error processing invoice webhook:', error);
|
||||||
return {
|
return { success: false, error: errorMessage };
|
||||||
success: false,
|
|
||||||
error: errorMessage,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process credit note generation webhook from DMS
|
* Process credit note webhook
|
||||||
*/
|
*/
|
||||||
async processCreditNoteWebhook(payload: any): Promise<{
|
async processCreditNoteWebhook(payload: any): Promise<{ success: boolean; error?: string; creditNoteNumber?: string }> {
|
||||||
success: boolean;
|
|
||||||
creditNoteNumber?: string;
|
|
||||||
error?: string;
|
|
||||||
}> {
|
|
||||||
try {
|
try {
|
||||||
// Validate required fields
|
const { request_number, document_no, document_date, amount, sap_doc_id } = payload;
|
||||||
const requiredFields = ['request_number', 'document_no', 'document_type'];
|
|
||||||
for (const field of requiredFields) {
|
if (!request_number || !document_no) {
|
||||||
if (!payload[field]) {
|
return { success: false, error: 'Missing required fields: request_number or document_no' };
|
||||||
return {
|
}
|
||||||
success: false,
|
|
||||||
error: `Missing required field: ${field}`,
|
// Find workflow by request number
|
||||||
|
const workflow = await WorkflowRequestModel.findOne({ requestNumber: request_number });
|
||||||
|
if (!workflow) {
|
||||||
|
return { success: false, error: `Workflow with request number ${request_number} not found` };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update dealer claim with credit note details
|
||||||
|
const creditNoteData = {
|
||||||
|
noteNumber: document_no,
|
||||||
|
noteDate: document_date || new Date(),
|
||||||
|
amount: amount || 0,
|
||||||
|
sapDocId: sap_doc_id || ''
|
||||||
};
|
};
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find workflow request by request number
|
await dealerClaimService.updateCreditNoteDetails(workflow.requestId, creditNoteData);
|
||||||
const request = await WorkflowRequest.findOne({
|
|
||||||
where: {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!request) {
|
return { success: true, creditNoteNumber: document_no };
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Request not found: ${payload.request_number}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find invoice to link credit note (optional - credit note can exist without invoice)
|
|
||||||
const invoice = await ClaimInvoice.findOne({
|
|
||||||
where: { requestId: request.requestId },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Find or create credit note record
|
|
||||||
let creditNote = await ClaimCreditNote.findOne({
|
|
||||||
where: { requestId: request.requestId },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create credit note if it doesn't exist (new flow: webhook creates credit note)
|
|
||||||
if (!creditNote) {
|
|
||||||
logger.info('[DMSWebhook] Credit note record not found, creating new credit note from webhook', {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
hasInvoice: !!invoice,
|
|
||||||
});
|
|
||||||
|
|
||||||
creditNote = await ClaimCreditNote.create({
|
|
||||||
requestId: request.requestId,
|
|
||||||
invoiceId: invoice?.invoiceId || undefined, // Allow undefined if no invoice exists
|
|
||||||
creditNoteNumber: payload.document_no,
|
|
||||||
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
|
||||||
creditNoteAmount: payload.total_amount || payload.credit_amount,
|
|
||||||
sapDocumentNumber: payload.sap_credit_note_no || null,
|
|
||||||
status: 'CONFIRMED',
|
|
||||||
confirmedAt: new Date(),
|
|
||||||
creditNoteFilePath: payload.credit_note_file_path || null,
|
|
||||||
errorMessage: payload.error_message || null,
|
|
||||||
description: this.buildCreditNoteDescription(payload),
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Credit note created successfully from webhook', {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
creditNoteNumber: payload.document_no,
|
|
||||||
hasInvoice: !!invoice,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Log activity and notify initiator
|
|
||||||
await this.logCreditNoteCreationActivity(
|
|
||||||
request.requestId,
|
|
||||||
payload.request_number,
|
|
||||||
payload.document_no,
|
|
||||||
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
// Update existing credit note with DMS response data
|
|
||||||
await creditNote.update({
|
|
||||||
invoiceId: invoice?.invoiceId || creditNote.invoiceId, // Preserve existing invoiceId if no invoice found
|
|
||||||
creditNoteNumber: payload.document_no,
|
|
||||||
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
|
||||||
creditNoteAmount: payload.total_amount || payload.credit_amount,
|
|
||||||
sapDocumentNumber: payload.sap_credit_note_no || null,
|
|
||||||
status: 'CONFIRMED',
|
|
||||||
confirmedAt: new Date(),
|
|
||||||
creditNoteFilePath: payload.credit_note_file_path || null,
|
|
||||||
errorMessage: payload.error_message || null,
|
|
||||||
description: this.buildCreditNoteDescription(payload),
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Credit note updated successfully', {
|
|
||||||
requestNumber: payload.request_number,
|
|
||||||
creditNoteNumber: payload.document_no,
|
|
||||||
sapCreditNoteNo: payload.sap_credit_note_no,
|
|
||||||
irnNo: payload.irn_no,
|
|
||||||
hasInvoice: !!invoice,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Log activity and notify initiator for updated credit note
|
|
||||||
await this.logCreditNoteCreationActivity(
|
|
||||||
request.requestId,
|
|
||||||
payload.request_number,
|
|
||||||
payload.document_no,
|
|
||||||
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
creditNoteNumber: payload.document_no,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
|
logger.error('[DMSWebhookService] Error processing credit note webhook:', error);
|
||||||
return {
|
return { success: false, error: errorMessage };
|
||||||
success: false,
|
|
||||||
error: errorMessage,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build invoice description from DMS payload
|
|
||||||
*/
|
|
||||||
private buildInvoiceDescription(payload: any): string {
|
|
||||||
const parts: string[] = [];
|
|
||||||
|
|
||||||
if (payload.irn_no) {
|
|
||||||
parts.push(`IRN: ${payload.irn_no}`);
|
|
||||||
}
|
|
||||||
if (payload.item_code_no) {
|
|
||||||
parts.push(`Item Code: ${payload.item_code_no}`);
|
|
||||||
}
|
|
||||||
if (payload.hsn_sac_code) {
|
|
||||||
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
|
|
||||||
}
|
|
||||||
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
|
|
||||||
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return parts.length > 0 ? parts.join(' | ') : '';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build credit note description from DMS payload
|
|
||||||
*/
|
|
||||||
private buildCreditNoteDescription(payload: any): string {
|
|
||||||
const parts: string[] = [];
|
|
||||||
|
|
||||||
if (payload.irn_no) {
|
|
||||||
parts.push(`IRN: ${payload.irn_no}`);
|
|
||||||
}
|
|
||||||
if (payload.sap_credit_note_no) {
|
|
||||||
parts.push(`SAP CN: ${payload.sap_credit_note_no}`);
|
|
||||||
}
|
|
||||||
if (payload.credit_type) {
|
|
||||||
parts.push(`Credit Type: ${payload.credit_type}`);
|
|
||||||
}
|
|
||||||
if (payload.item_code_no) {
|
|
||||||
parts.push(`Item Code: ${payload.item_code_no}`);
|
|
||||||
}
|
|
||||||
if (payload.hsn_sac_code) {
|
|
||||||
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
|
|
||||||
}
|
|
||||||
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
|
|
||||||
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return parts.length > 0 ? parts.join(' | ') : '';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Log Credit Note Creation as activity and notify initiator
|
|
||||||
* This is called after credit note is created/updated from DMS webhook
|
|
||||||
*/
|
|
||||||
private async logCreditNoteCreationActivity(
|
|
||||||
requestId: string,
|
|
||||||
requestNumber: string,
|
|
||||||
creditNoteNumber: string,
|
|
||||||
creditNoteAmount: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check if this is a claim management workflow
|
|
||||||
const request = await WorkflowRequest.findByPk(requestId);
|
|
||||||
if (!request) {
|
|
||||||
logger.warn('[DMSWebhook] Request not found for credit note activity logging', { requestId });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowType = (request as any).workflowType;
|
|
||||||
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
|
||||||
logger.info('[DMSWebhook] Not a claim management workflow, skipping credit note activity logging', {
|
|
||||||
requestId,
|
|
||||||
workflowType,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const initiatorId = (request as any).initiatorId;
|
|
||||||
if (!initiatorId) {
|
|
||||||
logger.warn('[DMSWebhook] Initiator ID not found for credit note notification', { requestId });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log activity
|
|
||||||
await activityService.log({
|
|
||||||
requestId,
|
|
||||||
type: 'status_change',
|
|
||||||
user: undefined, // System event (no user means it's a system event)
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'Credit Note Generated',
|
|
||||||
details: `Credit note generated from DMS. Credit Note Number: ${creditNoteNumber}. Credit Note Amount: ₹${creditNoteAmount || 0}. Request: ${requestNumber}`,
|
|
||||||
category: 'credit_note',
|
|
||||||
severity: 'INFO',
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Credit note activity logged successfully', {
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
creditNoteNumber,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get dealer information from claim details
|
|
||||||
const claimDetails = await DealerClaimDetails.findOne({
|
|
||||||
where: { requestId }
|
|
||||||
});
|
|
||||||
|
|
||||||
let dealerUserId: string | null = null;
|
|
||||||
if (claimDetails?.dealerEmail) {
|
|
||||||
const dealerUser = await User.findOne({
|
|
||||||
where: { email: claimDetails.dealerEmail.toLowerCase() },
|
|
||||||
attributes: ['userId'],
|
|
||||||
});
|
|
||||||
dealerUserId = dealerUser?.userId || null;
|
|
||||||
|
|
||||||
if (dealerUserId) {
|
|
||||||
logger.info('[DMSWebhook] Found dealer user for notification', {
|
|
||||||
requestId,
|
|
||||||
dealerEmail: claimDetails.dealerEmail,
|
|
||||||
dealerUserId,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
logger.warn('[DMSWebhook] Dealer email found but user not found in system', {
|
|
||||||
requestId,
|
|
||||||
dealerEmail: claimDetails.dealerEmail,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.info('[DMSWebhook] No dealer email found in claim details', { requestId });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send notification to initiator
|
|
||||||
await notificationService.sendToUsers([initiatorId], {
|
|
||||||
title: 'Credit Note Generated',
|
|
||||||
body: `Credit note ${creditNoteNumber} has been generated for request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'status_change',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
actionRequired: false,
|
|
||||||
metadata: {
|
|
||||||
creditNoteNumber,
|
|
||||||
creditNoteAmount,
|
|
||||||
source: 'dms_webhook',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Credit note notification sent to initiator', {
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
initiatorId,
|
|
||||||
creditNoteNumber,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Send notification to dealer if dealer user exists
|
|
||||||
if (dealerUserId) {
|
|
||||||
await notificationService.sendToUsers([dealerUserId], {
|
|
||||||
title: 'Credit Note Generated',
|
|
||||||
body: `Credit note ${creditNoteNumber} has been generated for your claim request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'status_change',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
actionRequired: false,
|
|
||||||
metadata: {
|
|
||||||
creditNoteNumber,
|
|
||||||
creditNoteAmount,
|
|
||||||
source: 'dms_webhook',
|
|
||||||
recipient: 'dealer',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] Credit note notification sent to dealer', {
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
dealerUserId,
|
|
||||||
dealerEmail: claimDetails?.dealerEmail,
|
|
||||||
creditNoteNumber,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DMSWebhook] Error logging credit note activity:', {
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
error: errorMessage,
|
|
||||||
});
|
|
||||||
// Don't throw error - webhook processing should continue even if activity/notification fails
|
|
||||||
// The credit note is already created/updated, which is the primary goal
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Log E-Invoice Generation as activity (no longer an approval step)
|
|
||||||
* This is called after invoice is created/updated from DMS webhook
|
|
||||||
*/
|
|
||||||
private async logEInvoiceGenerationActivity(requestId: string, requestNumber: string): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check if this is a claim management workflow
|
|
||||||
const request = await WorkflowRequest.findByPk(requestId);
|
|
||||||
if (!request) {
|
|
||||||
logger.warn('[DMSWebhook] Request not found for Step 7 auto-approval', { requestId });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowType = (request as any).workflowType;
|
|
||||||
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
|
||||||
logger.info('[DMSWebhook] Not a claim management workflow, skipping Step 7 auto-approval', {
|
|
||||||
requestId,
|
|
||||||
workflowType,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// E-Invoice Generation is now an activity log only, not an approval step
|
|
||||||
// Log the activity using the dealerClaimService
|
|
||||||
const { DealerClaimService } = await import('./dealerClaim.service');
|
|
||||||
const dealerClaimService = new DealerClaimService();
|
|
||||||
const invoice = await ClaimInvoice.findOne({ where: { requestId } });
|
|
||||||
const invoiceNumber = invoice?.invoiceNumber || 'N/A';
|
|
||||||
|
|
||||||
await dealerClaimService.logEInvoiceGenerationActivity(requestId, invoiceNumber);
|
|
||||||
|
|
||||||
logger.info('[DMSWebhook] E-Invoice Generation activity logged successfully', {
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
invoiceNumber,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DMSWebhook] Error logging E-Invoice Generation activity:', {
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
error: errorMessage,
|
|
||||||
});
|
|
||||||
// Don't throw error - webhook processing should continue even if activity logging fails
|
|
||||||
// The invoice is already created/updated, which is the primary goal
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,28 +1,23 @@
|
|||||||
import { Holiday, HolidayType } from '@models/Holiday';
|
import { HolidayModel, IHoliday } from '../models/mongoose/Holiday.schema';
|
||||||
import { Op } from 'sequelize';
|
import logger from '../utils/logger';
|
||||||
import logger from '@utils/logger';
|
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
|
|
||||||
export class HolidayService {
|
export class HolidayMongoService {
|
||||||
/**
|
/**
|
||||||
* Get all holidays within a date range
|
* Get all holidays within a date range
|
||||||
*/
|
*/
|
||||||
async getHolidaysInRange(startDate: Date | string, endDate: Date | string): Promise<string[]> {
|
async getHolidaysInRange(startDate: Date | string, endDate: Date | string): Promise<string[]> {
|
||||||
try {
|
try {
|
||||||
const holidays = await Holiday.findAll({
|
const holidays = await HolidayModel.find({
|
||||||
where: {
|
date: {
|
||||||
holidayDate: {
|
$gte: dayjs(startDate).startOf('day').toDate(),
|
||||||
[Op.between]: [dayjs(startDate).format('YYYY-MM-DD'), dayjs(endDate).format('YYYY-MM-DD')]
|
$lte: dayjs(endDate).endOf('day').toDate()
|
||||||
},
|
}
|
||||||
isActive: true
|
}).select('date');
|
||||||
},
|
|
||||||
attributes: ['holidayDate'],
|
|
||||||
raw: true
|
|
||||||
});
|
|
||||||
|
|
||||||
return holidays.map((h: any) => h.holidayDate || h.holiday_date);
|
return holidays.map((h: any) => dayjs(h.date).format('YYYY-MM-DD'));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error fetching holidays:', error);
|
logger.error('[Holiday Mongo Service] Error fetching holidays:', error);
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -32,17 +27,16 @@ export class HolidayService {
|
|||||||
*/
|
*/
|
||||||
async isHoliday(date: Date | string): Promise<boolean> {
|
async isHoliday(date: Date | string): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
const dateStr = dayjs(date).format('YYYY-MM-DD');
|
const holiday = await HolidayModel.findOne({
|
||||||
const holiday = await Holiday.findOne({
|
date: {
|
||||||
where: {
|
$gte: dayjs(date).startOf('day').toDate(),
|
||||||
holidayDate: dateStr,
|
$lte: dayjs(date).endOf('day').toDate()
|
||||||
isActive: true
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return !!holiday;
|
return !!holiday;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error checking holiday:', error);
|
logger.error('[Holiday Mongo Service] Error checking holiday:', error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -68,26 +62,25 @@ export class HolidayService {
|
|||||||
* Add a new holiday
|
* Add a new holiday
|
||||||
*/
|
*/
|
||||||
async createHoliday(holidayData: {
|
async createHoliday(holidayData: {
|
||||||
holidayDate: string;
|
date: Date | string;
|
||||||
holidayName: string;
|
name: string;
|
||||||
description?: string;
|
type: 'PUBLIC' | 'OPTIONAL' | 'WEEKEND';
|
||||||
holidayType?: HolidayType;
|
year?: number;
|
||||||
isRecurring?: boolean;
|
}): Promise<IHoliday> {
|
||||||
recurrenceRule?: string;
|
|
||||||
appliesToDepartments?: string[];
|
|
||||||
appliesToLocations?: string[];
|
|
||||||
createdBy: string;
|
|
||||||
}): Promise<Holiday> {
|
|
||||||
try {
|
try {
|
||||||
const holiday = await Holiday.create({
|
const date = dayjs(holidayData.date).toDate();
|
||||||
...holidayData,
|
const year = holidayData.year || dayjs(date).year();
|
||||||
isActive: true
|
|
||||||
} as any);
|
|
||||||
|
|
||||||
logger.info(`[Holiday Service] Holiday created: ${holidayData.holidayName} on ${holidayData.holidayDate}`);
|
const holiday = await HolidayModel.create({
|
||||||
|
...holidayData,
|
||||||
|
date,
|
||||||
|
year
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Holiday Mongo Service] Holiday created: ${holidayData.name} on ${dayjs(date).format('YYYY-MM-DD')}`);
|
||||||
return holiday;
|
return holiday;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error creating holiday:', error);
|
logger.error('[Holiday Mongo Service] Error creating holiday:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -95,41 +88,31 @@ export class HolidayService {
|
|||||||
/**
|
/**
|
||||||
* Update a holiday
|
* Update a holiday
|
||||||
*/
|
*/
|
||||||
async updateHoliday(holidayId: string, updates: any, updatedBy: string): Promise<Holiday | null> {
|
async updateHoliday(id: string, updates: any): Promise<IHoliday | null> {
|
||||||
try {
|
try {
|
||||||
const holiday = await Holiday.findByPk(holidayId);
|
const holiday = await HolidayModel.findByIdAndUpdate(id, updates, { new: true });
|
||||||
if (!holiday) {
|
if (!holiday) {
|
||||||
throw new Error('Holiday not found');
|
throw new Error('Holiday not found');
|
||||||
}
|
}
|
||||||
|
|
||||||
await holiday.update({
|
logger.info(`[Holiday Mongo Service] Holiday updated: ${id}`);
|
||||||
...updates,
|
|
||||||
updatedBy,
|
|
||||||
updatedAt: new Date()
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`[Holiday Service] Holiday updated: ${holidayId}`);
|
|
||||||
return holiday;
|
return holiday;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error updating holiday:', error);
|
logger.error('[Holiday Mongo Service] Error updating holiday:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete (deactivate) a holiday
|
* Delete a holiday
|
||||||
*/
|
*/
|
||||||
async deleteHoliday(holidayId: string): Promise<boolean> {
|
async deleteHoliday(id: string): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
await Holiday.update(
|
await HolidayModel.findByIdAndDelete(id);
|
||||||
{ isActive: false },
|
logger.info(`[Holiday Mongo Service] Holiday deleted: ${id}`);
|
||||||
{ where: { holidayId } }
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.info(`[Holiday Service] Holiday deactivated: ${holidayId}`);
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error deleting holiday:', error);
|
logger.error('[Holiday Mongo Service] Error deleting holiday:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -137,26 +120,16 @@ export class HolidayService {
|
|||||||
/**
|
/**
|
||||||
* Get all active holidays
|
* Get all active holidays
|
||||||
*/
|
*/
|
||||||
async getAllActiveHolidays(year?: number): Promise<Holiday[]> {
|
async getAllActiveHolidays(year?: number): Promise<IHoliday[]> {
|
||||||
try {
|
try {
|
||||||
const whereClause: any = { isActive: true };
|
const query: any = {};
|
||||||
|
|
||||||
if (year) {
|
if (year) {
|
||||||
const startDate = `${year}-01-01`;
|
query.year = year;
|
||||||
const endDate = `${year}-12-31`;
|
|
||||||
whereClause.holidayDate = {
|
|
||||||
[Op.between]: [startDate, endDate]
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const holidays = await Holiday.findAll({
|
return await HolidayModel.find(query).sort({ date: 1 });
|
||||||
where: whereClause,
|
|
||||||
order: [['holidayDate', 'ASC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
return holidays;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error fetching holidays:', error);
|
logger.error('[Holiday Mongo Service] Error fetching holidays:', error);
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -166,28 +139,15 @@ export class HolidayService {
|
|||||||
*/
|
*/
|
||||||
async getHolidayCalendar(year: number): Promise<any[]> {
|
async getHolidayCalendar(year: number): Promise<any[]> {
|
||||||
try {
|
try {
|
||||||
const startDate = `${year}-01-01`;
|
const holidays = await HolidayModel.find({ year }).sort({ date: 1 });
|
||||||
const endDate = `${year}-12-31`;
|
|
||||||
|
|
||||||
const holidays = await Holiday.findAll({
|
|
||||||
where: {
|
|
||||||
holidayDate: {
|
|
||||||
[Op.between]: [startDate, endDate]
|
|
||||||
},
|
|
||||||
isActive: true
|
|
||||||
},
|
|
||||||
order: [['holidayDate', 'ASC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
return holidays.map((h: any) => ({
|
return holidays.map((h: any) => ({
|
||||||
date: h.holidayDate || h.holiday_date,
|
date: dayjs(h.date).format('YYYY-MM-DD'),
|
||||||
name: h.holidayName || h.holiday_name,
|
name: h.name,
|
||||||
description: h.description,
|
type: h.type
|
||||||
type: h.holidayType || h.holiday_type,
|
|
||||||
isRecurring: h.isRecurring || h.is_recurring
|
|
||||||
}));
|
}));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Holiday Service] Error fetching holiday calendar:', error);
|
logger.error('[Holiday Mongo Service] Error fetching holiday calendar:', error);
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -195,27 +155,23 @@ export class HolidayService {
|
|||||||
/**
|
/**
|
||||||
* Import multiple holidays (bulk upload)
|
* Import multiple holidays (bulk upload)
|
||||||
*/
|
*/
|
||||||
async bulkImportHolidays(holidays: any[], createdBy: string): Promise<{ success: number; failed: number }> {
|
async bulkImportHolidays(holidays: any[]): Promise<{ success: number; failed: number }> {
|
||||||
let success = 0;
|
let success = 0;
|
||||||
let failed = 0;
|
let failed = 0;
|
||||||
|
|
||||||
for (const holiday of holidays) {
|
for (const holiday of holidays) {
|
||||||
try {
|
try {
|
||||||
await this.createHoliday({
|
await this.createHoliday(holiday);
|
||||||
...holiday,
|
|
||||||
createdBy
|
|
||||||
});
|
|
||||||
success++;
|
success++;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
failed++;
|
failed++;
|
||||||
logger.error(`[Holiday Service] Failed to import holiday: ${holiday.holidayName}`, error);
|
logger.error(`[Holiday Mongo Service] Failed to import holiday: ${holiday.name}`, error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`[Holiday Service] Bulk import complete: ${success} success, ${failed} failed`);
|
logger.info(`[Holiday Mongo Service] Bulk import complete: ${success} success, ${failed} failed`);
|
||||||
return { success, failed };
|
return { success, failed };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const holidayService = new HolidayService();
|
export const holidayMongoService = new HolidayMongoService();
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user