Compare commits
2 Commits
main
...
mongo_migr
| Author | SHA1 | Date | |
|---|---|---|---|
| c9a0305d44 | |||
| 2dbfcd7a56 |
114
_archive/services/activity.service.ts
Normal file
114
_archive/services/activity.service.ts
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
|
// Special UUID for system events (login, etc.) - well-known UUID: 00000000-0000-0000-0000-000000000001
|
||||||
|
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
|
||||||
|
|
||||||
|
export type ActivityEntry = {
|
||||||
|
requestId: string;
|
||||||
|
type: 'created' | 'submitted' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'summary_generated' | 'closed' | 'login' | 'paused' | 'resumed' | 'pause_retriggered';
|
||||||
|
user?: { userId: string; name?: string; email?: string };
|
||||||
|
timestamp: string;
|
||||||
|
action: string;
|
||||||
|
details: string;
|
||||||
|
metadata?: any;
|
||||||
|
ipAddress?: string;
|
||||||
|
userAgent?: string;
|
||||||
|
category?: string;
|
||||||
|
severity?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
class ActivityService {
|
||||||
|
private byRequest: Map<string, ActivityEntry[]> = new Map();
|
||||||
|
|
||||||
|
private inferCategory(type: string): string {
|
||||||
|
const categoryMap: Record<string, string> = {
|
||||||
|
'created': 'WORKFLOW',
|
||||||
|
'submitted': 'WORKFLOW',
|
||||||
|
'approval': 'WORKFLOW',
|
||||||
|
'rejection': 'WORKFLOW',
|
||||||
|
'status_change': 'WORKFLOW',
|
||||||
|
'assignment': 'WORKFLOW',
|
||||||
|
'comment': 'COLLABORATION',
|
||||||
|
'document_added': 'DOCUMENT',
|
||||||
|
'sla_warning': 'SYSTEM',
|
||||||
|
'reminder': 'SYSTEM',
|
||||||
|
'ai_conclusion_generated': 'SYSTEM',
|
||||||
|
'closed': 'WORKFLOW',
|
||||||
|
'login': 'AUTHENTICATION',
|
||||||
|
'paused': 'WORKFLOW',
|
||||||
|
'resumed': 'WORKFLOW',
|
||||||
|
'pause_retriggered': 'WORKFLOW'
|
||||||
|
};
|
||||||
|
return categoryMap[type] || 'OTHER';
|
||||||
|
}
|
||||||
|
|
||||||
|
private inferSeverity(type: string): string {
|
||||||
|
const severityMap: Record<string, string> = {
|
||||||
|
'rejection': 'WARNING',
|
||||||
|
'sla_warning': 'WARNING',
|
||||||
|
'approval': 'INFO',
|
||||||
|
'closed': 'INFO',
|
||||||
|
'status_change': 'INFO',
|
||||||
|
'login': 'INFO',
|
||||||
|
'created': 'INFO',
|
||||||
|
'submitted': 'INFO',
|
||||||
|
'comment': 'INFO',
|
||||||
|
'document_added': 'INFO',
|
||||||
|
'assignment': 'INFO',
|
||||||
|
'reminder': 'INFO',
|
||||||
|
'ai_conclusion_generated': 'INFO',
|
||||||
|
'paused': 'WARNING',
|
||||||
|
'resumed': 'INFO',
|
||||||
|
'pause_retriggered': 'INFO'
|
||||||
|
};
|
||||||
|
return severityMap[type] || 'INFO';
|
||||||
|
}
|
||||||
|
|
||||||
|
async log(entry: ActivityEntry) {
|
||||||
|
const list = this.byRequest.get(entry.requestId) || [];
|
||||||
|
list.push(entry);
|
||||||
|
this.byRequest.set(entry.requestId, list);
|
||||||
|
|
||||||
|
// Persist to database
|
||||||
|
try {
|
||||||
|
const { Activity } = require('@models/Activity');
|
||||||
|
const userName = entry.user?.name || entry.user?.email || null;
|
||||||
|
|
||||||
|
const activityData = {
|
||||||
|
requestId: entry.requestId,
|
||||||
|
userId: entry.user?.userId || null,
|
||||||
|
userName: userName,
|
||||||
|
activityType: entry.type,
|
||||||
|
activityDescription: entry.details,
|
||||||
|
activityCategory: entry.category || this.inferCategory(entry.type),
|
||||||
|
severity: entry.severity || this.inferSeverity(entry.type),
|
||||||
|
metadata: entry.metadata || null,
|
||||||
|
isSystemEvent: !entry.user,
|
||||||
|
ipAddress: entry.ipAddress || null, // Database accepts null
|
||||||
|
userAgent: entry.userAgent || null, // Database accepts null
|
||||||
|
};
|
||||||
|
|
||||||
|
logger.info(`[Activity] Creating activity:`, {
|
||||||
|
requestId: entry.requestId,
|
||||||
|
userName,
|
||||||
|
userId: entry.user?.userId,
|
||||||
|
type: entry.type,
|
||||||
|
ipAddress: entry.ipAddress ? '***' : null
|
||||||
|
});
|
||||||
|
|
||||||
|
await Activity.create(activityData);
|
||||||
|
|
||||||
|
logger.info(`[Activity] Successfully logged activity for request ${entry.requestId} by user: ${userName}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Activity] Failed to persist activity:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get(requestId: string): ActivityEntry[] {
|
||||||
|
return this.byRequest.get(requestId) || [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const activityService = new ActivityService();
|
||||||
|
|
||||||
|
|
||||||
897
_archive/services/approval.service.ts
Normal file
897
_archive/services/approval.service.ts
Normal file
@ -0,0 +1,897 @@
|
|||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { Participant } from '@models/Participant';
|
||||||
|
import { TatAlert } from '@models/TatAlert';
|
||||||
|
import { ApprovalAction } from '../types/approval.types';
|
||||||
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
|
import { calculateTATPercentage } from '@utils/helpers';
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
import logger, { logWorkflowEvent, logAIEvent } from '@utils/logger';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
import { emitToRequestRoom } from '../realtime/socket';
|
||||||
|
// Note: DealerClaimService import removed - dealer claim approvals are handled by DealerClaimApprovalService
|
||||||
|
|
||||||
|
export class ApprovalService {
|
||||||
|
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level) return null;
|
||||||
|
|
||||||
|
// Get workflow to determine priority for working hours calculation
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Verify this is NOT a claim management workflow (should use DealerClaimApprovalService)
|
||||||
|
const workflowType = (wf as any)?.workflowType;
|
||||||
|
if (workflowType === 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.error(`[Approval] Attempted to use ApprovalService for CLAIM_MANAGEMENT workflow ${level.requestId}. Use DealerClaimApprovalService instead.`);
|
||||||
|
throw new Error('ApprovalService cannot be used for CLAIM_MANAGEMENT workflows. Use DealerClaimApprovalService instead.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
||||||
|
const isPaused = (wf as any).isPaused || (level as any).isPaused;
|
||||||
|
|
||||||
|
// If paused, resume automatically when approving/rejecting (requirement 3.6)
|
||||||
|
if (isPaused) {
|
||||||
|
const { pauseService } = await import('./pause.service');
|
||||||
|
try {
|
||||||
|
await pauseService.resumeWorkflow(level.requestId, _userId);
|
||||||
|
logger.info(`[Approval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
|
||||||
|
} catch (pauseError) {
|
||||||
|
logger.warn(`[Approval] Failed to auto-resume paused workflow:`, pauseError);
|
||||||
|
// Continue with approval/rejection even if resume fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Calculate elapsed hours using working hours logic (with pause handling)
|
||||||
|
// Case 1: Level is currently paused (isPaused = true)
|
||||||
|
// Case 2: Level was paused and resumed (isPaused = false but pauseElapsedHours and pauseResumeDate exist)
|
||||||
|
const isPausedLevel = (level as any).isPaused;
|
||||||
|
const wasResumed = !isPausedLevel &&
|
||||||
|
(level as any).pauseElapsedHours !== null &&
|
||||||
|
(level as any).pauseElapsedHours !== undefined &&
|
||||||
|
(level as any).pauseResumeDate !== null;
|
||||||
|
|
||||||
|
const pauseInfo = isPausedLevel ? {
|
||||||
|
// Level is currently paused - return frozen elapsed hours at pause time
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (level as any).pausedAt,
|
||||||
|
pauseElapsedHours: (level as any).pauseElapsedHours,
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate
|
||||||
|
} : wasResumed ? {
|
||||||
|
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(
|
||||||
|
level.levelStartTime || level.createdAt,
|
||||||
|
now,
|
||||||
|
priority,
|
||||||
|
pauseInfo
|
||||||
|
);
|
||||||
|
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||||
|
|
||||||
|
const updateData = {
|
||||||
|
status: action.action === 'APPROVE' ? ApprovalStatus.APPROVED : ApprovalStatus.REJECTED,
|
||||||
|
actionDate: now,
|
||||||
|
levelEndTime: now,
|
||||||
|
elapsedHours,
|
||||||
|
tatPercentageUsed: tatPercentage,
|
||||||
|
comments: action.comments,
|
||||||
|
rejectionReason: action.rejectionReason
|
||||||
|
};
|
||||||
|
|
||||||
|
const updatedLevel = await level.update(updateData);
|
||||||
|
|
||||||
|
// Cancel TAT jobs for the current level since it's been actioned
|
||||||
|
try {
|
||||||
|
await tatSchedulerService.cancelTatJobs(level.requestId, level.levelId);
|
||||||
|
logger.info(`[Approval] TAT jobs cancelled for level ${level.levelId}`);
|
||||||
|
} catch (tatError) {
|
||||||
|
logger.error(`[Approval] Failed to cancel TAT jobs:`, tatError);
|
||||||
|
// Don't fail the approval if TAT cancellation fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update TAT alerts for this level to mark completion status
|
||||||
|
try {
|
||||||
|
const wasOnTime = elapsedHours <= level.tatHours;
|
||||||
|
await TatAlert.update(
|
||||||
|
{
|
||||||
|
wasCompletedOnTime: wasOnTime,
|
||||||
|
completionTime: now
|
||||||
|
},
|
||||||
|
{
|
||||||
|
where: { levelId: level.levelId }
|
||||||
|
}
|
||||||
|
);
|
||||||
|
logger.info(`[Approval] TAT alerts updated for level ${level.levelId} - Completed ${wasOnTime ? 'on time' : 'late'}`);
|
||||||
|
} catch (tatAlertError) {
|
||||||
|
logger.error(`[Approval] Failed to update TAT alerts:`, tatAlertError);
|
||||||
|
// Don't fail the approval if TAT alert update fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle approval - move to next level or close workflow (wf already loaded above)
|
||||||
|
if (action.action === 'APPROVE') {
|
||||||
|
// Check if this is final approval: either isFinalApprover flag is set OR all levels are approved
|
||||||
|
// This handles cases where additional approvers are added after initial approval
|
||||||
|
const allLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
const approvedLevelsCount = allLevels.filter((l: any) => l.status === 'APPROVED').length;
|
||||||
|
const totalLevels = allLevels.length;
|
||||||
|
const isAllLevelsApproved = approvedLevelsCount === totalLevels;
|
||||||
|
const isFinalApproval = level.isFinalApprover || isAllLevelsApproved;
|
||||||
|
|
||||||
|
if (isFinalApproval) {
|
||||||
|
// Final approver - close workflow as APPROVED
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: (level.levelNumber || 0) + 1
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
logWorkflowEvent('approved', level.requestId, {
|
||||||
|
level: level.levelNumber,
|
||||||
|
isFinalApproval: true,
|
||||||
|
status: 'APPROVED',
|
||||||
|
detectedBy: level.isFinalApprover ? 'isFinalApprover flag' : 'all levels approved check'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log final approval activity first (so it's included in AI context)
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
|
||||||
|
// This runs in the background without blocking the approval response
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
const { aiService } = await import('./ai.service');
|
||||||
|
const { ConclusionRemark } = await import('@models/index');
|
||||||
|
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
||||||
|
const { WorkNote } = await import('@models/WorkNote');
|
||||||
|
const { Document } = await import('@models/Document');
|
||||||
|
const { Activity } = await import('@models/Activity');
|
||||||
|
const { getConfigValue } = await import('./configReader.service');
|
||||||
|
|
||||||
|
// Check if AI features and remark generation are enabled in admin config
|
||||||
|
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
|
||||||
|
if (aiEnabled && remarkGenerationEnabled && aiService.isAvailable()) {
|
||||||
|
logAIEvent('request', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
action: 'conclusion_generation_started',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Gather context for AI generation
|
||||||
|
const approvalLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const workNotes = await WorkNote.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 20
|
||||||
|
});
|
||||||
|
|
||||||
|
const documents = await Document.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['uploadedAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const activities = await Activity.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 50
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build context object
|
||||||
|
const context = {
|
||||||
|
requestTitle: (wf as any).title,
|
||||||
|
requestDescription: (wf as any).description,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
priority: (wf as any).priority,
|
||||||
|
approvalFlow: approvalLevels.map((l: any) => {
|
||||||
|
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
|
||||||
|
? Number(l.tatPercentageUsed)
|
||||||
|
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
|
||||||
|
return {
|
||||||
|
levelNumber: l.levelNumber,
|
||||||
|
approverName: l.approverName,
|
||||||
|
status: l.status,
|
||||||
|
comments: l.comments,
|
||||||
|
actionDate: l.actionDate,
|
||||||
|
tatHours: Number(l.tatHours || 0),
|
||||||
|
elapsedHours: Number(l.elapsedHours || 0),
|
||||||
|
tatPercentageUsed: tatPercentage
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
workNotes: workNotes.map((note: any) => ({
|
||||||
|
userName: note.userName,
|
||||||
|
message: note.message,
|
||||||
|
createdAt: note.createdAt
|
||||||
|
})),
|
||||||
|
documents: documents.map((doc: any) => ({
|
||||||
|
fileName: doc.originalFileName || doc.fileName,
|
||||||
|
uploadedBy: doc.uploadedBy,
|
||||||
|
uploadedAt: doc.uploadedAt
|
||||||
|
})),
|
||||||
|
activities: activities.map((activity: any) => ({
|
||||||
|
type: activity.activityType,
|
||||||
|
action: activity.activityDescription,
|
||||||
|
details: activity.activityDescription,
|
||||||
|
timestamp: activity.createdAt
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
const aiResult = await aiService.generateConclusionRemark(context);
|
||||||
|
|
||||||
|
// Check if conclusion already exists (e.g., from previous final approval before additional approver was added)
|
||||||
|
const existingConclusion = await ConclusionRemark.findOne({
|
||||||
|
where: { requestId: level.requestId }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingConclusion) {
|
||||||
|
// Update existing conclusion with new AI-generated remark (regenerated with updated context)
|
||||||
|
await existingConclusion.update({
|
||||||
|
aiGeneratedRemark: aiResult.remark,
|
||||||
|
aiModelUsed: aiResult.provider,
|
||||||
|
aiConfidenceScore: aiResult.confidence,
|
||||||
|
// Preserve finalRemark if it was already finalized
|
||||||
|
// Only reset if it wasn't finalized yet
|
||||||
|
finalRemark: (existingConclusion as any).finalizedAt ? (existingConclusion as any).finalRemark : null,
|
||||||
|
editedBy: null,
|
||||||
|
isEdited: false,
|
||||||
|
editCount: 0,
|
||||||
|
approvalSummary: {
|
||||||
|
totalLevels: approvalLevels.length,
|
||||||
|
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
|
||||||
|
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
|
||||||
|
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
|
||||||
|
},
|
||||||
|
documentSummary: {
|
||||||
|
totalDocuments: documents.length,
|
||||||
|
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
||||||
|
},
|
||||||
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
|
generatedAt: new Date(),
|
||||||
|
// Preserve finalizedAt if it was already finalized
|
||||||
|
finalizedAt: (existingConclusion as any).finalizedAt || null
|
||||||
|
} as any);
|
||||||
|
logger.info(`[Approval] Updated existing AI conclusion for request ${level.requestId} with regenerated content (includes new approver)`);
|
||||||
|
} else {
|
||||||
|
// Create new conclusion
|
||||||
|
await ConclusionRemark.create({
|
||||||
|
requestId: level.requestId,
|
||||||
|
aiGeneratedRemark: aiResult.remark,
|
||||||
|
aiModelUsed: aiResult.provider,
|
||||||
|
aiConfidenceScore: aiResult.confidence,
|
||||||
|
finalRemark: null,
|
||||||
|
editedBy: null,
|
||||||
|
isEdited: false,
|
||||||
|
editCount: 0,
|
||||||
|
approvalSummary: {
|
||||||
|
totalLevels: approvalLevels.length,
|
||||||
|
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
|
||||||
|
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
|
||||||
|
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
|
||||||
|
},
|
||||||
|
documentSummary: {
|
||||||
|
totalDocuments: documents.length,
|
||||||
|
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
||||||
|
},
|
||||||
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
|
generatedAt: new Date(),
|
||||||
|
finalizedAt: null
|
||||||
|
} as any);
|
||||||
|
}
|
||||||
|
|
||||||
|
logAIEvent('response', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
action: 'conclusion_generation_completed',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'ai_conclusion_generated',
|
||||||
|
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'AI Conclusion Generated',
|
||||||
|
details: 'AI-powered conclusion remark generated for review by initiator',
|
||||||
|
ipAddress: undefined, // System-generated, no IP
|
||||||
|
userAgent: undefined // System-generated, no user agent
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Log why AI generation was skipped
|
||||||
|
if (!aiEnabled) {
|
||||||
|
logger.info(`[Approval] AI features disabled in admin config, skipping conclusion generation for ${level.requestId}`);
|
||||||
|
} else if (!remarkGenerationEnabled) {
|
||||||
|
logger.info(`[Approval] AI remark generation disabled in admin config, skipping for ${level.requestId}`);
|
||||||
|
} else if (!aiService.isAvailable()) {
|
||||||
|
logger.warn(`[Approval] AI service unavailable for ${level.requestId}, skipping conclusion generation`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-generate RequestSummary after final approval (system-level generation)
|
||||||
|
// This makes the summary immediately available when user views the approved request
|
||||||
|
try {
|
||||||
|
const { summaryService } = await import('./summary.service');
|
||||||
|
const summary = await summaryService.createSummary(level.requestId, 'system', {
|
||||||
|
isSystemGeneration: true
|
||||||
|
});
|
||||||
|
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId}`);
|
||||||
|
|
||||||
|
// Log summary generation activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'summary_generated',
|
||||||
|
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Summary Auto-Generated',
|
||||||
|
details: 'Request summary auto-generated after final approval',
|
||||||
|
ipAddress: undefined,
|
||||||
|
userAgent: undefined
|
||||||
|
});
|
||||||
|
} catch (summaryError: any) {
|
||||||
|
// Log but don't fail - initiator can regenerate later
|
||||||
|
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (aiError) {
|
||||||
|
logAIEvent('error', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
action: 'conclusion_generation_failed',
|
||||||
|
error: aiError,
|
||||||
|
});
|
||||||
|
// Silent failure - initiator can write manually
|
||||||
|
|
||||||
|
// Still try to generate summary even if AI conclusion failed
|
||||||
|
try {
|
||||||
|
const { summaryService } = await import('./summary.service');
|
||||||
|
const summary = await summaryService.createSummary(level.requestId, 'system', {
|
||||||
|
isSystemGeneration: true
|
||||||
|
});
|
||||||
|
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId} (without AI conclusion)`);
|
||||||
|
} catch (summaryError: any) {
|
||||||
|
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})().catch(err => {
|
||||||
|
// Catch any unhandled promise rejections
|
||||||
|
logger.error(`[Approval] Unhandled error in background AI generation:`, err);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator and all participants (including spectators) about approval
|
||||||
|
// Spectators are CC'd for transparency, similar to email CC
|
||||||
|
if (wf) {
|
||||||
|
const participants = await Participant.findAll({
|
||||||
|
where: { requestId: level.requestId }
|
||||||
|
});
|
||||||
|
const targetUserIds = new Set<string>();
|
||||||
|
targetUserIds.add((wf as any).initiatorId);
|
||||||
|
for (const p of participants as any[]) {
|
||||||
|
targetUserIds.add(p.userId); // Includes spectators
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notification to initiator about final approval (triggers email)
|
||||||
|
const initiatorId = (wf as any).initiatorId;
|
||||||
|
await notificationService.sendToUsers([initiatorId], {
|
||||||
|
title: `Request Approved - All Approvals Complete`,
|
||||||
|
body: `Your request "${(wf as any).title}" has been fully approved by all approvers. Please review and finalize the conclusion remark to close the request.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send notification to all participants/spectators (for transparency, no action required)
|
||||||
|
const participantUserIds = Array.from(targetUserIds).filter(id => id !== initiatorId);
|
||||||
|
if (participantUserIds.length > 0) {
|
||||||
|
await notificationService.sendToUsers(participantUserIds, {
|
||||||
|
title: `Request Approved`,
|
||||||
|
body: `Request "${(wf as any).title}" has been fully approved. The initiator will finalize the conclusion remark to close the request.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval_pending_closure',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[Approval] ✅ Final approval complete for ${level.requestId}. Initiator and ${participants.length} participant(s) notified.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Not final - move to next level
|
||||||
|
// Check if workflow is paused - if so, don't advance
|
||||||
|
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[Approval] Cannot advance workflow ${level.requestId} - workflow is paused`);
|
||||||
|
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the next PENDING level
|
||||||
|
// Custom workflows use strict sequential ordering (levelNumber + 1) to maintain intended order
|
||||||
|
// This ensures custom workflows work predictably and don't skip levels
|
||||||
|
const currentLevelNumber = level.levelNumber || 0;
|
||||||
|
logger.info(`[Approval] Finding next level after level ${currentLevelNumber} for request ${level.requestId} (Custom workflow)`);
|
||||||
|
|
||||||
|
// Use strict sequential approach for custom workflows
|
||||||
|
const nextLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
levelNumber: currentLevelNumber + 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!nextLevel) {
|
||||||
|
logger.info(`[Approval] Sequential level ${currentLevelNumber + 1} not found for custom workflow - this may be the final approval`);
|
||||||
|
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
|
||||||
|
// Sequential level exists but not PENDING - log warning but proceed
|
||||||
|
logger.warn(`[Approval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level to maintain workflow order.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
logger.info(`[Approval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
|
||||||
|
} else {
|
||||||
|
logger.info(`[Approval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
// Check if next level is paused - if so, don't activate it
|
||||||
|
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[Approval] Cannot activate next level ${nextLevelNumber} - level is paused`);
|
||||||
|
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Activate next level
|
||||||
|
await nextLevel.update({
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
levelStartTime: now,
|
||||||
|
tatStartTime: now
|
||||||
|
});
|
||||||
|
|
||||||
|
// Schedule TAT jobs for the next level
|
||||||
|
try {
|
||||||
|
// Get workflow priority for TAT calculation
|
||||||
|
const workflowPriority = (wf as any)?.priority || 'STANDARD';
|
||||||
|
|
||||||
|
await tatSchedulerService.scheduleTatJobs(
|
||||||
|
level.requestId,
|
||||||
|
(nextLevel as any).levelId,
|
||||||
|
(nextLevel as any).approverId,
|
||||||
|
Number((nextLevel as any).tatHours),
|
||||||
|
now,
|
||||||
|
workflowPriority // Pass workflow priority (EXPRESS = 24/7, STANDARD = working hours)
|
||||||
|
);
|
||||||
|
logger.info(`[Approval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
|
||||||
|
} catch (tatError) {
|
||||||
|
logger.error(`[Approval] Failed to schedule TAT jobs for next level:`, tatError);
|
||||||
|
// Don't fail the approval if TAT scheduling fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow current level (only if nextLevelNumber is not null)
|
||||||
|
if (nextLevelNumber !== null) {
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{ currentLevel: nextLevelNumber },
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
|
||||||
|
} else {
|
||||||
|
logger.warn(`Approved level ${level.levelNumber} but no next level found - workflow may be complete`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Dealer claim-specific logic (Activity Creation, E-Invoice) is handled by DealerClaimApprovalService
|
||||||
|
// This service is for custom workflows only
|
||||||
|
|
||||||
|
// Log approval activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator about the approval (triggers email for regular workflows)
|
||||||
|
if (wf) {
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Request Approved - Level ${level.levelNumber}`,
|
||||||
|
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify next approver
|
||||||
|
if (wf && nextLevel) {
|
||||||
|
// Check if it's an auto-step by checking approverEmail or levelName
|
||||||
|
// Note: Activity Creation, E-Invoice Generation, and Credit Note Confirmation are now activity logs only, not approval steps
|
||||||
|
// These steps are processed automatically and should NOT trigger notifications
|
||||||
|
const isAutoStep = (nextLevel as any).approverEmail === 'system@royalenfield.com'
|
||||||
|
|| (nextLevel as any).approverName === 'System Auto-Process'
|
||||||
|
|| (nextLevel as any).approverId === 'system';
|
||||||
|
|
||||||
|
// IMPORTANT: Skip notifications and assignment logging for system/auto-steps
|
||||||
|
// System steps are any step with system@royalenfield.com
|
||||||
|
// Only send notifications to real users, NOT system processes
|
||||||
|
if (!isAutoStep && (nextLevel as any).approverId && (nextLevel as any).approverId !== 'system') {
|
||||||
|
// Additional checks: ensure approverEmail and approverName are not system-related
|
||||||
|
// This prevents notifications to system accounts even if they pass other checks
|
||||||
|
const approverEmail = (nextLevel as any).approverEmail || '';
|
||||||
|
const approverName = (nextLevel as any).approverName || '';
|
||||||
|
const isSystemEmail = approverEmail.toLowerCase() === 'system@royalenfield.com'
|
||||||
|
|| approverEmail.toLowerCase().includes('system');
|
||||||
|
const isSystemName = approverName.toLowerCase() === 'system auto-process'
|
||||||
|
|| approverName.toLowerCase().includes('system');
|
||||||
|
|
||||||
|
// EXCLUDE all system-related steps from notifications
|
||||||
|
// Only send notifications to real users, NOT system processes
|
||||||
|
if (!isSystemEmail && !isSystemName) {
|
||||||
|
// Send notification to next approver (only for real users, not system processes)
|
||||||
|
// This will send both in-app and email notifications
|
||||||
|
const nextApproverId = (nextLevel as any).approverId;
|
||||||
|
const nextApproverName = (nextLevel as any).approverName || (nextLevel as any).approverEmail || 'approver';
|
||||||
|
|
||||||
|
logger.info(`[Approval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
|
||||||
|
|
||||||
|
await notificationService.sendToUsers([ nextApproverId ], {
|
||||||
|
title: `Action required: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'assignment',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Approval] Assignment notification sent successfully to ${nextApproverName} for level ${nextLevelNumber}`);
|
||||||
|
|
||||||
|
// Log assignment activity for the next approver
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'assignment',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Assigned to approver',
|
||||||
|
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.info(`[Approval] Skipping notification for system process: ${approverEmail} at level ${nextLevelNumber}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(`[Approval] Skipping notification for auto-step at level ${nextLevelNumber}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Dealer-specific notifications (proposal/completion submissions) are handled by DealerClaimApprovalService
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No next level found but not final approver - this shouldn't happen
|
||||||
|
logger.warn(`No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
|
||||||
|
// Use current level number since there's no next level (workflow is complete)
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: level.levelNumber || 0
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
if (wf) {
|
||||||
|
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
|
||||||
|
title: `Approved: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`
|
||||||
|
});
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (action.action === 'REJECT') {
|
||||||
|
// Rejection - mark workflow as REJECTED (closure will happen when initiator finalizes conclusion)
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.REJECTED
|
||||||
|
// Note: closureDate will be set when initiator finalizes the conclusion
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mark all pending levels as skipped
|
||||||
|
await ApprovalLevel.update(
|
||||||
|
{
|
||||||
|
status: ApprovalStatus.SKIPPED,
|
||||||
|
levelEndTime: now
|
||||||
|
},
|
||||||
|
{
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
status: ApprovalStatus.PENDING,
|
||||||
|
levelNumber: { [Op.gt]: level.levelNumber }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logWorkflowEvent('rejected', level.requestId, {
|
||||||
|
level: level.levelNumber,
|
||||||
|
status: 'REJECTED',
|
||||||
|
message: 'Awaiting closure from initiator',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log rejection activity first (so it's included in AI context)
|
||||||
|
if (wf) {
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'rejection',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Rejected',
|
||||||
|
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}. Awaiting closure from initiator.`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify initiator and all participants
|
||||||
|
if (wf) {
|
||||||
|
const participants = await Participant.findAll({ where: { requestId: level.requestId } });
|
||||||
|
const targetUserIds = new Set<string>();
|
||||||
|
targetUserIds.add((wf as any).initiatorId);
|
||||||
|
for (const p of participants as any[]) {
|
||||||
|
targetUserIds.add(p.userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notification to initiator with type 'rejection' to trigger email
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Rejected: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'rejection',
|
||||||
|
priority: 'HIGH',
|
||||||
|
metadata: {
|
||||||
|
rejectionReason: action.rejectionReason || action.comments || 'No reason provided'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send notification to other participants (spectators) for transparency (no email, just in-app)
|
||||||
|
const participantUserIds = Array.from(targetUserIds).filter(id => id !== (wf as any).initiatorId);
|
||||||
|
if (participantUserIds.length > 0) {
|
||||||
|
await notificationService.sendToUsers(participantUserIds, {
|
||||||
|
title: `Rejected: ${(wf as any).requestNumber}`,
|
||||||
|
body: `Request "${(wf as any).title}" has been rejected.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'status_change', // Use status_change to avoid triggering emails for participants
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate AI conclusion remark ASYNCHRONOUSLY for rejected requests (similar to approved)
|
||||||
|
// This runs in the background without blocking the rejection response
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
const { aiService } = await import('./ai.service');
|
||||||
|
const { ConclusionRemark } = await import('@models/index');
|
||||||
|
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
||||||
|
const { WorkNote } = await import('@models/WorkNote');
|
||||||
|
const { Document } = await import('@models/Document');
|
||||||
|
const { Activity } = await import('@models/Activity');
|
||||||
|
const { getConfigValue } = await import('./configReader.service');
|
||||||
|
|
||||||
|
// Check if AI features and remark generation are enabled in admin config
|
||||||
|
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
||||||
|
|
||||||
|
if (!aiEnabled || !remarkGenerationEnabled) {
|
||||||
|
logger.info(`[Approval] AI conclusion generation skipped for rejected request ${level.requestId} (AI disabled)`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if AI service is available
|
||||||
|
const { aiService: aiSvc } = await import('./ai.service');
|
||||||
|
if (!aiSvc.isAvailable()) {
|
||||||
|
logger.warn(`[Approval] AI service unavailable for rejected request ${level.requestId}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gather context for AI generation (similar to approved flow)
|
||||||
|
const approvalLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const workNotes = await WorkNote.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 20
|
||||||
|
});
|
||||||
|
|
||||||
|
const documents = await Document.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['uploadedAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const activities = await Activity.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']],
|
||||||
|
limit: 50
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build context object (include rejection reason)
|
||||||
|
const context = {
|
||||||
|
requestTitle: (wf as any).title,
|
||||||
|
requestDescription: (wf as any).description,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
priority: (wf as any).priority,
|
||||||
|
rejectionReason: action.rejectionReason || action.comments || 'No reason provided',
|
||||||
|
rejectedBy: level.approverName || level.approverEmail,
|
||||||
|
approvalFlow: approvalLevels.map((l: any) => {
|
||||||
|
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
|
||||||
|
? Number(l.tatPercentageUsed)
|
||||||
|
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
|
||||||
|
return {
|
||||||
|
levelNumber: l.levelNumber,
|
||||||
|
approverName: l.approverName,
|
||||||
|
status: l.status,
|
||||||
|
comments: l.comments,
|
||||||
|
actionDate: l.actionDate,
|
||||||
|
tatHours: Number(l.tatHours || 0),
|
||||||
|
elapsedHours: Number(l.elapsedHours || 0),
|
||||||
|
tatPercentageUsed: tatPercentage
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
workNotes: workNotes.map((note: any) => ({
|
||||||
|
userName: note.userName,
|
||||||
|
message: note.message,
|
||||||
|
createdAt: note.createdAt
|
||||||
|
})),
|
||||||
|
documents: documents.map((doc: any) => ({
|
||||||
|
fileName: doc.originalFileName || doc.fileName,
|
||||||
|
uploadedBy: doc.uploadedBy,
|
||||||
|
uploadedAt: doc.uploadedAt
|
||||||
|
})),
|
||||||
|
activities: activities.map((activity: any) => ({
|
||||||
|
type: activity.activityType,
|
||||||
|
action: activity.activityDescription,
|
||||||
|
details: activity.activityDescription,
|
||||||
|
timestamp: activity.createdAt
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
logger.info(`[Approval] Generating AI conclusion for rejected request ${level.requestId}...`);
|
||||||
|
|
||||||
|
// Generate AI conclusion (will adapt to rejection context)
|
||||||
|
const aiResult = await aiSvc.generateConclusionRemark(context);
|
||||||
|
|
||||||
|
// Create or update conclusion remark
|
||||||
|
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId: level.requestId } });
|
||||||
|
|
||||||
|
const conclusionData = {
|
||||||
|
aiGeneratedRemark: aiResult.remark,
|
||||||
|
aiModelUsed: aiResult.provider,
|
||||||
|
aiConfidenceScore: aiResult.confidence,
|
||||||
|
approvalSummary: {
|
||||||
|
totalLevels: approvalLevels.length,
|
||||||
|
rejectedLevel: level.levelNumber,
|
||||||
|
rejectedBy: level.approverName || level.approverEmail,
|
||||||
|
rejectionReason: action.rejectionReason || action.comments
|
||||||
|
},
|
||||||
|
documentSummary: {
|
||||||
|
totalDocuments: documents.length,
|
||||||
|
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
|
||||||
|
},
|
||||||
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
|
generatedAt: new Date()
|
||||||
|
};
|
||||||
|
|
||||||
|
if (conclusionInstance) {
|
||||||
|
await conclusionInstance.update(conclusionData as any);
|
||||||
|
logger.info(`[Approval] ✅ AI conclusion updated for rejected request ${level.requestId}`);
|
||||||
|
} else {
|
||||||
|
await ConclusionRemark.create({
|
||||||
|
requestId: level.requestId,
|
||||||
|
...conclusionData,
|
||||||
|
finalRemark: null,
|
||||||
|
editedBy: null,
|
||||||
|
isEdited: false,
|
||||||
|
editCount: 0,
|
||||||
|
finalizedAt: null
|
||||||
|
} as any);
|
||||||
|
logger.info(`[Approval] ✅ AI conclusion generated for rejected request ${level.requestId}`);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Approval] Failed to generate AI conclusion for rejected request ${level.requestId}:`, error);
|
||||||
|
// Don't fail the rejection if AI generation fails
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(level.requestId, 'request:updated', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
requestNumber: (wf as any)?.requestNumber,
|
||||||
|
action: action.action,
|
||||||
|
levelNumber: level.levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return updatedLevel;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to ${action.action.toLowerCase()} level ${levelId}:`, error);
|
||||||
|
throw new Error(`Failed to ${action.action.toLowerCase()} level`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
return await ApprovalLevel.findOne({
|
||||||
|
where: { requestId, status: ApprovalStatus.PENDING },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to get current approval level for ${requestId}:`, error);
|
||||||
|
throw new Error('Failed to get current approval level');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
|
||||||
|
try {
|
||||||
|
return await ApprovalLevel.findAll({
|
||||||
|
where: { requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to get approval levels for ${requestId}:`, error);
|
||||||
|
throw new Error('Failed to get approval levels');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
160
_archive/services/configReader.service.ts
Normal file
160
_archive/services/configReader.service.ts
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
/**
|
||||||
|
* Configuration Reader Service
|
||||||
|
* Reads admin configurations from database for use in backend logic
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
import { QueryTypes } from 'sequelize';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
|
// Cache configurations in memory for performance
|
||||||
|
let configCache: Map<string, string> = new Map();
|
||||||
|
let cacheExpiry: Date | null = null;
|
||||||
|
const CACHE_DURATION_MS = 5 * 60 * 1000; // 5 minutes
|
||||||
|
|
||||||
|
// Sensitive config keys that should be masked in logs
|
||||||
|
const SENSITIVE_CONFIG_PATTERNS = [
|
||||||
|
'API_KEY', 'SECRET', 'PASSWORD', 'TOKEN', 'CREDENTIAL',
|
||||||
|
'PRIVATE', 'AUTH', 'KEY', 'VAPID'
|
||||||
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a config key contains sensitive data
|
||||||
|
*/
|
||||||
|
function isSensitiveConfig(configKey: string): boolean {
|
||||||
|
const upperKey = configKey.toUpperCase();
|
||||||
|
return SENSITIVE_CONFIG_PATTERNS.some(pattern => upperKey.includes(pattern));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mask sensitive value for logging (show first 4 and last 2 chars)
|
||||||
|
*/
|
||||||
|
function maskSensitiveValue(value: string): string {
|
||||||
|
if (!value || value.length <= 8) {
|
||||||
|
return '***REDACTED***';
|
||||||
|
}
|
||||||
|
return `${value.substring(0, 4)}****${value.substring(value.length - 2)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a configuration value from database (with caching)
|
||||||
|
*/
|
||||||
|
export async function getConfigValue(configKey: string, defaultValue: string = ''): Promise<string> {
|
||||||
|
try {
|
||||||
|
// Check cache first
|
||||||
|
if (configCache.has(configKey) && cacheExpiry && new Date() < cacheExpiry) {
|
||||||
|
return configCache.get(configKey)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query database
|
||||||
|
const result = await sequelize.query(`
|
||||||
|
SELECT config_value
|
||||||
|
FROM admin_configurations
|
||||||
|
WHERE config_key = :configKey
|
||||||
|
LIMIT 1
|
||||||
|
`, {
|
||||||
|
replacements: { configKey },
|
||||||
|
type: QueryTypes.SELECT
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result && result.length > 0) {
|
||||||
|
const value = (result[0] as any).config_value;
|
||||||
|
configCache.set(configKey, value);
|
||||||
|
|
||||||
|
// Always update cache expiry when loading from database
|
||||||
|
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
|
||||||
|
|
||||||
|
// Mask sensitive values in logs for security
|
||||||
|
const logValue = isSensitiveConfig(configKey) ? maskSensitiveValue(value) : value;
|
||||||
|
logger.info(`[ConfigReader] Loaded config '${configKey}' = '${logValue}' from database (cached for 5min)`);
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mask sensitive default values in logs for security
|
||||||
|
const logDefault = isSensitiveConfig(configKey) ? maskSensitiveValue(defaultValue) : defaultValue;
|
||||||
|
logger.warn(`[ConfigReader] Config key '${configKey}' not found, using default: ${logDefault}`);
|
||||||
|
return defaultValue;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[ConfigReader] Error reading config '${configKey}':`, error);
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get number configuration
|
||||||
|
*/
|
||||||
|
export async function getConfigNumber(configKey: string, defaultValue: number): Promise<number> {
|
||||||
|
const value = await getConfigValue(configKey, String(defaultValue));
|
||||||
|
return parseFloat(value) || defaultValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get boolean configuration
|
||||||
|
*/
|
||||||
|
export async function getConfigBoolean(configKey: string, defaultValue: boolean): Promise<boolean> {
|
||||||
|
const value = await getConfigValue(configKey, String(defaultValue));
|
||||||
|
return value === 'true' || value === '1';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get TAT thresholds from database
|
||||||
|
*/
|
||||||
|
export async function getTatThresholds(): Promise<{ first: number; second: number }> {
|
||||||
|
const first = await getConfigNumber('TAT_REMINDER_THRESHOLD_1', 50);
|
||||||
|
const second = await getConfigNumber('TAT_REMINDER_THRESHOLD_2', 75);
|
||||||
|
|
||||||
|
return { first, second };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get working hours from database
|
||||||
|
*/
|
||||||
|
export async function getWorkingHours(): Promise<{ startHour: number; endHour: number }> {
|
||||||
|
const startHour = await getConfigNumber('WORK_START_HOUR', 9);
|
||||||
|
const endHour = await getConfigNumber('WORK_END_HOUR', 18);
|
||||||
|
|
||||||
|
return { startHour, endHour };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear configuration cache (call after updating configs)
|
||||||
|
*/
|
||||||
|
export function clearConfigCache(): void {
|
||||||
|
configCache.clear();
|
||||||
|
cacheExpiry = null;
|
||||||
|
logger.info('[ConfigReader] Configuration cache cleared');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preload all configurations into cache
|
||||||
|
*/
|
||||||
|
export async function preloadConfigurations(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const results = await sequelize.query(`
|
||||||
|
SELECT config_key, config_value
|
||||||
|
FROM admin_configurations
|
||||||
|
`, { type: QueryTypes.SELECT });
|
||||||
|
|
||||||
|
results.forEach((row: any) => {
|
||||||
|
configCache.set(row.config_key, row.config_value);
|
||||||
|
});
|
||||||
|
|
||||||
|
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
|
||||||
|
logger.info(`[ConfigReader] Preloaded ${results.length} configurations into cache`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[ConfigReader] Error preloading configurations:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Vertex AI configurations
|
||||||
|
*/
|
||||||
|
export async function getVertexAIConfig(): Promise<{
|
||||||
|
enabled: boolean;
|
||||||
|
}> {
|
||||||
|
const enabled = await getConfigBoolean('AI_ENABLED', true);
|
||||||
|
|
||||||
|
return { enabled };
|
||||||
|
}
|
||||||
|
|
||||||
2767
_archive/services/dashboard.service.ts
Normal file
2767
_archive/services/dashboard.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
3353
_archive/services/dealerClaim.service.ts
Normal file
3353
_archive/services/dealerClaim.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
967
_archive/services/dealerClaimApproval.service.ts
Normal file
967
_archive/services/dealerClaimApproval.service.ts
Normal file
@ -0,0 +1,967 @@
|
|||||||
|
/**
|
||||||
|
* Dealer Claim Approval Service
|
||||||
|
*
|
||||||
|
* Dedicated approval service for dealer claim workflows (CLAIM_MANAGEMENT).
|
||||||
|
* Handles dealer claim-specific logic including:
|
||||||
|
* - Dynamic approver support (additional approvers added between steps)
|
||||||
|
* - Activity Creation processing
|
||||||
|
* - Dealer-specific notifications
|
||||||
|
*
|
||||||
|
* This service is separate from ApprovalService to prevent conflicts with custom workflows.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { User } from '@models/User';
|
||||||
|
import { ApprovalAction } from '../types/approval.types';
|
||||||
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
|
import { calculateTATPercentage } from '@utils/helpers';
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import { notificationMongoService } from './notification.mongo.service';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
import { DealerClaimService } from './dealerClaim.service';
|
||||||
|
import { emitToRequestRoom } from '../realtime/socket';
|
||||||
|
|
||||||
|
export class DealerClaimApprovalService {
|
||||||
|
// Use lazy initialization to avoid circular dependency
|
||||||
|
private getDealerClaimService(): DealerClaimService {
|
||||||
|
return new DealerClaimService();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Approve a level in a dealer claim workflow
|
||||||
|
* Handles dealer claim-specific logic including dynamic approvers and activity creation
|
||||||
|
*/
|
||||||
|
async approveLevel(
|
||||||
|
levelId: string,
|
||||||
|
action: ApprovalAction,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
|
||||||
|
): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level) return null;
|
||||||
|
|
||||||
|
// Get workflow to determine priority for working hours calculation
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Verify this is a claim management workflow
|
||||||
|
const workflowType = (wf as any)?.workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
|
||||||
|
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
|
||||||
|
}
|
||||||
|
|
||||||
|
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
||||||
|
const isPaused = (wf as any).isPaused || (level as any).isPaused;
|
||||||
|
|
||||||
|
// If paused, resume automatically when approving/rejecting
|
||||||
|
if (isPaused) {
|
||||||
|
const { pauseService } = await import('./pause.service');
|
||||||
|
try {
|
||||||
|
await pauseService.resumeWorkflow(level.requestId, userId);
|
||||||
|
logger.info(`[DealerClaimApproval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
|
||||||
|
} catch (pauseError) {
|
||||||
|
logger.warn(`[DealerClaimApproval] Failed to auto-resume paused workflow:`, pauseError);
|
||||||
|
// Continue with approval/rejection even if resume fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Calculate elapsed hours using working hours logic (with pause handling)
|
||||||
|
const isPausedLevel = (level as any).isPaused;
|
||||||
|
const wasResumed = !isPausedLevel &&
|
||||||
|
(level as any).pauseElapsedHours !== null &&
|
||||||
|
(level as any).pauseElapsedHours !== undefined &&
|
||||||
|
(level as any).pauseResumeDate !== null;
|
||||||
|
|
||||||
|
const pauseInfo = isPausedLevel ? {
|
||||||
|
// Level is currently paused - return frozen elapsed hours at pause time
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (level as any).pausedAt,
|
||||||
|
pauseElapsedHours: (level as any).pauseElapsedHours,
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate
|
||||||
|
} : wasResumed ? {
|
||||||
|
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(
|
||||||
|
(level as any).levelStartTime || (level as any).tatStartTime || now,
|
||||||
|
now,
|
||||||
|
priority,
|
||||||
|
pauseInfo
|
||||||
|
);
|
||||||
|
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||||
|
|
||||||
|
// Handle rejection
|
||||||
|
if (action.action === 'REJECT') {
|
||||||
|
return await this.handleRejection(level, action, userId, requestMetadata, elapsedHours, tatPercentage, now);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Approving level ${levelId} with action:`, JSON.stringify(action));
|
||||||
|
|
||||||
|
// Robust comment extraction
|
||||||
|
const approvalComment = action.comments || (action as any).comment || '';
|
||||||
|
|
||||||
|
// Update level status and elapsed time for approval FIRST
|
||||||
|
// Only save snapshot if the update succeeds
|
||||||
|
await level.update({
|
||||||
|
status: ApprovalStatus.APPROVED,
|
||||||
|
actionDate: now,
|
||||||
|
levelEndTime: now,
|
||||||
|
elapsedHours: elapsedHours,
|
||||||
|
tatPercentageUsed: tatPercentage,
|
||||||
|
comments: approvalComment || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if this is a dealer submission (proposal or completion) - these have their own snapshot types
|
||||||
|
const levelName = (level.levelName || '').toLowerCase();
|
||||||
|
const isDealerSubmission = levelName.includes('dealer proposal') || levelName.includes('dealer completion');
|
||||||
|
|
||||||
|
// Only save APPROVE snapshot for actual approver actions (not dealer submissions)
|
||||||
|
// Dealer submissions use PROPOSAL/COMPLETION snapshot types instead
|
||||||
|
if (!isDealerSubmission) {
|
||||||
|
try {
|
||||||
|
await this.getDealerClaimService().saveApprovalHistory(
|
||||||
|
level.requestId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
'APPROVE',
|
||||||
|
approvalComment,
|
||||||
|
undefined,
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
} catch (snapshotError) {
|
||||||
|
// Log error but don't fail the approval - snapshot is for audit, not critical
|
||||||
|
logger.error(`[DealerClaimApproval] Failed to save approval history snapshot (non-critical):`, snapshotError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: We don't save workflow history for approval actions
|
||||||
|
// The approval history (saveApprovalHistory) is sufficient and includes comments
|
||||||
|
// Workflow movement information is included in the APPROVE snapshot's changeReason
|
||||||
|
|
||||||
|
// Check if this is the final approver
|
||||||
|
const allLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId }
|
||||||
|
});
|
||||||
|
const approvedCount = allLevels.filter((l: any) => l.status === ApprovalStatus.APPROVED).length;
|
||||||
|
const isFinalApprover = approvedCount === allLevels.length;
|
||||||
|
|
||||||
|
if (isFinalApprover) {
|
||||||
|
// Final approval - close workflow
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: level.levelNumber || 0
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Notify all participants
|
||||||
|
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
|
||||||
|
where: { requestId: level.requestId, isActive: true }
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (participants && participants.length > 0) {
|
||||||
|
const participantIds = participants.map((p: any) => p.userId).filter(Boolean);
|
||||||
|
await notificationService.sendToUsers(participantIds, {
|
||||||
|
title: `Request Approved: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
logger.info(`[DealerClaimApproval] Final approval complete. ${participants.length} participant(s) notified.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Not final - move to next level
|
||||||
|
// Check if workflow is paused - if so, don't advance
|
||||||
|
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Cannot advance workflow ${level.requestId} - workflow is paused`);
|
||||||
|
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the next PENDING level (supports dynamically added approvers)
|
||||||
|
// Strategy: First try sequential, then find next PENDING level if sequential doesn't exist
|
||||||
|
const currentLevelNumber = level.levelNumber || 0;
|
||||||
|
logger.info(`[DealerClaimApproval] Finding next level after level ${currentLevelNumber} for request ${level.requestId}`);
|
||||||
|
|
||||||
|
// First, try sequential approach
|
||||||
|
let nextLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
levelNumber: currentLevelNumber + 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// If sequential level doesn't exist, search for next PENDING level
|
||||||
|
// This handles cases where additional approvers are added dynamically between steps
|
||||||
|
if (!nextLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} not found, searching for next PENDING level (dynamic approvers)`);
|
||||||
|
nextLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
levelNumber: { [Op.gt]: currentLevelNumber },
|
||||||
|
status: ApprovalStatus.PENDING
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] Using fallback level ${nextLevel.levelNumber} (${(nextLevel as any).levelName || 'unnamed'})`);
|
||||||
|
}
|
||||||
|
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
|
||||||
|
// Sequential level exists but not PENDING - check if it's already approved/rejected
|
||||||
|
if (nextLevel.status === ApprovalStatus.APPROVED || nextLevel.status === ApprovalStatus.REJECTED) {
|
||||||
|
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} already ${nextLevel.status}. Skipping activation.`);
|
||||||
|
nextLevel = null; // Don't activate an already completed level
|
||||||
|
} else {
|
||||||
|
// Level exists but in unexpected status - log warning but proceed
|
||||||
|
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
|
||||||
|
} else {
|
||||||
|
logger.info(`[DealerClaimApproval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextLevel) {
|
||||||
|
// Check if next level is paused - if so, don't activate it
|
||||||
|
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Cannot activate next level ${nextLevelNumber} - level is paused`);
|
||||||
|
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Activate next level
|
||||||
|
await nextLevel.update({
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
levelStartTime: now,
|
||||||
|
tatStartTime: now
|
||||||
|
});
|
||||||
|
|
||||||
|
// Schedule TAT jobs for the next level
|
||||||
|
try {
|
||||||
|
const workflowPriority = (wf as any)?.priority || 'STANDARD';
|
||||||
|
|
||||||
|
await tatSchedulerService.scheduleTatJobs(
|
||||||
|
level.requestId,
|
||||||
|
(nextLevel as any).levelId,
|
||||||
|
(nextLevel as any).approverId,
|
||||||
|
Number((nextLevel as any).tatHours),
|
||||||
|
now,
|
||||||
|
workflowPriority
|
||||||
|
);
|
||||||
|
logger.info(`[DealerClaimApproval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
|
||||||
|
} catch (tatError) {
|
||||||
|
logger.error(`[DealerClaimApproval] Failed to schedule TAT jobs for next level:`, tatError);
|
||||||
|
// Don't fail the approval if TAT scheduling fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow current level
|
||||||
|
if (nextLevelNumber !== null) {
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{ currentLevel: nextLevelNumber },
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update the APPROVE snapshot's changeReason to include movement information
|
||||||
|
// This ensures the approval snapshot shows both the approval and the movement
|
||||||
|
// We don't create a separate WORKFLOW snapshot for approvals - only APPROVE snapshot
|
||||||
|
try {
|
||||||
|
const { DealerClaimHistory } = await import('@models/DealerClaimHistory');
|
||||||
|
const { SnapshotType } = await import('@models/DealerClaimHistory');
|
||||||
|
|
||||||
|
const approvalHistory = await DealerClaimHistory.findOne({
|
||||||
|
where: {
|
||||||
|
requestId: level.requestId,
|
||||||
|
approvalLevelId: level.levelId,
|
||||||
|
snapshotType: SnapshotType.APPROVE
|
||||||
|
},
|
||||||
|
order: [['createdAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (approvalHistory) {
|
||||||
|
// Use the robust approvalComment from outer scope
|
||||||
|
const updatedChangeReason = approvalComment
|
||||||
|
? `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber}). Comment: ${approvalComment}`
|
||||||
|
: `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber})`;
|
||||||
|
|
||||||
|
await approvalHistory.update({
|
||||||
|
changeReason: updatedChangeReason
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
// Log error but don't fail - this is just updating the changeReason for better display
|
||||||
|
logger.warn(`[DealerClaimApproval] Failed to update approval history changeReason (non-critical):`, updateError);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle dealer claim-specific step processing
|
||||||
|
const currentLevelName = (level.levelName || '').toLowerCase();
|
||||||
|
// Check by levelName first, use levelNumber only as fallback if levelName is missing
|
||||||
|
// This handles cases where additional approvers shift step numbers
|
||||||
|
const hasLevelName = level.levelName && level.levelName.trim() !== '';
|
||||||
|
const isDeptLeadApproval = hasLevelName
|
||||||
|
? currentLevelName.includes('department lead')
|
||||||
|
: (level.levelNumber === 3); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
const isRequestorClaimApproval = hasLevelName
|
||||||
|
? (currentLevelName.includes('requestor') && (currentLevelName.includes('claim') || currentLevelName.includes('approval')))
|
||||||
|
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
if (isDeptLeadApproval) {
|
||||||
|
// Activity Creation is now an activity log only - process it automatically
|
||||||
|
logger.info(`[DealerClaimApproval] Department Lead approved. Processing Activity Creation as activity log.`);
|
||||||
|
try {
|
||||||
|
const dealerClaimService = new DealerClaimService();
|
||||||
|
await dealerClaimService.processActivityCreation(level.requestId);
|
||||||
|
logger.info(`[DealerClaimApproval] Activity Creation activity logged for request ${level.requestId}`);
|
||||||
|
} catch (activityError) {
|
||||||
|
logger.error(`[DealerClaimApproval] Error processing Activity Creation activity for request ${level.requestId}:`, activityError);
|
||||||
|
// Don't fail the Department Lead approval if Activity Creation logging fails
|
||||||
|
}
|
||||||
|
} else if (isRequestorClaimApproval) {
|
||||||
|
// Step 6 (System - E-Invoice Generation) is now an activity log only - process it automatically
|
||||||
|
logger.info(`[DealerClaimApproval] Requestor Claim Approval approved. Triggering DMS push for E-Invoice generation.`);
|
||||||
|
try {
|
||||||
|
// Lazy load DealerClaimService to avoid circular dependency issues during method execution
|
||||||
|
const dealerClaimService = this.getDealerClaimService();
|
||||||
|
await dealerClaimService.updateEInvoiceDetails(level.requestId);
|
||||||
|
logger.info(`[DealerClaimApproval] DMS push initiated for request ${level.requestId}`);
|
||||||
|
} catch (dmsError) {
|
||||||
|
logger.error(`[DealerClaimApproval] Error initiating DMS push for request ${level.requestId}:`, dmsError);
|
||||||
|
// Don't fail the Requestor Claim Approval if DMS push fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log approval activity
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'approval',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Approved',
|
||||||
|
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator about the approval
|
||||||
|
// BUT skip this if it's a dealer proposal or dealer completion step - those have special notifications below
|
||||||
|
// Priority: levelName check first, then levelNumber only if levelName is missing
|
||||||
|
const hasLevelNameForApproval = level.levelName && level.levelName.trim() !== '';
|
||||||
|
const levelNameForApproval = hasLevelNameForApproval && level.levelName ? level.levelName.toLowerCase() : '';
|
||||||
|
const isDealerProposalApproval = hasLevelNameForApproval
|
||||||
|
? (levelNameForApproval.includes('dealer') && levelNameForApproval.includes('proposal'))
|
||||||
|
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
|
||||||
|
const isDealerCompletionApproval = hasLevelNameForApproval
|
||||||
|
? (levelNameForApproval.includes('dealer') && (levelNameForApproval.includes('completion') || levelNameForApproval.includes('documents')))
|
||||||
|
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
// Skip sending approval notification to initiator if they are the approver
|
||||||
|
// (they don't need to be notified that they approved their own request)
|
||||||
|
const isApproverInitiator = level.approverId && (wf as any).initiatorId && level.approverId === (wf as any).initiatorId;
|
||||||
|
|
||||||
|
if (wf && !isDealerProposalApproval && !isDealerCompletionApproval && !isApproverInitiator) {
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Request Approved - Level ${level.levelNumber}`,
|
||||||
|
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
} else if (isApproverInitiator) {
|
||||||
|
logger.info(`[DealerClaimApproval] Skipping approval notification to initiator - they are the approver`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify next approver - ALWAYS send notification when there's a next level
|
||||||
|
if (wf && nextLevel) {
|
||||||
|
const nextApproverId = (nextLevel as any).approverId;
|
||||||
|
const nextApproverEmail = (nextLevel as any).approverEmail || '';
|
||||||
|
const nextApproverName = (nextLevel as any).approverName || nextApproverEmail || 'approver';
|
||||||
|
|
||||||
|
// Check if it's an auto-step or system process
|
||||||
|
const isAutoStep = nextApproverEmail === 'system@royalenfield.com'
|
||||||
|
|| (nextLevel as any).approverName === 'System Auto-Process'
|
||||||
|
|| nextApproverId === 'system';
|
||||||
|
|
||||||
|
const isSystemEmail = nextApproverEmail.toLowerCase() === 'system@royalenfield.com'
|
||||||
|
|| nextApproverEmail.toLowerCase().includes('system');
|
||||||
|
const isSystemName = nextApproverName.toLowerCase() === 'system auto-process'
|
||||||
|
|| nextApproverName.toLowerCase().includes('system');
|
||||||
|
|
||||||
|
// Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents)
|
||||||
|
// Check this BEFORE sending assignment notification to avoid duplicates
|
||||||
|
// Priority: levelName check first, then levelNumber only if levelName is missing
|
||||||
|
const hasLevelNameForNotification = level.levelName && level.levelName.trim() !== '';
|
||||||
|
const levelNameForNotification = hasLevelNameForNotification && level.levelName ? level.levelName.toLowerCase() : '';
|
||||||
|
const isDealerProposalApproval = hasLevelNameForNotification
|
||||||
|
? (levelNameForNotification.includes('dealer') && levelNameForNotification.includes('proposal'))
|
||||||
|
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
|
||||||
|
const isDealerCompletionApproval = hasLevelNameForNotification
|
||||||
|
? (levelNameForNotification.includes('dealer') && (levelNameForNotification.includes('completion') || levelNameForNotification.includes('documents')))
|
||||||
|
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
|
||||||
|
|
||||||
|
// Check if next approver is the initiator (to avoid duplicate notifications)
|
||||||
|
const isNextApproverInitiator = nextApproverId && (wf as any).initiatorId && nextApproverId === (wf as any).initiatorId;
|
||||||
|
|
||||||
|
if (isDealerProposalApproval && (wf as any).initiatorId) {
|
||||||
|
// Get dealer and proposal data for the email template
|
||||||
|
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
|
||||||
|
const { DealerProposalDetails } = await import('@models/DealerProposalDetails');
|
||||||
|
const { DealerProposalCostItem } = await import('@models/DealerProposalCostItem');
|
||||||
|
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
const proposalDetails = await DealerProposalDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
|
||||||
|
// Get cost items if proposal exists
|
||||||
|
let costBreakup: any[] = [];
|
||||||
|
if (proposalDetails) {
|
||||||
|
const proposalId = (proposalDetails as any).proposalId || (proposalDetails as any).proposal_id;
|
||||||
|
if (proposalId) {
|
||||||
|
const costItems = await DealerProposalCostItem.findAll({
|
||||||
|
where: { proposalId },
|
||||||
|
order: [['itemOrder', 'ASC']]
|
||||||
|
});
|
||||||
|
costBreakup = costItems.map((item: any) => ({
|
||||||
|
description: item.itemDescription || item.description,
|
||||||
|
amount: Number(item.amount) || 0
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get dealer user
|
||||||
|
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
|
||||||
|
const dealerData = dealerUser ? dealerUser.toJSON() : {
|
||||||
|
userId: level.approverId,
|
||||||
|
email: level.approverEmail || '',
|
||||||
|
displayName: level.approverName || level.approverEmail || 'Dealer'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get next approver (could be Step 2 - Requestor Evaluation, or an additional approver if one was added between Step 1 and Step 2)
|
||||||
|
// The nextLevel is already found above using dynamic logic that handles additional approvers correctly
|
||||||
|
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
|
||||||
|
|
||||||
|
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 1 and Step 2)
|
||||||
|
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
|
||||||
|
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
|
||||||
|
|
||||||
|
// Send proposal submitted notification with proper type and metadata
|
||||||
|
// This will use the dealerProposalSubmitted template, not the multi-level approval template
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: 'Proposal Submitted',
|
||||||
|
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted a proposal for your claim request "${(wf as any).title}".`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'proposal_submitted',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
dealerData: dealerData,
|
||||||
|
proposalData: {
|
||||||
|
totalEstimatedBudget: proposalDetails ? (proposalDetails as any).totalEstimatedBudget : 0,
|
||||||
|
expectedCompletionDate: proposalDetails ? (proposalDetails as any).expectedCompletionDate : undefined,
|
||||||
|
dealerComments: proposalDetails ? (proposalDetails as any).dealerComments : undefined,
|
||||||
|
costBreakup: costBreakup,
|
||||||
|
submittedAt: proposalDetails ? (proposalDetails as any).submittedAt : new Date(),
|
||||||
|
nextApproverIsAdditional: isNextAdditionalApprover,
|
||||||
|
nextApproverIsInitiator: isNextApproverInitiator
|
||||||
|
},
|
||||||
|
nextApproverId: nextApproverData ? nextApproverData.userId : undefined,
|
||||||
|
// Add activity information from claimDetails
|
||||||
|
activityName: claimDetails ? (claimDetails as any).activityName : undefined,
|
||||||
|
activityType: claimDetails ? (claimDetails as any).activityType : undefined
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Sent proposal_submitted notification to initiator for Dealer Proposal Submission. Next approver: ${isNextApproverInitiator ? 'Initiator (self)' : (isNextAdditionalApprover ? 'Additional Approver' : 'Step 2 (Requestor Evaluation)')}`);
|
||||||
|
} else if (isDealerCompletionApproval && (wf as any).initiatorId) {
|
||||||
|
// Get dealer and completion data for the email template
|
||||||
|
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
|
||||||
|
const { DealerCompletionDetails } = await import('@models/DealerCompletionDetails');
|
||||||
|
const { DealerCompletionExpense } = await import('@models/DealerCompletionExpense');
|
||||||
|
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
const completionDetails = await DealerCompletionDetails.findOne({ where: { requestId: level.requestId } });
|
||||||
|
|
||||||
|
// Get expense items if completion exists
|
||||||
|
let closedExpenses: any[] = [];
|
||||||
|
if (completionDetails) {
|
||||||
|
const expenses = await DealerCompletionExpense.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['createdAt', 'ASC']]
|
||||||
|
});
|
||||||
|
closedExpenses = expenses.map((item: any) => ({
|
||||||
|
description: item.description || '',
|
||||||
|
amount: Number(item.amount) || 0
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get dealer user
|
||||||
|
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
|
||||||
|
const dealerData = dealerUser ? dealerUser.toJSON() : {
|
||||||
|
userId: level.approverId,
|
||||||
|
email: level.approverEmail || '',
|
||||||
|
displayName: level.approverName || level.approverEmail || 'Dealer'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get next approver (could be Step 5 - Requestor Claim Approval, or an additional approver if one was added between Step 4 and Step 5)
|
||||||
|
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
|
||||||
|
|
||||||
|
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 4 and Step 5)
|
||||||
|
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
|
||||||
|
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
|
||||||
|
|
||||||
|
// Check if next approver is the initiator (to show appropriate message in email)
|
||||||
|
const isNextApproverInitiator = nextApproverData && (wf as any).initiatorId && nextApproverData.userId === (wf as any).initiatorId;
|
||||||
|
|
||||||
|
// Send completion submitted notification with proper type and metadata
|
||||||
|
// This will use the completionDocumentsSubmitted template, not the multi-level approval template
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: 'Completion Documents Submitted',
|
||||||
|
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted completion documents for your claim request "${(wf as any).title}".`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'completion_submitted',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
dealerData: dealerData,
|
||||||
|
completionData: {
|
||||||
|
activityCompletionDate: completionDetails ? (completionDetails as any).activityCompletionDate : undefined,
|
||||||
|
numberOfParticipants: completionDetails ? (completionDetails as any).numberOfParticipants : undefined,
|
||||||
|
totalClosedExpenses: completionDetails ? (completionDetails as any).totalClosedExpenses : 0,
|
||||||
|
closedExpenses: closedExpenses,
|
||||||
|
documentsCount: undefined, // Documents count can be retrieved from documents table if needed
|
||||||
|
submittedAt: completionDetails ? (completionDetails as any).submittedAt : new Date(),
|
||||||
|
nextApproverIsAdditional: isNextAdditionalApprover,
|
||||||
|
nextApproverIsInitiator: isNextApproverInitiator
|
||||||
|
},
|
||||||
|
nextApproverId: nextApproverData ? nextApproverData.userId : undefined
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Sent completion_submitted notification to initiator for Dealer Completion Documents. Next approver: ${isNextAdditionalApprover ? 'Additional Approver' : 'Step 5 (Requestor Claim Approval)'}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only send assignment notification to next approver if:
|
||||||
|
// 1. It's NOT a dealer proposal/completion step (those have special notifications above)
|
||||||
|
// 2. Next approver is NOT the initiator (to avoid duplicate notifications)
|
||||||
|
// 3. It's not a system/auto step
|
||||||
|
if (!isDealerProposalApproval && !isDealerCompletionApproval && !isNextApproverInitiator) {
|
||||||
|
if (!isAutoStep && !isSystemEmail && !isSystemName && nextApproverId && nextApproverId !== 'system') {
|
||||||
|
try {
|
||||||
|
logger.info(`[DealerClaimApproval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
|
||||||
|
|
||||||
|
await notificationService.sendToUsers([nextApproverId], {
|
||||||
|
title: `Action required: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: (wf as any).requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'assignment',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] ✅ Assignment notification sent successfully to ${nextApproverName} (${nextApproverId}) for level ${nextLevelNumber}`);
|
||||||
|
|
||||||
|
// Log assignment activity for the next approver
|
||||||
|
await activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'assignment',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Assigned to approver',
|
||||||
|
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
} catch (notifError) {
|
||||||
|
logger.error(`[DealerClaimApproval] ❌ Failed to send notification to next approver ${nextApproverId} at level ${nextLevelNumber}:`, notifError);
|
||||||
|
// Don't throw - continue with workflow even if notification fails
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(`[DealerClaimApproval] ⚠️ Skipping notification for system/auto-step: ${nextApproverEmail} (${nextApproverId}) at level ${nextLevelNumber}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (isDealerProposalApproval || isDealerCompletionApproval) {
|
||||||
|
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - dealer-specific notification already sent`);
|
||||||
|
}
|
||||||
|
if (isNextApproverInitiator) {
|
||||||
|
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - next approver is the initiator (already notified)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No next level found but not final approver - this shouldn't happen
|
||||||
|
logger.warn(`[DealerClaimApproval] No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.APPROVED,
|
||||||
|
closureDate: now,
|
||||||
|
currentLevel: level.levelNumber || 0
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
if (wf) {
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Approved: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'approval',
|
||||||
|
priority: 'MEDIUM'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(level.requestId, 'request:updated', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
requestNumber: (wf as any)?.requestNumber,
|
||||||
|
action: action.action,
|
||||||
|
levelNumber: level.levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[DealerClaimApproval] Approval level ${levelId} ${action.action.toLowerCase()}ed and socket event emitted`);
|
||||||
|
|
||||||
|
return level;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[DealerClaimApproval] Error approving level:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle rejection (internal method called from approveLevel)
|
||||||
|
*/
|
||||||
|
private async handleRejection(
|
||||||
|
level: ApprovalLevel,
|
||||||
|
action: ApprovalAction,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null },
|
||||||
|
elapsedHours?: number,
|
||||||
|
tatPercentage?: number,
|
||||||
|
now?: Date
|
||||||
|
): Promise<ApprovalLevel | null> {
|
||||||
|
const rejectionNow = now || new Date();
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Check if this is the Department Lead approval step (Step 3)
|
||||||
|
// Robust check: check level name for variations and level number as fallback
|
||||||
|
// Default rejection logic: Return to immediately previous approval step
|
||||||
|
logger.info(`[DealerClaimApproval] Rejection for request ${level.requestId} by level ${level.levelNumber}. Finding previous step to return to.`);
|
||||||
|
|
||||||
|
// Save approval history (rejection) BEFORE updating level
|
||||||
|
await this.getDealerClaimService().saveApprovalHistory(
|
||||||
|
level.requestId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
'REJECT',
|
||||||
|
action.comments || '',
|
||||||
|
action.rejectionReason || undefined,
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
|
||||||
|
// Find all levels to determine previous step
|
||||||
|
const allLevels = await ApprovalLevel.findAll({
|
||||||
|
where: { requestId: level.requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find the immediately previous approval level
|
||||||
|
const currentLevelNumber = level.levelNumber || 0;
|
||||||
|
const previousLevels = allLevels.filter(l => l.levelNumber < currentLevelNumber && l.levelNumber > 0);
|
||||||
|
const previousLevel = previousLevels[previousLevels.length - 1];
|
||||||
|
|
||||||
|
// Update level status - if returning to previous step, set this level to PENDING (reset)
|
||||||
|
// If no previous step (terminal rejection), set to REJECTED
|
||||||
|
const newStatus = previousLevel ? ApprovalStatus.PENDING : ApprovalStatus.REJECTED;
|
||||||
|
|
||||||
|
await level.update({
|
||||||
|
status: newStatus,
|
||||||
|
// If resetting to PENDING, clear action details so it can be acted upon again later
|
||||||
|
actionDate: previousLevel ? null : rejectionNow,
|
||||||
|
levelEndTime: previousLevel ? null : rejectionNow,
|
||||||
|
elapsedHours: previousLevel ? 0 : (elapsedHours || 0),
|
||||||
|
tatPercentageUsed: previousLevel ? 0 : (tatPercentage || 0),
|
||||||
|
comments: previousLevel ? null : (action.comments || action.rejectionReason || undefined)
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
// If no previous level found (this is the first step), close the workflow
|
||||||
|
if (!previousLevel) {
|
||||||
|
logger.info(`[DealerClaimApproval] No previous level found. This is the first step. Closing workflow.`);
|
||||||
|
|
||||||
|
// Capture workflow snapshot for terminal rejection
|
||||||
|
await this.getDealerClaimService().saveWorkflowHistory(
|
||||||
|
level.requestId,
|
||||||
|
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
|
||||||
|
userId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
level.levelName || undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Close workflow FIRST
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.REJECTED,
|
||||||
|
closureDate: rejectionNow
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Capture workflow snapshot AFTER workflow is closed successfully
|
||||||
|
try {
|
||||||
|
await this.getDealerClaimService().saveWorkflowHistory(
|
||||||
|
level.requestId,
|
||||||
|
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
|
||||||
|
userId,
|
||||||
|
level.levelId,
|
||||||
|
level.levelNumber,
|
||||||
|
level.levelName || undefined
|
||||||
|
);
|
||||||
|
} catch (snapshotError) {
|
||||||
|
// Log error but don't fail the rejection - snapshot is for audit, not critical
|
||||||
|
logger.error(`[DealerClaimApproval] Failed to save workflow history snapshot (non-critical):`, snapshotError);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log rejection activity (terminal rejection)
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'rejection',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: rejectionNow.toISOString(),
|
||||||
|
action: 'Rejected',
|
||||||
|
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify initiator and participants (workflow is closed)
|
||||||
|
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
|
||||||
|
where: { requestId: level.requestId, isActive: true }
|
||||||
|
}));
|
||||||
|
|
||||||
|
const userIdsToNotify = [(wf as any).initiatorId];
|
||||||
|
if (participants && participants.length > 0) {
|
||||||
|
participants.forEach((p: any) => {
|
||||||
|
if (p.userId && p.userId !== (wf as any).initiatorId) {
|
||||||
|
userIdsToNotify.push(p.userId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await notificationService.sendToUsers(userIdsToNotify, {
|
||||||
|
title: `Request Rejected: ${(wf as any).requestNumber}`,
|
||||||
|
body: `${(wf as any).title} - Rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'rejection',
|
||||||
|
priority: 'HIGH'
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Return to previous step
|
||||||
|
logger.info(`[DealerClaimApproval] Returning to previous level ${previousLevel.levelNumber} (${previousLevel.levelName || 'unnamed'})`);
|
||||||
|
|
||||||
|
// Reset previous level to IN_PROGRESS so it can be acted upon again
|
||||||
|
await previousLevel.update({
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
levelStartTime: rejectionNow,
|
||||||
|
tatStartTime: rejectionNow,
|
||||||
|
actionDate: undefined,
|
||||||
|
levelEndTime: undefined,
|
||||||
|
comments: undefined,
|
||||||
|
elapsedHours: 0,
|
||||||
|
tatPercentageUsed: 0
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update workflow status to IN_PROGRESS (remains active for rework)
|
||||||
|
// Set currentLevel to previous level
|
||||||
|
await WorkflowRequest.update(
|
||||||
|
{
|
||||||
|
status: WorkflowStatus.PENDING,
|
||||||
|
currentLevel: previousLevel.levelNumber
|
||||||
|
},
|
||||||
|
{ where: { requestId: level.requestId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Log rejection activity (returned to previous step)
|
||||||
|
activityService.log({
|
||||||
|
requestId: level.requestId,
|
||||||
|
type: 'rejection',
|
||||||
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
|
timestamp: rejectionNow.toISOString(),
|
||||||
|
action: 'Returned to Previous Step',
|
||||||
|
details: `Request rejected by ${level.approverName || level.approverEmail} and returned to level ${previousLevel.levelNumber}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
// Notify the approver of the previous level
|
||||||
|
if (previousLevel.approverId) {
|
||||||
|
await notificationService.sendToUsers([previousLevel.approverId], {
|
||||||
|
title: `Request Returned: ${(wf as any).requestNumber}`,
|
||||||
|
body: `Request "${(wf as any).title}" has been returned to your level for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'assignment',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify initiator when request is returned (not closed)
|
||||||
|
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
||||||
|
title: `Request Returned: ${(wf as any).requestNumber}`,
|
||||||
|
body: `Request "${(wf as any).title}" has been returned to level ${previousLevel.levelNumber} for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
requestNumber: (wf as any).requestNumber,
|
||||||
|
requestId: level.requestId,
|
||||||
|
url: `/request/${(wf as any).requestNumber}`,
|
||||||
|
type: 'rejection',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(level.requestId, 'request:updated', {
|
||||||
|
requestId: level.requestId,
|
||||||
|
requestNumber: (wf as any)?.requestNumber,
|
||||||
|
action: 'REJECT',
|
||||||
|
levelNumber: level.levelNumber,
|
||||||
|
timestamp: rejectionNow.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return level;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reject a level in a dealer claim workflow (legacy method - kept for backward compatibility)
|
||||||
|
*/
|
||||||
|
async rejectLevel(
|
||||||
|
levelId: string,
|
||||||
|
reason: string,
|
||||||
|
comments: string,
|
||||||
|
userId: string,
|
||||||
|
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
|
||||||
|
): Promise<ApprovalLevel | null> {
|
||||||
|
try {
|
||||||
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level) return null;
|
||||||
|
|
||||||
|
const wf = await WorkflowRequest.findByPk(level.requestId);
|
||||||
|
if (!wf) return null;
|
||||||
|
|
||||||
|
// Verify this is a claim management workflow
|
||||||
|
const workflowType = (wf as any)?.workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
|
||||||
|
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Calculate elapsed hours
|
||||||
|
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
|
||||||
|
const isPausedLevel = (level as any).isPaused;
|
||||||
|
const wasResumed = !isPausedLevel &&
|
||||||
|
(level as any).pauseElapsedHours !== null &&
|
||||||
|
(level as any).pauseElapsedHours !== undefined &&
|
||||||
|
(level as any).pauseResumeDate !== null;
|
||||||
|
|
||||||
|
const pauseInfo = isPausedLevel ? {
|
||||||
|
// Level is currently paused - return frozen elapsed hours at pause time
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (level as any).pausedAt,
|
||||||
|
pauseElapsedHours: (level as any).pauseElapsedHours,
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate
|
||||||
|
} : wasResumed ? {
|
||||||
|
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null,
|
||||||
|
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
|
||||||
|
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
|
||||||
|
} : undefined;
|
||||||
|
|
||||||
|
// Use the internal handleRejection method
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(
|
||||||
|
(level as any).levelStartTime || (level as any).tatStartTime || now,
|
||||||
|
now,
|
||||||
|
priority,
|
||||||
|
pauseInfo
|
||||||
|
);
|
||||||
|
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||||
|
|
||||||
|
return await this.handleRejection(
|
||||||
|
level,
|
||||||
|
{ action: 'REJECT', comments: comments || reason, rejectionReason: reason || comments },
|
||||||
|
userId,
|
||||||
|
requestMetadata,
|
||||||
|
elapsedHours,
|
||||||
|
tatPercentage,
|
||||||
|
now
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[DealerClaimApproval] Error rejecting level:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current approval level for a request
|
||||||
|
*/
|
||||||
|
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) return null;
|
||||||
|
|
||||||
|
const currentLevel = (workflow as any).currentLevel;
|
||||||
|
if (!currentLevel) return null;
|
||||||
|
|
||||||
|
return await ApprovalLevel.findOne({
|
||||||
|
where: { requestId, levelNumber: currentLevel }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all approval levels for a request
|
||||||
|
*/
|
||||||
|
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
|
||||||
|
return await ApprovalLevel.findAll({
|
||||||
|
where: { requestId },
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
535
_archive/services/dmsWebhook.service.ts
Normal file
535
_archive/services/dmsWebhook.service.ts
Normal file
@ -0,0 +1,535 @@
|
|||||||
|
import { Request } from 'express';
|
||||||
|
import { ClaimInvoice } from '../models/ClaimInvoice';
|
||||||
|
import { ClaimCreditNote } from '../models/ClaimCreditNote';
|
||||||
|
import { WorkflowRequest } from '../models/WorkflowRequest';
|
||||||
|
import { ApprovalLevel } from '../models/ApprovalLevel';
|
||||||
|
import { DealerClaimDetails } from '../models/DealerClaimDetails';
|
||||||
|
import { User } from '../models/User';
|
||||||
|
import { ApprovalService } from './approval.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DMS Webhook Service
|
||||||
|
* Handles processing of webhook callbacks from DMS system
|
||||||
|
*/
|
||||||
|
export class DMSWebhookService {
|
||||||
|
private webhookSecret: string;
|
||||||
|
private approvalService: ApprovalService;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.webhookSecret = process.env.DMS_WEBHOOK_SECRET || '';
|
||||||
|
this.approvalService = new ApprovalService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate webhook signature for security
|
||||||
|
* DMS should send a signature in the header that we can verify
|
||||||
|
*/
|
||||||
|
async validateWebhookSignature(req: Request): Promise<boolean> {
|
||||||
|
// If webhook secret is not configured, skip validation (for development)
|
||||||
|
if (!this.webhookSecret) {
|
||||||
|
logger.warn('[DMSWebhook] Webhook secret not configured, skipping signature validation');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const signature = req.headers['x-dms-signature'] as string;
|
||||||
|
if (!signature) {
|
||||||
|
logger.warn('[DMSWebhook] Missing webhook signature in header');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create HMAC hash of the request body
|
||||||
|
const body = JSON.stringify(req.body);
|
||||||
|
const expectedSignature = crypto
|
||||||
|
.createHmac('sha256', this.webhookSecret)
|
||||||
|
.update(body)
|
||||||
|
.digest('hex');
|
||||||
|
|
||||||
|
// Compare signatures (use constant-time comparison to prevent timing attacks)
|
||||||
|
const isValid = crypto.timingSafeEqual(
|
||||||
|
Buffer.from(signature),
|
||||||
|
Buffer.from(expectedSignature)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isValid) {
|
||||||
|
logger.warn('[DMSWebhook] Invalid webhook signature');
|
||||||
|
}
|
||||||
|
|
||||||
|
return isValid;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[DMSWebhook] Error validating webhook signature:', error);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process invoice generation webhook from DMS
|
||||||
|
*/
|
||||||
|
async processInvoiceWebhook(payload: any): Promise<{
|
||||||
|
success: boolean;
|
||||||
|
invoiceNumber?: string;
|
||||||
|
error?: string;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
// Validate required fields
|
||||||
|
const requiredFields = ['request_number', 'document_no', 'document_type'];
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!payload[field]) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Missing required field: ${field}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find workflow request by request number
|
||||||
|
const request = await WorkflowRequest.findOne({
|
||||||
|
where: {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!request) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Request not found: ${payload.request_number}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find or create invoice record
|
||||||
|
let invoice = await ClaimInvoice.findOne({
|
||||||
|
where: { requestId: request.requestId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create invoice if it doesn't exist (new flow: webhook creates invoice)
|
||||||
|
if (!invoice) {
|
||||||
|
logger.info('[DMSWebhook] Invoice record not found, creating new invoice from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
});
|
||||||
|
|
||||||
|
invoice = await ClaimInvoice.create({
|
||||||
|
requestId: request.requestId,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
dmsNumber: payload.document_no,
|
||||||
|
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
amount: payload.total_amount || payload.claim_amount,
|
||||||
|
status: 'GENERATED',
|
||||||
|
generatedAt: new Date(),
|
||||||
|
invoiceFilePath: payload.invoice_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
description: this.buildInvoiceDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Invoice created successfully from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Update existing invoice with DMS response data
|
||||||
|
await invoice.update({
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
dmsNumber: payload.document_no, // DMS document number
|
||||||
|
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
amount: payload.total_amount || payload.claim_amount,
|
||||||
|
status: 'GENERATED',
|
||||||
|
generatedAt: new Date(),
|
||||||
|
invoiceFilePath: payload.invoice_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
// Store additional DMS data in description or separate fields if needed
|
||||||
|
description: this.buildInvoiceDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Invoice updated successfully', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
irnNo: payload.irn_no,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-approve Step 7 and move to Step 8
|
||||||
|
await this.logEInvoiceGenerationActivity(request.requestId, payload.request_number);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
invoiceNumber: payload.document_no,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process credit note generation webhook from DMS
|
||||||
|
*/
|
||||||
|
async processCreditNoteWebhook(payload: any): Promise<{
|
||||||
|
success: boolean;
|
||||||
|
creditNoteNumber?: string;
|
||||||
|
error?: string;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
// Validate required fields
|
||||||
|
const requiredFields = ['request_number', 'document_no', 'document_type'];
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!payload[field]) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Missing required field: ${field}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find workflow request by request number
|
||||||
|
const request = await WorkflowRequest.findOne({
|
||||||
|
where: {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!request) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Request not found: ${payload.request_number}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find invoice to link credit note (optional - credit note can exist without invoice)
|
||||||
|
const invoice = await ClaimInvoice.findOne({
|
||||||
|
where: { requestId: request.requestId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find or create credit note record
|
||||||
|
let creditNote = await ClaimCreditNote.findOne({
|
||||||
|
where: { requestId: request.requestId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create credit note if it doesn't exist (new flow: webhook creates credit note)
|
||||||
|
if (!creditNote) {
|
||||||
|
logger.info('[DMSWebhook] Credit note record not found, creating new credit note from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
hasInvoice: !!invoice,
|
||||||
|
});
|
||||||
|
|
||||||
|
creditNote = await ClaimCreditNote.create({
|
||||||
|
requestId: request.requestId,
|
||||||
|
invoiceId: invoice?.invoiceId || undefined, // Allow undefined if no invoice exists
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
creditNoteAmount: payload.total_amount || payload.credit_amount,
|
||||||
|
sapDocumentNumber: payload.sap_credit_note_no || null,
|
||||||
|
status: 'CONFIRMED',
|
||||||
|
confirmedAt: new Date(),
|
||||||
|
creditNoteFilePath: payload.credit_note_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
description: this.buildCreditNoteDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note created successfully from webhook', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
hasInvoice: !!invoice,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity and notify initiator
|
||||||
|
await this.logCreditNoteCreationActivity(
|
||||||
|
request.requestId,
|
||||||
|
payload.request_number,
|
||||||
|
payload.document_no,
|
||||||
|
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Update existing credit note with DMS response data
|
||||||
|
await creditNote.update({
|
||||||
|
invoiceId: invoice?.invoiceId || creditNote.invoiceId, // Preserve existing invoiceId if no invoice found
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
|
||||||
|
creditNoteAmount: payload.total_amount || payload.credit_amount,
|
||||||
|
sapDocumentNumber: payload.sap_credit_note_no || null,
|
||||||
|
status: 'CONFIRMED',
|
||||||
|
confirmedAt: new Date(),
|
||||||
|
creditNoteFilePath: payload.credit_note_file_path || null,
|
||||||
|
errorMessage: payload.error_message || null,
|
||||||
|
description: this.buildCreditNoteDescription(payload),
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note updated successfully', {
|
||||||
|
requestNumber: payload.request_number,
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
sapCreditNoteNo: payload.sap_credit_note_no,
|
||||||
|
irnNo: payload.irn_no,
|
||||||
|
hasInvoice: !!invoice,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity and notify initiator for updated credit note
|
||||||
|
await this.logCreditNoteCreationActivity(
|
||||||
|
request.requestId,
|
||||||
|
payload.request_number,
|
||||||
|
payload.document_no,
|
||||||
|
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
creditNoteNumber: payload.document_no,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build invoice description from DMS payload
|
||||||
|
*/
|
||||||
|
private buildInvoiceDescription(payload: any): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
if (payload.irn_no) {
|
||||||
|
parts.push(`IRN: ${payload.irn_no}`);
|
||||||
|
}
|
||||||
|
if (payload.item_code_no) {
|
||||||
|
parts.push(`Item Code: ${payload.item_code_no}`);
|
||||||
|
}
|
||||||
|
if (payload.hsn_sac_code) {
|
||||||
|
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
|
||||||
|
}
|
||||||
|
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
|
||||||
|
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.length > 0 ? parts.join(' | ') : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build credit note description from DMS payload
|
||||||
|
*/
|
||||||
|
private buildCreditNoteDescription(payload: any): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
if (payload.irn_no) {
|
||||||
|
parts.push(`IRN: ${payload.irn_no}`);
|
||||||
|
}
|
||||||
|
if (payload.sap_credit_note_no) {
|
||||||
|
parts.push(`SAP CN: ${payload.sap_credit_note_no}`);
|
||||||
|
}
|
||||||
|
if (payload.credit_type) {
|
||||||
|
parts.push(`Credit Type: ${payload.credit_type}`);
|
||||||
|
}
|
||||||
|
if (payload.item_code_no) {
|
||||||
|
parts.push(`Item Code: ${payload.item_code_no}`);
|
||||||
|
}
|
||||||
|
if (payload.hsn_sac_code) {
|
||||||
|
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
|
||||||
|
}
|
||||||
|
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
|
||||||
|
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.length > 0 ? parts.join(' | ') : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log Credit Note Creation as activity and notify initiator
|
||||||
|
* This is called after credit note is created/updated from DMS webhook
|
||||||
|
*/
|
||||||
|
private async logCreditNoteCreationActivity(
|
||||||
|
requestId: string,
|
||||||
|
requestNumber: string,
|
||||||
|
creditNoteNumber: string,
|
||||||
|
creditNoteAmount: number
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if this is a claim management workflow
|
||||||
|
const request = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!request) {
|
||||||
|
logger.warn('[DMSWebhook] Request not found for credit note activity logging', { requestId });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowType = (request as any).workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.info('[DMSWebhook] Not a claim management workflow, skipping credit note activity logging', {
|
||||||
|
requestId,
|
||||||
|
workflowType,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const initiatorId = (request as any).initiatorId;
|
||||||
|
if (!initiatorId) {
|
||||||
|
logger.warn('[DMSWebhook] Initiator ID not found for credit note notification', { requestId });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'status_change',
|
||||||
|
user: undefined, // System event (no user means it's a system event)
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Credit Note Generated',
|
||||||
|
details: `Credit note generated from DMS. Credit Note Number: ${creditNoteNumber}. Credit Note Amount: ₹${creditNoteAmount || 0}. Request: ${requestNumber}`,
|
||||||
|
category: 'credit_note',
|
||||||
|
severity: 'INFO',
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note activity logged successfully', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
creditNoteNumber,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get dealer information from claim details
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({
|
||||||
|
where: { requestId }
|
||||||
|
});
|
||||||
|
|
||||||
|
let dealerUserId: string | null = null;
|
||||||
|
if (claimDetails?.dealerEmail) {
|
||||||
|
const dealerUser = await User.findOne({
|
||||||
|
where: { email: claimDetails.dealerEmail.toLowerCase() },
|
||||||
|
attributes: ['userId'],
|
||||||
|
});
|
||||||
|
dealerUserId = dealerUser?.userId || null;
|
||||||
|
|
||||||
|
if (dealerUserId) {
|
||||||
|
logger.info('[DMSWebhook] Found dealer user for notification', {
|
||||||
|
requestId,
|
||||||
|
dealerEmail: claimDetails.dealerEmail,
|
||||||
|
dealerUserId,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.warn('[DMSWebhook] Dealer email found but user not found in system', {
|
||||||
|
requestId,
|
||||||
|
dealerEmail: claimDetails.dealerEmail,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info('[DMSWebhook] No dealer email found in claim details', { requestId });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notification to initiator
|
||||||
|
await notificationService.sendToUsers([initiatorId], {
|
||||||
|
title: 'Credit Note Generated',
|
||||||
|
body: `Credit note ${creditNoteNumber} has been generated for request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
creditNoteNumber,
|
||||||
|
creditNoteAmount,
|
||||||
|
source: 'dms_webhook',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note notification sent to initiator', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
initiatorId,
|
||||||
|
creditNoteNumber,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send notification to dealer if dealer user exists
|
||||||
|
if (dealerUserId) {
|
||||||
|
await notificationService.sendToUsers([dealerUserId], {
|
||||||
|
title: 'Credit Note Generated',
|
||||||
|
body: `Credit note ${creditNoteNumber} has been generated for your claim request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
creditNoteNumber,
|
||||||
|
creditNoteAmount,
|
||||||
|
source: 'dms_webhook',
|
||||||
|
recipient: 'dealer',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] Credit note notification sent to dealer', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
dealerUserId,
|
||||||
|
dealerEmail: claimDetails?.dealerEmail,
|
||||||
|
creditNoteNumber,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error logging credit note activity:', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
error: errorMessage,
|
||||||
|
});
|
||||||
|
// Don't throw error - webhook processing should continue even if activity/notification fails
|
||||||
|
// The credit note is already created/updated, which is the primary goal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log E-Invoice Generation as activity (no longer an approval step)
|
||||||
|
* This is called after invoice is created/updated from DMS webhook
|
||||||
|
*/
|
||||||
|
private async logEInvoiceGenerationActivity(requestId: string, requestNumber: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if this is a claim management workflow
|
||||||
|
const request = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!request) {
|
||||||
|
logger.warn('[DMSWebhook] Request not found for Step 7 auto-approval', { requestId });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowType = (request as any).workflowType;
|
||||||
|
if (workflowType !== 'CLAIM_MANAGEMENT') {
|
||||||
|
logger.info('[DMSWebhook] Not a claim management workflow, skipping Step 7 auto-approval', {
|
||||||
|
requestId,
|
||||||
|
workflowType,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// E-Invoice Generation is now an activity log only, not an approval step
|
||||||
|
// Log the activity using the dealerClaimService
|
||||||
|
const { DealerClaimService } = await import('./dealerClaim.service');
|
||||||
|
const dealerClaimService = new DealerClaimService();
|
||||||
|
const invoice = await ClaimInvoice.findOne({ where: { requestId } });
|
||||||
|
const invoiceNumber = invoice?.invoiceNumber || 'N/A';
|
||||||
|
|
||||||
|
await dealerClaimService.logEInvoiceGenerationActivity(requestId, invoiceNumber);
|
||||||
|
|
||||||
|
logger.info('[DMSWebhook] E-Invoice Generation activity logged successfully', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
invoiceNumber,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DMSWebhook] Error logging E-Invoice Generation activity:', {
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
error: errorMessage,
|
||||||
|
});
|
||||||
|
// Don't throw error - webhook processing should continue even if activity logging fails
|
||||||
|
// The invoice is already created/updated, which is the primary goal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
221
_archive/services/holiday.service.ts
Normal file
221
_archive/services/holiday.service.ts
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
import { Holiday, HolidayType } from '@models/Holiday';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
|
|
||||||
|
export class HolidayService {
|
||||||
|
/**
|
||||||
|
* Get all holidays within a date range
|
||||||
|
*/
|
||||||
|
async getHolidaysInRange(startDate: Date | string, endDate: Date | string): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const holidays = await Holiday.findAll({
|
||||||
|
where: {
|
||||||
|
holidayDate: {
|
||||||
|
[Op.between]: [dayjs(startDate).format('YYYY-MM-DD'), dayjs(endDate).format('YYYY-MM-DD')]
|
||||||
|
},
|
||||||
|
isActive: true
|
||||||
|
},
|
||||||
|
attributes: ['holidayDate'],
|
||||||
|
raw: true
|
||||||
|
});
|
||||||
|
|
||||||
|
return holidays.map((h: any) => h.holidayDate || h.holiday_date);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error fetching holidays:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a specific date is a holiday
|
||||||
|
*/
|
||||||
|
async isHoliday(date: Date | string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const dateStr = dayjs(date).format('YYYY-MM-DD');
|
||||||
|
const holiday = await Holiday.findOne({
|
||||||
|
where: {
|
||||||
|
holidayDate: dateStr,
|
||||||
|
isActive: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return !!holiday;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error checking holiday:', error);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a date is a working day (not weekend or holiday)
|
||||||
|
*/
|
||||||
|
async isWorkingDay(date: Date | string): Promise<boolean> {
|
||||||
|
const day = dayjs(date);
|
||||||
|
const dayOfWeek = day.day(); // 0 = Sunday, 6 = Saturday
|
||||||
|
|
||||||
|
// Check if weekend
|
||||||
|
if (dayOfWeek === 0 || dayOfWeek === 6) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if holiday
|
||||||
|
const isHol = await this.isHoliday(date);
|
||||||
|
return !isHol;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new holiday
|
||||||
|
*/
|
||||||
|
async createHoliday(holidayData: {
|
||||||
|
holidayDate: string;
|
||||||
|
holidayName: string;
|
||||||
|
description?: string;
|
||||||
|
holidayType?: HolidayType;
|
||||||
|
isRecurring?: boolean;
|
||||||
|
recurrenceRule?: string;
|
||||||
|
appliesToDepartments?: string[];
|
||||||
|
appliesToLocations?: string[];
|
||||||
|
createdBy: string;
|
||||||
|
}): Promise<Holiday> {
|
||||||
|
try {
|
||||||
|
const holiday = await Holiday.create({
|
||||||
|
...holidayData,
|
||||||
|
isActive: true
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Holiday created: ${holidayData.holidayName} on ${holidayData.holidayDate}`);
|
||||||
|
return holiday;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error creating holiday:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a holiday
|
||||||
|
*/
|
||||||
|
async updateHoliday(holidayId: string, updates: any, updatedBy: string): Promise<Holiday | null> {
|
||||||
|
try {
|
||||||
|
const holiday = await Holiday.findByPk(holidayId);
|
||||||
|
if (!holiday) {
|
||||||
|
throw new Error('Holiday not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
await holiday.update({
|
||||||
|
...updates,
|
||||||
|
updatedBy,
|
||||||
|
updatedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Holiday updated: ${holidayId}`);
|
||||||
|
return holiday;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error updating holiday:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete (deactivate) a holiday
|
||||||
|
*/
|
||||||
|
async deleteHoliday(holidayId: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await Holiday.update(
|
||||||
|
{ isActive: false },
|
||||||
|
{ where: { holidayId } }
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Holiday deactivated: ${holidayId}`);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error deleting holiday:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all active holidays
|
||||||
|
*/
|
||||||
|
async getAllActiveHolidays(year?: number): Promise<Holiday[]> {
|
||||||
|
try {
|
||||||
|
const whereClause: any = { isActive: true };
|
||||||
|
|
||||||
|
if (year) {
|
||||||
|
const startDate = `${year}-01-01`;
|
||||||
|
const endDate = `${year}-12-31`;
|
||||||
|
whereClause.holidayDate = {
|
||||||
|
[Op.between]: [startDate, endDate]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const holidays = await Holiday.findAll({
|
||||||
|
where: whereClause,
|
||||||
|
order: [['holidayDate', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
return holidays;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error fetching holidays:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get holidays by year for calendar view
|
||||||
|
*/
|
||||||
|
async getHolidayCalendar(year: number): Promise<any[]> {
|
||||||
|
try {
|
||||||
|
const startDate = `${year}-01-01`;
|
||||||
|
const endDate = `${year}-12-31`;
|
||||||
|
|
||||||
|
const holidays = await Holiday.findAll({
|
||||||
|
where: {
|
||||||
|
holidayDate: {
|
||||||
|
[Op.between]: [startDate, endDate]
|
||||||
|
},
|
||||||
|
isActive: true
|
||||||
|
},
|
||||||
|
order: [['holidayDate', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
return holidays.map((h: any) => ({
|
||||||
|
date: h.holidayDate || h.holiday_date,
|
||||||
|
name: h.holidayName || h.holiday_name,
|
||||||
|
description: h.description,
|
||||||
|
type: h.holidayType || h.holiday_type,
|
||||||
|
isRecurring: h.isRecurring || h.is_recurring
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Holiday Service] Error fetching holiday calendar:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import multiple holidays (bulk upload)
|
||||||
|
*/
|
||||||
|
async bulkImportHolidays(holidays: any[], createdBy: string): Promise<{ success: number; failed: number }> {
|
||||||
|
let success = 0;
|
||||||
|
let failed = 0;
|
||||||
|
|
||||||
|
for (const holiday of holidays) {
|
||||||
|
try {
|
||||||
|
await this.createHoliday({
|
||||||
|
...holiday,
|
||||||
|
createdBy
|
||||||
|
});
|
||||||
|
success++;
|
||||||
|
} catch (error) {
|
||||||
|
failed++;
|
||||||
|
logger.error(`[Holiday Service] Failed to import holiday: ${holiday.holidayName}`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[Holiday Service] Bulk import complete: ${success} success, ${failed} failed`);
|
||||||
|
return { success, failed };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const holidayService = new HolidayService();
|
||||||
|
|
||||||
1098
_archive/services/notification.service.ts
Normal file
1098
_archive/services/notification.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
764
_archive/services/pause.service.ts
Normal file
764
_archive/services/pause.service.ts
Normal file
@ -0,0 +1,764 @@
|
|||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { User } from '@models/User';
|
||||||
|
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
|
import { emitToRequestRoom } from '../realtime/socket';
|
||||||
|
|
||||||
|
export class PauseService {
|
||||||
|
/**
|
||||||
|
* Pause a workflow at a specific approval level
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID to pause (optional, pauses current level if not provided)
|
||||||
|
* @param userId - The user ID who is pausing
|
||||||
|
* @param reason - Reason for pausing
|
||||||
|
* @param resumeDate - Date when workflow should auto-resume (max 1 month from now)
|
||||||
|
*/
|
||||||
|
async pauseWorkflow(
|
||||||
|
requestId: string,
|
||||||
|
levelId: string | null,
|
||||||
|
userId: string,
|
||||||
|
reason: string,
|
||||||
|
resumeDate: Date
|
||||||
|
): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
|
||||||
|
try {
|
||||||
|
// Validate resume date (max 1 month from now)
|
||||||
|
const now = new Date();
|
||||||
|
const maxResumeDate = dayjs(now).add(1, 'month').toDate();
|
||||||
|
if (resumeDate > maxResumeDate) {
|
||||||
|
throw new Error('Resume date cannot be more than 1 month from now');
|
||||||
|
}
|
||||||
|
if (resumeDate <= now) {
|
||||||
|
throw new Error('Resume date must be in the future');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get workflow
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if already paused
|
||||||
|
if ((workflow as any).isPaused) {
|
||||||
|
throw new Error('Workflow is already paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current approval level
|
||||||
|
let level: ApprovalLevel | null = null;
|
||||||
|
if (levelId) {
|
||||||
|
level = await ApprovalLevel.findByPk(levelId);
|
||||||
|
if (!level || (level as any).requestId !== requestId) {
|
||||||
|
throw new Error('Approval level not found or does not belong to this workflow');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Get current active level
|
||||||
|
level = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
status: { [Op.in]: [ApprovalStatus.PENDING, ApprovalStatus.IN_PROGRESS] }
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!level) {
|
||||||
|
throw new Error('No active approval level found to pause');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user is either the approver for this level OR the initiator
|
||||||
|
const isApprover = (level as any).approverId === userId;
|
||||||
|
const isInitiator = (workflow as any).initiatorId === userId;
|
||||||
|
|
||||||
|
if (!isApprover && !isInitiator) {
|
||||||
|
throw new Error('Only the assigned approver or the initiator can pause this workflow');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if level is already paused
|
||||||
|
if ((level as any).isPaused) {
|
||||||
|
throw new Error('This approval level is already paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate elapsed hours before pause
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
|
||||||
|
// Check if this level was previously paused and resumed
|
||||||
|
// If so, we need to account for the previous pauseElapsedHours
|
||||||
|
// IMPORTANT: Convert to number to avoid string concatenation (DB returns DECIMAL as string)
|
||||||
|
const previousPauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
|
||||||
|
const previousResumeDate = (level as any).pauseResumeDate;
|
||||||
|
const originalTatStartTime = (level as any).pauseTatStartTime || (level as any).levelStartTime || (level as any).tatStartTime || (level as any).createdAt;
|
||||||
|
|
||||||
|
let elapsedHours: number;
|
||||||
|
let levelStartTimeForCalculation: Date;
|
||||||
|
|
||||||
|
if (previousPauseElapsedHours > 0 && previousResumeDate) {
|
||||||
|
// This is a second (or subsequent) pause
|
||||||
|
// Calculate: previous elapsed hours + time from resume to now
|
||||||
|
levelStartTimeForCalculation = previousResumeDate; // Start from last resume time
|
||||||
|
const timeSinceResume = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
|
||||||
|
elapsedHours = previousPauseElapsedHours + Number(timeSinceResume);
|
||||||
|
|
||||||
|
logger.info(`[Pause] Second pause detected - Previous elapsed: ${previousPauseElapsedHours}h, Since resume: ${timeSinceResume}h, Total: ${elapsedHours}h`);
|
||||||
|
} else {
|
||||||
|
// First pause - calculate from original start time
|
||||||
|
levelStartTimeForCalculation = originalTatStartTime;
|
||||||
|
elapsedHours = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store TAT snapshot
|
||||||
|
const tatSnapshot = {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
elapsedHours: Number(elapsedHours),
|
||||||
|
remainingHours: Math.max(0, Number((level as any).tatHours) - elapsedHours),
|
||||||
|
tatPercentageUsed: (Number((level as any).tatHours) > 0
|
||||||
|
? Math.min(100, Math.round((elapsedHours / Number((level as any).tatHours)) * 100))
|
||||||
|
: 0),
|
||||||
|
pausedAt: now.toISOString(),
|
||||||
|
originalTatStartTime: originalTatStartTime // Always use the original start time, not the resume time
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update approval level with pause information
|
||||||
|
await level.update({
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: now,
|
||||||
|
pausedBy: userId,
|
||||||
|
pauseReason: reason,
|
||||||
|
pauseResumeDate: resumeDate,
|
||||||
|
pauseTatStartTime: originalTatStartTime, // Always preserve the original start time
|
||||||
|
pauseElapsedHours: elapsedHours,
|
||||||
|
status: ApprovalStatus.PAUSED
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update workflow with pause information
|
||||||
|
// Store the current status before pausing so we can restore it on resume
|
||||||
|
const currentWorkflowStatus = (workflow as any).status;
|
||||||
|
const currentLevel = (workflow as any).currentLevel || (level as any).levelNumber;
|
||||||
|
|
||||||
|
await workflow.update({
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: now,
|
||||||
|
pausedBy: userId,
|
||||||
|
pauseReason: reason,
|
||||||
|
pauseResumeDate: resumeDate,
|
||||||
|
pauseTatSnapshot: {
|
||||||
|
...tatSnapshot,
|
||||||
|
previousStatus: currentWorkflowStatus, // Store previous status for resume
|
||||||
|
previousCurrentLevel: currentLevel // Store current level to prevent advancement
|
||||||
|
},
|
||||||
|
status: WorkflowStatus.PAUSED
|
||||||
|
// Note: We do NOT update currentLevel here - it should stay at the paused level
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cancel TAT jobs for this level
|
||||||
|
await tatSchedulerService.cancelTatJobs(requestId, (level as any).levelId);
|
||||||
|
|
||||||
|
// Get user details for notifications
|
||||||
|
const user = await User.findByPk(userId);
|
||||||
|
const userName = (user as any)?.displayName || (user as any)?.email || 'User';
|
||||||
|
|
||||||
|
// Get initiator
|
||||||
|
const initiator = await User.findByPk((workflow as any).initiatorId);
|
||||||
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
|
||||||
|
|
||||||
|
// Send notifications
|
||||||
|
const requestNumber = (workflow as any).requestNumber;
|
||||||
|
const title = (workflow as any).title;
|
||||||
|
|
||||||
|
// Notify initiator only if someone else (approver) paused the request
|
||||||
|
// Skip notification if initiator paused their own request
|
||||||
|
if (!isInitiator) {
|
||||||
|
await notificationService.sendToUsers([(workflow as any).initiatorId], {
|
||||||
|
title: 'Workflow Paused',
|
||||||
|
body: `Your request "${title}" has been paused by ${userName}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_paused',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
pauseReason: reason,
|
||||||
|
resumeDate: resumeDate.toISOString(),
|
||||||
|
pausedBy: userId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify the user who paused (confirmation) - no email for self-action
|
||||||
|
await notificationService.sendToUsers([userId], {
|
||||||
|
title: 'Workflow Paused Successfully',
|
||||||
|
body: `You have paused request "${title}". It will automatically resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change', // Use status_change to avoid email for self-action
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// If initiator paused, notify the current approver
|
||||||
|
if (isInitiator && (level as any).approverId) {
|
||||||
|
const approver = await User.findByPk((level as any).approverId);
|
||||||
|
const approverUserId = (level as any).approverId;
|
||||||
|
await notificationService.sendToUsers([approverUserId], {
|
||||||
|
title: 'Workflow Paused by Initiator',
|
||||||
|
body: `Request "${title}" has been paused by the initiator (${userName}). Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_paused',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
pauseReason: reason,
|
||||||
|
resumeDate: resumeDate.toISOString(),
|
||||||
|
pausedBy: userId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'paused',
|
||||||
|
user: { userId, name: userName },
|
||||||
|
timestamp: now.toISOString(),
|
||||||
|
action: 'Workflow Paused',
|
||||||
|
details: `Workflow paused by ${userName} at level ${(level as any).levelNumber}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
|
||||||
|
metadata: {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
resumeDate: resumeDate.toISOString()
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Pause] Workflow ${requestId} paused at level ${(level as any).levelNumber} by ${userId}`);
|
||||||
|
|
||||||
|
// Schedule dedicated auto-resume job for this workflow
|
||||||
|
try {
|
||||||
|
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
|
||||||
|
if (pauseResumeQueue && resumeDate) {
|
||||||
|
const delay = resumeDate.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (delay > 0) {
|
||||||
|
const jobId = `resume-${requestId}-${(level as any).levelId}`;
|
||||||
|
|
||||||
|
await pauseResumeQueue.add(
|
||||||
|
'auto-resume-workflow',
|
||||||
|
{
|
||||||
|
type: 'auto-resume-workflow',
|
||||||
|
requestId,
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
scheduledResumeDate: resumeDate.toISOString()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
jobId,
|
||||||
|
delay, // Exact delay in milliseconds until resume time
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[Pause] Scheduled dedicated auto-resume job ${jobId} for ${resumeDate.toISOString()} (delay: ${Math.round(delay / 1000 / 60)} minutes)`);
|
||||||
|
} else {
|
||||||
|
logger.warn(`[Pause] Resume date ${resumeDate.toISOString()} is in the past, skipping job scheduling`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (queueError) {
|
||||||
|
logger.warn(`[Pause] Could not schedule dedicated auto-resume job:`, queueError);
|
||||||
|
// Continue with pause even if job scheduling fails (hourly check will handle it as fallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(requestId, 'request:updated', {
|
||||||
|
requestId,
|
||||||
|
requestNumber: (workflow as any).requestNumber,
|
||||||
|
action: 'PAUSE',
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return { workflow, level };
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to pause workflow:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resume a paused workflow
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param userId - The user ID who is resuming (optional, for manual resume)
|
||||||
|
* @param notes - Optional notes for the resume action
|
||||||
|
*/
|
||||||
|
async resumeWorkflow(requestId: string, userId?: string, notes?: string): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Get workflow
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if paused
|
||||||
|
if (!(workflow as any).isPaused) {
|
||||||
|
throw new Error('Workflow is not paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get paused level
|
||||||
|
const level = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
isPaused: true
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!level) {
|
||||||
|
throw new Error('Paused approval level not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user has permission (if manual resume)
|
||||||
|
// Both initiator and current approver can resume the workflow
|
||||||
|
if (userId) {
|
||||||
|
const isApprover = (level as any).approverId === userId;
|
||||||
|
const isInitiator = (workflow as any).initiatorId === userId;
|
||||||
|
|
||||||
|
if (!isApprover && !isInitiator) {
|
||||||
|
throw new Error('Only the assigned approver or the initiator can resume this workflow');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate remaining TAT from resume time
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
const pauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
|
||||||
|
const tatHours = Number((level as any).tatHours);
|
||||||
|
const remainingHours = Math.max(0, tatHours - pauseElapsedHours);
|
||||||
|
|
||||||
|
// Get which alerts have already been sent (to avoid re-sending on resume)
|
||||||
|
const tat50AlertSent = (level as any).tat50AlertSent || false;
|
||||||
|
const tat75AlertSent = (level as any).tat75AlertSent || false;
|
||||||
|
const tatBreached = (level as any).tatBreached || false;
|
||||||
|
|
||||||
|
// Update approval level - resume TAT
|
||||||
|
// IMPORTANT: Keep pauseElapsedHours and store resumedAt (pauseResumeDate repurposed)
|
||||||
|
// This allows SLA calculation to correctly add pre-pause elapsed time
|
||||||
|
await level.update({
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null as any,
|
||||||
|
pausedBy: null as any,
|
||||||
|
pauseReason: null as any,
|
||||||
|
pauseResumeDate: now, // Store actual resume time (repurposed from scheduled resume date)
|
||||||
|
// pauseTatStartTime: null as any, // Keep original TAT start time for reference
|
||||||
|
// pauseElapsedHours is intentionally NOT cleared - needed for SLA calculations
|
||||||
|
status: ApprovalStatus.IN_PROGRESS,
|
||||||
|
tatStartTime: now, // Reset TAT start time to now for new elapsed calculation
|
||||||
|
levelStartTime: now // This is the new start time from resume
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cancel any scheduled auto-resume job (if exists)
|
||||||
|
try {
|
||||||
|
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
|
||||||
|
if (pauseResumeQueue) {
|
||||||
|
// Try to remove job by specific ID pattern first (more efficient)
|
||||||
|
const jobId = `resume-${requestId}-${(level as any).levelId}`;
|
||||||
|
try {
|
||||||
|
const specificJob = await pauseResumeQueue.getJob(jobId);
|
||||||
|
if (specificJob) {
|
||||||
|
await specificJob.remove();
|
||||||
|
logger.info(`[Pause] Cancelled scheduled auto-resume job ${jobId} for workflow ${requestId}`);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Job might not exist, which is fine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also check for any other jobs for this request (fallback for old jobs)
|
||||||
|
const scheduledJobs = await pauseResumeQueue.getJobs(['delayed', 'waiting']);
|
||||||
|
const otherJobs = scheduledJobs.filter((job: any) =>
|
||||||
|
job.data.requestId === requestId && job.id !== jobId
|
||||||
|
);
|
||||||
|
for (const job of otherJobs) {
|
||||||
|
await job.remove();
|
||||||
|
logger.info(`[Pause] Cancelled legacy auto-resume job ${job.id} for workflow ${requestId}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (queueError) {
|
||||||
|
logger.warn(`[Pause] Could not cancel scheduled auto-resume job:`, queueError);
|
||||||
|
// Continue with resume even if job cancellation fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow - restore previous status or default to PENDING
|
||||||
|
const pauseSnapshot = (workflow as any).pauseTatSnapshot || {};
|
||||||
|
const previousStatus = pauseSnapshot.previousStatus || WorkflowStatus.PENDING;
|
||||||
|
|
||||||
|
await workflow.update({
|
||||||
|
isPaused: false,
|
||||||
|
pausedAt: null as any,
|
||||||
|
pausedBy: null as any,
|
||||||
|
pauseReason: null as any,
|
||||||
|
pauseResumeDate: null as any,
|
||||||
|
pauseTatSnapshot: null as any,
|
||||||
|
status: previousStatus // Restore previous status (PENDING or IN_PROGRESS)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Reschedule TAT jobs from resume time - only for alerts that haven't been sent yet
|
||||||
|
if (remainingHours > 0) {
|
||||||
|
// Calculate which thresholds are still pending based on remaining time
|
||||||
|
const percentageUsedAtPause = tatHours > 0 ? (pauseElapsedHours / tatHours) * 100 : 0;
|
||||||
|
|
||||||
|
// Only schedule jobs for thresholds that:
|
||||||
|
// 1. Haven't been sent yet
|
||||||
|
// 2. Haven't been passed yet (based on percentage used at pause)
|
||||||
|
await tatSchedulerService.scheduleTatJobsOnResume(
|
||||||
|
requestId,
|
||||||
|
(level as any).levelId,
|
||||||
|
(level as any).approverId,
|
||||||
|
remainingHours, // Remaining TAT hours
|
||||||
|
now, // Start from now
|
||||||
|
priority as any,
|
||||||
|
{
|
||||||
|
// Pass which alerts were already sent
|
||||||
|
tat50AlertSent: tat50AlertSent,
|
||||||
|
tat75AlertSent: tat75AlertSent,
|
||||||
|
tatBreached: tatBreached,
|
||||||
|
// Pass percentage used at pause to determine which thresholds are still relevant
|
||||||
|
percentageUsedAtPause: percentageUsedAtPause
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user details
|
||||||
|
const resumeUser = userId ? await User.findByPk(userId) : null;
|
||||||
|
const resumeUserName = resumeUser
|
||||||
|
? ((resumeUser as any)?.displayName || (resumeUser as any)?.email || 'User')
|
||||||
|
: 'System (Auto-resume)';
|
||||||
|
|
||||||
|
// Get initiator and paused by user
|
||||||
|
const initiator = await User.findByPk((workflow as any).initiatorId);
|
||||||
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
|
||||||
|
const pausedByUser = (workflow as any).pausedBy
|
||||||
|
? await User.findByPk((workflow as any).pausedBy)
|
||||||
|
: null;
|
||||||
|
const pausedByName = pausedByUser
|
||||||
|
? ((pausedByUser as any)?.displayName || (pausedByUser as any)?.email || 'User')
|
||||||
|
: 'Unknown';
|
||||||
|
|
||||||
|
const requestNumber = (workflow as any).requestNumber;
|
||||||
|
const title = (workflow as any).title;
|
||||||
|
const initiatorId = (workflow as any).initiatorId;
|
||||||
|
const approverId = (level as any).approverId;
|
||||||
|
const isResumedByInitiator = userId === initiatorId;
|
||||||
|
const isResumedByApprover = userId === approverId;
|
||||||
|
|
||||||
|
// Calculate pause duration
|
||||||
|
const pausedAt = (level as any).pausedAt || (workflow as any).pausedAt;
|
||||||
|
const pauseDurationMs = pausedAt ? now.getTime() - new Date(pausedAt).getTime() : 0;
|
||||||
|
const pauseDurationHours = Math.round((pauseDurationMs / (1000 * 60 * 60)) * 100) / 100; // Round to 2 decimal places
|
||||||
|
const pauseDuration = pauseDurationHours > 0 ? `${pauseDurationHours} hours` : 'less than 1 hour';
|
||||||
|
|
||||||
|
// Notify initiator only if someone else resumed (or auto-resume)
|
||||||
|
// Skip if initiator resumed their own request
|
||||||
|
if (!isResumedByInitiator) {
|
||||||
|
await notificationService.sendToUsers([initiatorId], {
|
||||||
|
title: 'Workflow Resumed',
|
||||||
|
body: `Your request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_resumed',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
resumedBy: userId ? { userId, name: resumeUserName } : null,
|
||||||
|
pauseDuration: pauseDuration
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify approver only if someone else resumed (or auto-resume)
|
||||||
|
// Skip if approver resumed the request themselves
|
||||||
|
if (!isResumedByApprover && approverId) {
|
||||||
|
await notificationService.sendToUsers([approverId], {
|
||||||
|
title: 'Workflow Resumed',
|
||||||
|
body: `Request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}. Please continue with your review.`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'workflow_resumed',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true,
|
||||||
|
metadata: {
|
||||||
|
resumedBy: userId ? { userId, name: resumeUserName } : null,
|
||||||
|
pauseDuration: pauseDuration
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send confirmation to the user who resumed (if manual resume) - no email for self-action
|
||||||
|
if (userId) {
|
||||||
|
await notificationService.sendToUsers([userId], {
|
||||||
|
title: 'Workflow Resumed Successfully',
|
||||||
|
body: `You have resumed request "${title}". ${isResumedByApprover ? 'Please continue with your review.' : ''}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'status_change', // Use status_change to avoid email for self-action
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: isResumedByApprover
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity with notes
|
||||||
|
const resumeDetails = notes
|
||||||
|
? `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}. Notes: ${notes}`
|
||||||
|
: `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}.`;
|
||||||
|
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'resumed',
|
||||||
|
user: userId ? { userId, name: resumeUserName } : undefined,
|
||||||
|
timestamp: now.toISOString(),
|
||||||
|
action: 'Workflow Resumed',
|
||||||
|
details: resumeDetails,
|
||||||
|
metadata: {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
wasAutoResume: !userId,
|
||||||
|
notes: notes || null
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Pause] Workflow ${requestId} resumed ${userId ? `by ${userId}` : 'automatically'}`);
|
||||||
|
|
||||||
|
// Emit real-time update to all users viewing this request
|
||||||
|
emitToRequestRoom(requestId, 'request:updated', {
|
||||||
|
requestId,
|
||||||
|
requestNumber: (workflow as any).requestNumber,
|
||||||
|
action: 'RESUME',
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
timestamp: now.toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
return { workflow, level };
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to resume workflow:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel pause (for retrigger scenario - initiator requests approver to resume)
|
||||||
|
* This sends a notification to the approver who paused it
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param userId - The initiator user ID
|
||||||
|
*/
|
||||||
|
async retriggerPause(requestId: string, userId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(workflow as any).isPaused) {
|
||||||
|
throw new Error('Workflow is not paused');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user is initiator
|
||||||
|
if ((workflow as any).initiatorId !== userId) {
|
||||||
|
throw new Error('Only the initiator can retrigger a pause');
|
||||||
|
}
|
||||||
|
|
||||||
|
const pausedBy = (workflow as any).pausedBy;
|
||||||
|
if (!pausedBy) {
|
||||||
|
throw new Error('Cannot retrigger - no approver found who paused this workflow');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user details
|
||||||
|
const initiator = await User.findByPk(userId);
|
||||||
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
|
||||||
|
|
||||||
|
// Get approver details (who paused the workflow)
|
||||||
|
const approver = await User.findByPk(pausedBy);
|
||||||
|
const approverName = (approver as any)?.displayName || (approver as any)?.email || 'Approver';
|
||||||
|
|
||||||
|
const requestNumber = (workflow as any).requestNumber;
|
||||||
|
const title = (workflow as any).title;
|
||||||
|
|
||||||
|
// Notify approver who paused it
|
||||||
|
await notificationService.sendToUsers([pausedBy], {
|
||||||
|
title: 'Pause Retrigger Request',
|
||||||
|
body: `${initiatorName} is requesting you to cancel the pause and resume work on request "${title}".`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'pause_retrigger_request',
|
||||||
|
priority: 'HIGH',
|
||||||
|
actionRequired: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log activity with approver name
|
||||||
|
await activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'pause_retriggered',
|
||||||
|
user: { userId, name: initiatorName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Pause Retrigger Requested',
|
||||||
|
details: `${initiatorName} requested ${approverName} to cancel the pause and resume work.`,
|
||||||
|
metadata: {
|
||||||
|
pausedBy,
|
||||||
|
approverName
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`[Pause] Pause retrigger requested for workflow ${requestId} by initiator ${userId}`);
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to retrigger pause:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get pause details for a workflow
|
||||||
|
*/
|
||||||
|
async getPauseDetails(requestId: string): Promise<any> {
|
||||||
|
try {
|
||||||
|
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||||
|
if (!workflow) {
|
||||||
|
throw new Error('Workflow not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(workflow as any).isPaused) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const level = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
isPaused: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const pausedByUser = (workflow as any).pausedBy
|
||||||
|
? await User.findByPk((workflow as any).pausedBy, { attributes: ['userId', 'email', 'displayName'] })
|
||||||
|
: null;
|
||||||
|
|
||||||
|
return {
|
||||||
|
isPaused: true,
|
||||||
|
pausedAt: (workflow as any).pausedAt,
|
||||||
|
pausedBy: pausedByUser ? {
|
||||||
|
userId: (pausedByUser as any).userId,
|
||||||
|
email: (pausedByUser as any).email,
|
||||||
|
name: (pausedByUser as any).displayName || (pausedByUser as any).email
|
||||||
|
} : null,
|
||||||
|
pauseReason: (workflow as any).pauseReason,
|
||||||
|
pauseResumeDate: (workflow as any).pauseResumeDate,
|
||||||
|
level: level ? {
|
||||||
|
levelId: (level as any).levelId,
|
||||||
|
levelNumber: (level as any).levelNumber,
|
||||||
|
approverName: (level as any).approverName
|
||||||
|
} : null
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to get pause details:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check and auto-resume paused workflows whose resume date has passed
|
||||||
|
* This is called by a scheduled job
|
||||||
|
*/
|
||||||
|
async checkAndResumePausedWorkflows(): Promise<number> {
|
||||||
|
try {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Find all paused workflows where resume date has passed
|
||||||
|
// Handle backward compatibility: workflow_type column may not exist in old environments
|
||||||
|
let pausedWorkflows: WorkflowRequest[];
|
||||||
|
try {
|
||||||
|
pausedWorkflows = await WorkflowRequest.findAll({
|
||||||
|
where: {
|
||||||
|
isPaused: true,
|
||||||
|
pauseResumeDate: {
|
||||||
|
[Op.lte]: now
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
// If error is due to missing workflow_type column, use raw query
|
||||||
|
if (error.message?.includes('workflow_type') || (error.message?.includes('column') && error.message?.includes('does not exist'))) {
|
||||||
|
logger.warn('[Pause] workflow_type column not found, using raw query for backward compatibility');
|
||||||
|
const { sequelize } = await import('../config/database');
|
||||||
|
const { QueryTypes } = await import('sequelize');
|
||||||
|
const results = await sequelize.query(`
|
||||||
|
SELECT request_id, is_paused, pause_resume_date
|
||||||
|
FROM workflow_requests
|
||||||
|
WHERE is_paused = true
|
||||||
|
AND pause_resume_date <= :now
|
||||||
|
`, {
|
||||||
|
replacements: { now },
|
||||||
|
type: QueryTypes.SELECT
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert to WorkflowRequest-like objects
|
||||||
|
// results is an array of objects from SELECT query
|
||||||
|
pausedWorkflows = (results as any[]).map((r: any) => ({
|
||||||
|
requestId: r.request_id,
|
||||||
|
isPaused: r.is_paused,
|
||||||
|
pauseResumeDate: r.pause_resume_date
|
||||||
|
})) as any;
|
||||||
|
} else {
|
||||||
|
throw error; // Re-throw if it's a different error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let resumedCount = 0;
|
||||||
|
for (const workflow of pausedWorkflows) {
|
||||||
|
try {
|
||||||
|
await this.resumeWorkflow((workflow as any).requestId);
|
||||||
|
resumedCount++;
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to auto-resume workflow ${(workflow as any).requestId}:`, error);
|
||||||
|
// Continue with other workflows
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resumedCount > 0) {
|
||||||
|
logger.info(`[Pause] Auto-resumed ${resumedCount} workflow(s)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resumedCount;
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to check and resume paused workflows:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all paused workflows (for admin/reporting)
|
||||||
|
*/
|
||||||
|
async getPausedWorkflows(): Promise<WorkflowRequest[]> {
|
||||||
|
try {
|
||||||
|
return await WorkflowRequest.findAll({
|
||||||
|
where: {
|
||||||
|
isPaused: true
|
||||||
|
},
|
||||||
|
order: [['pausedAt', 'DESC']]
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error(`[Pause] Failed to get paused workflows:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const pauseService = new PauseService();
|
||||||
|
|
||||||
383
_archive/services/tatScheduler.service.ts
Normal file
383
_archive/services/tatScheduler.service.ts
Normal file
@ -0,0 +1,383 @@
|
|||||||
|
import { tatQueue } from '../queues/tatQueue';
|
||||||
|
import { calculateDelay, addWorkingHours, addWorkingHoursExpress } from '@utils/tatTimeUtils';
|
||||||
|
import { getTatThresholds } from './configReader.service';
|
||||||
|
import dayjs from 'dayjs';
|
||||||
|
import logger, { logTATEvent } from '@utils/logger';
|
||||||
|
import { Priority } from '../types/common.types';
|
||||||
|
|
||||||
|
export class TatSchedulerService {
|
||||||
|
/**
|
||||||
|
* Schedule TAT notification jobs for an approval level
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID
|
||||||
|
* @param approverId - The approver user ID
|
||||||
|
* @param tatDurationHours - TAT duration in hours
|
||||||
|
* @param startTime - Optional start time (defaults to now)
|
||||||
|
* @param priority - Request priority (EXPRESS = 24/7, STANDARD = working hours only)
|
||||||
|
*/
|
||||||
|
async scheduleTatJobs(
|
||||||
|
requestId: string,
|
||||||
|
levelId: string,
|
||||||
|
approverId: string,
|
||||||
|
tatDurationHours: number,
|
||||||
|
startTime?: Date,
|
||||||
|
priority: Priority = Priority.STANDARD
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if tatQueue is available
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = startTime || new Date();
|
||||||
|
// Handle both enum and string (case-insensitive) priority values
|
||||||
|
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
|
||||||
|
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
|
||||||
|
|
||||||
|
// Get current thresholds from database configuration
|
||||||
|
const thresholds = await getTatThresholds();
|
||||||
|
|
||||||
|
// Calculate milestone times using configured thresholds
|
||||||
|
// EXPRESS mode: 24/7 calculation (includes holidays, weekends, non-working hours)
|
||||||
|
// STANDARD mode: Working hours only (excludes holidays, weekends, non-working hours)
|
||||||
|
let threshold1Time: Date;
|
||||||
|
let threshold2Time: Date;
|
||||||
|
let breachTime: Date;
|
||||||
|
|
||||||
|
if (isExpress) {
|
||||||
|
// EXPRESS: All calendar days (Mon-Sun, including weekends/holidays) but working hours only (9 AM - 6 PM)
|
||||||
|
const t1 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.first / 100));
|
||||||
|
const t2 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.second / 100));
|
||||||
|
const tBreach = await addWorkingHoursExpress(now, tatDurationHours);
|
||||||
|
threshold1Time = t1.toDate();
|
||||||
|
threshold2Time = t2.toDate();
|
||||||
|
breachTime = tBreach.toDate();
|
||||||
|
} else {
|
||||||
|
// STANDARD: Working days only (Mon-Fri), working hours (9 AM - 6 PM), excludes holidays
|
||||||
|
const t1 = await addWorkingHours(now, tatDurationHours * (thresholds.first / 100));
|
||||||
|
const t2 = await addWorkingHours(now, tatDurationHours * (thresholds.second / 100));
|
||||||
|
const tBreach = await addWorkingHours(now, tatDurationHours);
|
||||||
|
threshold1Time = t1.toDate();
|
||||||
|
threshold2Time = t2.toDate();
|
||||||
|
breachTime = tBreach.toDate();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] Scheduling TAT jobs - Request: ${requestId}, Priority: ${priority}, TAT: ${tatDurationHours}h`);
|
||||||
|
|
||||||
|
const jobs = [
|
||||||
|
{
|
||||||
|
type: 'threshold1' as const,
|
||||||
|
threshold: thresholds.first,
|
||||||
|
delay: calculateDelay(threshold1Time),
|
||||||
|
targetTime: threshold1Time
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'threshold2' as const,
|
||||||
|
threshold: thresholds.second,
|
||||||
|
delay: calculateDelay(threshold2Time),
|
||||||
|
targetTime: threshold2Time
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'breach' as const,
|
||||||
|
threshold: 100,
|
||||||
|
delay: calculateDelay(breachTime),
|
||||||
|
targetTime: breachTime
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
|
// Check if test mode enabled (1 hour = 1 minute)
|
||||||
|
const isTestMode = process.env.TAT_TEST_MODE === 'true';
|
||||||
|
|
||||||
|
// Check if times collide (working hours calculation issue)
|
||||||
|
const uniqueTimes = new Set(jobs.map(j => j.targetTime.getTime()));
|
||||||
|
const hasCollision = uniqueTimes.size < jobs.length;
|
||||||
|
|
||||||
|
let jobIndex = 0;
|
||||||
|
for (const job of jobs) {
|
||||||
|
if (job.delay < 0) {
|
||||||
|
logger.error(`[TAT Scheduler] Skipping ${job.type} - time in past`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let spacedDelay: number;
|
||||||
|
|
||||||
|
if (isTestMode) {
|
||||||
|
// Test mode: times are already in minutes (tatTimeUtils converts hours to minutes)
|
||||||
|
// Just ensure they have minimum spacing for BullMQ reliability
|
||||||
|
spacedDelay = Math.max(job.delay, 5000) + (jobIndex * 5000);
|
||||||
|
} else if (hasCollision) {
|
||||||
|
// Production with collision: add 5-minute spacing
|
||||||
|
spacedDelay = job.delay + (jobIndex * 300000);
|
||||||
|
} else {
|
||||||
|
// Production without collision: use calculated delays
|
||||||
|
spacedDelay = job.delay;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
|
||||||
|
|
||||||
|
await tatQueue.add(
|
||||||
|
job.type,
|
||||||
|
{
|
||||||
|
type: job.type,
|
||||||
|
threshold: job.threshold,
|
||||||
|
requestId,
|
||||||
|
levelId,
|
||||||
|
approverId
|
||||||
|
},
|
||||||
|
{
|
||||||
|
delay: spacedDelay,
|
||||||
|
jobId: jobId,
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600, // Keep for 1 hour for debugging
|
||||||
|
count: 1000
|
||||||
|
},
|
||||||
|
removeOnFail: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
jobIndex++;
|
||||||
|
}
|
||||||
|
|
||||||
|
logTATEvent('warning', requestId, {
|
||||||
|
level: parseInt(levelId.split('-').pop() || '1'),
|
||||||
|
tatHours: tatDurationHours,
|
||||||
|
priority,
|
||||||
|
message: 'TAT jobs scheduled',
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule TAT jobs on resume - only schedules jobs for alerts that haven't been sent yet
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID
|
||||||
|
* @param approverId - The approver user ID
|
||||||
|
* @param remainingTatHours - Remaining TAT duration in hours (from resume point)
|
||||||
|
* @param startTime - Resume start time
|
||||||
|
* @param priority - Request priority
|
||||||
|
* @param alertStatus - Object indicating which alerts have already been sent and percentage used at pause
|
||||||
|
*/
|
||||||
|
async scheduleTatJobsOnResume(
|
||||||
|
requestId: string,
|
||||||
|
levelId: string,
|
||||||
|
approverId: string,
|
||||||
|
remainingTatHours: number,
|
||||||
|
startTime: Date,
|
||||||
|
priority: Priority = Priority.STANDARD,
|
||||||
|
alertStatus: {
|
||||||
|
tat50AlertSent: boolean;
|
||||||
|
tat75AlertSent: boolean;
|
||||||
|
tatBreached: boolean;
|
||||||
|
percentageUsedAtPause: number;
|
||||||
|
}
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling on resume.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = startTime;
|
||||||
|
// Handle both enum and string (case-insensitive) priority values
|
||||||
|
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
|
||||||
|
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
|
||||||
|
|
||||||
|
// Get current thresholds from database configuration
|
||||||
|
const thresholds = await getTatThresholds();
|
||||||
|
|
||||||
|
// Calculate original TAT from remaining + elapsed
|
||||||
|
// Example: If 35 min used (58.33%) and 25 min remaining, original TAT = 60 min
|
||||||
|
const elapsedHours = alertStatus.percentageUsedAtPause > 0
|
||||||
|
? (remainingTatHours * alertStatus.percentageUsedAtPause) / (100 - alertStatus.percentageUsedAtPause)
|
||||||
|
: 0;
|
||||||
|
const originalTatHours = elapsedHours + remainingTatHours;
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] Resuming TAT scheduling - Request: ${requestId}, Remaining: ${(remainingTatHours * 60).toFixed(1)} min, Priority: ${isExpress ? 'EXPRESS' : 'STANDARD'}`);
|
||||||
|
|
||||||
|
// Jobs to schedule - only include those that haven't been sent and haven't been passed
|
||||||
|
const jobsToSchedule: Array<{
|
||||||
|
type: 'threshold1' | 'threshold2' | 'breach';
|
||||||
|
threshold: number;
|
||||||
|
alreadySent: boolean;
|
||||||
|
alreadyPassed: boolean;
|
||||||
|
hoursFromNow: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
// Threshold 1 (e.g., 50%)
|
||||||
|
// Skip if: already sent OR already passed the threshold
|
||||||
|
if (!alertStatus.tat50AlertSent && alertStatus.percentageUsedAtPause < thresholds.first) {
|
||||||
|
// Calculate: How many hours from NOW until we reach this threshold?
|
||||||
|
// Formula: (thresholdHours - elapsedHours)
|
||||||
|
// thresholdHours = originalTatHours * (threshold/100)
|
||||||
|
const thresholdHours = originalTatHours * (thresholds.first / 100);
|
||||||
|
const hoursFromNow = thresholdHours - elapsedHours;
|
||||||
|
|
||||||
|
if (hoursFromNow > 0) {
|
||||||
|
jobsToSchedule.push({
|
||||||
|
type: 'threshold1',
|
||||||
|
threshold: thresholds.first,
|
||||||
|
alreadySent: false,
|
||||||
|
alreadyPassed: false,
|
||||||
|
hoursFromNow: hoursFromNow
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Threshold 2 (e.g., 75%)
|
||||||
|
if (!alertStatus.tat75AlertSent && alertStatus.percentageUsedAtPause < thresholds.second) {
|
||||||
|
const thresholdHours = originalTatHours * (thresholds.second / 100);
|
||||||
|
const hoursFromNow = thresholdHours - elapsedHours;
|
||||||
|
|
||||||
|
if (hoursFromNow > 0) {
|
||||||
|
jobsToSchedule.push({
|
||||||
|
type: 'threshold2',
|
||||||
|
threshold: thresholds.second,
|
||||||
|
alreadySent: false,
|
||||||
|
alreadyPassed: false,
|
||||||
|
hoursFromNow: hoursFromNow
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Breach (100%)
|
||||||
|
if (!alertStatus.tatBreached) {
|
||||||
|
// Breach is always scheduled for the end of remaining TAT
|
||||||
|
jobsToSchedule.push({
|
||||||
|
type: 'breach',
|
||||||
|
threshold: 100,
|
||||||
|
alreadySent: false,
|
||||||
|
alreadyPassed: false,
|
||||||
|
hoursFromNow: remainingTatHours
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jobsToSchedule.length === 0) {
|
||||||
|
logger.info(`[TAT Scheduler] No TAT jobs to schedule (all alerts already sent)`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate actual times and schedule jobs
|
||||||
|
for (const job of jobsToSchedule) {
|
||||||
|
let targetTime: Date;
|
||||||
|
|
||||||
|
if (isExpress) {
|
||||||
|
targetTime = (await addWorkingHoursExpress(now, job.hoursFromNow)).toDate();
|
||||||
|
} else {
|
||||||
|
targetTime = (await addWorkingHours(now, job.hoursFromNow)).toDate();
|
||||||
|
}
|
||||||
|
|
||||||
|
const delay = calculateDelay(targetTime);
|
||||||
|
|
||||||
|
if (delay < 0) {
|
||||||
|
logger.warn(`[TAT Scheduler] Skipping ${job.type} - calculated time is in past`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
|
||||||
|
|
||||||
|
await tatQueue.add(
|
||||||
|
job.type,
|
||||||
|
{
|
||||||
|
type: job.type,
|
||||||
|
threshold: job.threshold,
|
||||||
|
requestId,
|
||||||
|
levelId,
|
||||||
|
approverId
|
||||||
|
},
|
||||||
|
{
|
||||||
|
delay: delay,
|
||||||
|
jobId: jobId,
|
||||||
|
removeOnComplete: {
|
||||||
|
age: 3600,
|
||||||
|
count: 1000
|
||||||
|
},
|
||||||
|
removeOnFail: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✓ Scheduled ${job.type} (${job.threshold}%) for ${dayjs(targetTime).format('YYYY-MM-DD HH:mm')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✅ ${jobsToSchedule.length} TAT job(s) scheduled for request ${requestId}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs on resume:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel TAT jobs for a specific approval level
|
||||||
|
* Useful when an approver acts before TAT expires
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
* @param levelId - The approval level ID
|
||||||
|
*/
|
||||||
|
async cancelTatJobs(requestId: string, levelId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if tatQueue is available
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use generic job names that don't depend on threshold percentages
|
||||||
|
const jobIds = [
|
||||||
|
`tat-threshold1-${requestId}-${levelId}`,
|
||||||
|
`tat-threshold2-${requestId}-${levelId}`,
|
||||||
|
`tat-breach-${requestId}-${levelId}`
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const jobId of jobIds) {
|
||||||
|
try {
|
||||||
|
const job = await tatQueue.getJob(jobId);
|
||||||
|
if (job) {
|
||||||
|
await job.remove();
|
||||||
|
logger.info(`[TAT Scheduler] Cancelled job ${jobId}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Job might not exist, which is fine
|
||||||
|
logger.debug(`[TAT Scheduler] Job ${jobId} not found (may have already been processed)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✅ TAT jobs cancelled for level ${levelId}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to cancel TAT jobs:`, error);
|
||||||
|
// Don't throw - cancellation failure shouldn't break the workflow
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel all TAT jobs for a workflow request
|
||||||
|
* @param requestId - The workflow request ID
|
||||||
|
*/
|
||||||
|
async cancelAllTatJobsForRequest(requestId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Check if tatQueue is available
|
||||||
|
if (!tatQueue) {
|
||||||
|
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = await tatQueue.getJobs(['delayed', 'waiting']);
|
||||||
|
const requestJobs = jobs.filter(job => job.data.requestId === requestId);
|
||||||
|
|
||||||
|
for (const job of requestJobs) {
|
||||||
|
await job.remove();
|
||||||
|
logger.info(`[TAT Scheduler] Cancelled job ${job.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[TAT Scheduler] ✅ All TAT jobs cancelled for request ${requestId}`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[TAT Scheduler] Failed to cancel all TAT jobs:`, error);
|
||||||
|
// Don't throw - cancellation failure shouldn't break the workflow
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const tatSchedulerService = new TatSchedulerService();
|
||||||
|
|
||||||
3449
_archive/services/workflow.service.ts
Normal file
3449
_archive/services/workflow.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
446
_archive/services/worknote.service.ts
Normal file
446
_archive/services/worknote.service.ts
Normal file
@ -0,0 +1,446 @@
|
|||||||
|
import { Op } from 'sequelize';
|
||||||
|
import { WorkNote } from '@models/WorkNote';
|
||||||
|
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
|
||||||
|
import { Participant } from '@models/Participant';
|
||||||
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
|
import { User } from '@models/User';
|
||||||
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { activityService } from './activity.service';
|
||||||
|
import { notificationService } from './notification.service';
|
||||||
|
import { emailNotificationService } from './emailNotification.service';
|
||||||
|
import { gcsStorageService } from './gcsStorage.service';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
export class WorkNoteService {
|
||||||
|
async list(requestId: string) {
|
||||||
|
const notes = await WorkNote.findAll({
|
||||||
|
where: { requestId },
|
||||||
|
order: [['created_at' as any, 'ASC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Load attachments for each note
|
||||||
|
const enriched = await Promise.all(notes.map(async (note) => {
|
||||||
|
const noteId = (note as any).noteId;
|
||||||
|
const attachments = await WorkNoteAttachment.findAll({
|
||||||
|
where: { noteId }
|
||||||
|
});
|
||||||
|
|
||||||
|
const noteData = (note as any).toJSON();
|
||||||
|
|
||||||
|
const mappedAttachments = attachments.map((a: any) => {
|
||||||
|
const attData = typeof a.toJSON === 'function' ? a.toJSON() : a;
|
||||||
|
return {
|
||||||
|
attachmentId: attData.attachmentId || attData.attachment_id,
|
||||||
|
fileName: attData.fileName || attData.file_name,
|
||||||
|
fileType: attData.fileType || attData.file_type,
|
||||||
|
fileSize: attData.fileSize || attData.file_size,
|
||||||
|
filePath: attData.filePath || attData.file_path,
|
||||||
|
storageUrl: attData.storageUrl || attData.storage_url,
|
||||||
|
isDownloadable: attData.isDownloadable || attData.is_downloadable,
|
||||||
|
uploadedAt: attData.uploadedAt || attData.uploaded_at
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
noteId: noteData.noteId || noteData.note_id,
|
||||||
|
requestId: noteData.requestId || noteData.request_id,
|
||||||
|
userId: noteData.userId || noteData.user_id,
|
||||||
|
userName: noteData.userName || noteData.user_name,
|
||||||
|
userRole: noteData.userRole || noteData.user_role,
|
||||||
|
message: noteData.message,
|
||||||
|
isPriority: noteData.isPriority || noteData.is_priority,
|
||||||
|
hasAttachment: noteData.hasAttachment || noteData.has_attachment,
|
||||||
|
createdAt: noteData.createdAt || noteData.created_at,
|
||||||
|
updatedAt: noteData.updatedAt || noteData.updated_at,
|
||||||
|
attachments: mappedAttachments
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
|
||||||
|
return enriched;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getUserRole(requestId: string, userId: string): Promise<string> {
|
||||||
|
try {
|
||||||
|
const participant = await Participant.findOne({
|
||||||
|
where: { requestId, userId }
|
||||||
|
});
|
||||||
|
if (participant) {
|
||||||
|
const type = (participant as any).participantType || (participant as any).participant_type;
|
||||||
|
return type ? type.toString() : 'Participant';
|
||||||
|
}
|
||||||
|
return 'Participant';
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[WorkNote] Error fetching user role:', error);
|
||||||
|
return 'Participant';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> {
|
||||||
|
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
|
||||||
|
|
||||||
|
const note = await WorkNote.create({
|
||||||
|
requestId,
|
||||||
|
userId: user.userId,
|
||||||
|
userName: user.name || null,
|
||||||
|
userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR)
|
||||||
|
message: payload.message,
|
||||||
|
isPriority: !!payload.isPriority,
|
||||||
|
parentNoteId: payload.parentNoteId || null,
|
||||||
|
mentionedUsers: payload.mentionedUsers || null,
|
||||||
|
hasAttachment: files && files.length > 0 ? true : false
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
logger.info('[WorkNote] Created note:', {
|
||||||
|
noteId: (note as any).noteId,
|
||||||
|
userId: (note as any).userId,
|
||||||
|
userName: (note as any).userName,
|
||||||
|
userRole: (note as any).userRole
|
||||||
|
});
|
||||||
|
|
||||||
|
const attachments = [];
|
||||||
|
if (files && files.length) {
|
||||||
|
// Get request number for folder structure
|
||||||
|
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
|
||||||
|
const requestNumber = workflow ? ((workflow as any).requestNumber || (workflow as any).request_number) : null;
|
||||||
|
|
||||||
|
for (const f of files) {
|
||||||
|
// Read file buffer if path exists, otherwise use provided buffer
|
||||||
|
const fileBuffer = f.buffer || (f.path ? fs.readFileSync(f.path) : Buffer.from(''));
|
||||||
|
|
||||||
|
// Upload with automatic fallback to local storage
|
||||||
|
// If requestNumber is not available, use a default structure
|
||||||
|
const effectiveRequestNumber = requestNumber || 'UNKNOWN';
|
||||||
|
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
||||||
|
buffer: fileBuffer,
|
||||||
|
originalName: f.originalname,
|
||||||
|
mimeType: f.mimetype,
|
||||||
|
requestNumber: effectiveRequestNumber,
|
||||||
|
fileType: 'attachments'
|
||||||
|
});
|
||||||
|
|
||||||
|
const storageUrl = uploadResult.storageUrl;
|
||||||
|
const gcsFilePath = uploadResult.filePath;
|
||||||
|
|
||||||
|
// Clean up local temporary file if it exists (from multer disk storage)
|
||||||
|
if (f.path && fs.existsSync(f.path)) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(f.path);
|
||||||
|
} catch (unlinkError) {
|
||||||
|
logger.warn('[WorkNote] Failed to delete local temporary file:', unlinkError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachment = await WorkNoteAttachment.create({
|
||||||
|
noteId: (note as any).noteId,
|
||||||
|
fileName: f.originalname,
|
||||||
|
fileType: f.mimetype,
|
||||||
|
fileSize: f.size,
|
||||||
|
filePath: gcsFilePath, // Store GCS path or local path
|
||||||
|
storageUrl: storageUrl, // Store GCS URL or local URL
|
||||||
|
isDownloadable: true
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
attachments.push({
|
||||||
|
attachmentId: (attachment as any).attachmentId,
|
||||||
|
fileName: (attachment as any).fileName,
|
||||||
|
fileType: (attachment as any).fileType,
|
||||||
|
fileSize: (attachment as any).fileSize,
|
||||||
|
filePath: (attachment as any).filePath,
|
||||||
|
storageUrl: (attachment as any).storageUrl,
|
||||||
|
isDownloadable: (attachment as any).isDownloadable
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notifications for additional document added via work notes
|
||||||
|
if (attachments.length > 0) {
|
||||||
|
try {
|
||||||
|
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
|
||||||
|
if (workflow) {
|
||||||
|
const initiatorId = (workflow as any).initiatorId || (workflow as any).initiator_id;
|
||||||
|
const isInitiator = user.userId === initiatorId;
|
||||||
|
|
||||||
|
// Get all participants (spectators)
|
||||||
|
const spectators = await Participant.findAll({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
participantType: 'SPECTATOR'
|
||||||
|
},
|
||||||
|
include: [{
|
||||||
|
model: User,
|
||||||
|
as: 'user',
|
||||||
|
attributes: ['userId', 'email', 'displayName']
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get current approver (pending or in-progress approval level)
|
||||||
|
const currentApprovalLevel = await ApprovalLevel.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
|
||||||
|
},
|
||||||
|
order: [['levelNumber', 'ASC']],
|
||||||
|
include: [{
|
||||||
|
model: User,
|
||||||
|
as: 'approver',
|
||||||
|
attributes: ['userId', 'email', 'displayName']
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Determine who to notify based on who uploaded
|
||||||
|
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
||||||
|
|
||||||
|
if (isInitiator) {
|
||||||
|
// Initiator added → notify spectators and current approver
|
||||||
|
spectators.forEach((spectator: any) => {
|
||||||
|
const spectatorUser = spectator.user || spectator.User;
|
||||||
|
if (spectatorUser && spectatorUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: spectatorUser.userId,
|
||||||
|
email: spectatorUser.email,
|
||||||
|
displayName: spectatorUser.displayName || spectatorUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (currentApprovalLevel) {
|
||||||
|
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
||||||
|
if (approverUser && approverUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: approverUser.userId,
|
||||||
|
email: approverUser.email,
|
||||||
|
displayName: approverUser.displayName || approverUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Check if uploader is a spectator
|
||||||
|
const uploaderParticipant = await Participant.findOne({
|
||||||
|
where: {
|
||||||
|
requestId,
|
||||||
|
userId: user.userId,
|
||||||
|
participantType: 'SPECTATOR'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (uploaderParticipant) {
|
||||||
|
// Spectator added → notify initiator and current approver
|
||||||
|
const initiator = await User.findByPk(initiatorId);
|
||||||
|
if (initiator) {
|
||||||
|
const initiatorData = initiator.toJSON();
|
||||||
|
if (initiatorData.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: initiatorData.userId,
|
||||||
|
email: initiatorData.email,
|
||||||
|
displayName: initiatorData.displayName || initiatorData.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentApprovalLevel) {
|
||||||
|
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
||||||
|
if (approverUser && approverUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: approverUser.userId,
|
||||||
|
email: approverUser.email,
|
||||||
|
displayName: approverUser.displayName || approverUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Approver added → notify initiator and spectators
|
||||||
|
const initiator = await User.findByPk(initiatorId);
|
||||||
|
if (initiator) {
|
||||||
|
const initiatorData = initiator.toJSON();
|
||||||
|
if (initiatorData.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: initiatorData.userId,
|
||||||
|
email: initiatorData.email,
|
||||||
|
displayName: initiatorData.displayName || initiatorData.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
spectators.forEach((spectator: any) => {
|
||||||
|
const spectatorUser = spectator.user || spectator.User;
|
||||||
|
if (spectatorUser && spectatorUser.userId !== user.userId) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: spectatorUser.userId,
|
||||||
|
email: spectatorUser.email,
|
||||||
|
displayName: spectatorUser.displayName || spectatorUser.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send notifications (email, in-app, and web-push)
|
||||||
|
const requestNumber = (workflow as any).requestNumber || requestId;
|
||||||
|
const requestData = {
|
||||||
|
requestNumber: requestNumber,
|
||||||
|
requestId: requestId,
|
||||||
|
title: (workflow as any).title || 'Request'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Prepare user IDs for in-app and web-push notifications
|
||||||
|
const recipientUserIds = recipientsToNotify.map(r => r.userId);
|
||||||
|
|
||||||
|
// Send in-app and web-push notifications for each attachment
|
||||||
|
if (recipientUserIds.length > 0 && attachments.length > 0) {
|
||||||
|
try {
|
||||||
|
for (const attachment of attachments) {
|
||||||
|
await notificationService.sendToUsers(
|
||||||
|
recipientUserIds,
|
||||||
|
{
|
||||||
|
title: 'Additional Document Added',
|
||||||
|
body: `${user.name || 'User'} added "${attachment.fileName}" to ${requestNumber}`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'document_added',
|
||||||
|
priority: 'MEDIUM',
|
||||||
|
actionRequired: false,
|
||||||
|
metadata: {
|
||||||
|
documentName: attachment.fileName,
|
||||||
|
fileSize: attachment.fileSize,
|
||||||
|
addedByName: user.name || 'User',
|
||||||
|
source: 'Work Notes'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
logger.info('[WorkNote] In-app and web-push notifications sent for additional documents', {
|
||||||
|
requestId,
|
||||||
|
attachmentsCount: attachments.length,
|
||||||
|
recipientsCount: recipientUserIds.length
|
||||||
|
});
|
||||||
|
} catch (notifyError) {
|
||||||
|
logger.error('[WorkNote] Failed to send in-app/web-push notifications for additional documents:', notifyError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send email notifications for each attachment
|
||||||
|
for (const attachment of attachments) {
|
||||||
|
for (const recipient of recipientsToNotify) {
|
||||||
|
await emailNotificationService.sendAdditionalDocumentAdded(
|
||||||
|
requestData,
|
||||||
|
recipient,
|
||||||
|
{
|
||||||
|
documentName: attachment.fileName,
|
||||||
|
fileSize: attachment.fileSize,
|
||||||
|
addedByName: user.name || 'User',
|
||||||
|
source: 'Work Notes'
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[WorkNote] Additional document notifications sent', {
|
||||||
|
requestId,
|
||||||
|
attachmentsCount: attachments.length,
|
||||||
|
recipientsCount: recipientsToNotify.length,
|
||||||
|
isInitiator
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (notifyError) {
|
||||||
|
// Don't fail work note creation if notifications fail
|
||||||
|
logger.error('[WorkNote] Failed to send additional document notifications:', notifyError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log activity for work note
|
||||||
|
activityService.log({
|
||||||
|
requestId,
|
||||||
|
type: 'comment',
|
||||||
|
user: { userId: user.userId, name: user.name || 'User' },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Work Note Added',
|
||||||
|
details: `${user.name || 'User'} added a work note: ${payload.message.substring(0, 100)}${payload.message.length > 100 ? '...' : ''}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Optional realtime emit (if socket layer is initialized)
|
||||||
|
const { emitToRequestRoom } = require('../realtime/socket');
|
||||||
|
if (emitToRequestRoom) {
|
||||||
|
// Emit note with all fields explicitly (to ensure camelCase fields are sent)
|
||||||
|
const noteData = {
|
||||||
|
noteId: (note as any).noteId,
|
||||||
|
requestId: (note as any).requestId,
|
||||||
|
userId: (note as any).userId,
|
||||||
|
userName: (note as any).userName,
|
||||||
|
userRole: (note as any).userRole, // Include participant role
|
||||||
|
message: (note as any).message,
|
||||||
|
createdAt: (note as any).createdAt,
|
||||||
|
hasAttachment: (note as any).hasAttachment,
|
||||||
|
attachments: attachments // Include attachments
|
||||||
|
};
|
||||||
|
emitToRequestRoom(requestId, 'worknote:new', { note: noteData });
|
||||||
|
}
|
||||||
|
} catch (e) { logger.warn('Realtime emit failed (not initialized)'); }
|
||||||
|
|
||||||
|
// Send notifications to mentioned users
|
||||||
|
if (payload.mentionedUsers && Array.isArray(payload.mentionedUsers) && payload.mentionedUsers.length > 0) {
|
||||||
|
try {
|
||||||
|
// Get workflow details for request number and title
|
||||||
|
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
|
||||||
|
const requestNumber = (workflow as any)?.requestNumber || requestId;
|
||||||
|
const requestTitle = (workflow as any)?.title || 'Request';
|
||||||
|
|
||||||
|
logger.info(`[WorkNote] Sending mention notifications to ${payload.mentionedUsers.length} users`);
|
||||||
|
|
||||||
|
await notificationService.sendToUsers(
|
||||||
|
payload.mentionedUsers,
|
||||||
|
{
|
||||||
|
title: '💬 Mentioned in Work Note',
|
||||||
|
body: `${user.name || 'Someone'} mentioned you in ${requestNumber}: "${payload.message.substring(0, 50)}${payload.message.length > 50 ? '...' : ''}"`,
|
||||||
|
requestId,
|
||||||
|
requestNumber,
|
||||||
|
url: `/request/${requestNumber}`,
|
||||||
|
type: 'mention'
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`[WorkNote] Mention notifications sent successfully`);
|
||||||
|
} catch (notifyError) {
|
||||||
|
logger.error('[WorkNote] Failed to send mention notifications:', notifyError);
|
||||||
|
// Don't fail the work note creation if notifications fail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ...note, attachments };
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadAttachment(attachmentId: string) {
|
||||||
|
const attachment = await WorkNoteAttachment.findOne({
|
||||||
|
where: { attachmentId }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!attachment) {
|
||||||
|
throw new Error('Attachment not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageUrl = (attachment as any).storageUrl || (attachment as any).storage_url;
|
||||||
|
const filePath = (attachment as any).filePath || (attachment as any).file_path;
|
||||||
|
const fileName = (attachment as any).fileName || (attachment as any).file_name;
|
||||||
|
const fileType = (attachment as any).fileType || (attachment as any).file_type;
|
||||||
|
|
||||||
|
// Check if it's a GCS URL
|
||||||
|
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
|
||||||
|
|
||||||
|
return {
|
||||||
|
filePath: filePath,
|
||||||
|
storageUrl: storageUrl,
|
||||||
|
fileName: fileName,
|
||||||
|
fileType: fileType,
|
||||||
|
isGcsUrl: isGcsUrl
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workNoteService = new WorkNoteService();
|
||||||
|
|
||||||
|
|
||||||
@ -383,3 +383,221 @@ report_cache {
|
|||||||
%% 8. TAT thresholds: 50%, 80%, 100%
|
%% 8. TAT thresholds: 50%, 80%, 100%
|
||||||
%% 9. Max approval levels: 10
|
%% 9. Max approval levels: 10
|
||||||
%% 10. Max file size: 10 MB
|
%% 10. Max file size: 10 MB
|
||||||
|
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|| dealer_claim_details : "has_claim_details"
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : "has_claim_history"
|
||||||
|
workflow_requests ||--|| dealer_proposal_details : "has_proposal"
|
||||||
|
workflow_requests ||--|| dealer_completion_details : "has_completion"
|
||||||
|
workflow_requests ||--|| claim_budget_tracking : "tracks_budget"
|
||||||
|
workflow_requests ||--|| internal_orders : "has_io"
|
||||||
|
workflow_requests ||--o{ claim_invoices : "has_invoices"
|
||||||
|
workflow_requests ||--o{ claim_credit_notes : "has_credit_notes"
|
||||||
|
workflow_requests ||--o{ tat_alerts : "triggers_alerts"
|
||||||
|
workflow_requests ||--|| request_summaries : "has_summary"
|
||||||
|
|
||||||
|
dealer_proposal_details ||--o{ dealer_proposal_cost_items : "has_items"
|
||||||
|
dealer_completion_details ||--o{ dealer_completion_expenses : "has_expenses"
|
||||||
|
claim_invoices ||--o{ claim_credit_notes : "has_credit_notes"
|
||||||
|
|
||||||
|
request_summaries ||--o{ shared_summaries : "shared_as"
|
||||||
|
users ||--o{ shared_summaries : "shares"
|
||||||
|
users ||--o{ subscriptions : "has_subscription"
|
||||||
|
users ||--o{ holidays : "creates"
|
||||||
|
users ||--o{ activity_types : "creates"
|
||||||
|
|
||||||
|
dealers {
|
||||||
|
uuid dealer_id PK
|
||||||
|
varchar sales_code
|
||||||
|
varchar service_code
|
||||||
|
varchar dealer_name
|
||||||
|
varchar region
|
||||||
|
varchar state
|
||||||
|
varchar city
|
||||||
|
varchar location
|
||||||
|
boolean is_active
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_claim_details {
|
||||||
|
uuid claim_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar activity_name
|
||||||
|
varchar activity_type
|
||||||
|
varchar dealer_code
|
||||||
|
varchar dealer_name
|
||||||
|
date activity_date
|
||||||
|
date period_start_date
|
||||||
|
date period_end_date
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_claim_history {
|
||||||
|
uuid history_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid approval_level_id FK
|
||||||
|
integer version
|
||||||
|
enum snapshot_type
|
||||||
|
jsonb snapshot_data
|
||||||
|
text change_reason
|
||||||
|
uuid changed_by FK
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_proposal_details {
|
||||||
|
uuid proposal_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar proposal_document_path
|
||||||
|
decimal total_estimated_budget
|
||||||
|
date expected_completion_date
|
||||||
|
text dealer_comments
|
||||||
|
timestamp submitted_at
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_proposal_cost_items {
|
||||||
|
uuid cost_item_id PK
|
||||||
|
uuid proposal_id FK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar item_description
|
||||||
|
decimal amount
|
||||||
|
integer item_order
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_completion_details {
|
||||||
|
uuid completion_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
date activity_completion_date
|
||||||
|
integer number_of_participants
|
||||||
|
decimal total_closed_expenses
|
||||||
|
timestamp submitted_at
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_completion_expenses {
|
||||||
|
uuid expense_id PK
|
||||||
|
uuid completion_id FK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar description
|
||||||
|
decimal amount
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
claim_budget_tracking {
|
||||||
|
uuid budget_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
decimal initial_estimated_budget
|
||||||
|
decimal proposal_estimated_budget
|
||||||
|
decimal approved_budget
|
||||||
|
decimal io_blocked_amount
|
||||||
|
decimal closed_expenses
|
||||||
|
decimal final_claim_amount
|
||||||
|
decimal credit_note_amount
|
||||||
|
enum budget_status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
claim_invoices {
|
||||||
|
uuid invoice_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar invoice_number
|
||||||
|
date invoice_date
|
||||||
|
decimal amount
|
||||||
|
varchar status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
claim_credit_notes {
|
||||||
|
uuid credit_note_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid invoice_id FK
|
||||||
|
varchar credit_note_number
|
||||||
|
decimal credit_note_amount
|
||||||
|
varchar status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
internal_orders {
|
||||||
|
uuid io_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
varchar io_number
|
||||||
|
decimal io_available_balance
|
||||||
|
decimal io_blocked_amount
|
||||||
|
enum status
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
holidays {
|
||||||
|
uuid holiday_id PK
|
||||||
|
date holiday_date
|
||||||
|
varchar holiday_name
|
||||||
|
enum holiday_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
activity_types {
|
||||||
|
uuid activity_type_id PK
|
||||||
|
varchar title
|
||||||
|
varchar item_code
|
||||||
|
varchar taxation_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
tat_alerts {
|
||||||
|
uuid alert_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid level_id FK
|
||||||
|
uuid approver_id FK
|
||||||
|
enum alert_type
|
||||||
|
boolean is_breached
|
||||||
|
timestamp alert_sent_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
request_summaries {
|
||||||
|
uuid summary_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid initiator_id FK
|
||||||
|
varchar title
|
||||||
|
text description
|
||||||
|
text closing_remarks
|
||||||
|
boolean is_ai_generated
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
shared_summaries {
|
||||||
|
uuid shared_summary_id PK
|
||||||
|
uuid summary_id FK
|
||||||
|
uuid shared_by FK
|
||||||
|
uuid shared_with FK
|
||||||
|
boolean is_read
|
||||||
|
timestamp shared_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
subscriptions {
|
||||||
|
uuid subscription_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar endpoint
|
||||||
|
varchar p256dh
|
||||||
|
varchar auth
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|||||||
310
docs/DATABASE_SCHEMA.md
Normal file
310
docs/DATABASE_SCHEMA.md
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
# Database Schema Documentation
|
||||||
|
|
||||||
|
## 1. Overview
|
||||||
|
This document provides a detailed reference for the backend database schema of the Royal Enfield Workflow Management System.
|
||||||
|
|
||||||
|
**Database System:** PostgreSQL 16.x
|
||||||
|
**Schema Conventions:**
|
||||||
|
* **Primary Keys:** UUID (v4) for all tables.
|
||||||
|
* **Naming:** Snake_case for tables and columns.
|
||||||
|
* **Audit Columns:** Most tables include `created_at`, `updated_at`, `created_by`, `updated_by`.
|
||||||
|
* **Soft Deletes:** `is_deleted` flag used on critical entities.
|
||||||
|
|
||||||
|
## 2. Architecture Diagrams (A4 Optimized)
|
||||||
|
|
||||||
|
### 2.1. Core Workflow Architecture
|
||||||
|
Focuses on the request lifecycle, approval chains, and direct interactions.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
users ||--o{ workflow_requests : "initiates"
|
||||||
|
users ||--o{ approval_levels : "approves"
|
||||||
|
users ||--o{ participants : "collaborates"
|
||||||
|
workflow_requests ||--|{ approval_levels : "has_steps"
|
||||||
|
workflow_requests ||--o{ participants : "has_users"
|
||||||
|
workflow_requests ||--o{ documents : "contains"
|
||||||
|
workflow_requests ||--o{ work_notes : "discussions"
|
||||||
|
workflow_requests ||--o{ activities : "audit_trail"
|
||||||
|
workflow_templates ||--o{ workflow_requests : "spawns"
|
||||||
|
workflow_requests ||--|| conclusion_remarks : "finalizes"
|
||||||
|
|
||||||
|
workflow_requests {
|
||||||
|
uuid request_id PK
|
||||||
|
varchar request_number
|
||||||
|
enum status
|
||||||
|
integer current_level
|
||||||
|
}
|
||||||
|
approval_levels {
|
||||||
|
uuid level_id PK
|
||||||
|
integer level_number
|
||||||
|
enum status
|
||||||
|
uuid approver_id FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.2. Business Domain Data
|
||||||
|
Focuses on the specific data payloads (Dealers, Finance, Claims) attached to requests.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--o{ dealers : "context"
|
||||||
|
workflow_requests ||--|| dealer_claim_details : "claim_data"
|
||||||
|
workflow_requests ||--|| dealer_proposal_details : "proposal"
|
||||||
|
workflow_requests ||--|| dealer_completion_details : "evidence"
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : "versions"
|
||||||
|
|
||||||
|
workflow_requests ||--|| claim_budget_tracking : "financials"
|
||||||
|
workflow_requests ||--|| internal_orders : "sap_ref"
|
||||||
|
workflow_requests ||--o{ claim_invoices : "billing"
|
||||||
|
claim_invoices ||--o{ claim_credit_notes : "adjustments"
|
||||||
|
|
||||||
|
dealer_claim_details {
|
||||||
|
uuid claim_id PK
|
||||||
|
varchar activity_type
|
||||||
|
}
|
||||||
|
claim_budget_tracking {
|
||||||
|
decimal approved_budget
|
||||||
|
decimal final_claim_amount
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.3. System Support Services
|
||||||
|
Focuses on cross-cutting concerns like logging, notifications, and monitoring.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
users ||--o{ notifications : "receives"
|
||||||
|
users ||--o{ system_settings : "configures"
|
||||||
|
users ||--o{ audit_logs : "actions"
|
||||||
|
|
||||||
|
workflow_requests ||--o{ notifications : "triggers"
|
||||||
|
workflow_requests ||--o{ tat_tracking : "monitors_sla"
|
||||||
|
workflow_requests ||--o{ tat_alerts : "sla_breaches"
|
||||||
|
workflow_requests ||--o{ request_summaries : "ai_summary"
|
||||||
|
workflow_requests ||--o{ report_cache : "reporting"
|
||||||
|
|
||||||
|
notifications ||--o{ email_logs : "outbound"
|
||||||
|
notifications ||--o{ sms_logs : "outbound"
|
||||||
|
|
||||||
|
tat_tracking {
|
||||||
|
decimal total_tat_hours
|
||||||
|
boolean threshold_breached
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Schema Modules
|
||||||
|
|
||||||
|
### 3.1. User & Authentication Module
|
||||||
|
Manages user identities, sessions, and system-wide configurations.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
users ||--o{ user_sessions : "has"
|
||||||
|
users ||--o{ subscriptions : "has_device"
|
||||||
|
users ||--o{ system_settings : "modifies"
|
||||||
|
|
||||||
|
users {
|
||||||
|
uuid user_id PK
|
||||||
|
varchar employee_id
|
||||||
|
varchar email
|
||||||
|
varchar display_name
|
||||||
|
enum role
|
||||||
|
boolean is_active
|
||||||
|
}
|
||||||
|
user_sessions {
|
||||||
|
uuid session_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar session_token
|
||||||
|
timestamp expires_at
|
||||||
|
}
|
||||||
|
subscriptions {
|
||||||
|
uuid subscription_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar endpoint
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`users`**
|
||||||
|
Core user registry. synced with Okta/HRMS.
|
||||||
|
* `user_id` (PK): Unique UUID.
|
||||||
|
* `employee_id` (Unique): HR system ID.
|
||||||
|
* `email` (Unique): Official email address.
|
||||||
|
* `role`: RBAC role (USER, ADMIN, etc.).
|
||||||
|
* `is_active`: Soft delete/account link status.
|
||||||
|
|
||||||
|
**`user_sessions`**
|
||||||
|
Active JWT sessions for invalidation/tracking.
|
||||||
|
* `session_token`: The JWT access token.
|
||||||
|
* `refresh_token`: For renewing access tokens.
|
||||||
|
* `device_type`: Web/Mobile classification.
|
||||||
|
|
||||||
|
**`system_settings`**
|
||||||
|
Dynamic configuration (e.g., global TAT thresholds).
|
||||||
|
* `setting_key` (Unique): Config identifier name.
|
||||||
|
* `setting_value`: The value (text/json).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.2. Workflow Engine Module
|
||||||
|
The core engine driving request lifecycles, approvals, and tracking.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|{ approval_levels : "steps"
|
||||||
|
workflow_requests ||--o{ activities : "events"
|
||||||
|
workflow_requests ||--|{ participants : "access"
|
||||||
|
workflow_templates ||--o{ workflow_requests : "spawns"
|
||||||
|
|
||||||
|
workflow_requests {
|
||||||
|
uuid request_id PK
|
||||||
|
varchar request_number
|
||||||
|
enum status
|
||||||
|
uuid initiator_id FK
|
||||||
|
}
|
||||||
|
approval_levels {
|
||||||
|
uuid level_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
integer level_number
|
||||||
|
enum status
|
||||||
|
uuid approver_id FK
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`workflow_requests`**
|
||||||
|
The central entity representing a business process instance.
|
||||||
|
* `request_number`: Human-readable ID (e.g., REQ-2024-001).
|
||||||
|
* `current_level`: Pointer to the active approval step.
|
||||||
|
* `status`: DRAFT, PENDING, APPROVED, REJECTED, CLOSED.
|
||||||
|
|
||||||
|
**`approval_levels`**
|
||||||
|
Defines the sequence of approvers for a request.
|
||||||
|
* `level_number`: Sequence index (1, 2, 3...).
|
||||||
|
* `approver_id`: User responsible for this step.
|
||||||
|
* `tat_hours`: SLA for this specific step.
|
||||||
|
* `status`: PENDING, APPROVED, REJECTED.
|
||||||
|
|
||||||
|
**`participants`**
|
||||||
|
Users with visibility/access to the request (spectators, contributors).
|
||||||
|
* `participant_type`: SPECTATOR, CONTRIBUTOR.
|
||||||
|
* `can_comment`, `can_view_documents`: Granular permissions.
|
||||||
|
|
||||||
|
**`activities`**
|
||||||
|
Audit trail of all actions performed on a request.
|
||||||
|
* `activity_type`: CREATED, APPROVED, COMMENTED, FILE_UPLOADED.
|
||||||
|
* `metadata`: JSON payload with specific details of the event.
|
||||||
|
|
||||||
|
**`workflow_templates`**
|
||||||
|
Blueprints for creating new requests.
|
||||||
|
* `approval_levels_config`: JSON defining the default approver chain structure.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.3. Dealer Management Module
|
||||||
|
Stores specific data related to dealer claims, onboardings, and performance.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|| dealer_claim_details : "details"
|
||||||
|
workflow_requests ||--|| dealer_proposal_details : "proposal"
|
||||||
|
workflow_requests ||--|| dealer_completion_details : "completion"
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : "versions"
|
||||||
|
workflow_requests ||--o{ dealers : "related_to"
|
||||||
|
|
||||||
|
dealers {
|
||||||
|
uuid dealer_id PK
|
||||||
|
varchar dealer_name
|
||||||
|
varchar sales_code
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`dealers`**
|
||||||
|
Master data for dealerships.
|
||||||
|
* `sales_code`, `service_code`: Dealer unique identifiers.
|
||||||
|
* `dealer_name`, `region`, `city`: Location details.
|
||||||
|
|
||||||
|
**`dealer_claim_details`**
|
||||||
|
Specific attributes for a Dealer Claim request.
|
||||||
|
* `activity_name`, `activity_type`: Marketing/Sales activity details.
|
||||||
|
* `period_start_date`, `period_end_date`: Duration of the claim activity.
|
||||||
|
|
||||||
|
**`dealer_proposal_details`**
|
||||||
|
Stores the initial proposal data for a claim.
|
||||||
|
* `total_estimated_budget`: The proposed validation amount.
|
||||||
|
* `proposal_document_url`: Link to the uploaded proposal PDF/Doc.
|
||||||
|
|
||||||
|
**`dealer_claim_history`**
|
||||||
|
Snapshots of the claim data at various approval stages.
|
||||||
|
* `snapshot_data`: JSON dump of the claim state.
|
||||||
|
* `version`: Incremental version number.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.4. Financial Module
|
||||||
|
Manages budgeting, internal orders, and invoicing.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
erDiagram
|
||||||
|
workflow_requests ||--|| claim_budget_tracking : "budget"
|
||||||
|
workflow_requests ||--|| internal_orders : "io"
|
||||||
|
workflow_requests ||--o{ claim_invoices : "invoices"
|
||||||
|
claim_invoices ||--o{ claim_credit_notes : "credit_notes"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`claim_budget_tracking`**
|
||||||
|
Central ledger for a request's financial lifecycle.
|
||||||
|
* `initial_estimated_budget`: Original requested amount.
|
||||||
|
* `approved_budget`: Validated amount after approvals.
|
||||||
|
* `io_blocked_amount`: Amount reserved in SAP.
|
||||||
|
* `final_claim_amount`: Actual payout amount.
|
||||||
|
|
||||||
|
**`internal_orders`**
|
||||||
|
SAP Internal Order references.
|
||||||
|
* `io_number`: The IO code from SAP.
|
||||||
|
* `io_available_balance`, `io_blocked_amount`: Balance tracking.
|
||||||
|
|
||||||
|
**`claim_invoices`**
|
||||||
|
Invoices submitted against the claim.
|
||||||
|
* `invoice_number`: Vendor invoice ID.
|
||||||
|
* `amount`: Invoice value.
|
||||||
|
* `dms_number`: Document Management System reference.
|
||||||
|
|
||||||
|
**`claim_credit_notes`**
|
||||||
|
Adjustments/Returns linked to invoices.
|
||||||
|
* `credit_note_amount`: Value to be deducted/adjusted.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.5. Ancillary Modules
|
||||||
|
Support functions like notifications, tracking, and logs.
|
||||||
|
|
||||||
|
#### Tables
|
||||||
|
|
||||||
|
**`notifications`**
|
||||||
|
User alerts.
|
||||||
|
* `is_read`: Read status.
|
||||||
|
* `action_url`: Deep link to the relevant request.
|
||||||
|
|
||||||
|
**`tat_tracking`**
|
||||||
|
Turnaround Time monitoring.
|
||||||
|
* `tracking_type`: REQUEST (overall) or LEVEL (step-specific).
|
||||||
|
* `total_tat_hours`: The allowed time.
|
||||||
|
* `elapsed_hours`: Time consumed so far.
|
||||||
|
* `breached_flags`: `threshold_50_breached`, etc.
|
||||||
|
|
||||||
|
**`tat_alerts`**
|
||||||
|
Logs of TAT breach notifications sent.
|
||||||
|
* `alert_type`: TAT_50, TAT_75, TAT_100.
|
||||||
|
* `is_breached`: Confirmed breach status.
|
||||||
|
|
||||||
|
**`request_summaries`**
|
||||||
|
AI or manually generated summaries of complex requests.
|
||||||
|
* `is_ai_generated`: Origin flag.
|
||||||
|
* `description`, `closing_remarks`: Narrative text.
|
||||||
141
docs/ERD.mermaid
141
docs/ERD.mermaid
@ -24,12 +24,19 @@ erDiagram
|
|||||||
workflow_requests ||--|| claim_invoices : claim_invoice
|
workflow_requests ||--|| claim_invoices : claim_invoice
|
||||||
workflow_requests ||--|| claim_credit_notes : claim_credit_note
|
workflow_requests ||--|| claim_credit_notes : claim_credit_note
|
||||||
work_notes ||--o{ work_note_attachments : has
|
work_notes ||--o{ work_note_attachments : has
|
||||||
notifications ||--o{ email_logs : sends
|
|
||||||
notifications ||--o{ sms_logs : sends
|
|
||||||
workflow_requests ||--o{ report_cache : caches
|
workflow_requests ||--o{ report_cache : caches
|
||||||
workflow_requests ||--o{ audit_logs : audits
|
workflow_requests ||--o{ audit_logs : audits
|
||||||
workflow_requests ||--o{ workflow_templates : templates
|
workflow_requests ||--o{ workflow_templates : templates
|
||||||
users ||--o{ system_settings : updates
|
users ||--o{ system_settings : updates
|
||||||
|
workflow_requests ||--o{ dealer_claim_history : has_history
|
||||||
|
workflow_requests ||--o{ tat_alerts : triggers
|
||||||
|
workflow_requests ||--|| request_summaries : summarizes
|
||||||
|
request_summaries ||--o{ shared_summaries : shared_as
|
||||||
|
users ||--o{ shared_summaries : shares
|
||||||
|
users ||--o{ subscriptions : has_device
|
||||||
|
users ||--o{ holidays : manages
|
||||||
|
users ||--o{ activity_types : manages
|
||||||
|
|
||||||
users {
|
users {
|
||||||
uuid user_id PK
|
uuid user_id PK
|
||||||
@ -286,46 +293,7 @@ erDiagram
|
|||||||
varchar logout_reason
|
varchar logout_reason
|
||||||
}
|
}
|
||||||
|
|
||||||
email_logs {
|
|
||||||
uuid email_log_id PK
|
|
||||||
uuid request_id FK
|
|
||||||
uuid notification_id FK
|
|
||||||
varchar recipient_email
|
|
||||||
uuid recipient_user_id FK
|
|
||||||
text[] cc_emails
|
|
||||||
text[] bcc_emails
|
|
||||||
varchar subject
|
|
||||||
text body
|
|
||||||
varchar email_type
|
|
||||||
varchar status
|
|
||||||
integer send_attempts
|
|
||||||
timestamp sent_at
|
|
||||||
timestamp failed_at
|
|
||||||
text failure_reason
|
|
||||||
timestamp opened_at
|
|
||||||
timestamp clicked_at
|
|
||||||
timestamp created_at
|
|
||||||
}
|
|
||||||
|
|
||||||
sms_logs {
|
|
||||||
uuid sms_log_id PK
|
|
||||||
uuid request_id FK
|
|
||||||
uuid notification_id FK
|
|
||||||
varchar recipient_phone
|
|
||||||
uuid recipient_user_id FK
|
|
||||||
text message
|
|
||||||
varchar sms_type
|
|
||||||
varchar status
|
|
||||||
integer send_attempts
|
|
||||||
timestamp sent_at
|
|
||||||
timestamp delivered_at
|
|
||||||
timestamp failed_at
|
|
||||||
text failure_reason
|
|
||||||
varchar sms_provider
|
|
||||||
varchar sms_provider_message_id
|
|
||||||
decimal cost
|
|
||||||
timestamp created_at
|
|
||||||
}
|
|
||||||
|
|
||||||
system_settings {
|
system_settings {
|
||||||
uuid setting_id PK
|
uuid setting_id PK
|
||||||
@ -505,3 +473,94 @@ erDiagram
|
|||||||
timestamp updated_at
|
timestamp updated_at
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dealers {
|
||||||
|
uuid dealer_id PK
|
||||||
|
varchar sales_code
|
||||||
|
varchar service_code
|
||||||
|
varchar dealer_name
|
||||||
|
varchar region
|
||||||
|
varchar state
|
||||||
|
varchar city
|
||||||
|
varchar location
|
||||||
|
boolean is_active
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
dealer_claim_history {
|
||||||
|
uuid history_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid approval_level_id FK
|
||||||
|
integer version
|
||||||
|
enum snapshot_type
|
||||||
|
jsonb snapshot_data
|
||||||
|
text change_reason
|
||||||
|
uuid changed_by FK
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
holidays {
|
||||||
|
uuid holiday_id PK
|
||||||
|
date holiday_date
|
||||||
|
varchar holiday_name
|
||||||
|
enum holiday_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
activity_types {
|
||||||
|
uuid activity_type_id PK
|
||||||
|
varchar title
|
||||||
|
varchar item_code
|
||||||
|
varchar taxation_type
|
||||||
|
boolean is_active
|
||||||
|
uuid created_by FK
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
tat_alerts {
|
||||||
|
uuid alert_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid level_id FK
|
||||||
|
uuid approver_id FK
|
||||||
|
enum alert_type
|
||||||
|
boolean is_breached
|
||||||
|
timestamp alert_sent_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
request_summaries {
|
||||||
|
uuid summary_id PK
|
||||||
|
uuid request_id FK
|
||||||
|
uuid initiator_id FK
|
||||||
|
varchar title
|
||||||
|
text description
|
||||||
|
text closing_remarks
|
||||||
|
boolean is_ai_generated
|
||||||
|
timestamp created_at
|
||||||
|
timestamp updated_at
|
||||||
|
}
|
||||||
|
|
||||||
|
shared_summaries {
|
||||||
|
uuid shared_summary_id PK
|
||||||
|
uuid summary_id FK
|
||||||
|
uuid shared_by FK
|
||||||
|
uuid shared_with FK
|
||||||
|
boolean is_read
|
||||||
|
timestamp shared_at
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
subscriptions {
|
||||||
|
uuid subscription_id PK
|
||||||
|
uuid user_id FK
|
||||||
|
varchar endpoint
|
||||||
|
varchar p256dh
|
||||||
|
varchar auth
|
||||||
|
timestamp created_at
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
113
docs/POSTGRES_JUSTIFICATION.md
Normal file
113
docs/POSTGRES_JUSTIFICATION.md
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
# Why PostgreSQL Wins for "Royal Enfield Workflow"
|
||||||
|
|
||||||
|
## Executive Summary
|
||||||
|
For "Royal Enfield Workflow", **PostgreSQL is superior to MongoDB**.
|
||||||
|
The decision rests on **Reporting Speed** and **Deep Filtering capabilities**. Your workflow requires filtering by *Relationships* (Approvers, Departments), not just static data.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Complex Workflow Filters (The "My Tasks" Problem)
|
||||||
|
Users need specific views like "Requests waiting for me" or "Paused requests".
|
||||||
|
|
||||||
|
### A. "Requests Open For Me" (The Join Filter)
|
||||||
|
*Scenario: Show all requests where **I am the current approver**.*
|
||||||
|
|
||||||
|
#### PostgreSQL (Simple SQL `JOIN`)
|
||||||
|
Index usage is perfect. The DB jumps mainly to the few rows in `approval_levels` assigned to you.
|
||||||
|
```sql
|
||||||
|
SELECT r.id, r.status, r.created_at
|
||||||
|
FROM workflow_requests r
|
||||||
|
JOIN approval_levels al ON r.id = al.request_id
|
||||||
|
WHERE al.approver_id = 'USER_UUID_123'
|
||||||
|
AND al.status = 'PENDING'
|
||||||
|
ORDER BY r.created_at DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### MongoDB (Array Query + Sort Issue)
|
||||||
|
You must index inside an array. If you sort by "Date", Mongo often cannot use the index effectively for both the *array match* and the *sort*, leading to slow scans.
|
||||||
|
```javascript
|
||||||
|
db.requests.find({
|
||||||
|
"approvers": {
|
||||||
|
$elemMatch: {
|
||||||
|
userId: "USER_UUID_123",
|
||||||
|
status: "PENDING"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).sort({ createdAt: -1 });
|
||||||
|
// WARNING: Performance degrades heavily if user has many historical requests
|
||||||
|
```
|
||||||
|
|
||||||
|
### B. "Paused & Resumed" History
|
||||||
|
*Scenario: Show requests that were previously Paused but are now Active (requires checking history).*
|
||||||
|
|
||||||
|
#### PostgreSQL (Audit Log Join)
|
||||||
|
You query the history table directly without loading the main request data until the match is found.
|
||||||
|
```sql
|
||||||
|
SELECT DISTINCT r.*
|
||||||
|
FROM workflow_requests r
|
||||||
|
JOIN audit_logs log ON r.id = log.request_id
|
||||||
|
WHERE log.action = 'PAUSED'
|
||||||
|
AND r.status = 'IN_PROGRESS';
|
||||||
|
```
|
||||||
|
|
||||||
|
#### MongoDB (The "Lookup" or "Bloat" Trade-off)
|
||||||
|
**Option 1: Lookups (Slow)**
|
||||||
|
You have to join the separate `audit_logs` collection for every request.
|
||||||
|
```javascript
|
||||||
|
db.requests.aggregate([
|
||||||
|
{ $match: { status: "IN_PROGRESS" } },
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: "audit_logs",
|
||||||
|
localField: "_id",
|
||||||
|
foreignField: "requestId",
|
||||||
|
as: "history"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $match: { "history.action": "PAUSED" } }
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 2: Embedding (Bloated)**
|
||||||
|
You store every log inside the Request document.
|
||||||
|
* *Result*: Your generic `db.requests.find({})` becomes 10x slower because it's dragging megabytes of history logs across the network for every result.
|
||||||
|
|
||||||
|
## 2. The Filter Nightmare: "Deep Filtering"
|
||||||
|
Users expect to slice-and-dice data freely. *Example: "Show requests initiated by users in the 'Sales' Department".*
|
||||||
|
|
||||||
|
* **Postgres (Cross-Table Filter)**:
|
||||||
|
```sql
|
||||||
|
SELECT * FROM workflow_requests r
|
||||||
|
JOIN users u ON r.initiator_id = u.id
|
||||||
|
WHERE u.department = 'Sales'
|
||||||
|
```
|
||||||
|
* **Result**: Instant. SQL simply filters the `users` table first (using an index on `department`) and then grabs the matching requests.
|
||||||
|
|
||||||
|
* **MongoDB (The "Lookup" Trap)**:
|
||||||
|
* `Department` is stored on the **User** document, not the Request.
|
||||||
|
* To filter Requests by "Department", you must `$lookup` (join) the User collection for *every single request* before you can filter them.
|
||||||
|
* *Alternative*: Copy `department` into every Request document.
|
||||||
|
* *Maintenance Cost*: If a user transfers from 'Sales' to 'Marketing', you must run a script to update all their historical requests, or your reports will be wrong.
|
||||||
|
|
||||||
|
## 3. Dashboard: The "Aggregation" Bottleneck
|
||||||
|
Your dashboard provides real-time insights (e.g., "Approver Efficiency," "TAT per Region").
|
||||||
|
|
||||||
|
* **Window Functions (SQL Superpower)**:
|
||||||
|
* *Requirement*: Rank dealers by "Average Approval Time" compared to their peers.
|
||||||
|
* *Postgres*: `RANK() OVER (PARTITION BY region ORDER BY avg_tat)` runs natively and instanly.
|
||||||
|
* *MongoDB*: Requires complex Aggregation Pipelines (`$setWindowFields`) that are memory-intensive and harder to optimize.
|
||||||
|
|
||||||
|
## 4. Audit & Compliance
|
||||||
|
* **Postgres**: Foreign Key constraints prevent "Orphaned Logs." You cannot delete a User if they are referenced in an Audit Log. This guarantees **legal traceability**.
|
||||||
|
* **MongoDB**: No constraints. Deleting a user can leave "Ghost Logs" (Referencing a null ID), breaking compliance reports.
|
||||||
|
|
||||||
|
## Summary Verdict
|
||||||
|
| Feature | PostgreSQL | MongoDB |
|
||||||
|
| :--- | :--- | :--- |
|
||||||
|
| **"Open For Me"** | **Simple Join** | **Complex Array Indexing** |
|
||||||
|
| **Dept/Region Filters** | **Simple Join** | **Slow Lookup** or **Duplicated Data** |
|
||||||
|
| **Ad-Hoc Reports** | **Flexible** | **Rigid** (Needs Indexes) |
|
||||||
|
| **Audit Compliance** | **Guaranteed** | **Risk of Orphaned Data** |
|
||||||
|
|
||||||
|
**Recommendation**: Stick with PostgreSQL.
|
||||||
|
The "Relational" nature of your reporting (Connecting Requests -> Users -> Departments -> Regions) is exactly what SQL was built to solve efficiently.
|
||||||
159
docs/SYSTEM_ARCHITECTURE.md
Normal file
159
docs/SYSTEM_ARCHITECTURE.md
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
# Royal Enfield Workflow Management System - Technical Architecture Definition
|
||||||
|
|
||||||
|
## 1. Platform Overview
|
||||||
|
The Royal Enfield (RE) Workflow Management System is a resilient, horizontally scalable infrastructure designed to orchestrate complex internal business processes. It utilizes a decoupled, service-oriented architecture leveraging **Node.js (TypeScript)**, **MongoDB Atlas (v8)**, and **Google Cloud Storage (GCS)** to ensure high availability and performance across enterprise workflows.
|
||||||
|
|
||||||
|
This document focus exclusively on the core platform infrastructure and custom workflow engine, excluding legacy dealer claim modules.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Global Architecture & Ingress
|
||||||
|
|
||||||
|
### A. High-Level System Architecture
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
User((User / Client))
|
||||||
|
subgraph "Public Interface"
|
||||||
|
Nginx[Nginx Reverse Proxy]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph "Application Layer (Node.js)"
|
||||||
|
Auth[Auth Middleware]
|
||||||
|
Core[Workflow Service]
|
||||||
|
Dynamic[Ad-hoc Logic]
|
||||||
|
AI[Vertex AI Service]
|
||||||
|
TAT[TAT Worker / BullMQ]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph "Persistence & Infrastructure"
|
||||||
|
Atlas[(MongoDB Atlas v8)]
|
||||||
|
GCS_Bucket[GCS Bucket - Artifacts]
|
||||||
|
GSM[Google Secret Manager]
|
||||||
|
Redis[(Redis Cache)]
|
||||||
|
end
|
||||||
|
|
||||||
|
User --> Nginx
|
||||||
|
Nginx --> Auth
|
||||||
|
Auth --> Core
|
||||||
|
Core --> Dynamic
|
||||||
|
Core --> Atlas
|
||||||
|
Core --> GCS_Bucket
|
||||||
|
Core --> AI
|
||||||
|
TAT --> Redis
|
||||||
|
TAT --> Atlas
|
||||||
|
Core --> GSM
|
||||||
|
```
|
||||||
|
|
||||||
|
### B. Professional Entrance: Nginx Proxy
|
||||||
|
All incoming traffic is managed by **Nginx**, acting as the "Deployed Server" facade.
|
||||||
|
- **SSL Termination**: Encrypts traffic at the edge.
|
||||||
|
- **Micro-caching**: Caches static metadata to reduce load on Node.js.
|
||||||
|
- **Proxying**: Strategically routes `/api` to the backend and serves the production React bundle for root requests.
|
||||||
|
|
||||||
|
### C. Stateless Authentication (JWT + RBAC)
|
||||||
|
The platform follows a stateless security model:
|
||||||
|
1. **JWT Validation**: `auth.middleware.ts` verifies signatures using secrets managed by **Google Secret Manager (GSM)**.
|
||||||
|
2. **Context Enrichment**: User identity is synchronized from the `users` collection in MongoDB Atlas.
|
||||||
|
3. **Granular RBAC**: Access is governed by roles (`ADMIN`, `MANAGEMENT`, `USER`) and dynamic participant checks.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Background Processing & SLA Management (BullMQ)
|
||||||
|
|
||||||
|
At the heart of the platform's performance is the **Asynchronous Task Engine** powered by **BullMQ** and **Redis**.
|
||||||
|
|
||||||
|
### A. TAT (Turnaround Time) Tracking Logic
|
||||||
|
Turnaround time is monitored per-level using a highly accurate calculation engine that accounts for:
|
||||||
|
- **Business Days/Hours**: Weekend and holiday filtering via `tatTimeUtils.ts`.
|
||||||
|
- **Priority Multipliers**: Scaling TAT for `STANDARD` vs `EXPRESS` requests.
|
||||||
|
- **Pause Impact**: Snapshot-based SLA halting during business-case pauses.
|
||||||
|
|
||||||
|
### B. TAT Worker Flow (Redis Backed)
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
Trigger[Request Assignment] --> Queue[tatQueue - BullMQ]
|
||||||
|
Queue --> Redis[(Redis Cache)]
|
||||||
|
Redis --> Worker[tatWorker.ts]
|
||||||
|
Worker --> Processor[tatProcessor.mongo.ts]
|
||||||
|
Processor --> Check{Threshold Reached?}
|
||||||
|
Check -->|50/75%| Notify[Reminder Notification]
|
||||||
|
Check -->|100%| Breach[Breach Alert + Escalation]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Multi-Channel Notification Dispatch Engine
|
||||||
|
|
||||||
|
The system ensures critical workflow events (Approvals, Breaches, Comments) reach users through three distinct synchronous and asynchronous channels.
|
||||||
|
|
||||||
|
### A. Channel Orchestration
|
||||||
|
Managed by `notification.service.ts`, the engine handles:
|
||||||
|
1. **Real-time (Socket.io)**: Immediate UI updates via room-based events.
|
||||||
|
2. **Web Push (Vapid)**: Browser-level push notifications for offline users.
|
||||||
|
3. **Enterprise Email**: Specialized services like `emailNotification.service.ts` dispatch templated HTML emails.
|
||||||
|
|
||||||
|
### B. Notification Lifecycle
|
||||||
|
```mermaid
|
||||||
|
sequenceDiagram
|
||||||
|
participant S as Service Layer
|
||||||
|
participant N as Notification Service
|
||||||
|
participant DB as MongoDB (NotificationModel)
|
||||||
|
participant SK as Socket.io
|
||||||
|
participant E as Email Service
|
||||||
|
|
||||||
|
S->>N: Trigger Event (e.g. "Assignment")
|
||||||
|
N->>DB: Persist Notification Record (Audit)
|
||||||
|
N->>SK: broadcast(user:id, "notification:new")
|
||||||
|
N->>E: dispatchAsync(EmailTemplate)
|
||||||
|
DB-->>S: Success
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Cloud-Native Storage & Assets (GCS)
|
||||||
|
|
||||||
|
The architecture treats **Google Cloud Storage (GCS)** as a first-class citizen for both operational and deployment data.
|
||||||
|
|
||||||
|
### A. Deployment Artifact Architecture
|
||||||
|
- **Static Site Hosting**: GCS stores the compiled frontend artifacts.
|
||||||
|
- **Production Secrets**: `Google Secret Manager` ensures that no production passwords or API keys reside in the codebase.
|
||||||
|
|
||||||
|
### B. Scalable Document Storage
|
||||||
|
- **Decoupling**: Binaries are never stored in the database. MongoDB only stores the URI.
|
||||||
|
- **Privacy Mode**: Documents are retrieved via **Signed URLs** with a configurable TTL.
|
||||||
|
- **Structure**: `requests/{requestNumber}/documents/`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Real-time Collaboration (Socket.io)
|
||||||
|
|
||||||
|
Collaborative features like "Who else is viewing this request?" and "Instant Alerts" are powered by a persistent WebSocket layer.
|
||||||
|
|
||||||
|
- **Presence Tracking**: A `Map<requestId, Set<userId>>` tracks online users per workflow request.
|
||||||
|
- **Room Logic**: Users join specific "Rooms" based on their current active request view.
|
||||||
|
- **Bi-directional Sync**: Frontend emits `presence:join` when entering a request page.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Intelligent Monitoring & Observability
|
||||||
|
|
||||||
|
The platform includes a dedicated monitoring stack for "Day 2" operations.
|
||||||
|
|
||||||
|
- **Metrics (Prometheus)**: Scrapes the `/metrics` endpoint provided by our Prometheus middleware.
|
||||||
|
- **Log Aggregation (Grafana Loki)**: `promtail` ships container logs to Loki for centralized debugging.
|
||||||
|
- **Alerting**: **Alertmanager** triggers PagerDuty/Email alerts for critical system failures.
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph LR
|
||||||
|
App[RE Backend] -->|Prometheus| P[Prometheus DB]
|
||||||
|
App -->|Logs| L[Loki]
|
||||||
|
P --> G[Grafana Dashboards]
|
||||||
|
L --> G
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Dynamic Workflow Flexibility
|
||||||
|
The "Custom Workflow" module provides logic for ad-hoc adjustments:
|
||||||
|
1. **Skip Approver**: Bypasses a level while maintaining a forced audit reason.
|
||||||
|
2. **Ad-hoc Insertion**: Inserts an approver level mid-flight, dynamically recalculating the downstream chain.
|
||||||
49
fix-imports.ps1
Normal file
49
fix-imports.ps1
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# Fix all simple imports to use MongoDB services
|
||||||
|
|
||||||
|
$replacements = @{
|
||||||
|
'from ''@services/activity.service''' = 'from ''@services/activity.mongo.service'''
|
||||||
|
'from ''../services/activity.service''' = 'from ''../services/activity.mongo.service'''
|
||||||
|
'from ''@services/notification.service''' = 'from ''@services/notification.mongo.service'''
|
||||||
|
'from ''../services/notification.service''' = 'from ''../services/notification.mongo.service'''
|
||||||
|
'from ''@services/configReader.service''' = 'from ''@services/configReader.mongo.service'''
|
||||||
|
'from ''../services/configReader.service''' = 'from ''../services/configReader.mongo.service'''
|
||||||
|
'from ''./configReader.service''' = 'from ''./configReader.mongo.service'''
|
||||||
|
'from ''../services/holiday.service''' = 'from ''../services/holiday.mongo.service'''
|
||||||
|
'from ''../services/workflow.service''' = 'from ''../services/workflow.service.mongo'''
|
||||||
|
'from ''../services/worknote.service''' = 'from ''../services/worknote.mongo.service'''
|
||||||
|
|
||||||
|
# Service instance renames
|
||||||
|
'\bactivityService\b' = 'activityMongoService'
|
||||||
|
'\bnotificationService\b' = 'notificationMongoService'
|
||||||
|
'\bholidayService\b' = 'holidayMongoService'
|
||||||
|
'\bworkNoteService\b' = 'workNoteMongoService'
|
||||||
|
}
|
||||||
|
|
||||||
|
$files = @(
|
||||||
|
'src/controllers/conclusion.controller.ts',
|
||||||
|
'src/controllers/document.controller.ts',
|
||||||
|
'src/controllers/notification.controller.ts',
|
||||||
|
'src/controllers/tat.controller.ts',
|
||||||
|
'src/routes/workflow.routes.ts',
|
||||||
|
'src/emailtemplates/emailPreferences.helper.ts',
|
||||||
|
'src/routes/debug.routes.ts',
|
||||||
|
'src/services/ai.service.ts',
|
||||||
|
'src/utils/tatTimeUtils.ts'
|
||||||
|
)
|
||||||
|
|
||||||
|
foreach ($file in $files) {
|
||||||
|
if (Test-Path $file) {
|
||||||
|
$content = Get-Content $file -Raw
|
||||||
|
|
||||||
|
foreach ($key in $replacements.Keys) {
|
||||||
|
$content = $content -replace $key, $replacements[$key]
|
||||||
|
}
|
||||||
|
|
||||||
|
Set-Content $file $content -NoNewline
|
||||||
|
Write-Host "✓ Updated: $file"
|
||||||
|
} else {
|
||||||
|
Write-Host "✗ Not found: $file"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "`n✅ Import replacements complete!"
|
||||||
940
package-lock.json
generated
940
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@ -16,10 +16,9 @@
|
|||||||
"type-check": "tsc --noEmit",
|
"type-check": "tsc --noEmit",
|
||||||
"clean": "rm -rf dist",
|
"clean": "rm -rf dist",
|
||||||
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
||||||
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-configs.ts",
|
||||||
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
"reset:mongo": "ts-node -r tsconfig-paths/register src/scripts/reset-mongo-db.ts",
|
||||||
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.mongo.ts"
|
||||||
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google-cloud/secret-manager": "^6.1.1",
|
"@google-cloud/secret-manager": "^6.1.1",
|
||||||
@ -40,6 +39,7 @@
|
|||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
"ioredis": "^5.8.2",
|
"ioredis": "^5.8.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"mongoose": "^9.1.5",
|
||||||
"morgan": "^1.10.0",
|
"morgan": "^1.10.0",
|
||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
"node-cron": "^3.0.3",
|
"node-cron": "^3.0.3",
|
||||||
@ -47,10 +47,7 @@
|
|||||||
"openai": "^6.8.1",
|
"openai": "^6.8.1",
|
||||||
"passport": "^0.7.0",
|
"passport": "^0.7.0",
|
||||||
"passport-jwt": "^4.0.1",
|
"passport-jwt": "^4.0.1",
|
||||||
"pg": "^8.13.1",
|
|
||||||
"pg-hstore": "^2.3.4",
|
|
||||||
"prom-client": "^15.1.3",
|
"prom-client": "^15.1.3",
|
||||||
"sequelize": "^6.37.5",
|
|
||||||
"socket.io": "^4.8.1",
|
"socket.io": "^4.8.1",
|
||||||
"uuid": "^8.3.2",
|
"uuid": "^8.3.2",
|
||||||
"web-push": "^3.6.7",
|
"web-push": "^3.6.7",
|
||||||
@ -65,12 +62,12 @@
|
|||||||
"@types/express": "^5.0.0",
|
"@types/express": "^5.0.0",
|
||||||
"@types/jest": "^29.5.14",
|
"@types/jest": "^29.5.14",
|
||||||
"@types/jsonwebtoken": "^9.0.7",
|
"@types/jsonwebtoken": "^9.0.7",
|
||||||
|
"@types/mongoose": "^5.11.96",
|
||||||
"@types/morgan": "^1.9.9",
|
"@types/morgan": "^1.9.9",
|
||||||
"@types/multer": "^1.4.12",
|
"@types/multer": "^1.4.12",
|
||||||
"@types/node": "^22.19.1",
|
"@types/node": "^22.19.1",
|
||||||
"@types/passport": "^1.0.16",
|
"@types/passport": "^1.0.16",
|
||||||
"@types/passport-jwt": "^4.0.1",
|
"@types/passport-jwt": "^4.0.1",
|
||||||
"@types/pg": "^8.15.6",
|
|
||||||
"@types/supertest": "^6.0.2",
|
"@types/supertest": "^6.0.2",
|
||||||
"@types/web-push": "^3.6.4",
|
"@types/web-push": "^3.6.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
||||||
@ -79,7 +76,6 @@
|
|||||||
"jest": "^29.7.0",
|
"jest": "^29.7.0",
|
||||||
"nodemon": "^3.1.9",
|
"nodemon": "^3.1.9",
|
||||||
"prettier": "^3.4.2",
|
"prettier": "^3.4.2",
|
||||||
"sequelize-cli": "^6.6.2",
|
|
||||||
"supertest": "^7.0.0",
|
"supertest": "^7.0.0",
|
||||||
"ts-jest": "^29.2.5",
|
"ts-jest": "^29.2.5",
|
||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
|
|||||||
11
src/app.ts
11
src/app.ts
@ -5,7 +5,7 @@ import dotenv from 'dotenv';
|
|||||||
import cookieParser from 'cookie-parser';
|
import cookieParser from 'cookie-parser';
|
||||||
import { UserService } from './services/user.service';
|
import { UserService } from './services/user.service';
|
||||||
import { SSOUserData } from './types/auth.types';
|
import { SSOUserData } from './types/auth.types';
|
||||||
import { sequelize } from './config/database';
|
|
||||||
import { corsMiddleware } from './middlewares/cors.middleware';
|
import { corsMiddleware } from './middlewares/cors.middleware';
|
||||||
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
||||||
import routes from './routes/index';
|
import routes from './routes/index';
|
||||||
@ -21,13 +21,10 @@ dotenv.config();
|
|||||||
const app: express.Application = express();
|
const app: express.Application = express();
|
||||||
const userService = new UserService();
|
const userService = new UserService();
|
||||||
|
|
||||||
// Initialize database connection
|
// Database initialization
|
||||||
const initializeDatabase = async () => {
|
const initializeDatabase = async () => {
|
||||||
try {
|
// MongoDB is connected via server.ts or separate config
|
||||||
await sequelize.authenticate();
|
// No Sequelize initialization needed
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Database connection failed:', error);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Initialize database
|
// Initialize database
|
||||||
|
|||||||
@ -1,28 +1,56 @@
|
|||||||
import { Sequelize } from 'sequelize';
|
import mongoose from 'mongoose';
|
||||||
import dotenv from 'dotenv';
|
import dotenv from 'dotenv';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import dns from 'dns';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const sequelize = new Sequelize({
|
export const connectMongoDB = async () => {
|
||||||
host: process.env.DB_HOST || 'localhost',
|
try {
|
||||||
port: parseInt(process.env.DB_PORT || '5432', 10),
|
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL || 'mongodb://localhost:27017/re_workflow_db';
|
||||||
database: process.env.DB_NAME || 're_workflow_db',
|
|
||||||
username: process.env.DB_USER || 'postgres',
|
|
||||||
password: process.env.DB_PASSWORD || 'postgres',
|
|
||||||
dialect: 'postgres',
|
|
||||||
logging: false, // Disable SQL query logging for cleaner console output
|
|
||||||
pool: {
|
|
||||||
min: parseInt(process.env.DB_POOL_MIN || '2', 10),
|
|
||||||
max: parseInt(process.env.DB_POOL_MAX || '10', 10),
|
|
||||||
acquire: 30000,
|
|
||||||
idle: 10000,
|
|
||||||
},
|
|
||||||
dialectOptions: {
|
|
||||||
ssl: process.env.DB_SSL === 'true' ? {
|
|
||||||
require: true,
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
} : false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export { sequelize };
|
// Workaround for querySrv ECONNREFUSED in specific network environments (e.g. some Windows setups/VPNs)
|
||||||
|
// Set DNS servers BEFORE any connection attempt to fix SRV resolution issues
|
||||||
|
if (mongoUri.startsWith('mongodb+srv://')) {
|
||||||
|
logger.info('[Database] Detected Atlas SRV URI, configuring DNS resolution...');
|
||||||
|
try {
|
||||||
|
// Set public DNS servers globally to fix Windows DNS resolution issues
|
||||||
|
dns.setServers(['8.8.8.8', '8.8.4.4', '1.1.1.1', '1.0.0.1']);
|
||||||
|
logger.info('[Database] DNS servers configured: Google DNS (8.8.8.8, 8.8.4.4) and Cloudflare DNS (1.1.1.1, 1.0.0.1)');
|
||||||
|
|
||||||
|
// Add a small delay to ensure DNS settings take effect
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
|
} catch (dnsErr) {
|
||||||
|
logger.warn('[Database] Failed to set public DNS servers:', dnsErr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[Database] Connecting to MongoDB...');
|
||||||
|
await mongoose.connect(mongoUri, {
|
||||||
|
serverSelectionTimeoutMS: 10000, // Increase timeout to 10 seconds
|
||||||
|
socketTimeoutMS: 45000,
|
||||||
|
});
|
||||||
|
logger.info('✅ MongoDB Connected Successfully');
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('❌ MongoDB Connection Error:', error.message);
|
||||||
|
if (error.stack) {
|
||||||
|
logger.error('Stack trace:', error.stack);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Provide helpful error messages
|
||||||
|
if (error.message.includes('querySrv ECONNREFUSED') || error.message.includes('ENOTFOUND')) {
|
||||||
|
logger.error('');
|
||||||
|
logger.error('🔍 DNS Resolution Failed. Possible solutions:');
|
||||||
|
logger.error(' 1. Check your internet connection');
|
||||||
|
logger.error(' 2. Verify the MongoDB Atlas cluster is running');
|
||||||
|
logger.error(' 3. Try disabling VPN if you\'re using one');
|
||||||
|
logger.error(' 4. Check Windows Firewall settings');
|
||||||
|
logger.error(' 5. Verify your MongoDB Atlas connection string is correct');
|
||||||
|
logger.error('');
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error; // Re-throw to stop server startup
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export { mongoose };
|
||||||
|
|||||||
@ -28,7 +28,7 @@ export const SYSTEM_CONFIG = {
|
|||||||
|
|
||||||
// Test mode for faster testing
|
// Test mode for faster testing
|
||||||
TEST_MODE: process.env.TAT_TEST_MODE === 'true',
|
TEST_MODE: process.env.TAT_TEST_MODE === 'true',
|
||||||
TEST_TIME_MULTIPLIER: process.env.TAT_TEST_MODE === 'true' ? 1/60 : 1, // 1 hour = 1 minute in test mode
|
TEST_TIME_MULTIPLIER: process.env.TAT_TEST_MODE === 'true' ? 1 / 60 : 1, // 1 hour = 1 minute in test mode
|
||||||
|
|
||||||
// Default TAT values by priority (in hours)
|
// Default TAT values by priority (in hours)
|
||||||
DEFAULT_EXPRESS_TAT: parseInt(process.env.DEFAULT_EXPRESS_TAT || '24', 10),
|
DEFAULT_EXPRESS_TAT: parseInt(process.env.DEFAULT_EXPRESS_TAT || '24', 10),
|
||||||
@ -149,8 +149,8 @@ export async function getPublicConfig() {
|
|||||||
const { getConfigValue } = require('../services/configReader.service');
|
const { getConfigValue } = require('../services/configReader.service');
|
||||||
|
|
||||||
// Get AI configuration from admin settings (database)
|
// Get AI configuration from admin settings (database)
|
||||||
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
|
const aiEnabled = String(await getConfigValue('AI_ENABLED', 'true')).toLowerCase() === 'true';
|
||||||
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
|
const remarkGenerationEnabled = String(await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true')).toLowerCase() === 'true';
|
||||||
const maxRemarkLength = parseInt(await getConfigValue('AI_MAX_REMARK_LENGTH', '2000') || '2000', 10);
|
const maxRemarkLength = parseInt(await getConfigValue('AI_MAX_REMARK_LENGTH', '2000') || '2000', 10);
|
||||||
|
|
||||||
// Try to get AI service status (gracefully handle if not available)
|
// Try to get AI service status (gracefully handle if not available)
|
||||||
|
|||||||
@ -1,13 +1,14 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { Holiday, HolidayType } from '@models/Holiday';
|
import { HolidayModel as Holiday, HolidayType } from '../models/mongoose/Holiday.schema';
|
||||||
import { holidayService } from '@services/holiday.service';
|
import { holidayMongoService as holidayService } from '../services/holiday.service';
|
||||||
import { activityTypeService } from '@services/activityType.service';
|
import { activityTypeService } from '../services/activityType.service';
|
||||||
import { sequelize } from '@config/database';
|
import { adminConfigMongoService } from '../services/adminConfig.service';
|
||||||
import { QueryTypes, Op } from 'sequelize';
|
import logger from '../utils/logger';
|
||||||
import logger from '@utils/logger';
|
import dayjs from 'dayjs';
|
||||||
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
import { initializeHolidaysCache, clearWorkingHoursCache } from '../utils/tatTimeUtils';
|
||||||
import { clearConfigCache } from '@services/configReader.service';
|
import { clearConfigCache } from '../services/configReader.service';
|
||||||
import { User, UserRole } from '@models/User';
|
import { UserModel as User, IUser } from '../models/mongoose/User.schema';
|
||||||
|
import { UserRole } from '../types/user.types';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all holidays (with optional year filter)
|
* Get all holidays (with optional year filter)
|
||||||
@ -19,10 +20,13 @@ export const getAllHolidays = async (req: Request, res: Response): Promise<void>
|
|||||||
|
|
||||||
const holidays = await holidayService.getAllActiveHolidays(yearNum);
|
const holidays = await holidayService.getAllActiveHolidays(yearNum);
|
||||||
|
|
||||||
|
// Format response to match legacy structure
|
||||||
|
const formattedHolidays = holidays.map(mapToLegacyHoliday);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: holidays,
|
data: formattedHolidays,
|
||||||
count: holidays.length
|
count: formattedHolidays.length
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Admin] Error fetching holidays:', error);
|
logger.error('[Admin] Error fetching holidays:', error);
|
||||||
@ -49,13 +53,17 @@ export const getHolidayCalendar = async (req: Request, res: Response): Promise<v
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const calendar = await holidayService.getHolidayCalendar(yearNum);
|
// Use getAllActiveHolidays to get full docs, then filter by year in memory or update service
|
||||||
|
// Service has getHolidayCalendar(year) which returns partial objects.
|
||||||
|
// Better to use getAllActiveHolidays(year) and map ourselves.
|
||||||
|
const holidays = await holidayService.getAllActiveHolidays(yearNum);
|
||||||
|
const formattedHolidays = holidays.map(mapToLegacyHoliday);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
year: yearNum,
|
year: yearNum,
|
||||||
holidays: calendar,
|
holidays: formattedHolidays,
|
||||||
count: calendar.length
|
count: formattedHolidays.length
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Admin] Error fetching holiday calendar:', error);
|
logger.error('[Admin] Error fetching holiday calendar:', error);
|
||||||
@ -103,22 +111,26 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
const holiday = await holidayService.createHoliday({
|
const holiday = await holidayService.createHoliday({
|
||||||
holidayDate,
|
holidayDate,
|
||||||
holidayName,
|
holidayName,
|
||||||
description,
|
holidayType: (holidayType as any) || HolidayType.ORGANIZATIONAL,
|
||||||
holidayType: holidayType || HolidayType.ORGANIZATIONAL,
|
year: new Date(holidayDate).getFullYear(),
|
||||||
isRecurring: isRecurring || false,
|
|
||||||
recurrenceRule,
|
|
||||||
appliesToDepartments,
|
appliesToDepartments,
|
||||||
appliesToLocations,
|
appliesToLocations,
|
||||||
|
description,
|
||||||
|
isRecurring,
|
||||||
|
recurrenceRule,
|
||||||
createdBy: userId
|
createdBy: userId
|
||||||
});
|
});
|
||||||
|
|
||||||
// Reload holidays cache
|
// Reload holidays cache
|
||||||
await initializeHolidaysCache();
|
await initializeHolidaysCache();
|
||||||
|
|
||||||
|
// Format response to match legacy structure
|
||||||
|
const legacyResponse = mapToLegacyHoliday(holiday);
|
||||||
|
|
||||||
res.status(201).json({
|
res.status(201).json({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Holiday created successfully',
|
message: 'Holiday created successfully',
|
||||||
data: holiday
|
data: [legacyResponse] // Returning array as requested
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
logger.error('[Admin] Error creating holiday:', error);
|
logger.error('[Admin] Error creating holiday:', error);
|
||||||
@ -129,6 +141,28 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to map Mongoose document to Legacy JSON format
|
||||||
|
*/
|
||||||
|
const mapToLegacyHoliday = (holiday: any) => ({
|
||||||
|
holidayId: holiday._id,
|
||||||
|
holidayDate: dayjs(holiday.holidayDate).format('YYYY-MM-DD'),
|
||||||
|
holidayName: holiday.holidayName,
|
||||||
|
description: holiday.description || null,
|
||||||
|
isRecurring: holiday.isRecurring || false,
|
||||||
|
recurrenceRule: holiday.recurrenceRule || null,
|
||||||
|
holidayType: holiday.holidayType,
|
||||||
|
isActive: holiday.isActive !== undefined ? holiday.isActive : true,
|
||||||
|
appliesToDepartments: (holiday.appliesToDepartments && holiday.appliesToDepartments.length > 0) ? holiday.appliesToDepartments : null,
|
||||||
|
appliesToLocations: (holiday.appliesToLocations && holiday.appliesToLocations.length > 0) ? holiday.appliesToLocations : null,
|
||||||
|
createdBy: holiday.createdBy || null,
|
||||||
|
updatedBy: holiday.updatedBy || null,
|
||||||
|
createdAt: holiday.createdAt,
|
||||||
|
updatedAt: holiday.updatedAt,
|
||||||
|
created_at: holiday.createdAt,
|
||||||
|
updated_at: holiday.updatedAt
|
||||||
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update a holiday
|
* Update a holiday
|
||||||
*/
|
*/
|
||||||
@ -146,7 +180,7 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
const { holidayId } = req.params;
|
const { holidayId } = req.params;
|
||||||
const updates = req.body;
|
const updates = req.body;
|
||||||
|
|
||||||
const holiday = await holidayService.updateHoliday(holidayId, updates, userId);
|
const holiday = await holidayService.updateHoliday(holidayId, updates);
|
||||||
|
|
||||||
if (!holiday) {
|
if (!holiday) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
@ -162,7 +196,7 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Holiday updated successfully',
|
message: 'Holiday updated successfully',
|
||||||
data: holiday
|
data: [mapToLegacyHoliday(holiday)] // Returning array for consistency
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
logger.error('[Admin] Error updating holiday:', error);
|
logger.error('[Admin] Error updating holiday:', error);
|
||||||
@ -222,7 +256,7 @@ export const bulkImportHolidays = async (req: Request, res: Response): Promise<v
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await holidayService.bulkImportHolidays(holidays, userId);
|
const result = await holidayService.bulkImportHolidays(holidays);
|
||||||
|
|
||||||
// Reload holidays cache
|
// Reload holidays cache
|
||||||
await initializeHolidaysCache();
|
await initializeHolidaysCache();
|
||||||
@ -259,35 +293,7 @@ export const getPublicConfigurations = async (req: Request, res: Response): Prom
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let whereClause = '';
|
const configurations = await adminConfigMongoService.getPublicConfigurations(category as string);
|
||||||
if (category) {
|
|
||||||
whereClause = `WHERE config_category = '${category}' AND is_sensitive = false`;
|
|
||||||
} else {
|
|
||||||
whereClause = `WHERE config_category IN ('DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING', 'SYSTEM_SETTINGS') AND is_sensitive = false`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const rawConfigurations = await sequelize.query(`
|
|
||||||
SELECT
|
|
||||||
config_key,
|
|
||||||
config_category,
|
|
||||||
config_value,
|
|
||||||
value_type,
|
|
||||||
display_name,
|
|
||||||
description
|
|
||||||
FROM admin_configurations
|
|
||||||
${whereClause}
|
|
||||||
ORDER BY config_category, sort_order
|
|
||||||
`, { type: QueryTypes.SELECT });
|
|
||||||
|
|
||||||
// Map snake_case to camelCase for frontend
|
|
||||||
const configurations = (rawConfigurations as any[]).map((config: any) => ({
|
|
||||||
configKey: config.config_key,
|
|
||||||
configCategory: config.config_category,
|
|
||||||
configValue: config.config_value,
|
|
||||||
valueType: config.value_type,
|
|
||||||
displayName: config.display_name,
|
|
||||||
description: config.description
|
|
||||||
}));
|
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -310,55 +316,7 @@ export const getAllConfigurations = async (req: Request, res: Response): Promise
|
|||||||
try {
|
try {
|
||||||
const { category } = req.query;
|
const { category } = req.query;
|
||||||
|
|
||||||
let whereClause = '';
|
const configurations = await adminConfigMongoService.getAllConfigurations(category as string);
|
||||||
if (category) {
|
|
||||||
whereClause = `WHERE config_category = '${category}'`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const rawConfigurations = await sequelize.query(`
|
|
||||||
SELECT
|
|
||||||
config_id,
|
|
||||||
config_key,
|
|
||||||
config_category,
|
|
||||||
config_value,
|
|
||||||
value_type,
|
|
||||||
display_name,
|
|
||||||
description,
|
|
||||||
default_value,
|
|
||||||
is_editable,
|
|
||||||
is_sensitive,
|
|
||||||
validation_rules,
|
|
||||||
ui_component,
|
|
||||||
options,
|
|
||||||
sort_order,
|
|
||||||
requires_restart,
|
|
||||||
last_modified_at,
|
|
||||||
last_modified_by
|
|
||||||
FROM admin_configurations
|
|
||||||
${whereClause}
|
|
||||||
ORDER BY config_category, sort_order
|
|
||||||
`, { type: QueryTypes.SELECT });
|
|
||||||
|
|
||||||
// Map snake_case to camelCase for frontend
|
|
||||||
const configurations = (rawConfigurations as any[]).map((config: any) => ({
|
|
||||||
configId: config.config_id,
|
|
||||||
configKey: config.config_key,
|
|
||||||
configCategory: config.config_category,
|
|
||||||
configValue: config.config_value,
|
|
||||||
valueType: config.value_type,
|
|
||||||
displayName: config.display_name,
|
|
||||||
description: config.description,
|
|
||||||
defaultValue: config.default_value,
|
|
||||||
isEditable: config.is_editable,
|
|
||||||
isSensitive: config.is_sensitive || false,
|
|
||||||
validationRules: config.validation_rules,
|
|
||||||
uiComponent: config.ui_component,
|
|
||||||
options: config.options,
|
|
||||||
sortOrder: config.sort_order,
|
|
||||||
requiresRestart: config.requires_restart || false,
|
|
||||||
lastModifiedAt: config.last_modified_at,
|
|
||||||
lastModifiedBy: config.last_modified_by
|
|
||||||
}));
|
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -400,22 +358,9 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Update configuration
|
// Update configuration
|
||||||
const result = await sequelize.query(`
|
const config = await adminConfigMongoService.updateConfig(configKey, configValue, userId);
|
||||||
UPDATE admin_configurations
|
|
||||||
SET
|
|
||||||
config_value = :configValue,
|
|
||||||
last_modified_by = :userId,
|
|
||||||
last_modified_at = NOW(),
|
|
||||||
updated_at = NOW()
|
|
||||||
WHERE config_key = :configKey
|
|
||||||
AND is_editable = true
|
|
||||||
RETURNING *
|
|
||||||
`, {
|
|
||||||
replacements: { configValue, userId, configKey },
|
|
||||||
type: QueryTypes.UPDATE
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!result || (result[1] as any) === 0) {
|
if (!config) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Configuration not found or not editable'
|
error: 'Configuration not found or not editable'
|
||||||
@ -467,15 +412,15 @@ export const resetConfiguration = async (req: Request, res: Response): Promise<v
|
|||||||
try {
|
try {
|
||||||
const { configKey } = req.params;
|
const { configKey } = req.params;
|
||||||
|
|
||||||
await sequelize.query(`
|
const config = await adminConfigMongoService.resetConfig(configKey);
|
||||||
UPDATE admin_configurations
|
|
||||||
SET config_value = default_value,
|
if (!config) {
|
||||||
updated_at = NOW()
|
res.status(404).json({
|
||||||
WHERE config_key = :configKey
|
success: false,
|
||||||
`, {
|
error: 'Configuration not found'
|
||||||
replacements: { configKey },
|
});
|
||||||
type: QueryTypes.UPDATE
|
return;
|
||||||
});
|
}
|
||||||
|
|
||||||
// Clear config cache so reset values are used immediately
|
// Clear config cache so reset values are used immediately
|
||||||
clearConfigCache();
|
clearConfigCache();
|
||||||
@ -533,7 +478,7 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find user
|
// Find user
|
||||||
const user = await User.findByPk(userId);
|
const user = await User.findOne({ userId });
|
||||||
if (!user) {
|
if (!user) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
@ -606,8 +551,8 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
|
|||||||
|
|
||||||
// Handle role filtering
|
// Handle role filtering
|
||||||
if (role && role !== 'ALL' && role !== 'ELEVATED') {
|
if (role && role !== 'ALL' && role !== 'ELEVATED') {
|
||||||
const validRoles: UserRole[] = ['USER', 'MANAGEMENT', 'ADMIN'];
|
const validRoles: string[] = ['USER', 'MANAGEMENT', 'ADMIN'];
|
||||||
if (!validRoles.includes(role as UserRole)) {
|
if (!validRoles.includes(role as string)) {
|
||||||
res.status(400).json({
|
res.status(400).json({
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED'
|
error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED'
|
||||||
@ -617,61 +562,32 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
|
|||||||
whereClause.role = role;
|
whereClause.role = role;
|
||||||
} else if (role === 'ELEVATED' || !role) {
|
} else if (role === 'ELEVATED' || !role) {
|
||||||
// Default: Show only ADMIN and MANAGEMENT (elevated users)
|
// Default: Show only ADMIN and MANAGEMENT (elevated users)
|
||||||
whereClause.role = { [Op.in]: ['ADMIN', 'MANAGEMENT'] };
|
whereClause.role = { $in: ['ADMIN', 'MANAGEMENT'] };
|
||||||
}
|
}
|
||||||
// If role === 'ALL', don't filter by role (show all users)
|
// If role === 'ALL', don't filter by role (show all users)
|
||||||
|
|
||||||
// Get total count for pagination
|
// Get total count for pagination
|
||||||
const totalUsers = await User.count({ where: whereClause });
|
const totalUsers = await User.countDocuments(whereClause);
|
||||||
const totalPages = Math.ceil(totalUsers / limitNum);
|
const totalPages = Math.ceil(totalUsers / limitNum);
|
||||||
|
|
||||||
// Get paginated users
|
// Get paginated users
|
||||||
const users = await User.findAll({
|
const users = await User.find(whereClause)
|
||||||
where: whereClause,
|
.select('userId email displayName firstName lastName department designation role manager postalAddress lastLogin createdAt')
|
||||||
attributes: [
|
.sort({ role: 1, displayName: 1 })
|
||||||
'userId',
|
.skip(offset)
|
||||||
'email',
|
.limit(limitNum);
|
||||||
'displayName',
|
|
||||||
'firstName',
|
|
||||||
'lastName',
|
|
||||||
'department',
|
|
||||||
'designation',
|
|
||||||
'role',
|
|
||||||
'manager',
|
|
||||||
'postalAddress',
|
|
||||||
'lastLogin',
|
|
||||||
'createdAt'
|
|
||||||
],
|
|
||||||
order: [
|
|
||||||
['role', 'ASC'], // ADMIN first, then MANAGEMENT, then USER
|
|
||||||
['displayName', 'ASC']
|
|
||||||
],
|
|
||||||
limit: limitNum,
|
|
||||||
offset: offset
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get role summary (across all users, not just current page)
|
// Get role summary (across all users, not just current page)
|
||||||
const roleStats = await sequelize.query(`
|
const roleStatsRaw = await User.aggregate([
|
||||||
SELECT
|
{ $match: { isActive: true } },
|
||||||
role,
|
{ $group: { _id: '$role', count: { $sum: 1 } } },
|
||||||
COUNT(*) as count
|
{ $sort: { _id: 1 } }
|
||||||
FROM users
|
]);
|
||||||
WHERE is_active = true
|
|
||||||
GROUP BY role
|
|
||||||
ORDER BY
|
|
||||||
CASE role
|
|
||||||
WHEN 'ADMIN' THEN 1
|
|
||||||
WHEN 'MANAGEMENT' THEN 2
|
|
||||||
WHEN 'USER' THEN 3
|
|
||||||
END
|
|
||||||
`, {
|
|
||||||
type: QueryTypes.SELECT
|
|
||||||
});
|
|
||||||
|
|
||||||
const summary = {
|
const summary = {
|
||||||
ADMIN: parseInt((roleStats.find((s: any) => s.role === 'ADMIN') as any)?.count || '0'),
|
ADMIN: roleStatsRaw.find((s: any) => s._id === 'ADMIN')?.count || 0,
|
||||||
MANAGEMENT: parseInt((roleStats.find((s: any) => s.role === 'MANAGEMENT') as any)?.count || '0'),
|
MANAGEMENT: roleStatsRaw.find((s: any) => s._id === 'MANAGEMENT')?.count || 0,
|
||||||
USER: parseInt((roleStats.find((s: any) => s.role === 'USER') as any)?.count || '0')
|
USER: roleStatsRaw.find((s: any) => s._id === 'USER')?.count || 0
|
||||||
};
|
};
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@ -708,29 +624,31 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
|
|||||||
*/
|
*/
|
||||||
export const getRoleStatistics = async (req: Request, res: Response): Promise<void> => {
|
export const getRoleStatistics = async (req: Request, res: Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const stats = await sequelize.query(`
|
const stats = await User.aggregate([
|
||||||
SELECT
|
{
|
||||||
role,
|
$group: {
|
||||||
COUNT(*) as count,
|
_id: '$role',
|
||||||
COUNT(CASE WHEN is_active = true THEN 1 END) as active_count,
|
count: { $sum: 1 },
|
||||||
COUNT(CASE WHEN is_active = false THEN 1 END) as inactive_count
|
activeCount: { $sum: { $cond: ['$isActive', 1, 0] } },
|
||||||
FROM users
|
inactiveCount: { $sum: { $cond: ['$isActive', 0, 1] } }
|
||||||
GROUP BY role
|
}
|
||||||
ORDER BY
|
},
|
||||||
CASE role
|
{ $sort: { _id: 1 } }
|
||||||
WHEN 'ADMIN' THEN 1
|
]);
|
||||||
WHEN 'MANAGEMENT' THEN 2
|
|
||||||
WHEN 'USER' THEN 3
|
// Format for frontend
|
||||||
END
|
const formattedStats = stats.map((stat: any) => ({
|
||||||
`, {
|
role: stat._id,
|
||||||
type: QueryTypes.SELECT
|
count: stat.count,
|
||||||
});
|
active_count: stat.activeCount,
|
||||||
|
inactive_count: stat.inactiveCount
|
||||||
|
}));
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: {
|
data: {
|
||||||
statistics: stats,
|
statistics: formattedStats,
|
||||||
total: stats.reduce((sum: number, stat: any) => sum + parseInt(stat.count), 0)
|
total: formattedStats.reduce((sum: number, stat: any) => sum + stat.count, 0)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -777,7 +695,7 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`);
|
logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`);
|
||||||
|
|
||||||
// First, check if user already exists in our database
|
// First, check if user already exists in our database
|
||||||
let user = await User.findOne({ where: { email } });
|
let user: IUser | null = await User.findOne({ email });
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
// User doesn't exist, need to fetch from Okta and create
|
// User doesn't exist, need to fetch from Okta and create
|
||||||
@ -800,12 +718,11 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create user in our database via centralized userService with all fields including manager
|
// Create user in our database via centralized userService with all fields including manager
|
||||||
const ensured = await userService.createOrUpdateUser({
|
user = (await userService.createOrUpdateUser({
|
||||||
...oktaUserData,
|
...oktaUserData,
|
||||||
role, // Set the assigned role
|
role: role as any, // Set the assigned role
|
||||||
isActive: true, // Ensure user is active
|
isActive: true, // Ensure user is active
|
||||||
});
|
})) as IUser;
|
||||||
user = ensured;
|
|
||||||
|
|
||||||
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`);
|
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`);
|
||||||
} catch (oktaError: any) {
|
} catch (oktaError: any) {
|
||||||
@ -839,27 +756,36 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
|
|
||||||
if (oktaUserData) {
|
if (oktaUserData) {
|
||||||
// Sync all fields from Okta including the new role using centralized method
|
// Sync all fields from Okta including the new role using centralized method
|
||||||
const updated = await userService.createOrUpdateUser({
|
user = (await userService.createOrUpdateUser({
|
||||||
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
|
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
|
||||||
role, // Set the new role
|
role: role as any, // Set the new role
|
||||||
isActive: true, // Ensure user is active
|
isActive: true, // Ensure user is active
|
||||||
});
|
})) as IUser;
|
||||||
user = updated;
|
|
||||||
|
|
||||||
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
|
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
|
||||||
} else {
|
} else {
|
||||||
// Okta user not found, just update role
|
// Okta user not found, just update role
|
||||||
await user.update({ role });
|
user.role = role as any;
|
||||||
|
await user.save();
|
||||||
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
|
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
|
||||||
}
|
}
|
||||||
} catch (oktaError: any) {
|
} catch (oktaError: any) {
|
||||||
// If Okta fetch fails, just update the role
|
// If Okta fetch fails, just update the role
|
||||||
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
|
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
|
||||||
await user.update({ role });
|
user.role = role as any;
|
||||||
|
await user.save();
|
||||||
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
|
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to create or update user'
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: `Successfully assigned ${role} role to ${user.displayName || email}`,
|
message: `Successfully assigned ${role} role to ${user.displayName || email}`,
|
||||||
@ -1049,4 +975,3 @@ export const deleteActivityType = async (req: Request, res: Response): Promise<v
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,15 +1,15 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { ApprovalService } from '@services/approval.service';
|
import { ApprovalService } from '@services/approval.service';
|
||||||
import { DealerClaimApprovalService } from '@services/dealerClaimApproval.service';
|
import { DealerClaimApprovalMongoService } from '@services/dealerClaimApproval.service';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { validateApprovalAction } from '@validators/approval.validator';
|
import { validateApprovalAction } from '@validators/approval.validator';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
const approvalService = new ApprovalService();
|
const approvalService = new ApprovalService();
|
||||||
const dealerClaimApprovalService = new DealerClaimApprovalService();
|
const dealerClaimApprovalService = new DealerClaimApprovalMongoService();
|
||||||
|
|
||||||
export class ApprovalController {
|
export class ApprovalController {
|
||||||
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -18,13 +18,13 @@ export class ApprovalController {
|
|||||||
const validatedData = validateApprovalAction(req.body);
|
const validatedData = validateApprovalAction(req.body);
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
// Determine which service to use based on workflow type
|
||||||
const level = await ApprovalLevel.findByPk(levelId);
|
const level = await ApprovalLevel.findOne({ levelId });
|
||||||
if (!level) {
|
if (!level) {
|
||||||
ResponseHandler.notFound(res, 'Approval level not found');
|
ResponseHandler.notFound(res, 'Approval level not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflow = await WorkflowRequest.findByPk(level.requestId);
|
const workflow = await WorkflowRequest.findOne({ requestId: level.requestId });
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -75,8 +75,10 @@ export class ApprovalController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
// Determine which service to use based on workflow type (handle both requestId and requestNumber)
|
||||||
const workflow = await WorkflowRequest.findByPk(id);
|
const workflow = await WorkflowRequest.findOne({
|
||||||
|
$or: [{ requestId: id }, { requestNumber: id }]
|
||||||
|
});
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -103,8 +105,10 @@ export class ApprovalController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
// Determine which service to use based on workflow type (handle both requestId and requestNumber)
|
||||||
const workflow = await WorkflowRequest.findByPk(id);
|
const workflow = await WorkflowRequest.findOne({
|
||||||
|
$or: [{ requestId: id }, { requestNumber: id }]
|
||||||
|
});
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
|
|||||||
@ -4,7 +4,7 @@ import { validateSSOCallback, validateRefreshToken, validateTokenExchange, valid
|
|||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
import { activityMongoService as activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
||||||
import { getRequestMetadata } from '../utils/requestUtils';
|
import { getRequestMetadata } from '../utils/requestUtils';
|
||||||
|
|
||||||
export class AuthController {
|
export class AuthController {
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
|
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark, User } from '../models'; // Fixed imports
|
||||||
import { aiService } from '@services/ai.service';
|
import { aiService } from '../services/ai.service';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityMongoService as activityService } from '../services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '../utils/requestUtils';
|
||||||
|
|
||||||
export class ConclusionController {
|
export class ConclusionController {
|
||||||
/**
|
/**
|
||||||
@ -15,19 +15,16 @@ export class ConclusionController {
|
|||||||
const { requestId } = req.params;
|
const { requestId } = req.params;
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
|
|
||||||
// Fetch request with all related data
|
// Fetch request
|
||||||
const request = await WorkflowRequest.findOne({
|
// Mongoose doesn't support 'include' directly like Sequelize.
|
||||||
where: { requestId },
|
// We'll fetch the request first.
|
||||||
include: [
|
const request = await WorkflowRequest.findOne({ requestId });
|
||||||
{ association: 'initiator', attributes: ['userId', 'displayName', 'email'] }
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!request) {
|
if (!request) {
|
||||||
return res.status(404).json({ error: 'Request not found' });
|
return res.status(404).json({ error: 'Request not found' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if user is the initiator
|
// Check if user is the initiator (compare userId strings)
|
||||||
if ((request as any).initiatorId !== userId) {
|
if ((request as any).initiatorId !== userId) {
|
||||||
return res.status(403).json({ error: 'Only the initiator can generate conclusion remarks' });
|
return res.status(403).json({ error: 'Only the initiator can generate conclusion remarks' });
|
||||||
}
|
}
|
||||||
@ -71,27 +68,23 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Gather context for AI generation
|
// Gather context for AI generation
|
||||||
const approvalLevels = await ApprovalLevel.findAll({
|
// Mongoose: find({ requestId }), sort by levelNumber
|
||||||
where: { requestId },
|
const approvalLevels = await ApprovalLevel.find({ requestId })
|
||||||
order: [['levelNumber', 'ASC']]
|
.sort({ levelNumber: 1 });
|
||||||
});
|
|
||||||
|
|
||||||
const workNotes = await WorkNote.findAll({
|
const workNotes = await WorkNote.find({ requestId })
|
||||||
where: { requestId },
|
.sort({ createdAt: 1 })
|
||||||
order: [['createdAt', 'ASC']],
|
.limit(20);
|
||||||
limit: 20 // Last 20 work notes - keep full context for better conclusions
|
|
||||||
});
|
|
||||||
|
|
||||||
const documents = await Document.findAll({
|
const documents = await Document.find({ requestId })
|
||||||
where: { requestId },
|
.sort({ uploadedAt: -1 });
|
||||||
order: [['uploadedAt', 'DESC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
const activities = await Activity.findAll({
|
const activities = await Activity.find({ requestId })
|
||||||
where: { requestId },
|
.sort({ createdAt: 1 })
|
||||||
order: [['createdAt', 'ASC']],
|
.limit(50);
|
||||||
limit: 50 // Last 50 activities - keep full context for better conclusions
|
|
||||||
});
|
// Fetch initiator details manually since we can't 'include'
|
||||||
|
const initiator = await User.findOne({ userId: (request as any).initiatorId });
|
||||||
|
|
||||||
// Build context object
|
// Build context object
|
||||||
const context = {
|
const context = {
|
||||||
@ -138,7 +131,7 @@ export class ConclusionController {
|
|||||||
const aiResult = await aiService.generateConclusionRemark(context);
|
const aiResult = await aiService.generateConclusionRemark(context);
|
||||||
|
|
||||||
// Check if conclusion already exists
|
// Check if conclusion already exists
|
||||||
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId } });
|
let conclusionInstance = await ConclusionRemark.findOne({ requestId });
|
||||||
|
|
||||||
const conclusionData = {
|
const conclusionData = {
|
||||||
aiGeneratedRemark: aiResult.remark,
|
aiGeneratedRemark: aiResult.remark,
|
||||||
@ -160,19 +153,21 @@ export class ConclusionController {
|
|||||||
|
|
||||||
if (conclusionInstance) {
|
if (conclusionInstance) {
|
||||||
// Update existing conclusion (allow regeneration)
|
// Update existing conclusion (allow regeneration)
|
||||||
await conclusionInstance.update(conclusionData as any);
|
// Mongoose document update
|
||||||
|
Object.assign(conclusionInstance, conclusionData);
|
||||||
|
await conclusionInstance.save();
|
||||||
logger.info(`[Conclusion] ✅ AI conclusion regenerated for request ${requestId}`);
|
logger.info(`[Conclusion] ✅ AI conclusion regenerated for request ${requestId}`);
|
||||||
} else {
|
} else {
|
||||||
// Create new conclusion
|
// Create new conclusion
|
||||||
conclusionInstance = await ConclusionRemark.create({
|
conclusionInstance = await ConclusionRemark.create({
|
||||||
requestId,
|
requestId,
|
||||||
...conclusionData,
|
...conclusionData,
|
||||||
finalRemark: null,
|
finalRemark: undefined,
|
||||||
editedBy: null,
|
editedBy: undefined,
|
||||||
isEdited: false,
|
isEdited: false,
|
||||||
editCount: 0,
|
editCount: 0,
|
||||||
finalizedAt: null
|
finalizedAt: undefined
|
||||||
} as any);
|
});
|
||||||
logger.info(`[Conclusion] ✅ AI conclusion generated for request ${requestId}`);
|
logger.info(`[Conclusion] ✅ AI conclusion generated for request ${requestId}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,7 +176,7 @@ export class ConclusionController {
|
|||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId,
|
||||||
type: 'ai_conclusion_generated',
|
type: 'ai_conclusion_generated',
|
||||||
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
|
user: { userId, name: initiator?.displayName || 'Initiator' },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'AI Conclusion Generated',
|
action: 'AI Conclusion Generated',
|
||||||
details: 'AI-powered conclusion remark generated for review',
|
details: 'AI-powered conclusion remark generated for review',
|
||||||
@ -192,7 +187,7 @@ export class ConclusionController {
|
|||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
message: 'Conclusion generated successfully',
|
message: 'Conclusion generated successfully',
|
||||||
data: {
|
data: {
|
||||||
conclusionId: (conclusionInstance as any).conclusionId,
|
conclusionId: (conclusionInstance as any).conclusionId || (conclusionInstance as any)._id,
|
||||||
aiGeneratedRemark: aiResult.remark,
|
aiGeneratedRemark: aiResult.remark,
|
||||||
keyDiscussionPoints: aiResult.keyPoints,
|
keyDiscussionPoints: aiResult.keyPoints,
|
||||||
confidence: aiResult.confidence,
|
confidence: aiResult.confidence,
|
||||||
@ -205,8 +200,8 @@ export class ConclusionController {
|
|||||||
|
|
||||||
// Provide helpful error messages
|
// Provide helpful error messages
|
||||||
const isConfigError = error.message?.includes('not configured') ||
|
const isConfigError = error.message?.includes('not configured') ||
|
||||||
error.message?.includes('not available') ||
|
error.message?.includes('not available') ||
|
||||||
error.message?.includes('not initialized');
|
error.message?.includes('not initialized');
|
||||||
|
|
||||||
return res.status(isConfigError ? 503 : 500).json({
|
return res.status(isConfigError ? 503 : 500).json({
|
||||||
error: isConfigError ? 'AI service not configured' : 'Failed to generate conclusion',
|
error: isConfigError ? 'AI service not configured' : 'Failed to generate conclusion',
|
||||||
@ -231,7 +226,7 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch request
|
// Fetch request
|
||||||
const request = await WorkflowRequest.findOne({ where: { requestId } });
|
const request = await WorkflowRequest.findOne({ requestId });
|
||||||
if (!request) {
|
if (!request) {
|
||||||
return res.status(404).json({ error: 'Request not found' });
|
return res.status(404).json({ error: 'Request not found' });
|
||||||
}
|
}
|
||||||
@ -242,7 +237,7 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find conclusion
|
// Find conclusion
|
||||||
const conclusion = await ConclusionRemark.findOne({ where: { requestId } });
|
const conclusion = await ConclusionRemark.findOne({ requestId });
|
||||||
if (!conclusion) {
|
if (!conclusion) {
|
||||||
return res.status(404).json({ error: 'Conclusion not found. Generate it first.' });
|
return res.status(404).json({ error: 'Conclusion not found. Generate it first.' });
|
||||||
}
|
}
|
||||||
@ -250,12 +245,13 @@ export class ConclusionController {
|
|||||||
// Update conclusion
|
// Update conclusion
|
||||||
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
|
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
|
||||||
|
|
||||||
await conclusion.update({
|
conclusion.finalRemark = finalRemark;
|
||||||
finalRemark: finalRemark,
|
conclusion.editedBy = userId;
|
||||||
editedBy: userId,
|
conclusion.isEdited = wasEdited;
|
||||||
isEdited: wasEdited,
|
if (wasEdited) {
|
||||||
editCount: wasEdited ? (conclusion as any).editCount + 1 : (conclusion as any).editCount
|
conclusion.editCount = ((conclusion as any).editCount || 0) + 1;
|
||||||
} as any);
|
}
|
||||||
|
await conclusion.save();
|
||||||
|
|
||||||
logger.info(`[Conclusion] Updated conclusion for request ${requestId} (edited: ${wasEdited})`);
|
logger.info(`[Conclusion] Updated conclusion for request ${requestId} (edited: ${wasEdited})`);
|
||||||
|
|
||||||
@ -284,17 +280,15 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch request
|
// Fetch request
|
||||||
const request = await WorkflowRequest.findOne({
|
const request = await WorkflowRequest.findOne({ requestId });
|
||||||
where: { requestId },
|
|
||||||
include: [
|
|
||||||
{ association: 'initiator', attributes: ['userId', 'displayName', 'email'] }
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!request) {
|
if (!request) {
|
||||||
return res.status(404).json({ error: 'Request not found' });
|
return res.status(404).json({ error: 'Request not found' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fetch initiator manually
|
||||||
|
const initiator = await User.findOne({ userId: (request as any).initiatorId });
|
||||||
|
|
||||||
// Check if user is the initiator
|
// Check if user is the initiator
|
||||||
if ((request as any).initiatorId !== userId) {
|
if ((request as any).initiatorId !== userId) {
|
||||||
return res.status(403).json({ error: 'Only the initiator can finalize conclusion remarks' });
|
return res.status(403).json({ error: 'Only the initiator can finalize conclusion remarks' });
|
||||||
@ -306,15 +300,15 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find or create conclusion
|
// Find or create conclusion
|
||||||
let conclusion = await ConclusionRemark.findOne({ where: { requestId } });
|
let conclusion = await ConclusionRemark.findOne({ requestId });
|
||||||
|
|
||||||
if (!conclusion) {
|
if (!conclusion) {
|
||||||
// Create if doesn't exist (manual conclusion without AI)
|
// Create if doesn't exist (manual conclusion without AI)
|
||||||
conclusion = await ConclusionRemark.create({
|
conclusion = await ConclusionRemark.create({
|
||||||
requestId,
|
requestId,
|
||||||
aiGeneratedRemark: null,
|
aiGeneratedRemark: undefined,
|
||||||
aiModelUsed: null,
|
aiModelUsed: undefined,
|
||||||
aiConfidenceScore: null,
|
aiConfidenceScore: undefined,
|
||||||
finalRemark: finalRemark,
|
finalRemark: finalRemark,
|
||||||
editedBy: userId,
|
editedBy: userId,
|
||||||
isEdited: false,
|
isEdited: false,
|
||||||
@ -322,28 +316,28 @@ export class ConclusionController {
|
|||||||
approvalSummary: {},
|
approvalSummary: {},
|
||||||
documentSummary: {},
|
documentSummary: {},
|
||||||
keyDiscussionPoints: [],
|
keyDiscussionPoints: [],
|
||||||
generatedAt: null,
|
generatedAt: undefined,
|
||||||
finalizedAt: new Date()
|
finalizedAt: new Date()
|
||||||
} as any);
|
});
|
||||||
} else {
|
} else {
|
||||||
// Update existing conclusion
|
// Update existing conclusion
|
||||||
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
|
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
|
||||||
|
|
||||||
await conclusion.update({
|
conclusion.finalRemark = finalRemark;
|
||||||
finalRemark: finalRemark,
|
conclusion.editedBy = userId;
|
||||||
editedBy: userId,
|
conclusion.isEdited = wasEdited;
|
||||||
isEdited: wasEdited,
|
if (wasEdited) {
|
||||||
editCount: wasEdited ? (conclusion as any).editCount + 1 : (conclusion as any).editCount,
|
conclusion.editCount = ((conclusion as any).editCount || 0) + 1;
|
||||||
finalizedAt: new Date()
|
}
|
||||||
} as any);
|
conclusion.finalizedAt = new Date();
|
||||||
|
await conclusion.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update request status to CLOSED
|
// Update request status to CLOSED
|
||||||
await request.update({
|
request.status = 'CLOSED';
|
||||||
status: 'CLOSED',
|
(request as any).conclusionRemark = finalRemark;
|
||||||
conclusionRemark: finalRemark,
|
(request as any).closureDate = new Date();
|
||||||
closureDate: new Date()
|
await request.save();
|
||||||
} as any);
|
|
||||||
|
|
||||||
logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`);
|
logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`);
|
||||||
|
|
||||||
@ -351,7 +345,7 @@ export class ConclusionController {
|
|||||||
// Since the initiator is finalizing, this should always succeed
|
// Since the initiator is finalizing, this should always succeed
|
||||||
let summaryId = null;
|
let summaryId = null;
|
||||||
try {
|
try {
|
||||||
const { summaryService } = await import('@services/summary.service');
|
const { summaryService } = await import('../services/summary.service');
|
||||||
const userRole = (req as any).user?.role || (req as any).auth?.role;
|
const userRole = (req as any).user?.role || (req as any).auth?.role;
|
||||||
const summary = await summaryService.createSummary(requestId, userId, { userRole });
|
const summary = await summaryService.createSummary(requestId, userId, { userRole });
|
||||||
summaryId = (summary as any).summaryId;
|
summaryId = (summary as any).summaryId;
|
||||||
@ -367,10 +361,10 @@ export class ConclusionController {
|
|||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId,
|
||||||
type: 'closed',
|
type: 'closed',
|
||||||
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
|
user: { userId, name: initiator?.displayName || 'Initiator' },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Request Closed',
|
action: 'Request Closed',
|
||||||
details: `Request closed with conclusion remark by ${(request as any).initiator?.displayName}`,
|
details: `Request closed with conclusion remark by ${initiator?.displayName}`,
|
||||||
ipAddress: requestMeta.ipAddress,
|
ipAddress: requestMeta.ipAddress,
|
||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
@ -378,7 +372,7 @@ export class ConclusionController {
|
|||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
message: 'Request finalized and closed successfully',
|
message: 'Request finalized and closed successfully',
|
||||||
data: {
|
data: {
|
||||||
conclusionId: (conclusion as any).conclusionId,
|
conclusionId: (conclusion as any).conclusionId || (conclusion as any)._id,
|
||||||
requestNumber: (request as any).requestNumber,
|
requestNumber: (request as any).requestNumber,
|
||||||
status: 'CLOSED',
|
status: 'CLOSED',
|
||||||
finalRemark: finalRemark,
|
finalRemark: finalRemark,
|
||||||
@ -400,20 +394,31 @@ export class ConclusionController {
|
|||||||
try {
|
try {
|
||||||
const { requestId } = req.params;
|
const { requestId } = req.params;
|
||||||
|
|
||||||
const conclusion = await ConclusionRemark.findOne({
|
const conclusion = await ConclusionRemark.findOne({ requestId });
|
||||||
where: { requestId },
|
|
||||||
include: [
|
|
||||||
{ association: 'editor', attributes: ['userId', 'displayName', 'email'] }
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!conclusion) {
|
if (!conclusion) {
|
||||||
return res.status(404).json({ error: 'Conclusion not found' });
|
return res.status(404).json({ error: 'Conclusion not found' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Manually fetch editor if needed
|
||||||
|
let editor = null;
|
||||||
|
if (conclusion.editedBy) {
|
||||||
|
editor = await User.findOne({ userId: conclusion.editedBy });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append editor info to result if needed, or just return conclusion
|
||||||
|
const result = (conclusion as any).toJSON ? (conclusion as any).toJSON() : conclusion;
|
||||||
|
if (editor) {
|
||||||
|
result.editor = {
|
||||||
|
userId: editor.userId,
|
||||||
|
displayName: editor.displayName,
|
||||||
|
email: editor.email
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
message: 'Conclusion retrieved successfully',
|
message: 'Conclusion retrieved successfully',
|
||||||
data: conclusion
|
data: result
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
logger.error('[Conclusion] Error getting conclusion:', error);
|
logger.error('[Conclusion] Error getting conclusion:', error);
|
||||||
@ -423,4 +428,3 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const conclusionController = new ConclusionController();
|
export const conclusionController = new ConclusionController();
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { DashboardService } from '../services/dashboard.service';
|
import { DashboardMongoService, dashboardMongoService } from '../services/dashboard.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
export class DashboardController {
|
export class DashboardController {
|
||||||
private dashboardService: DashboardService;
|
private dashboardService: DashboardMongoService = dashboardMongoService;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.dashboardService = new DashboardService();
|
// Service is now injected via import singleton
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -646,4 +646,3 @@ export class DashboardController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +1,10 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { DealerClaimService } from '../services/dealerClaim.service';
|
import { DealerClaimMongoService } from '../services/dealerClaim.service';
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { gcsStorageService } from '../services/gcsStorage.service';
|
import { gcsStorageService } from '../services/gcsStorage.service';
|
||||||
import { Document } from '../models/Document';
|
import { Document, InternalOrder, WorkflowRequest } from '../models'; // Fixed imports
|
||||||
import { InternalOrder } from '../models/InternalOrder';
|
|
||||||
import { constants } from '../config/constants';
|
import { constants } from '../config/constants';
|
||||||
import { sapIntegrationService } from '../services/sapIntegration.service';
|
import { sapIntegrationService } from '../services/sapIntegration.service';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
@ -13,7 +12,7 @@ import path from 'path';
|
|||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
|
|
||||||
export class DealerClaimController {
|
export class DealerClaimController {
|
||||||
private dealerClaimService = new DealerClaimService();
|
private dealerClaimService = new DealerClaimMongoService();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new dealer claim request
|
* Create a new dealer claim request
|
||||||
@ -121,11 +120,11 @@ export class DealerClaimController {
|
|||||||
return uuidRegex.test(id);
|
return uuidRegex.test(id);
|
||||||
};
|
};
|
||||||
|
|
||||||
const { WorkflowRequest } = await import('../models/WorkflowRequest');
|
// Use WorkflowRequest from imports (Mongoose model)
|
||||||
if (isUuid(identifier)) {
|
if (isUuid(identifier)) {
|
||||||
return await WorkflowRequest.findByPk(identifier);
|
return await WorkflowRequest.findOne({ requestId: identifier });
|
||||||
} else {
|
} else {
|
||||||
return await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
|
return await WorkflowRequest.findOne({ requestNumber: identifier });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -312,8 +311,9 @@ export class DealerClaimController {
|
|||||||
|
|
||||||
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||||
|
|
||||||
// Save to documents table
|
// Save to documents table (Mongoose)
|
||||||
const doc = await Document.create({
|
const doc = await Document.create({
|
||||||
|
documentId: crypto.randomUUID(), // Generate UUID if model requires it and doesn't auto-gen
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: path.basename(file.filename || file.originalname),
|
fileName: path.basename(file.filename || file.originalname),
|
||||||
@ -332,10 +332,11 @@ export class DealerClaimController {
|
|||||||
parentDocumentId: null as any,
|
parentDocumentId: null as any,
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
} as any);
|
uploadedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
completionDocuments.push({
|
completionDocuments.push({
|
||||||
documentId: doc.documentId,
|
documentId: (doc as any).documentId,
|
||||||
name: file.originalname,
|
name: file.originalname,
|
||||||
url: uploadResult.storageUrl,
|
url: uploadResult.storageUrl,
|
||||||
size: file.size,
|
size: file.size,
|
||||||
@ -373,6 +374,7 @@ export class DealerClaimController {
|
|||||||
|
|
||||||
// Save to documents table
|
// Save to documents table
|
||||||
const doc = await Document.create({
|
const doc = await Document.create({
|
||||||
|
documentId: crypto.randomUUID(),
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: path.basename(file.filename || file.originalname),
|
fileName: path.basename(file.filename || file.originalname),
|
||||||
@ -391,10 +393,11 @@ export class DealerClaimController {
|
|||||||
parentDocumentId: null as any,
|
parentDocumentId: null as any,
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
} as any);
|
uploadedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
activityPhotos.push({
|
activityPhotos.push({
|
||||||
documentId: doc.documentId,
|
documentId: (doc as any).documentId,
|
||||||
name: file.originalname,
|
name: file.originalname,
|
||||||
url: uploadResult.storageUrl,
|
url: uploadResult.storageUrl,
|
||||||
size: file.size,
|
size: file.size,
|
||||||
@ -433,6 +436,7 @@ export class DealerClaimController {
|
|||||||
|
|
||||||
// Save to documents table
|
// Save to documents table
|
||||||
const doc = await Document.create({
|
const doc = await Document.create({
|
||||||
|
documentId: crypto.randomUUID(), // UUID gen
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: path.basename(file.filename || file.originalname),
|
fileName: path.basename(file.filename || file.originalname),
|
||||||
@ -451,10 +455,11 @@ export class DealerClaimController {
|
|||||||
parentDocumentId: null as any,
|
parentDocumentId: null as any,
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
} as any);
|
uploadedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
invoicesReceipts.push({
|
invoicesReceipts.push({
|
||||||
documentId: doc.documentId,
|
documentId: (doc as any).documentId,
|
||||||
name: file.originalname,
|
name: file.originalname,
|
||||||
url: uploadResult.storageUrl,
|
url: uploadResult.storageUrl,
|
||||||
size: file.size,
|
size: file.size,
|
||||||
@ -493,6 +498,7 @@ export class DealerClaimController {
|
|||||||
|
|
||||||
// Save to documents table
|
// Save to documents table
|
||||||
const doc = await Document.create({
|
const doc = await Document.create({
|
||||||
|
documentId: crypto.randomUUID(), // UUID gen
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: path.basename(attendanceSheetFile.filename || attendanceSheetFile.originalname),
|
fileName: path.basename(attendanceSheetFile.filename || attendanceSheetFile.originalname),
|
||||||
@ -511,10 +517,11 @@ export class DealerClaimController {
|
|||||||
parentDocumentId: null as any,
|
parentDocumentId: null as any,
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
} as any);
|
uploadedAt: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
attendanceSheet = {
|
attendanceSheet = {
|
||||||
documentId: doc.documentId,
|
documentId: (doc as any).documentId,
|
||||||
name: attendanceSheetFile.originalname,
|
name: attendanceSheetFile.originalname,
|
||||||
url: uploadResult.storageUrl,
|
url: uploadResult.storageUrl,
|
||||||
size: attendanceSheetFile.size,
|
size: attendanceSheetFile.size,
|
||||||
@ -659,7 +666,7 @@ export class DealerClaimController {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Fetch and return the updated IO details from database
|
// Fetch and return the updated IO details from database
|
||||||
const updatedIO = await InternalOrder.findOne({ where: { requestId } });
|
const updatedIO = await InternalOrder.findOne({ requestId });
|
||||||
|
|
||||||
if (updatedIO) {
|
if (updatedIO) {
|
||||||
return ResponseHandler.success(res, {
|
return ResponseHandler.success(res, {
|
||||||
@ -803,125 +810,4 @@ export class DealerClaimController {
|
|||||||
return ResponseHandler.error(res, 'Failed to update credit note details', 500, errorMessage);
|
return ResponseHandler.error(res, 'Failed to update credit note details', 500, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Send credit note to dealer and auto-approve Step 8
|
|
||||||
* POST /api/v1/dealer-claims/:requestId/credit-note/send
|
|
||||||
* Accepts either UUID or requestNumber
|
|
||||||
*/
|
|
||||||
async sendCreditNoteToDealer(
|
|
||||||
req: AuthenticatedRequest,
|
|
||||||
res: Response
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const identifier = req.params.requestId; // Can be UUID or requestNumber
|
|
||||||
const userId = req.user?.userId;
|
|
||||||
if (!userId) {
|
|
||||||
return ResponseHandler.error(res, 'Unauthorized', 401);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find workflow to get actual UUID
|
|
||||||
const workflow = await this.findWorkflowByIdentifier(identifier);
|
|
||||||
if (!workflow) {
|
|
||||||
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestId = (workflow as any).requestId || (workflow as any).request_id;
|
|
||||||
if (!requestId) {
|
|
||||||
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.dealerClaimService.sendCreditNoteToDealer(requestId, userId);
|
|
||||||
|
|
||||||
return ResponseHandler.success(res, { message: 'Credit note sent to dealer and Step 8 approved successfully' }, 'Credit note sent');
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DealerClaimController] Error sending credit note to dealer:', error);
|
|
||||||
return ResponseHandler.error(res, 'Failed to send credit note to dealer', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test SAP Budget Blocking (for testing/debugging)
|
|
||||||
* POST /api/v1/dealer-claims/test/sap-block
|
|
||||||
*
|
|
||||||
* This endpoint allows direct testing of SAP budget blocking without creating a full request
|
|
||||||
*/
|
|
||||||
async testSapBudgetBlock(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const userId = req.user?.userId;
|
|
||||||
if (!userId) {
|
|
||||||
return ResponseHandler.error(res, 'Unauthorized', 401);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { ioNumber, amount, requestNumber } = req.body;
|
|
||||||
|
|
||||||
// Validation
|
|
||||||
if (!ioNumber || !amount) {
|
|
||||||
return ResponseHandler.error(res, 'Missing required fields: ioNumber and amount are required', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
const blockAmount = parseFloat(amount);
|
|
||||||
if (isNaN(blockAmount) || blockAmount <= 0) {
|
|
||||||
return ResponseHandler.error(res, 'Amount must be a positive number', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[DealerClaimController] Testing SAP budget block:`, {
|
|
||||||
ioNumber,
|
|
||||||
amount: blockAmount,
|
|
||||||
requestNumber: requestNumber || 'TEST-REQUEST',
|
|
||||||
userId
|
|
||||||
});
|
|
||||||
|
|
||||||
// First validate IO number
|
|
||||||
const ioValidation = await sapIntegrationService.validateIONumber(ioNumber);
|
|
||||||
|
|
||||||
if (!ioValidation.isValid) {
|
|
||||||
return ResponseHandler.error(res, `Invalid IO number: ${ioValidation.error || 'IO number not found in SAP'}`, 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[DealerClaimController] IO validation successful:`, {
|
|
||||||
ioNumber,
|
|
||||||
availableBalance: ioValidation.availableBalance
|
|
||||||
});
|
|
||||||
|
|
||||||
// Block budget in SAP
|
|
||||||
const testRequestNumber = requestNumber || `TEST-${Date.now()}`;
|
|
||||||
const blockResult = await sapIntegrationService.blockBudget(
|
|
||||||
ioNumber,
|
|
||||||
blockAmount,
|
|
||||||
testRequestNumber,
|
|
||||||
`Test budget block for ${testRequestNumber}`
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!blockResult.success) {
|
|
||||||
return ResponseHandler.error(res, `Failed to block budget in SAP: ${blockResult.error}`, 500);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return detailed response
|
|
||||||
return ResponseHandler.success(res, {
|
|
||||||
message: 'SAP budget block test successful',
|
|
||||||
ioNumber,
|
|
||||||
requestedAmount: blockAmount,
|
|
||||||
availableBalance: ioValidation.availableBalance,
|
|
||||||
sapResponse: {
|
|
||||||
success: blockResult.success,
|
|
||||||
blockedAmount: blockResult.blockedAmount,
|
|
||||||
remainingBalance: blockResult.remainingBalance,
|
|
||||||
sapDocumentNumber: blockResult.blockId || null,
|
|
||||||
error: blockResult.error || null
|
|
||||||
},
|
|
||||||
calculatedRemainingBalance: ioValidation.availableBalance - blockResult.blockedAmount,
|
|
||||||
validation: {
|
|
||||||
isValid: ioValidation.isValid,
|
|
||||||
availableBalance: ioValidation.availableBalance,
|
|
||||||
error: ioValidation.error || null
|
|
||||||
}
|
|
||||||
}, 'SAP budget block test completed');
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[DealerClaimController] Error testing SAP budget block:', error);
|
|
||||||
return ResponseHandler.error(res, error.message || 'Failed to test SAP budget block', 500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,18 +1,18 @@
|
|||||||
|
|
||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { Document } from '@models/Document';
|
import { DocumentModel } from '../models/mongoose/Document.schema';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { Participant } from '@models/Participant';
|
import { ParticipantModel as Participant } from '../models/mongoose/Participant.schema';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
import { Op } from 'sequelize';
|
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityMongoService as activityService } from '@services/activity.service';
|
||||||
import { gcsStorageService } from '@services/gcsStorage.service';
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
||||||
import { emailNotificationService } from '@services/emailNotification.service';
|
import { emailNotificationService } from '@services/emailNotification.service';
|
||||||
import { notificationService } from '@services/notification.service';
|
import { notificationMongoService as notificationService } from '@services/notification.service';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
||||||
@ -28,9 +28,18 @@ export class DocumentController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract requestId from body (multer should parse form fields)
|
// Extract requestId from body (multer should parse form fields)
|
||||||
// Try both req.body and req.body.requestId for compatibility
|
|
||||||
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
|
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
|
||||||
|
|
||||||
|
console.log('[DEBUG] Document upload attempt:', {
|
||||||
|
identifier,
|
||||||
|
bodyKeys: Object.keys(req.body || {}),
|
||||||
|
bodyRequestId: req.body?.requestId,
|
||||||
|
bodyRequest_id: req.body?.request_id,
|
||||||
|
userId: req.user?.userId
|
||||||
|
});
|
||||||
|
|
||||||
if (!identifier || identifier === 'undefined' || identifier === 'null') {
|
if (!identifier || identifier === 'undefined' || identifier === 'null') {
|
||||||
|
console.log('[DEBUG] RequestId missing or invalid');
|
||||||
logWithContext('error', 'RequestId missing or invalid in document upload', {
|
logWithContext('error', 'RequestId missing or invalid in document upload', {
|
||||||
body: req.body,
|
body: req.body,
|
||||||
bodyKeys: Object.keys(req.body || {}),
|
bodyKeys: Object.keys(req.body || {}),
|
||||||
@ -46,19 +55,45 @@ export class DocumentController {
|
|||||||
return uuidRegex.test(id);
|
return uuidRegex.test(id);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get workflow request - handle both UUID (requestId) and requestNumber
|
// Helper to check if identifier is MongoDB ObjectId
|
||||||
let workflowRequest: WorkflowRequest | null = null;
|
const isObjectId = (id: string): boolean => {
|
||||||
if (isUuid(identifier)) {
|
return /^[0-9a-f]{24}$/i.test(id);
|
||||||
workflowRequest = await WorkflowRequest.findByPk(identifier);
|
};
|
||||||
|
|
||||||
|
// Get workflow request - handle UUID (requestId), requestNumber, or MongoDB ObjectId (_id)
|
||||||
|
let workflowRequest: any = null;
|
||||||
|
const identifierIsUuid = isUuid(identifier);
|
||||||
|
const identifierIsObjectId = isObjectId(identifier);
|
||||||
|
|
||||||
|
console.log('[DEBUG] Looking up workflow request:', {
|
||||||
|
identifier,
|
||||||
|
identifierIsUuid,
|
||||||
|
identifierIsObjectId,
|
||||||
|
lookupField: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (identifierIsUuid) {
|
||||||
|
workflowRequest = await WorkflowRequest.findOne({ requestId: identifier });
|
||||||
|
} else if (identifierIsObjectId) {
|
||||||
|
workflowRequest = await WorkflowRequest.findById(identifier);
|
||||||
} else {
|
} else {
|
||||||
workflowRequest = await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
|
workflowRequest = await WorkflowRequest.findOne({ requestNumber: identifier });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log('[DEBUG] Workflow lookup result:', {
|
||||||
|
found: !!workflowRequest,
|
||||||
|
requestId: workflowRequest?.requestId,
|
||||||
|
requestNumber: workflowRequest?.requestNumber,
|
||||||
|
_id: workflowRequest?._id?.toString()
|
||||||
|
});
|
||||||
|
|
||||||
if (!workflowRequest) {
|
if (!workflowRequest) {
|
||||||
logWithContext('error', 'Workflow request not found for document upload', {
|
logWithContext('error', 'Workflow request not found for document upload', {
|
||||||
identifier,
|
identifier,
|
||||||
isUuid: isUuid(identifier),
|
isUuid: identifierIsUuid,
|
||||||
userId: req.user?.userId
|
isObjectId: identifierIsObjectId,
|
||||||
|
userId: req.user?.userId,
|
||||||
|
attemptedLookup: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
|
||||||
});
|
});
|
||||||
ResponseHandler.error(res, 'Workflow request not found', 404);
|
ResponseHandler.error(res, 'Workflow request not found', 404);
|
||||||
return;
|
return;
|
||||||
@ -71,7 +106,6 @@ export class DocumentController {
|
|||||||
if (!requestNumber) {
|
if (!requestNumber) {
|
||||||
logWithContext('error', 'Request number not found for workflow', {
|
logWithContext('error', 'Request number not found for workflow', {
|
||||||
requestId,
|
requestId,
|
||||||
workflowRequest: JSON.stringify(workflowRequest.toJSON()),
|
|
||||||
userId: req.user?.userId
|
userId: req.user?.userId
|
||||||
});
|
});
|
||||||
ResponseHandler.error(res, 'Request number not found for workflow', 500);
|
ResponseHandler.error(res, 'Request number not found for workflow', 500);
|
||||||
@ -84,28 +118,28 @@ export class DocumentController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate file size against database configuration
|
// Validate file size
|
||||||
const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10);
|
const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10);
|
||||||
const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024;
|
const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024;
|
||||||
|
|
||||||
if (file.size > maxFileSizeBytes) {
|
if (file.size > maxFileSizeBytes) {
|
||||||
ResponseHandler.error(
|
ResponseHandler.error(
|
||||||
res,
|
res,
|
||||||
`File size exceeds the maximum allowed size of ${maxFileSizeMB}MB. Current size: ${(file.size / (1024 * 1024)).toFixed(2)}MB`,
|
`File size exceeds the maximum allowed size of ${maxFileSizeMB} MB.Current size: ${(file.size / (1024 * 1024)).toFixed(2)} MB`,
|
||||||
400
|
400
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate file type against database configuration
|
// Validate file type
|
||||||
const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif');
|
const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif');
|
||||||
const allowedFileTypes = allowedFileTypesStr.split(',').map(ext => ext.trim().toLowerCase());
|
const allowedFileTypes = allowedFileTypesStr.split(',').map((ext: string) => ext.trim().toLowerCase());
|
||||||
const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||||
|
|
||||||
if (!allowedFileTypes.includes(fileExtension)) {
|
if (!allowedFileTypes.includes(fileExtension)) {
|
||||||
ResponseHandler.error(
|
ResponseHandler.error(
|
||||||
res,
|
res,
|
||||||
`File type "${fileExtension}" is not allowed. Allowed types: ${allowedFileTypes.join(', ')}`,
|
`File type "${fileExtension}" is not allowed.Allowed types: ${allowedFileTypes.join(', ')} `,
|
||||||
400
|
400
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
@ -117,7 +151,7 @@ export class DocumentController {
|
|||||||
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||||
const category = (req.body?.category as string) || 'OTHER';
|
const category = (req.body?.category as string) || 'OTHER';
|
||||||
|
|
||||||
// Upload with automatic fallback to local storage
|
// Upload file
|
||||||
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
||||||
buffer: fileBuffer,
|
buffer: fileBuffer,
|
||||||
originalName: file.originalname,
|
originalName: file.originalname,
|
||||||
@ -129,7 +163,7 @@ export class DocumentController {
|
|||||||
const storageUrl = uploadResult.storageUrl;
|
const storageUrl = uploadResult.storageUrl;
|
||||||
const gcsFilePath = uploadResult.filePath;
|
const gcsFilePath = uploadResult.filePath;
|
||||||
|
|
||||||
// Clean up local temporary file if it exists (from multer disk storage)
|
// Clean up local temp file
|
||||||
if (file.path && fs.existsSync(file.path)) {
|
if (file.path && fs.existsSync(file.path)) {
|
||||||
try {
|
try {
|
||||||
fs.unlinkSync(file.path);
|
fs.unlinkSync(file.path);
|
||||||
@ -138,134 +172,30 @@ export class DocumentController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if storageUrl exceeds database column limit (500 chars)
|
|
||||||
// GCS signed URLs can be very long (500-1000+ chars)
|
|
||||||
const MAX_STORAGE_URL_LENGTH = 500;
|
|
||||||
let finalStorageUrl = storageUrl;
|
|
||||||
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
||||||
logWithContext('warn', 'Storage URL exceeds database column limit, truncating', {
|
|
||||||
originalLength: storageUrl.length,
|
|
||||||
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
||||||
urlPrefix: storageUrl.substring(0, 100),
|
|
||||||
});
|
|
||||||
// For signed URLs, we can't truncate as it will break the URL
|
|
||||||
// Instead, store null and generate signed URLs on-demand when needed
|
|
||||||
// The filePath is sufficient to generate a new signed URL later
|
|
||||||
finalStorageUrl = null as any;
|
|
||||||
logWithContext('info', 'Storing null storageUrl - will generate signed URL on-demand', {
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
reason: 'Signed URL too long for database column',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate file names if they exceed database column limits (255 chars)
|
|
||||||
const MAX_FILE_NAME_LENGTH = 255;
|
|
||||||
const originalFileName = file.originalname;
|
|
||||||
let truncatedOriginalFileName = originalFileName;
|
|
||||||
|
|
||||||
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
// Preserve file extension when truncating
|
|
||||||
const ext = path.extname(originalFileName);
|
|
||||||
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
// If extension itself is too long, just use the extension
|
|
||||||
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('warn', 'File name truncated to fit database column', {
|
|
||||||
originalLength: originalFileName.length,
|
|
||||||
truncatedLength: truncatedOriginalFileName.length,
|
|
||||||
originalName: originalFileName.substring(0, 100) + '...',
|
|
||||||
truncatedName: truncatedOriginalFileName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate fileName (basename of the generated file name in GCS)
|
|
||||||
const generatedFileName = path.basename(gcsFilePath);
|
|
||||||
let truncatedFileName = generatedFileName;
|
|
||||||
|
|
||||||
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
const ext = path.extname(generatedFileName);
|
|
||||||
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('warn', 'Generated file name truncated', {
|
|
||||||
originalLength: generatedFileName.length,
|
|
||||||
truncatedLength: truncatedFileName.length,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare document data
|
// Prepare document data
|
||||||
const documentData = {
|
const documentData = {
|
||||||
|
documentId: require('crypto').randomUUID(),
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: path.basename(gcsFilePath).substring(0, 255),
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: file.originalname.substring(0, 255),
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
fileExtension: extension,
|
fileExtension: extension,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
filePath: gcsFilePath, // Store GCS path or local path
|
filePath: gcsFilePath,
|
||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: (storageUrl && storageUrl.length < 500) ? storageUrl : undefined,
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
category: category as any,
|
||||||
googleDocUrl: null as any,
|
|
||||||
category,
|
|
||||||
version: 1,
|
version: 1,
|
||||||
parentDocumentId: null as any,
|
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
logWithContext('info', 'Creating document record', {
|
const doc = await (DocumentModel as any).create(documentData);
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
documentData: JSON.stringify(documentData, null, 2),
|
|
||||||
});
|
|
||||||
|
|
||||||
let doc;
|
// Log event
|
||||||
try {
|
logDocumentEvent('uploaded', (doc as any).documentId, {
|
||||||
doc = await Document.create(documentData as any);
|
requestId: workflowRequest.requestId, // Standardized to UUID
|
||||||
logWithContext('info', 'Document record created successfully', {
|
|
||||||
documentId: doc.documentId,
|
|
||||||
requestId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
});
|
|
||||||
} catch (createError) {
|
|
||||||
const createErrorMessage = createError instanceof Error ? createError.message : 'Unknown error';
|
|
||||||
const createErrorStack = createError instanceof Error ? createError.stack : undefined;
|
|
||||||
// Check if it's a Sequelize validation error
|
|
||||||
const sequelizeError = (createError as any)?.errors || (createError as any)?.parent;
|
|
||||||
logWithContext('error', 'Document.create() failed', {
|
|
||||||
error: createErrorMessage,
|
|
||||||
stack: createErrorStack,
|
|
||||||
sequelizeErrors: sequelizeError,
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
documentData: JSON.stringify(documentData, null, 2),
|
|
||||||
});
|
|
||||||
throw createError; // Re-throw to be caught by outer catch block
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log document upload event
|
|
||||||
logDocumentEvent('uploaded', doc.documentId, {
|
|
||||||
requestId,
|
|
||||||
userId,
|
userId,
|
||||||
fileName: file.originalname,
|
fileName: file.originalname,
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
@ -274,18 +204,18 @@ export class DocumentController {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Get user details for activity logging
|
// Get user details for activity logging
|
||||||
const user = await User.findByPk(userId);
|
const uploader = await UserModel.findOne({ userId });
|
||||||
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
const uploaderName = uploader?.displayName || uploader?.email || 'User';
|
||||||
|
|
||||||
// Log activity for document upload
|
// Log activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId: workflowRequest.requestId, // Standardized to UUID
|
||||||
type: 'document_added',
|
type: 'document_added',
|
||||||
user: { userId, name: uploaderName },
|
user: { userId, name: uploaderName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Document Added',
|
action: 'Document Added',
|
||||||
details: `Added ${file.originalname} as supporting document by ${uploaderName}`,
|
details: `Added ${file.originalname} as supporting document by ${uploaderName} `,
|
||||||
metadata: {
|
metadata: {
|
||||||
fileName: file.originalname,
|
fileName: file.originalname,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
@ -296,227 +226,106 @@ export class DocumentController {
|
|||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
// Send notifications for additional document added
|
// Send notifications
|
||||||
try {
|
try {
|
||||||
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
|
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
|
||||||
const isInitiator = userId === initiatorId;
|
const isInitiator = userId === initiatorId;
|
||||||
|
|
||||||
// Get all participants (spectators)
|
// Get participants
|
||||||
const spectators = await Participant.findAll({
|
const participants = await Participant.find({
|
||||||
where: {
|
requestId: workflowRequest.requestId, // Standardized to UUID
|
||||||
requestId,
|
participantType: 'SPECTATOR'
|
||||||
participantType: 'SPECTATOR'
|
|
||||||
},
|
|
||||||
include: [{
|
|
||||||
model: User,
|
|
||||||
as: 'user',
|
|
||||||
attributes: ['userId', 'email', 'displayName']
|
|
||||||
}]
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get current approver (pending or in-progress approval level)
|
// Get current approver
|
||||||
const currentApprovalLevel = await ApprovalLevel.findOne({
|
const currentLevel = await ApprovalLevel.findOne({
|
||||||
where: {
|
requestId: requestId,
|
||||||
requestId,
|
status: { $in: ['PENDING', 'IN_PROGRESS'] }
|
||||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
|
}).sort({ levelNumber: 1 });
|
||||||
},
|
|
||||||
order: [['levelNumber', 'ASC']],
|
|
||||||
include: [{
|
|
||||||
model: User,
|
|
||||||
as: 'approver',
|
|
||||||
attributes: ['userId', 'email', 'displayName']
|
|
||||||
}]
|
|
||||||
});
|
|
||||||
|
|
||||||
logWithContext('info', 'Current approver lookup for document notification', {
|
|
||||||
requestId,
|
|
||||||
currentApprovalLevelFound: !!currentApprovalLevel,
|
|
||||||
approverUserId: currentApprovalLevel ? ((currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver)?.userId : null,
|
|
||||||
isInitiator
|
|
||||||
});
|
|
||||||
|
|
||||||
// Determine who to notify based on who uploaded
|
|
||||||
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
||||||
|
|
||||||
if (isInitiator) {
|
// Add initiator if they are not the uploader
|
||||||
// Initiator added → notify spectators and current approver
|
if (!isInitiator) {
|
||||||
spectators.forEach((spectator: any) => {
|
const initiator = await UserModel.findOne({ userId: initiatorId });
|
||||||
const spectatorUser = spectator.user || spectator.User;
|
if (initiator) {
|
||||||
if (spectatorUser && spectatorUser.userId !== userId) {
|
recipientsToNotify.push({
|
||||||
recipientsToNotify.push({
|
userId: initiator.userId,
|
||||||
userId: spectatorUser.userId,
|
email: initiator.email,
|
||||||
email: spectatorUser.email,
|
displayName: initiator.displayName || initiator.email
|
||||||
displayName: spectatorUser.displayName || spectatorUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (currentApprovalLevel) {
|
|
||||||
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
|
||||||
if (approverUser && approverUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: approverUser.userId,
|
|
||||||
email: approverUser.email,
|
|
||||||
displayName: approverUser.displayName || approverUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Check if uploader is a spectator
|
|
||||||
const uploaderParticipant = await Participant.findOne({
|
|
||||||
where: {
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
participantType: 'SPECTATOR'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (uploaderParticipant) {
|
|
||||||
// Spectator added → notify initiator and current approver
|
|
||||||
const initiator = await User.findByPk(initiatorId);
|
|
||||||
if (initiator) {
|
|
||||||
const initiatorData = initiator.toJSON();
|
|
||||||
if (initiatorData.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: initiatorData.userId,
|
|
||||||
email: initiatorData.email,
|
|
||||||
displayName: initiatorData.displayName || initiatorData.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentApprovalLevel) {
|
|
||||||
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
|
||||||
if (approverUser && approverUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: approverUser.userId,
|
|
||||||
email: approverUser.email,
|
|
||||||
displayName: approverUser.displayName || approverUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Approver added → notify initiator and spectators
|
|
||||||
const initiator = await User.findByPk(initiatorId);
|
|
||||||
if (initiator) {
|
|
||||||
const initiatorData = initiator.toJSON();
|
|
||||||
if (initiatorData.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: initiatorData.userId,
|
|
||||||
email: initiatorData.email,
|
|
||||||
displayName: initiatorData.displayName || initiatorData.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
spectators.forEach((spectator: any) => {
|
|
||||||
const spectatorUser = spectator.user || spectator.User;
|
|
||||||
if (spectatorUser && spectatorUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: spectatorUser.userId,
|
|
||||||
email: spectatorUser.email,
|
|
||||||
displayName: spectatorUser.displayName || spectatorUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send notifications (email, in-app, and web-push)
|
// Add current approver if not the uploader
|
||||||
const requestData = {
|
if (currentLevel?.approver?.userId && currentLevel.approver.userId !== userId) {
|
||||||
requestNumber: requestNumber,
|
const approver = await UserModel.findOne({ userId: currentLevel.approver.userId });
|
||||||
requestId: requestId,
|
if (approver) {
|
||||||
title: (workflowRequest as any).title || 'Request'
|
recipientsToNotify.push({
|
||||||
};
|
userId: approver.userId,
|
||||||
|
email: approver.email,
|
||||||
|
displayName: approver.displayName || approver.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Prepare user IDs for in-app and web-push notifications
|
// Add spectators
|
||||||
const recipientUserIds = recipientsToNotify.map(r => r.userId);
|
for (const p of participants) {
|
||||||
|
if (p.userId !== userId && !recipientsToNotify.some(r => r.userId === p.userId)) {
|
||||||
|
const spectator = await UserModel.findOne({ userId: p.userId });
|
||||||
|
if (spectator) {
|
||||||
|
recipientsToNotify.push({
|
||||||
|
userId: spectator.userId,
|
||||||
|
email: spectator.email,
|
||||||
|
displayName: spectator.displayName || spectator.email
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Send in-app and web-push notifications
|
// Send notifications
|
||||||
if (recipientUserIds.length > 0) {
|
if (recipientsToNotify.length > 0) {
|
||||||
try {
|
const recipientIds = recipientsToNotify.map(r => r.userId);
|
||||||
await notificationService.sendToUsers(
|
|
||||||
recipientUserIds,
|
await notificationService.sendToUsers(recipientIds, {
|
||||||
{
|
title: 'Additional Document Added',
|
||||||
title: 'Additional Document Added',
|
body: `${uploaderName} added "${file.originalname}" to ${requestNumber} `,
|
||||||
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`,
|
requestId,
|
||||||
requestId,
|
requestNumber,
|
||||||
requestNumber,
|
url: `/ request / ${requestNumber} `,
|
||||||
url: `/request/${requestNumber}`,
|
type: 'document_added',
|
||||||
type: 'document_added',
|
priority: 'MEDIUM',
|
||||||
priority: 'MEDIUM',
|
actionRequired: false,
|
||||||
actionRequired: false,
|
metadata: {
|
||||||
metadata: {
|
|
||||||
documentName: file.originalname,
|
|
||||||
fileSize: file.size,
|
|
||||||
addedByName: uploaderName,
|
|
||||||
source: 'Documents Tab'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
logWithContext('info', 'In-app and web-push notifications sent for additional document', {
|
|
||||||
requestId,
|
|
||||||
documentName: file.originalname,
|
documentName: file.originalname,
|
||||||
recipientsCount: recipientUserIds.length
|
addedByName: uploaderName
|
||||||
});
|
}
|
||||||
} catch (notifyError) {
|
});
|
||||||
logWithContext('error', 'Failed to send in-app/web-push notifications for additional document', {
|
|
||||||
requestId,
|
|
||||||
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send email notifications
|
const requestData = {
|
||||||
for (const recipient of recipientsToNotify) {
|
requestNumber,
|
||||||
await emailNotificationService.sendAdditionalDocumentAdded(
|
requestId,
|
||||||
requestData,
|
title: (workflowRequest as any).title || 'Request'
|
||||||
recipient,
|
};
|
||||||
{
|
|
||||||
|
for (const recipient of recipientsToNotify) {
|
||||||
|
await emailNotificationService.sendAdditionalDocumentAdded(requestData, recipient, {
|
||||||
documentName: file.originalname,
|
documentName: file.originalname,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
addedByName: uploaderName,
|
addedByName: uploaderName,
|
||||||
source: 'Documents Tab'
|
source: 'Documents Tab'
|
||||||
}
|
});
|
||||||
);
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logWithContext('info', 'Additional document notifications sent', {
|
|
||||||
requestId,
|
|
||||||
documentName: file.originalname,
|
|
||||||
recipientsCount: recipientsToNotify.length,
|
|
||||||
isInitiator
|
|
||||||
});
|
|
||||||
} catch (notifyError) {
|
} catch (notifyError) {
|
||||||
// Don't fail document upload if notifications fail
|
logWithContext('error', 'Failed to send document notifications', { error: notifyError });
|
||||||
logWithContext('error', 'Failed to send additional document notifications', {
|
|
||||||
requestId,
|
|
||||||
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
logWithContext('error', 'Document upload failed', { error: message });
|
||||||
logWithContext('error', 'Document upload failed', {
|
|
||||||
userId: req.user?.userId,
|
|
||||||
requestId: req.body?.requestId || req.body?.request_id,
|
|
||||||
body: req.body,
|
|
||||||
bodyKeys: Object.keys(req.body || {}),
|
|
||||||
file: req.file ? {
|
|
||||||
originalname: req.file.originalname,
|
|
||||||
size: req.file.size,
|
|
||||||
mimetype: req.file.mimetype,
|
|
||||||
hasBuffer: !!req.file.buffer,
|
|
||||||
hasPath: !!req.file.path
|
|
||||||
} : 'No file',
|
|
||||||
error: message,
|
|
||||||
stack: errorStack
|
|
||||||
});
|
|
||||||
ResponseHandler.error(res, 'Upload failed', 500, message);
|
ResponseHandler.error(res, 'Upload failed', 500, message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { Notification } from '@models/Notification';
|
import mongoose from 'mongoose';
|
||||||
import { Op } from 'sequelize';
|
import { NotificationModel as Notification } from '../models/mongoose/Notification.schema';
|
||||||
import logger from '@utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { notificationService } from '@services/notification.service';
|
import { notificationMongoService as notificationService } from '../services/notification.service';
|
||||||
|
|
||||||
export class NotificationController {
|
export class NotificationController {
|
||||||
/**
|
/**
|
||||||
@ -25,12 +25,12 @@ export class NotificationController {
|
|||||||
|
|
||||||
const offset = (Number(page) - 1) * Number(limit);
|
const offset = (Number(page) - 1) * Number(limit);
|
||||||
|
|
||||||
const { rows, count } = await Notification.findAndCountAll({
|
const rows = await Notification.find(where)
|
||||||
where,
|
.sort({ createdAt: -1 })
|
||||||
order: [['createdAt', 'DESC']],
|
.limit(Number(limit))
|
||||||
limit: Number(limit),
|
.skip(offset);
|
||||||
offset
|
|
||||||
});
|
const count = await Notification.countDocuments(where);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -42,7 +42,7 @@ export class NotificationController {
|
|||||||
total: count,
|
total: count,
|
||||||
totalPages: Math.ceil(count / Number(limit))
|
totalPages: Math.ceil(count / Number(limit))
|
||||||
},
|
},
|
||||||
unreadCount: unreadOnly === 'true' ? count : await Notification.count({ where: { userId, isRead: false } })
|
unreadCount: unreadOnly === 'true' ? count : await Notification.countDocuments({ userId, isRead: false })
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -63,8 +63,8 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const count = await Notification.count({
|
const count = await Notification.countDocuments({
|
||||||
where: { userId, isRead: false }
|
userId, isRead: false
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@ -90,8 +90,13 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!mongoose.Types.ObjectId.isValid(notificationId)) {
|
||||||
|
res.status(400).json({ success: false, message: 'Invalid notification ID' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const notification = await Notification.findOne({
|
const notification = await Notification.findOne({
|
||||||
where: { notificationId, userId }
|
_id: notificationId, userId
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!notification) {
|
if (!notification) {
|
||||||
@ -99,10 +104,10 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await notification.update({
|
notification.isRead = true;
|
||||||
isRead: true,
|
notification.metadata = notification.metadata || {};
|
||||||
readAt: new Date()
|
notification.metadata.readAt = new Date();
|
||||||
});
|
await notification.save();
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -127,9 +132,9 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await Notification.update(
|
await Notification.updateMany(
|
||||||
{ isRead: true, readAt: new Date() },
|
{ userId, isRead: false },
|
||||||
{ where: { userId, isRead: false } }
|
{ $set: { isRead: true } }
|
||||||
);
|
);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@ -155,10 +160,17 @@ export class NotificationController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleted = await Notification.destroy({
|
if (!mongoose.Types.ObjectId.isValid(notificationId)) {
|
||||||
where: { notificationId, userId }
|
res.status(400).json({ success: false, message: 'Invalid notification ID' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await Notification.deleteOne({
|
||||||
|
_id: notificationId, userId
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const deleted = result.deletedCount;
|
||||||
|
|
||||||
if (deleted === 0) {
|
if (deleted === 0) {
|
||||||
res.status(404).json({ success: false, message: 'Notification not found' });
|
res.status(404).json({ success: false, message: 'Notification not found' });
|
||||||
return;
|
return;
|
||||||
@ -201,4 +213,3 @@ export class NotificationController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,13 @@
|
|||||||
import { Response } from 'express';
|
import { Response } from 'express';
|
||||||
import { pauseService } from '@services/pause.service';
|
import { pauseMongoService } from '@services/pause.service';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
// Validation schemas
|
// Validation schemas
|
||||||
|
// In MongoDB, levelId could be a string (ObjectId)
|
||||||
const pauseWorkflowSchema = z.object({
|
const pauseWorkflowSchema = z.object({
|
||||||
levelId: z.string().uuid().optional().nullable(),
|
levelId: z.string().optional().nullable(),
|
||||||
reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'),
|
reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'),
|
||||||
resumeDate: z.string().datetime().or(z.date())
|
resumeDate: z.string().datetime().or(z.date())
|
||||||
});
|
});
|
||||||
@ -26,7 +27,7 @@ export class PauseController {
|
|||||||
const userId = req.user?.userId;
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
ResponseHandler.unauthorized(res, 'Unauthorized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -36,7 +37,7 @@ export class PauseController {
|
|||||||
? validated.resumeDate
|
? validated.resumeDate
|
||||||
: new Date(validated.resumeDate);
|
: new Date(validated.resumeDate);
|
||||||
|
|
||||||
const result = await pauseService.pauseWorkflow(
|
const result = await pauseMongoService.pauseWorkflow(
|
||||||
id,
|
id,
|
||||||
validated.levelId || null,
|
validated.levelId || null,
|
||||||
userId,
|
userId,
|
||||||
@ -68,14 +69,14 @@ export class PauseController {
|
|||||||
const userId = req.user?.userId;
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
ResponseHandler.unauthorized(res, 'Unauthorized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate request body (notes is optional)
|
// Validate request body (notes is optional)
|
||||||
const validated = resumeWorkflowSchema.parse(req.body || {});
|
const validated = resumeWorkflowSchema.parse(req.body || {});
|
||||||
|
|
||||||
const result = await pauseService.resumeWorkflow(id, userId, validated.notes);
|
const result = await pauseMongoService.resumeWorkflow(id, userId, validated.notes);
|
||||||
|
|
||||||
ResponseHandler.success(res, {
|
ResponseHandler.success(res, {
|
||||||
workflow: result.workflow,
|
workflow: result.workflow,
|
||||||
@ -101,11 +102,11 @@ export class PauseController {
|
|||||||
const userId = req.user?.userId;
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
ResponseHandler.unauthorized(res, 'Unauthorized');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await pauseService.retriggerPause(id, userId);
|
await pauseMongoService.retriggerPause(id, userId);
|
||||||
|
|
||||||
ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200);
|
ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200);
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -122,7 +123,7 @@ export class PauseController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
const pauseDetails = await pauseService.getPauseDetails(id);
|
const pauseDetails = await pauseMongoService.getPauseDetails(id);
|
||||||
|
|
||||||
if (!pauseDetails) {
|
if (!pauseDetails) {
|
||||||
ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200);
|
ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200);
|
||||||
@ -138,4 +139,3 @@ export class PauseController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const pauseController = new PauseController();
|
export const pauseController = new PauseController();
|
||||||
|
|
||||||
|
|||||||
@ -1,13 +1,11 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { TatAlert } from '@models/TatAlert';
|
import { TatAlertModel as TatAlert } from '../models/mongoose/TatAlert.schema';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import logger from '@utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { sequelize } from '@config/database';
|
import { activityMongoService as activityService } from '../services/activity.service';
|
||||||
import { QueryTypes } from 'sequelize';
|
import { getRequestMetadata } from '../utils/requestUtils';
|
||||||
import { activityService } from '@services/activity.service';
|
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -17,26 +15,35 @@ export const getTatAlertsByRequest = async (req: Request, res: Response) => {
|
|||||||
try {
|
try {
|
||||||
const { requestId } = req.params;
|
const { requestId } = req.params;
|
||||||
|
|
||||||
const alerts = await TatAlert.findAll({
|
const alerts = await TatAlert.find({ requestId })
|
||||||
where: { requestId },
|
.sort({ alertSentAt: 1 })
|
||||||
include: [
|
.lean();
|
||||||
{
|
|
||||||
model: ApprovalLevel,
|
// Enrich with level info manually since we can't easily populate across collections if not using ObjectIds strictly for references in Mongoose style (using strings here)
|
||||||
as: 'level',
|
// Or we can query ApprovalLevel
|
||||||
attributes: ['levelNumber', 'levelName', 'approverName', 'status']
|
const enrichedAlerts = await Promise.all(alerts.map(async (alert: any) => {
|
||||||
},
|
// Fetch level info
|
||||||
{
|
const level = await ApprovalLevel.findOne({ levelId: alert.levelId }).select('levelNumber levelName approverName status').lean(); // Use findOne with levelId (string)
|
||||||
model: User,
|
|
||||||
as: 'approver',
|
const alertData = { ...alert, level };
|
||||||
attributes: ['userId', 'displayName', 'email', 'department']
|
|
||||||
|
if (alert.approverId) {
|
||||||
|
const approver = await UserModel.findOne({ userId: alert.approverId }).select('userId displayName email department').lean();
|
||||||
|
if (approver) {
|
||||||
|
alertData.approver = {
|
||||||
|
userId: approver.userId,
|
||||||
|
displayName: approver.displayName,
|
||||||
|
email: approver.email,
|
||||||
|
department: approver.department
|
||||||
|
};
|
||||||
}
|
}
|
||||||
],
|
}
|
||||||
order: [['alertSentAt', 'ASC']]
|
return alertData;
|
||||||
});
|
}));
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: alerts
|
data: enrichedAlerts
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[TAT Controller] Error fetching TAT alerts:', error);
|
logger.error('[TAT Controller] Error fetching TAT alerts:', error);
|
||||||
@ -54,10 +61,8 @@ export const getTatAlertsByLevel = async (req: Request, res: Response) => {
|
|||||||
try {
|
try {
|
||||||
const { levelId } = req.params;
|
const { levelId } = req.params;
|
||||||
|
|
||||||
const alerts = await TatAlert.findAll({
|
const alerts = await TatAlert.find({ levelId })
|
||||||
where: { levelId },
|
.sort({ alertSentAt: 1 });
|
||||||
order: [['alertSentAt', 'ASC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -79,31 +84,61 @@ export const getTatComplianceSummary = async (req: Request, res: Response) => {
|
|||||||
try {
|
try {
|
||||||
const { startDate, endDate } = req.query;
|
const { startDate, endDate } = req.query;
|
||||||
|
|
||||||
let dateFilter = '';
|
const matchStage: any = {};
|
||||||
if (startDate && endDate) {
|
if (startDate && endDate) {
|
||||||
dateFilter = `AND alert_sent_at BETWEEN '${startDate}' AND '${endDate}'`;
|
matchStage.alertSentAt = {
|
||||||
|
$gte: new Date(startDate as string),
|
||||||
|
$lte: new Date(endDate as string)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const summary = await sequelize.query(`
|
const summary = await TatAlert.aggregate([
|
||||||
SELECT
|
{ $match: matchStage },
|
||||||
COUNT(*) as total_alerts,
|
{
|
||||||
COUNT(CASE WHEN alert_type = 'TAT_50' THEN 1 END) as alerts_50,
|
$group: {
|
||||||
COUNT(CASE WHEN alert_type = 'TAT_75' THEN 1 END) as alerts_75,
|
_id: null,
|
||||||
COUNT(CASE WHEN alert_type = 'TAT_100' THEN 1 END) as breaches,
|
total_alerts: { $sum: 1 },
|
||||||
COUNT(CASE WHEN was_completed_on_time = true THEN 1 END) as completed_on_time,
|
alerts_50: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_50'] }, 1, 0] } },
|
||||||
COUNT(CASE WHEN was_completed_on_time = false THEN 1 END) as completed_late,
|
alerts_75: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_75'] }, 1, 0] } },
|
||||||
ROUND(
|
breaches: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_100'] }, 1, 0] } },
|
||||||
COUNT(CASE WHEN was_completed_on_time = true THEN 1 END) * 100.0 /
|
completed_on_time: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', true] }, 1, 0] } },
|
||||||
NULLIF(COUNT(CASE WHEN was_completed_on_time IS NOT NULL THEN 1 END), 0),
|
completed_late: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', false] }, 1, 0] } },
|
||||||
2
|
completed_total: {
|
||||||
) as compliance_percentage
|
$sum: { $cond: [{ $ne: ['$wasCompletedOnTime', null] }, 1, 0] }
|
||||||
FROM tat_alerts
|
}
|
||||||
WHERE 1=1 ${dateFilter}
|
}
|
||||||
`, { type: QueryTypes.SELECT });
|
},
|
||||||
|
{
|
||||||
|
$project: {
|
||||||
|
_id: 0,
|
||||||
|
total_alerts: 1,
|
||||||
|
alerts_50: 1,
|
||||||
|
alerts_75: 1,
|
||||||
|
breaches: 1,
|
||||||
|
completed_on_time: 1,
|
||||||
|
completed_late: 1,
|
||||||
|
compliance_percentage: {
|
||||||
|
$cond: [
|
||||||
|
{ $eq: ['$completed_total', 0] },
|
||||||
|
0,
|
||||||
|
{ $round: [{ $multiply: [{ $divide: ['$completed_on_time', '$completed_total'] }, 100] }, 2] }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: summary[0] || {}
|
data: summary[0] || {
|
||||||
|
total_alerts: 0,
|
||||||
|
alerts_50: 0,
|
||||||
|
alerts_75: 0,
|
||||||
|
breaches: 0,
|
||||||
|
completed_on_time: 0,
|
||||||
|
completed_late: 0,
|
||||||
|
compliance_percentage: 0
|
||||||
|
}
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[TAT Controller] Error fetching TAT compliance summary:', error);
|
logger.error('[TAT Controller] Error fetching TAT compliance summary:', error);
|
||||||
@ -119,32 +154,56 @@ export const getTatComplianceSummary = async (req: Request, res: Response) => {
|
|||||||
*/
|
*/
|
||||||
export const getTatBreachReport = async (req: Request, res: Response) => {
|
export const getTatBreachReport = async (req: Request, res: Response) => {
|
||||||
try {
|
try {
|
||||||
const breaches = await sequelize.query(`
|
const breaches = await TatAlert.aggregate([
|
||||||
SELECT
|
{ $match: { isBreached: true } },
|
||||||
ta.alert_id,
|
{ $sort: { alertSentAt: -1 } },
|
||||||
ta.request_id,
|
{ $limit: 100 },
|
||||||
w.request_number,
|
// Lookup WorkflowRequest
|
||||||
w.title as request_title,
|
{
|
||||||
w.priority,
|
$lookup: {
|
||||||
al.level_number,
|
from: 'workflow_requests',
|
||||||
al.approver_name,
|
localField: 'requestId',
|
||||||
ta.tat_hours_allocated,
|
foreignField: 'requestId',
|
||||||
ta.tat_hours_elapsed,
|
as: 'request'
|
||||||
ta.alert_sent_at,
|
}
|
||||||
ta.completion_time,
|
},
|
||||||
ta.was_completed_on_time,
|
{ $unwind: { path: '$request', preserveNullAndEmptyArrays: true } },
|
||||||
CASE
|
// Lookup ApprovalLevel
|
||||||
WHEN ta.completion_time IS NULL THEN 'Still Pending'
|
{
|
||||||
WHEN ta.was_completed_on_time = false THEN 'Completed Late'
|
$lookup: {
|
||||||
ELSE 'Completed On Time'
|
from: 'approval_levels',
|
||||||
END as completion_status
|
localField: 'levelId',
|
||||||
FROM tat_alerts ta
|
foreignField: 'levelId',
|
||||||
JOIN workflow_requests w ON ta.request_id = w.request_id
|
as: 'level'
|
||||||
JOIN approval_levels al ON ta.level_id = al.level_id
|
}
|
||||||
WHERE ta.is_breached = true
|
},
|
||||||
ORDER BY ta.alert_sent_at DESC
|
{ $unwind: { path: '$level', preserveNullAndEmptyArrays: true } },
|
||||||
LIMIT 100
|
{
|
||||||
`, { type: QueryTypes.SELECT });
|
$project: {
|
||||||
|
alert_id: '$_id',
|
||||||
|
request_id: '$requestId',
|
||||||
|
request_number: '$request.requestNumber',
|
||||||
|
request_title: '$request.title',
|
||||||
|
priority: '$request.priority',
|
||||||
|
level_number: '$level.levelNumber',
|
||||||
|
approver_name: '$level.approverName',
|
||||||
|
tat_hours_allocated: '$tatHoursAllocated',
|
||||||
|
tat_hours_elapsed: '$tatHoursElapsed',
|
||||||
|
alert_sent_at: '$alertSentAt',
|
||||||
|
completion_time: '$completionTime',
|
||||||
|
was_completed_on_time: '$wasCompletedOnTime',
|
||||||
|
completion_status: {
|
||||||
|
$switch: {
|
||||||
|
branches: [
|
||||||
|
{ case: { $eq: ['$completionTime', null] }, then: 'Still Pending' },
|
||||||
|
{ case: { $eq: ['$wasCompletedOnTime', false] }, then: 'Completed Late' }
|
||||||
|
],
|
||||||
|
default: 'Completed On Time'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -184,7 +243,9 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the approval level to verify permissions
|
// Get the approval level to verify permissions
|
||||||
const level = await ApprovalLevel.findByPk(levelId);
|
// Note: levelId in params likely refers to the level document UUID
|
||||||
|
const level = await ApprovalLevel.findOne({ levelId }); // Use findOne with levelId custom ID
|
||||||
|
|
||||||
if (!level) {
|
if (!level) {
|
||||||
return res.status(404).json({
|
return res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
@ -193,7 +254,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get user to check role
|
// Get user to check role
|
||||||
const user = await User.findByPk(userId);
|
const user = await UserModel.findOne({ userId });
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return res.status(404).json({
|
return res.status(404).json({
|
||||||
success: false,
|
success: false,
|
||||||
@ -201,8 +262,8 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const userRole = (user as any).role;
|
const userRole = user.role;
|
||||||
const approverId = (level as any).approverId;
|
const approverId = (level as any).approverId || (level.approver ? level.approver.userId : null);
|
||||||
|
|
||||||
// Check permissions: ADMIN, MANAGEMENT, or the approver
|
// Check permissions: ADMIN, MANAGEMENT, or the approver
|
||||||
const hasPermission =
|
const hasPermission =
|
||||||
@ -218,18 +279,15 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get user details for activity logging
|
// Get user details for activity logging
|
||||||
const userDisplayName = (user as any).displayName || (user as any).email || 'Unknown User';
|
const userDisplayName = user.displayName || user.email || 'Unknown User';
|
||||||
const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time
|
const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time
|
||||||
const levelNumber = (level as any).levelNumber;
|
const levelNumber = (level as any).levelNumber;
|
||||||
const approverName = (level as any).approverName || 'Unknown Approver';
|
const approverName = (level as any).approverName || (level.approver ? level.approver.name : 'Unknown Approver');
|
||||||
|
|
||||||
// Update breach reason directly in approval_levels table
|
// Update breach reason directly in approval_levels
|
||||||
await level.update({
|
// Mongoose update
|
||||||
breachReason: breachReason.trim()
|
(level as any).breachReason = breachReason.trim();
|
||||||
});
|
await level.save();
|
||||||
|
|
||||||
// Reload to get updated data
|
|
||||||
await level.reload();
|
|
||||||
|
|
||||||
// Log activity for the request
|
// Log activity for the request
|
||||||
const userRoleLabel = userRole === 'ADMIN' ? 'Admin' : userRole === 'MANAGEMENT' ? 'Management' : 'Approver';
|
const userRoleLabel = userRole === 'ADMIN' ? 'Admin' : userRole === 'MANAGEMENT' ? 'Management' : 'Approver';
|
||||||
@ -239,7 +297,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
|
|||||||
user: {
|
user: {
|
||||||
userId: userId,
|
userId: userId,
|
||||||
name: userDisplayName,
|
name: userDisplayName,
|
||||||
email: (user as any).email
|
email: user.email
|
||||||
},
|
},
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason',
|
action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason',
|
||||||
@ -281,28 +339,52 @@ export const getApproverTatPerformance = async (req: Request, res: Response) =>
|
|||||||
try {
|
try {
|
||||||
const { approverId } = req.params;
|
const { approverId } = req.params;
|
||||||
|
|
||||||
const performance = await sequelize.query(`
|
const performance = await TatAlert.aggregate([
|
||||||
SELECT
|
{ $match: { approverId: approverId } },
|
||||||
COUNT(DISTINCT ta.level_id) as total_approvals,
|
{
|
||||||
COUNT(CASE WHEN ta.alert_type = 'TAT_50' THEN 1 END) as alerts_50_received,
|
$group: {
|
||||||
COUNT(CASE WHEN ta.alert_type = 'TAT_75' THEN 1 END) as alerts_75_received,
|
_id: null,
|
||||||
COUNT(CASE WHEN ta.is_breached = true THEN 1 END) as breaches,
|
total_approvals: { $addToSet: '$levelId' }, // Count distinct levels? Or count alerts? Query said count distinct level_id.
|
||||||
AVG(ta.tat_hours_elapsed) as avg_hours_taken,
|
alerts_50_received: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_50'] }, 1, 0] } },
|
||||||
ROUND(
|
alerts_75_received: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_75'] }, 1, 0] } },
|
||||||
COUNT(CASE WHEN ta.was_completed_on_time = true THEN 1 END) * 100.0 /
|
breaches: { $sum: { $cond: [{ $eq: ['$isBreached', true] }, 1, 0] } },
|
||||||
NULLIF(COUNT(CASE WHEN ta.was_completed_on_time IS NOT NULL THEN 1 END), 0),
|
min_hours: { $min: '$tatHoursElapsed' }, // Helper to ensure avg works if field exists
|
||||||
2
|
tatHoursElapsedSum: { $sum: '$tatHoursElapsed' },
|
||||||
) as compliance_rate
|
tatHoursElapsedCount: { $sum: 1 },
|
||||||
FROM tat_alerts ta
|
|
||||||
WHERE ta.approver_id = :approverId
|
completed_on_time: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', true] }, 1, 0] } },
|
||||||
`, {
|
completed_total: { $sum: { $cond: [{ $ne: ['$wasCompletedOnTime', null] }, 1, 0] } }
|
||||||
replacements: { approverId },
|
}
|
||||||
type: QueryTypes.SELECT
|
},
|
||||||
});
|
{
|
||||||
|
$project: {
|
||||||
|
_id: 0,
|
||||||
|
total_approvals: { $size: '$total_approvals' },
|
||||||
|
alerts_50_received: 1,
|
||||||
|
alerts_75_received: 1,
|
||||||
|
breaches: 1,
|
||||||
|
avg_hours_taken: { $divide: ['$tatHoursElapsedSum', '$tatHoursElapsedCount'] },
|
||||||
|
compliance_rate: {
|
||||||
|
$cond: [
|
||||||
|
{ $eq: ['$completed_total', 0] },
|
||||||
|
0,
|
||||||
|
{ $round: [{ $multiply: [{ $divide: ['$completed_on_time', '$completed_total'] }, 100] }, 2] }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: performance[0] || {}
|
data: performance[0] || {
|
||||||
|
total_approvals: 0,
|
||||||
|
alerts_50_received: 0,
|
||||||
|
alerts_75_received: 0,
|
||||||
|
breaches: 0,
|
||||||
|
avg_hours_taken: 0,
|
||||||
|
compliance_rate: 0
|
||||||
|
}
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[TAT Controller] Error fetching approver TAT performance:', error);
|
logger.error('[TAT Controller] Error fetching approver TAT performance:', error);
|
||||||
@ -312,4 +394,3 @@ export const getApproverTatPerformance = async (req: Request, res: Response) =>
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -158,6 +158,7 @@ export class TemplateController {
|
|||||||
templateName,
|
templateName,
|
||||||
templateDescription,
|
templateDescription,
|
||||||
templateCategory,
|
templateCategory,
|
||||||
|
workflowType, // Added
|
||||||
approvalLevelsConfig,
|
approvalLevelsConfig,
|
||||||
defaultTatHours,
|
defaultTatHours,
|
||||||
formStepsConfig,
|
formStepsConfig,
|
||||||
@ -174,9 +175,10 @@ export class TemplateController {
|
|||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
const template = await this.templateService.updateTemplate(templateId, userId, {
|
const template = await this.templateService.updateTemplate(templateId, userId, {
|
||||||
templateName: templateName || name,
|
name: templateName || name,
|
||||||
templateDescription: templateDescription || description,
|
description: templateDescription || description,
|
||||||
templateCategory: templateCategory || category,
|
department: templateCategory || category,
|
||||||
|
workflowType,
|
||||||
approvalLevelsConfig: approvalLevelsConfig || approvers,
|
approvalLevelsConfig: approvalLevelsConfig || approvers,
|
||||||
defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined,
|
defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined,
|
||||||
formStepsConfig,
|
formStepsConfig,
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator';
|
import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
@ -10,14 +10,7 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
|
|||||||
try {
|
try {
|
||||||
const userId = req.user!.userId;
|
const userId = req.user!.userId;
|
||||||
|
|
||||||
const user = await User.findByPk(userId, {
|
const user = await UserModel.findOne({ userId });
|
||||||
attributes: [
|
|
||||||
'userId',
|
|
||||||
'emailNotificationsEnabled',
|
|
||||||
'pushNotificationsEnabled',
|
|
||||||
'inAppNotificationsEnabled'
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
@ -32,9 +25,9 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
|
|||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
data: {
|
data: {
|
||||||
emailNotificationsEnabled: user.emailNotificationsEnabled,
|
emailNotificationsEnabled: user.notifications?.email ?? true,
|
||||||
pushNotificationsEnabled: user.pushNotificationsEnabled,
|
pushNotificationsEnabled: user.notifications?.push ?? true,
|
||||||
inAppNotificationsEnabled: user.inAppNotificationsEnabled
|
inAppNotificationsEnabled: user.notifications?.inApp ?? true
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -57,7 +50,7 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
|
|||||||
// Validate request body
|
// Validate request body
|
||||||
const validated = updateNotificationPreferencesSchema.parse(req.body);
|
const validated = updateNotificationPreferencesSchema.parse(req.body);
|
||||||
|
|
||||||
const user = await User.findByPk(userId);
|
const user = await UserModel.findOne({ userId });
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
res.status(404).json({
|
res.status(404).json({
|
||||||
@ -67,29 +60,32 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update only provided fields
|
// Update only provided fields in nested notifications object
|
||||||
const updateData: any = {};
|
if (!user.notifications) {
|
||||||
|
user.notifications = { email: true, push: true, inApp: true };
|
||||||
|
}
|
||||||
|
|
||||||
if (validated.emailNotificationsEnabled !== undefined) {
|
if (validated.emailNotificationsEnabled !== undefined) {
|
||||||
updateData.emailNotificationsEnabled = validated.emailNotificationsEnabled;
|
user.notifications.email = validated.emailNotificationsEnabled;
|
||||||
}
|
}
|
||||||
if (validated.pushNotificationsEnabled !== undefined) {
|
if (validated.pushNotificationsEnabled !== undefined) {
|
||||||
updateData.pushNotificationsEnabled = validated.pushNotificationsEnabled;
|
user.notifications.push = validated.pushNotificationsEnabled;
|
||||||
}
|
}
|
||||||
if (validated.inAppNotificationsEnabled !== undefined) {
|
if (validated.inAppNotificationsEnabled !== undefined) {
|
||||||
updateData.inAppNotificationsEnabled = validated.inAppNotificationsEnabled;
|
user.notifications.inApp = validated.inAppNotificationsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
await user.update(updateData);
|
await user.save();
|
||||||
|
|
||||||
logger.info(`[UserPreference] Updated notification preferences for user ${userId}:`, updateData);
|
logger.info(`[UserPreference] Updated notification preferences for user ${userId}`);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Notification preferences updated successfully',
|
message: 'Notification preferences updated successfully',
|
||||||
data: {
|
data: {
|
||||||
emailNotificationsEnabled: user.emailNotificationsEnabled,
|
emailNotificationsEnabled: user.notifications.email,
|
||||||
pushNotificationsEnabled: user.pushNotificationsEnabled,
|
pushNotificationsEnabled: user.notifications.push,
|
||||||
inAppNotificationsEnabled: user.inAppNotificationsEnabled
|
inAppNotificationsEnabled: user.notifications.inApp
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@ -110,4 +106,3 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,23 +1,23 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { WorkflowService } from '@services/workflow.service';
|
import { workflowServiceMongo } from '@services/workflow.service';
|
||||||
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
|
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { Priority } from '../types/common.types';
|
import { Priority } from '../types/common.types';
|
||||||
import type { UpdateWorkflowRequest } from '../types/workflow.types';
|
import type { UpdateWorkflowRequest } from '../types/workflow.types';
|
||||||
import { Document } from '@models/Document';
|
import { DocumentModel } from '../models/mongoose/Document.schema';
|
||||||
import { User } from '@models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { gcsStorageService } from '@services/gcsStorage.service';
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
|
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
|
||||||
import { DealerClaimService } from '@services/dealerClaim.service';
|
import { DealerClaimMongoService } from '@services/dealerClaim.service';
|
||||||
|
import { activityMongoService as activityService } from '@services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
const workflowService = new WorkflowService();
|
const dealerClaimService = new DealerClaimMongoService();
|
||||||
const dealerClaimService = new DealerClaimService();
|
|
||||||
|
|
||||||
export class WorkflowController {
|
export class WorkflowController {
|
||||||
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -66,9 +66,9 @@ export class WorkflowController {
|
|||||||
|
|
||||||
// Build complete participants array automatically
|
// Build complete participants array automatically
|
||||||
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
||||||
const initiator = await User.findByPk(req.user.userId);
|
const initiator = await UserModel.findOne({ userId: req.user.userId });
|
||||||
const initiatorEmail = (initiator as any).email;
|
const initiatorEmail = (initiator as any)?.email;
|
||||||
const initiatorName = (initiator as any).displayName || (initiator as any).email;
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email;
|
||||||
|
|
||||||
const autoGeneratedParticipants = [
|
const autoGeneratedParticipants = [
|
||||||
// Add initiator
|
// Add initiator
|
||||||
@ -100,13 +100,15 @@ export class WorkflowController {
|
|||||||
// Convert string literal priority to enum
|
// Convert string literal priority to enum
|
||||||
const workflowData = {
|
const workflowData = {
|
||||||
...validatedData,
|
...validatedData,
|
||||||
|
initiatorEmail,
|
||||||
|
initiatorName,
|
||||||
priority: validatedData.priority as Priority,
|
priority: validatedData.priority as Priority,
|
||||||
approvalLevels: enrichedApprovalLevels,
|
approvalLevels: enrichedApprovalLevels,
|
||||||
participants: autoGeneratedParticipants,
|
participants: autoGeneratedParticipants,
|
||||||
};
|
};
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, {
|
const workflow = await workflowServiceMongo.createWorkflow(req.user.userId, workflowData, {
|
||||||
ipAddress: requestMeta.ipAddress,
|
ipAddress: requestMeta.ipAddress,
|
||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
@ -200,9 +202,9 @@ export class WorkflowController {
|
|||||||
|
|
||||||
// Build complete participants array automatically
|
// Build complete participants array automatically
|
||||||
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
||||||
const initiator = await User.findByPk(userId);
|
const initiator = await UserModel.findOne({ userId: userId });
|
||||||
const initiatorEmail = (initiator as any).email;
|
const initiatorEmail = (initiator as any)?.email;
|
||||||
const initiatorName = (initiator as any).displayName || (initiator as any).email;
|
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || initiatorEmail;
|
||||||
|
|
||||||
const autoGeneratedParticipants = [
|
const autoGeneratedParticipants = [
|
||||||
// Add initiator
|
// Add initiator
|
||||||
@ -233,13 +235,15 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const workflowData = {
|
const workflowData = {
|
||||||
...validated,
|
...validated,
|
||||||
|
initiatorEmail,
|
||||||
|
initiatorName,
|
||||||
priority: validated.priority as Priority,
|
priority: validated.priority as Priority,
|
||||||
approvalLevels: enrichedApprovalLevels,
|
approvalLevels: enrichedApprovalLevels,
|
||||||
participants: autoGeneratedParticipants,
|
participants: autoGeneratedParticipants,
|
||||||
} as any;
|
} as any;
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
const workflow = await workflowService.createWorkflow(userId, workflowData, {
|
const workflow = await workflowServiceMongo.createWorkflow(userId, workflowData, {
|
||||||
ipAddress: requestMeta.ipAddress,
|
ipAddress: requestMeta.ipAddress,
|
||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
@ -249,8 +253,7 @@ export class WorkflowController {
|
|||||||
const category = (req.body?.category as string) || 'OTHER';
|
const category = (req.body?.category as string) || 'OTHER';
|
||||||
const docs: any[] = [];
|
const docs: any[] = [];
|
||||||
if (files && files.length > 0) {
|
if (files && files.length > 0) {
|
||||||
const { activityService } = require('../services/activity.service');
|
const user = await UserModel.findOne({ userId });
|
||||||
const user = await User.findByPk(userId);
|
|
||||||
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
@ -346,12 +349,13 @@ export class WorkflowController {
|
|||||||
fileName: truncatedOriginalFileName,
|
fileName: truncatedOriginalFileName,
|
||||||
filePath: gcsFilePath,
|
filePath: gcsFilePath,
|
||||||
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
||||||
requestId: workflow.requestId
|
requestId: workflow.requestNumber
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const doc = await Document.create({
|
const doc = await DocumentModel.create({
|
||||||
requestId: workflow.requestId,
|
documentId: require('crypto').randomUUID(),
|
||||||
|
requestId: workflow.requestId, // Standardized to UUID
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: truncatedFileName,
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: truncatedOriginalFileName,
|
||||||
@ -362,14 +366,10 @@ export class WorkflowController {
|
|||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
category: (category || 'OTHER') as any,
|
||||||
googleDocUrl: null as any,
|
|
||||||
category: category || 'OTHER',
|
|
||||||
version: 1,
|
version: 1,
|
||||||
parentDocumentId: null as any,
|
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
});
|
||||||
} as any);
|
|
||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
logger.info('[Workflow] Document record created successfully', {
|
logger.info('[Workflow] Document record created successfully', {
|
||||||
documentId: doc.documentId,
|
documentId: doc.documentId,
|
||||||
@ -382,7 +382,7 @@ export class WorkflowController {
|
|||||||
error: docErrorMessage,
|
error: docErrorMessage,
|
||||||
stack: docErrorStack,
|
stack: docErrorStack,
|
||||||
fileName: file.originalname,
|
fileName: file.originalname,
|
||||||
requestId: workflow.requestId,
|
requestId: workflow.requestNumber,
|
||||||
filePath: gcsFilePath,
|
filePath: gcsFilePath,
|
||||||
storageUrl: storageUrl,
|
storageUrl: storageUrl,
|
||||||
});
|
});
|
||||||
@ -393,7 +393,7 @@ export class WorkflowController {
|
|||||||
// Log document upload activity
|
// Log document upload activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
activityService.log({
|
activityService.log({
|
||||||
requestId: workflow.requestId,
|
requestId: workflow.requestId, // Use UUID
|
||||||
type: 'document_added',
|
type: 'document_added',
|
||||||
user: { userId, name: uploaderName },
|
user: { userId, name: uploaderName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@ -406,7 +406,7 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
|
ResponseHandler.success(res, { requestId: workflow.requestNumber, documents: docs }, 'Workflow created with documents', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
const errorStack = error instanceof Error ? error.stack : undefined;
|
||||||
@ -423,7 +423,7 @@ export class WorkflowController {
|
|||||||
async getWorkflow(req: Request, res: Response): Promise<void> {
|
async getWorkflow(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
const workflow = await workflowService.getWorkflowById(id);
|
const workflow = await workflowServiceMongo.getWorkflowById(id);
|
||||||
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
@ -448,13 +448,13 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if user has access to this request
|
// Check if user has access to this request
|
||||||
const accessCheck = await workflowService.checkUserRequestAccess(userId, id);
|
const accessCheck = await workflowServiceMongo.checkUserRequestAccess(userId, id);
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
|
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await workflowService.getWorkflowDetails(id);
|
const result = await workflowServiceMongo.getWorkflowDetails(id);
|
||||||
if (!result) {
|
if (!result) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -479,7 +479,7 @@ export class WorkflowController {
|
|||||||
templateType: req.query.templateType as string | undefined,
|
templateType: req.query.templateType as string | undefined,
|
||||||
department: req.query.department as string | undefined,
|
department: req.query.department as string | undefined,
|
||||||
initiator: req.query.initiator as string | undefined,
|
initiator: req.query.initiator as string | undefined,
|
||||||
approver: req.query.approver as string | undefined,
|
approverName: req.query.approver as string | undefined, // Mapping 'approver' to 'approverName' for Mongo deep filter
|
||||||
approverType: req.query.approverType as 'current' | 'any' | undefined,
|
approverType: req.query.approverType as 'current' | 'any' | undefined,
|
||||||
slaCompliance: req.query.slaCompliance as string | undefined,
|
slaCompliance: req.query.slaCompliance as string | undefined,
|
||||||
dateRange: req.query.dateRange as string | undefined,
|
dateRange: req.query.dateRange as string | undefined,
|
||||||
@ -487,7 +487,8 @@ export class WorkflowController {
|
|||||||
endDate: req.query.endDate as string | undefined,
|
endDate: req.query.endDate as string | undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await workflowService.listWorkflows(page, limit, filters);
|
// USE MONGODB SERVICE FOR LISTING
|
||||||
|
const result = await workflowServiceMongo.listWorkflows(page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'Workflows fetched');
|
ResponseHandler.success(res, result, 'Workflows fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -516,7 +517,7 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyRequests(userId, page, limit, filters);
|
const result = await workflowServiceMongo.listMyRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'My requests fetched');
|
ResponseHandler.success(res, result, 'My requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -550,7 +551,7 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
const result = await workflowServiceMongo.listParticipantRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'Participant requests fetched');
|
ResponseHandler.success(res, result, 'Participant requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -580,7 +581,7 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
const result = await workflowServiceMongo.listMyInitiatedRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -606,7 +607,7 @@ export class WorkflowController {
|
|||||||
const sortBy = req.query.sortBy as string | undefined;
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
const result = await workflowServiceMongo.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -632,7 +633,7 @@ export class WorkflowController {
|
|||||||
const sortBy = req.query.sortBy as string | undefined;
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
const result = await workflowServiceMongo.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -651,7 +652,7 @@ export class WorkflowController {
|
|||||||
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflow = await workflowService.updateWorkflow(id, updateData);
|
const workflow = await workflowServiceMongo.updateWorkflow(id, updateData);
|
||||||
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
@ -690,7 +691,7 @@ export class WorkflowController {
|
|||||||
// Update workflow
|
// Update workflow
|
||||||
let workflow;
|
let workflow;
|
||||||
try {
|
try {
|
||||||
workflow = await workflowService.updateWorkflow(id, updateData);
|
workflow = await workflowServiceMongo.updateWorkflow(id, updateData);
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
@ -814,7 +815,8 @@ export class WorkflowController {
|
|||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const doc = await Document.create({
|
const doc = await DocumentModel.create({
|
||||||
|
documentId: require('crypto').randomUUID(),
|
||||||
requestId: actualRequestId,
|
requestId: actualRequestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: truncatedFileName,
|
||||||
@ -826,14 +828,10 @@ export class WorkflowController {
|
|||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
category: (category || 'OTHER') as any,
|
||||||
googleDocUrl: null as any,
|
|
||||||
category: category || 'OTHER',
|
|
||||||
version: 1,
|
version: 1,
|
||||||
parentDocumentId: null as any,
|
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
});
|
||||||
} as any);
|
|
||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
logger.info('[Workflow] Document record created successfully', {
|
logger.info('[Workflow] Document record created successfully', {
|
||||||
documentId: doc.documentId,
|
documentId: doc.documentId,
|
||||||
@ -875,7 +873,7 @@ export class WorkflowController {
|
|||||||
async submitWorkflow(req: Request, res: Response): Promise<void> {
|
async submitWorkflow(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
const workflow = await workflowService.submitWorkflow(id);
|
const workflow = await workflowServiceMongo.submitWorkflow(id);
|
||||||
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
@ -918,14 +916,13 @@ export class WorkflowController {
|
|||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
// Resolve requestId UUID from identifier (could be requestNumber or UUID)
|
// Resolve requestId from identifier (could be requestNumber or ID)
|
||||||
const workflowService = new WorkflowService();
|
const wf = await workflowServiceMongo.getRequest(id);
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(id);
|
|
||||||
if (!wf) {
|
if (!wf) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const requestId = wf.getDataValue('requestId');
|
const requestId = wf.requestId; // Use UUID
|
||||||
|
|
||||||
const history = await dealerClaimService.getHistory(requestId);
|
const history = await dealerClaimService.getHistory(requestId);
|
||||||
ResponseHandler.success(res, history, 'Revision history fetched successfully');
|
ResponseHandler.success(res, history, 'Revision history fetched successfully');
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { WorkflowTemplate } from '../models';
|
import { WorkflowTemplateModel as WorkflowTemplate } from '../models/mongoose/WorkflowTemplate.schema';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
export const createTemplate = async (req: Request, res: Response) => {
|
export const createTemplate = async (req: Request, res: Response) => {
|
||||||
@ -36,10 +36,8 @@ export const createTemplate = async (req: Request, res: Response) => {
|
|||||||
|
|
||||||
export const getTemplates = async (req: Request, res: Response) => {
|
export const getTemplates = async (req: Request, res: Response) => {
|
||||||
try {
|
try {
|
||||||
const templates = await WorkflowTemplate.findAll({
|
const templates = await WorkflowTemplate.find({ isActive: true })
|
||||||
where: { isActive: true },
|
.sort({ createdAt: -1 });
|
||||||
order: [['createdAt', 'DESC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
res.status(200).json({
|
res.status(200).json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -69,7 +67,7 @@ export const updateTemplate = async (req: Request, res: Response) => {
|
|||||||
if (suggestedSLA) updates.defaultTatHours = suggestedSLA;
|
if (suggestedSLA) updates.defaultTatHours = suggestedSLA;
|
||||||
if (isActive !== undefined) updates.isActive = isActive;
|
if (isActive !== undefined) updates.isActive = isActive;
|
||||||
|
|
||||||
const template = await WorkflowTemplate.findByPk(id);
|
const template = await WorkflowTemplate.findByIdAndUpdate(id, updates, { new: true });
|
||||||
|
|
||||||
if (!template) {
|
if (!template) {
|
||||||
return res.status(404).json({
|
return res.status(404).json({
|
||||||
@ -78,8 +76,6 @@ export const updateTemplate = async (req: Request, res: Response) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
await template.update(updates);
|
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
success: true,
|
success: true,
|
||||||
message: 'Workflow template updated successfully',
|
message: 'Workflow template updated successfully',
|
||||||
@ -98,7 +94,7 @@ export const updateTemplate = async (req: Request, res: Response) => {
|
|||||||
export const deleteTemplate = async (req: Request, res: Response) => {
|
export const deleteTemplate = async (req: Request, res: Response) => {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
const template = await WorkflowTemplate.findByPk(id);
|
const template = await WorkflowTemplate.findById(id);
|
||||||
|
|
||||||
if (!template) {
|
if (!template) {
|
||||||
return res.status(404).json({
|
return res.status(404).json({
|
||||||
@ -107,13 +103,8 @@ export const deleteTemplate = async (req: Request, res: Response) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hard delete or Soft delete based on preference.
|
// Hard delete
|
||||||
// Since we have isActive flag, let's use that (Soft Delete) or just destroy if it's unused.
|
await template.deleteOne();
|
||||||
// For now, let's do a hard delete to match the expectation of "Delete" in the UI
|
|
||||||
// unless there are FK constraints (which sequelize handles).
|
|
||||||
// Actually, safer to Soft Delete by setting isActive = false if we want history,
|
|
||||||
// but user asked for Delete. Let's do destroy.
|
|
||||||
await template.destroy();
|
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@ -1,70 +1,95 @@
|
|||||||
import type { Request, Response } from 'express';
|
import type { Response } from 'express';
|
||||||
import { workNoteService } from '../services/worknote.service';
|
import { workNoteMongoService } from '../services/worknote.service';
|
||||||
import { WorkflowService } from '../services/workflow.service';
|
import { workflowServiceMongo } from '../services/workflow.service';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
|
import { AuthenticatedRequest } from '../types/express';
|
||||||
|
import { ParticipantModel } from '../models/mongoose/Participant.schema';
|
||||||
|
|
||||||
export class WorkNoteController {
|
export class WorkNoteController {
|
||||||
private workflowService = new WorkflowService();
|
/**
|
||||||
|
* List notes for a request
|
||||||
|
*/
|
||||||
|
async list(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const requestNumber = req.params.id;
|
||||||
|
const request = await workflowServiceMongo.getRequest(requestNumber);
|
||||||
|
|
||||||
async list(req: any, res: Response): Promise<void> {
|
if (!request) {
|
||||||
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
|
ResponseHandler.notFound(res, 'Request not found');
|
||||||
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
|
return;
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
}
|
||||||
const rows = await workNoteService.list(requestId);
|
|
||||||
res.json({ success: true, data: rows });
|
const rows = await workNoteMongoService.list(requestNumber);
|
||||||
|
ResponseHandler.success(res, rows, 'Work notes retrieved');
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHandler.error(res, 'Failed to list work notes', 500);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(req: any, res: Response): Promise<void> {
|
/**
|
||||||
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
|
* Create a new work note
|
||||||
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
|
*/
|
||||||
const requestId: string = wf.getDataValue('requestId');
|
async create(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const requestNumber = req.params.id;
|
||||||
|
const request = await workflowServiceMongo.getRequest(requestNumber);
|
||||||
|
|
||||||
// Get user's participant info (includes userName and role)
|
if (!request) {
|
||||||
const { Participant } = require('@models/Participant');
|
ResponseHandler.notFound(res, 'Request not found');
|
||||||
const participant = await Participant.findOne({
|
return;
|
||||||
where: { requestId, userId: req.user?.userId }
|
}
|
||||||
});
|
|
||||||
|
|
||||||
let userName = req.user?.email || 'Unknown User';
|
// Get user's participant info from Mongo
|
||||||
let userRole = 'SPECTATOR';
|
const participant = await ParticipantModel.findOne({
|
||||||
|
requestId: requestNumber,
|
||||||
|
userId: req.user.userId
|
||||||
|
});
|
||||||
|
|
||||||
if (participant) {
|
let userName = req.user.email || 'Unknown User';
|
||||||
userName = (participant as any).userName || (participant as any).user_name || req.user?.email || 'Unknown User';
|
let userRole = 'SPECTATOR';
|
||||||
userRole = (participant as any).participantType || (participant as any).participant_type || 'SPECTATOR';
|
|
||||||
|
if (participant) {
|
||||||
|
userName = participant.userName || req.user.email || 'Unknown User';
|
||||||
|
userRole = participant.participantType || 'SPECTATOR';
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = {
|
||||||
|
userId: req.user.userId,
|
||||||
|
name: userName,
|
||||||
|
role: userRole
|
||||||
|
};
|
||||||
|
|
||||||
|
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
|
||||||
|
|
||||||
|
// Map files
|
||||||
|
const files = (req.files as any[])?.map(f => ({
|
||||||
|
buffer: f.buffer,
|
||||||
|
path: f.path || null,
|
||||||
|
originalname: f.originalname,
|
||||||
|
mimetype: f.mimetype,
|
||||||
|
size: f.size
|
||||||
|
})) || [];
|
||||||
|
|
||||||
|
const workNotePayload = {
|
||||||
|
message: payload.message,
|
||||||
|
type: payload.type || 'COMMENT',
|
||||||
|
isVisibleToDealer: payload.isVisibleToDealer || false,
|
||||||
|
mentionedUsers: payload.mentions || []
|
||||||
|
};
|
||||||
|
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
const note = await workNoteMongoService.create(
|
||||||
|
requestNumber,
|
||||||
|
user,
|
||||||
|
workNotePayload,
|
||||||
|
files
|
||||||
|
);
|
||||||
|
|
||||||
|
ResponseHandler.success(res, note, 'Work note created', 201);
|
||||||
|
} catch (error) {
|
||||||
|
const msg = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
ResponseHandler.error(res, 'Failed to create work note', 500, msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
const user = {
|
|
||||||
userId: req.user?.userId,
|
|
||||||
name: userName,
|
|
||||||
role: userRole
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
|
|
||||||
// Map files with buffer for GCS upload (multer.memoryStorage provides buffer, not path)
|
|
||||||
const files = (req.files as any[])?.map(f => ({
|
|
||||||
buffer: f.buffer,
|
|
||||||
path: f.path || null, // May not exist with memory storage
|
|
||||||
originalname: f.originalname,
|
|
||||||
mimetype: f.mimetype,
|
|
||||||
size: f.size
|
|
||||||
})) || [];
|
|
||||||
|
|
||||||
// Extract mentions from payload (sent by frontend)
|
|
||||||
const mentions = payload.mentions || [];
|
|
||||||
const workNotePayload = {
|
|
||||||
message: payload.message,
|
|
||||||
isPriority: payload.isPriority,
|
|
||||||
parentNoteId: payload.parentNoteId,
|
|
||||||
mentionedUsers: mentions // Pass mentioned user IDs to service
|
|
||||||
};
|
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
|
||||||
const note = await workNoteService.create(requestId, user, workNotePayload, files, {
|
|
||||||
ipAddress: requestMeta.ipAddress,
|
|
||||||
userAgent: requestMeta.userAgent
|
|
||||||
});
|
|
||||||
res.status(201).json({ success: true, data: note });
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -5,7 +5,8 @@
|
|||||||
* Logic: Email only sent if BOTH admin AND user have it enabled
|
* Logic: Email only sent if BOTH admin AND user have it enabled
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { User } from '@models/User';
|
|
||||||
|
|
||||||
import { SYSTEM_CONFIG } from '../config/system.config';
|
import { SYSTEM_CONFIG } from '../config/system.config';
|
||||||
import { getConfigValue } from '../services/configReader.service';
|
import { getConfigValue } from '../services/configReader.service';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
@ -85,7 +86,7 @@ async function isAdminEmailEnabled(emailType: EmailNotificationType): Promise<bo
|
|||||||
|
|
||||||
if (dbConfigValue) {
|
if (dbConfigValue) {
|
||||||
// Parse database value (it's stored as string 'true' or 'false')
|
// Parse database value (it's stored as string 'true' or 'false')
|
||||||
const dbEnabled = dbConfigValue.toLowerCase() === 'true';
|
const dbEnabled = String(dbConfigValue).toLowerCase() === 'true';
|
||||||
|
|
||||||
if (!dbEnabled) {
|
if (!dbEnabled) {
|
||||||
logger.info('[Email] Admin has disabled email notifications globally (from database config)');
|
logger.info('[Email] Admin has disabled email notifications globally (from database config)');
|
||||||
@ -119,18 +120,18 @@ async function isAdminEmailEnabled(emailType: EmailNotificationType): Promise<bo
|
|||||||
*/
|
*/
|
||||||
async function isUserEmailEnabled(userId: string, emailType: EmailNotificationType): Promise<boolean> {
|
async function isUserEmailEnabled(userId: string, emailType: EmailNotificationType): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
|
const { UserModel } = await import('../models/mongoose/User.schema');
|
||||||
// Fetch user and check emailNotificationsEnabled field
|
// Fetch user and check emailNotificationsEnabled field
|
||||||
const user = await User.findByPk(userId, {
|
const user = await UserModel.findOne({ userId });
|
||||||
attributes: ['userId', 'emailNotificationsEnabled']
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
logger.warn(`[Email] User ${userId} not found - defaulting to enabled`);
|
logger.warn(`[Email] User ${userId} not found - defaulting to enabled`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check user's global email notification setting
|
// Check user's global email notification setting (Mongoose uses nested 'notifications.email')
|
||||||
const enabled = (user as any).emailNotificationsEnabled !== false;
|
// Fallback to true if undefined
|
||||||
|
const enabled = user.notifications?.email !== false;
|
||||||
|
|
||||||
if (!enabled) {
|
if (!enabled) {
|
||||||
logger.info(`[Email] User ${userId} has disabled email notifications globally`);
|
logger.info(`[Email] User ${userId} has disabled email notifications globally`);
|
||||||
@ -159,18 +160,17 @@ export async function shouldSendInAppNotification(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { UserModel } = await import('../models/mongoose/User.schema');
|
||||||
// Fetch user and check inAppNotificationsEnabled field
|
// Fetch user and check inAppNotificationsEnabled field
|
||||||
const user = await User.findByPk(userId, {
|
const user = await UserModel.findOne({ userId });
|
||||||
attributes: ['userId', 'inAppNotificationsEnabled']
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
logger.warn(`[Notification] User ${userId} not found - defaulting to enabled`);
|
logger.warn(`[Notification] User ${userId} not found - defaulting to enabled`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check user's global in-app notification setting
|
// Check user's global in-app notification setting (Mongoose uses nested 'notifications.inApp')
|
||||||
const enabled = (user as any).inAppNotificationsEnabled !== false;
|
const enabled = user.notifications?.inApp !== false;
|
||||||
|
|
||||||
if (!enabled) {
|
if (!enabled) {
|
||||||
logger.info(`[Notification] User ${userId} has disabled in-app notifications globally`);
|
logger.info(`[Notification] User ${userId} has disabled in-app notifications globally`);
|
||||||
@ -194,7 +194,7 @@ async function isAdminInAppEnabled(notificationType: string): Promise<boolean> {
|
|||||||
|
|
||||||
if (dbConfigValue) {
|
if (dbConfigValue) {
|
||||||
// Parse database value (it's stored as string 'true' or 'false')
|
// Parse database value (it's stored as string 'true' or 'false')
|
||||||
const dbEnabled = dbConfigValue.toLowerCase() === 'true';
|
const dbEnabled = String(dbConfigValue).toLowerCase() === 'true';
|
||||||
|
|
||||||
if (!dbEnabled) {
|
if (!dbEnabled) {
|
||||||
logger.info('[Notification] Admin has disabled in-app notifications globally (from database config)');
|
logger.info('[Notification] Admin has disabled in-app notifications globally (from database config)');
|
||||||
@ -282,4 +282,3 @@ export async function shouldSendEmailWithOverride(
|
|||||||
// Non-critical emails - check both admin and user preferences
|
// Non-critical emails - check both admin and user preferences
|
||||||
return await shouldSendEmail(userId, emailType);
|
return await shouldSendEmail(userId, emailType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -91,6 +91,7 @@ export interface WorkflowPausedData extends BaseEmailData {
|
|||||||
pausedTime: string;
|
pausedTime: string;
|
||||||
resumeDate: string;
|
resumeDate: string;
|
||||||
pauseReason: string;
|
pauseReason: string;
|
||||||
|
isApprover?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowResumedData extends BaseEmailData {
|
export interface WorkflowResumedData extends BaseEmailData {
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import jwt from 'jsonwebtoken';
|
import jwt from 'jsonwebtoken';
|
||||||
import { User } from '../models/User';
|
import { UserModel } from '../models/mongoose/User.schema';
|
||||||
import { ssoConfig } from '../config/sso';
|
import { ssoConfig } from '../config/sso';
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
|
||||||
@ -37,7 +37,7 @@ export const authenticateToken = async (
|
|||||||
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
||||||
|
|
||||||
// Fetch user from database to ensure they still exist and are active
|
// Fetch user from database to ensure they still exist and are active
|
||||||
const user = await User.findByPk(decoded.userId);
|
const user = await UserModel.findOne({ userId: decoded.userId });
|
||||||
|
|
||||||
if (!user || !user.isActive) {
|
if (!user || !user.isActive) {
|
||||||
ResponseHandler.unauthorized(res, 'User not found or inactive');
|
ResponseHandler.unauthorized(res, 'User not found or inactive');
|
||||||
@ -88,7 +88,7 @@ export const optionalAuth = async (
|
|||||||
|
|
||||||
if (token) {
|
if (token) {
|
||||||
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
|
||||||
const user = await User.findByPk(decoded.userId);
|
const user = await UserModel.findOne({ userId: decoded.userId });
|
||||||
|
|
||||||
if (user && user.isActive) {
|
if (user && user.isActive) {
|
||||||
req.user = {
|
req.user = {
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import { Participant } from '@models/Participant';
|
import { WorkflowRequestModel } from '../models/mongoose/WorkflowRequest.schema';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { ParticipantModel } from '../models/mongoose/Participant.schema';
|
||||||
import { Op } from 'sequelize';
|
import { ApprovalLevelModel } from '../models/mongoose/ApprovalLevel.schema';
|
||||||
|
|
||||||
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
|
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
|
||||||
|
|
||||||
@ -12,14 +12,11 @@ function isUuid(identifier: string): boolean {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Helper to find workflow by either requestId or requestNumber
|
// Helper to find workflow by either requestId or requestNumber
|
||||||
async function findWorkflowByIdentifier(identifier: string): Promise<WorkflowRequest | null> {
|
async function findWorkflowByIdentifier(identifier: string): Promise<any | null> {
|
||||||
if (isUuid(identifier)) {
|
const query = isUuid(identifier)
|
||||||
return await WorkflowRequest.findByPk(identifier);
|
? { requestId: identifier }
|
||||||
} else {
|
: { requestNumber: identifier };
|
||||||
return await WorkflowRequest.findOne({
|
return await WorkflowRequestModel.findOne(query);
|
||||||
where: { requestNumber: identifier }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function requireParticipantTypes(allowed: AllowedType[]) {
|
export function requireParticipantTypes(allowed: AllowedType[]) {
|
||||||
@ -36,24 +33,22 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
|
|||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
return res.status(404).json({ success: false, error: 'Workflow not found' });
|
return res.status(404).json({ success: false, error: 'Workflow not found' });
|
||||||
}
|
}
|
||||||
const actualRequestId = (workflow as any).requestId;
|
const actualRequestId = workflow.requestId;
|
||||||
|
|
||||||
// Check initiator
|
// Check initiator
|
||||||
if (allowed.includes('INITIATOR')) {
|
if (allowed.includes('INITIATOR')) {
|
||||||
if ((workflow as any).initiatorId === userId) {
|
if (workflow.initiator?.userId === userId) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check participants table for SPECTATOR
|
// Check participants table for SPECTATOR
|
||||||
if (allowed.includes('SPECTATOR')) {
|
if (allowed.includes('SPECTATOR')) {
|
||||||
const participant = await Participant.findOne({
|
const participant = await ParticipantModel.findOne({
|
||||||
where: {
|
requestId: actualRequestId,
|
||||||
requestId: actualRequestId,
|
userId,
|
||||||
userId,
|
participantType: 'SPECTATOR',
|
||||||
participantType: 'SPECTATOR',
|
isActive: true
|
||||||
isActive: true
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
if (participant) {
|
if (participant) {
|
||||||
return next();
|
return next();
|
||||||
@ -63,26 +58,21 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
|
|||||||
// For APPROVER role, check ApprovalLevel table
|
// For APPROVER role, check ApprovalLevel table
|
||||||
// This is the primary source of truth for approvers
|
// This is the primary source of truth for approvers
|
||||||
if (allowed.includes('APPROVER')) {
|
if (allowed.includes('APPROVER')) {
|
||||||
const { ApprovalLevel } = await import('@models/ApprovalLevel');
|
const approvalLevel = await ApprovalLevelModel.findOne({
|
||||||
const approvalLevel = await ApprovalLevel.findOne({
|
requestId: actualRequestId,
|
||||||
where: {
|
'approver.userId': userId,
|
||||||
requestId: actualRequestId,
|
status: { $in: ['PENDING', 'IN_PROGRESS'] }
|
||||||
approverId: userId,
|
|
||||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any }
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
if (approvalLevel) {
|
if (approvalLevel) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: also check Participants table (some approvers might be added there)
|
// Fallback: also check Participants table (some approvers might be added there)
|
||||||
const participant = await Participant.findOne({
|
const participant = await ParticipantModel.findOne({
|
||||||
where: {
|
requestId: actualRequestId,
|
||||||
requestId: actualRequestId,
|
userId,
|
||||||
userId,
|
participantType: 'APPROVER',
|
||||||
participantType: 'APPROVER',
|
isActive: true
|
||||||
isActive: true
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
if (participant) {
|
if (participant) {
|
||||||
return next();
|
return next();
|
||||||
|
|||||||
@ -1,92 +0,0 @@
|
|||||||
import { QueryInterface, QueryTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to add AI model configuration entries
|
|
||||||
* Adds CLAUDE_MODEL, OPENAI_MODEL, and GEMINI_MODEL to admin_configurations
|
|
||||||
*
|
|
||||||
* This migration is idempotent - it will only insert if the configs don't exist
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Insert AI model configurations if they don't exist
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
INSERT INTO admin_configurations (
|
|
||||||
config_id, config_key, config_category, config_value, value_type,
|
|
||||||
display_name, description, default_value, is_editable, is_sensitive,
|
|
||||||
validation_rules, ui_component, options, sort_order, requires_restart,
|
|
||||||
last_modified_by, last_modified_at, created_at, updated_at
|
|
||||||
) VALUES
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'CLAUDE_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'claude-sonnet-4-20250514',
|
|
||||||
'STRING',
|
|
||||||
'Claude Model',
|
|
||||||
'Claude (Anthropic) model to use for AI generation',
|
|
||||||
'claude-sonnet-4-20250514',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
27,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'OPENAI_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'gpt-4o',
|
|
||||||
'STRING',
|
|
||||||
'OpenAI Model',
|
|
||||||
'OpenAI model to use for AI generation',
|
|
||||||
'gpt-4o',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
28,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'GEMINI_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'gemini-2.0-flash-lite',
|
|
||||||
'STRING',
|
|
||||||
'Gemini Model',
|
|
||||||
'Gemini (Google) model to use for AI generation',
|
|
||||||
'gemini-2.0-flash-lite',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
29,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
)
|
|
||||||
ON CONFLICT (config_key) DO NOTHING
|
|
||||||
`, { type: QueryTypes.INSERT });
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove the AI model configurations
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DELETE FROM admin_configurations
|
|
||||||
WHERE config_key IN ('CLAUDE_MODEL', 'OPENAI_MODEL', 'GEMINI_MODEL')
|
|
||||||
`, { type: QueryTypes.DELETE });
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,322 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
import { Sequelize } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Ensure uuid-ossp extension is enabled (required for uuid_generate_v4())
|
|
||||||
await queryInterface.sequelize.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
|
|
||||||
|
|
||||||
// Create dealers table with all fields from sample data
|
|
||||||
await queryInterface.createTable('dealers', {
|
|
||||||
dealer_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: Sequelize.literal('uuid_generate_v4()')
|
|
||||||
},
|
|
||||||
sales_code: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Sales Code'
|
|
||||||
},
|
|
||||||
service_code: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Service Code'
|
|
||||||
},
|
|
||||||
gear_code: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Gear Code'
|
|
||||||
},
|
|
||||||
gma_code: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'GMA CODE'
|
|
||||||
},
|
|
||||||
region: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Region'
|
|
||||||
},
|
|
||||||
dealership: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Dealership name'
|
|
||||||
},
|
|
||||||
state: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'State'
|
|
||||||
},
|
|
||||||
district: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'District'
|
|
||||||
},
|
|
||||||
city: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'City'
|
|
||||||
},
|
|
||||||
location: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Location'
|
|
||||||
},
|
|
||||||
city_category_pst: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'City category (PST)'
|
|
||||||
},
|
|
||||||
layout_format: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Layout format'
|
|
||||||
},
|
|
||||||
tier_city_category: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'TIER City Category'
|
|
||||||
},
|
|
||||||
on_boarding_charges: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'On Boarding Charges (stored as text to allow text values)'
|
|
||||||
},
|
|
||||||
date: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'DATE (stored as text to avoid format validation)'
|
|
||||||
},
|
|
||||||
single_format_month_year: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Single Format of Month/Year (stored as text)'
|
|
||||||
},
|
|
||||||
domain_id: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Domain Id'
|
|
||||||
},
|
|
||||||
replacement: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Replacement (stored as text to allow longer values)'
|
|
||||||
},
|
|
||||||
termination_resignation_status: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Termination / Resignation under Proposal or Evaluation'
|
|
||||||
},
|
|
||||||
date_of_termination_resignation: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Date Of termination/ resignation (stored as text to avoid format validation)'
|
|
||||||
},
|
|
||||||
last_date_of_operations: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Last date of operations (stored as text to avoid format validation)'
|
|
||||||
},
|
|
||||||
old_codes: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Old Codes'
|
|
||||||
},
|
|
||||||
branch_details: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Branch Details'
|
|
||||||
},
|
|
||||||
dealer_principal_name: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Dealer Principal Name'
|
|
||||||
},
|
|
||||||
dealer_principal_email_id: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Dealer Principal Email Id'
|
|
||||||
},
|
|
||||||
dp_contact_number: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'DP CONTACT NUMBER (stored as text to allow multiple numbers)'
|
|
||||||
},
|
|
||||||
dp_contacts: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'DP CONTACTS (stored as text to allow multiple contacts)'
|
|
||||||
},
|
|
||||||
showroom_address: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Showroom Address'
|
|
||||||
},
|
|
||||||
showroom_pincode: {
|
|
||||||
type: DataTypes.STRING(10),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Showroom Pincode'
|
|
||||||
},
|
|
||||||
workshop_address: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Workshop Address'
|
|
||||||
},
|
|
||||||
workshop_pincode: {
|
|
||||||
type: DataTypes.STRING(10),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Workshop Pincode'
|
|
||||||
},
|
|
||||||
location_district: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Location / District'
|
|
||||||
},
|
|
||||||
state_workshop: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'State (for workshop)'
|
|
||||||
},
|
|
||||||
no_of_studios: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: 0,
|
|
||||||
comment: 'No Of Studios'
|
|
||||||
},
|
|
||||||
website_update: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Website update (stored as text to allow longer values)'
|
|
||||||
},
|
|
||||||
gst: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'GST'
|
|
||||||
},
|
|
||||||
pan: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'PAN'
|
|
||||||
},
|
|
||||||
firm_type: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Firm Type'
|
|
||||||
},
|
|
||||||
prop_managing_partners_directors: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Prop. / Managing Partners / Managing Directors'
|
|
||||||
},
|
|
||||||
total_prop_partners_directors: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Total Prop. / Partners / Directors'
|
|
||||||
},
|
|
||||||
docs_folder_link: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'DOCS Folder Link'
|
|
||||||
},
|
|
||||||
workshop_gma_codes: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Workshop GMA Codes'
|
|
||||||
},
|
|
||||||
existing_new: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Existing / New'
|
|
||||||
},
|
|
||||||
dlrcode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'dlrcode'
|
|
||||||
},
|
|
||||||
is_active: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'Whether the dealer is currently active'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('dealers', ['sales_code'], {
|
|
||||||
name: 'idx_dealers_sales_code',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['service_code'], {
|
|
||||||
name: 'idx_dealers_service_code',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['gma_code'], {
|
|
||||||
name: 'idx_dealers_gma_code',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['domain_id'], {
|
|
||||||
name: 'idx_dealers_domain_id',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['region'], {
|
|
||||||
name: 'idx_dealers_region',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['state'], {
|
|
||||||
name: 'idx_dealers_state',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['city'], {
|
|
||||||
name: 'idx_dealers_city',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['district'], {
|
|
||||||
name: 'idx_dealers_district',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['dlrcode'], {
|
|
||||||
name: 'idx_dealers_dlrcode',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealers', ['is_active'], {
|
|
||||||
name: 'idx_dealers_is_active',
|
|
||||||
unique: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Drop indexes first
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_sales_code');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_service_code');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_gma_code');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_domain_id');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_region');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_state');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_city');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_district');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_dlrcode');
|
|
||||||
await queryInterface.removeIndex('dealers', 'idx_dealers_is_active');
|
|
||||||
|
|
||||||
// Drop table
|
|
||||||
await queryInterface.dropTable('dealers');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,92 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create request_summaries table
|
|
||||||
* Stores comprehensive summaries of closed workflow requests
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('request_summaries', {
|
|
||||||
summary_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
unique: true // One summary per request
|
|
||||||
},
|
|
||||||
initiator_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE'
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
closing_remarks: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
is_ai_generated: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
},
|
|
||||||
conclusion_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'conclusion_remarks',
|
|
||||||
key: 'conclusion_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'SET NULL'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('request_summaries', ['request_id'], {
|
|
||||||
name: 'idx_request_summaries_request_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('request_summaries', ['initiator_id'], {
|
|
||||||
name: 'idx_request_summaries_initiator_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('request_summaries', ['created_at'], {
|
|
||||||
name: 'idx_request_summaries_created_at'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('request_summaries');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,99 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create shared_summaries table
|
|
||||||
* Stores sharing relationships for request summaries
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('shared_summaries', {
|
|
||||||
shared_summary_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
summary_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'request_summaries',
|
|
||||||
key: 'summary_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE'
|
|
||||||
},
|
|
||||||
shared_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE'
|
|
||||||
},
|
|
||||||
shared_with: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE'
|
|
||||||
},
|
|
||||||
shared_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
viewed_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
is_read: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create unique constraint to prevent duplicate shares
|
|
||||||
await queryInterface.addConstraint('shared_summaries', {
|
|
||||||
fields: ['summary_id', 'shared_with'],
|
|
||||||
type: 'unique',
|
|
||||||
name: 'uk_shared_summary'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('shared_summaries', ['summary_id'], {
|
|
||||||
name: 'idx_shared_summaries_summary_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('shared_summaries', ['shared_by'], {
|
|
||||||
name: 'idx_shared_summaries_shared_by'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('shared_summaries', ['shared_with'], {
|
|
||||||
name: 'idx_shared_summaries_shared_with'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('shared_summaries', ['shared_at'], {
|
|
||||||
name: 'idx_shared_summaries_shared_at'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('shared_summaries');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Update Request Number Format
|
|
||||||
*
|
|
||||||
* This migration documents the change in request number format from:
|
|
||||||
* - Old: REQ-YYYY-NNNNN (e.g., REQ-2025-12345)
|
|
||||||
* - New: REQ-YYYY-MM-XXXX (e.g., REQ-2025-11-0001)
|
|
||||||
*
|
|
||||||
* The counter now resets every month automatically.
|
|
||||||
*
|
|
||||||
* No schema changes are required as the request_number column (VARCHAR(20))
|
|
||||||
* is already sufficient for the new format (16 characters).
|
|
||||||
*
|
|
||||||
* Existing request numbers will remain unchanged.
|
|
||||||
* New requests will use the new format starting from this migration.
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// No schema changes needed - this is a code-level change only
|
|
||||||
// The generateRequestNumber() function in helpers.ts has been updated
|
|
||||||
// to generate the new format: REQ-YYYY-MM-XXXX
|
|
||||||
|
|
||||||
// Log the change for reference
|
|
||||||
console.log('[Migration] Request number format updated to REQ-YYYY-MM-XXXX');
|
|
||||||
console.log('[Migration] Counter will reset automatically each month');
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// No rollback needed - this is a code-level change
|
|
||||||
// To revert, simply update the generateRequestNumber() function
|
|
||||||
// in helpers.ts back to the old format
|
|
||||||
console.log('[Migration] Request number format can be reverted by updating generateRequestNumber() function');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,83 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create activity_types table for claim management activity types
|
|
||||||
* Admin can manage activity types similar to holiday management
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('activity_types', {
|
|
||||||
activity_type_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
comment: 'Activity type title/name (e.g., "Riders Mania Claims", "Legal Claims Reimbursement")'
|
|
||||||
},
|
|
||||||
item_code: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
comment: 'Optional item code for the activity type'
|
|
||||||
},
|
|
||||||
taxation_type: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
comment: 'Optional taxation type for the activity'
|
|
||||||
},
|
|
||||||
sap_ref_no: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
comment: 'Optional SAP reference number'
|
|
||||||
},
|
|
||||||
is_active: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'Whether this activity type is currently active/available for selection'
|
|
||||||
},
|
|
||||||
created_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
comment: 'Admin user who created this activity type'
|
|
||||||
},
|
|
||||||
updated_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
comment: 'Admin user who last updated this activity type'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Indexes for performance
|
|
||||||
await queryInterface.sequelize.query('CREATE UNIQUE INDEX IF NOT EXISTS "activity_types_title_unique" ON "activity_types" ("title");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_is_active" ON "activity_types" ("is_active");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_item_code" ON "activity_types" ("item_code");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_created_by" ON "activity_types" ("created_by");');
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('activity_types');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,73 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Add pause fields to approval_levels table
|
|
||||||
// Note: The 'PAUSED' enum value is added in a separate migration (20250126-add-paused-to-enum.ts)
|
|
||||||
await queryInterface.addColumn('approval_levels', 'is_paused', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('approval_levels', 'paused_at', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('approval_levels', 'paused_by', {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('approval_levels', 'pause_reason', {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('approval_levels', 'pause_resume_date', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('approval_levels', 'pause_tat_start_time', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Original TAT start time before pause'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('approval_levels', 'pause_elapsed_hours', {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Elapsed hours at pause time'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create index on is_paused for faster queries
|
|
||||||
await queryInterface.sequelize.query(
|
|
||||||
'CREATE INDEX IF NOT EXISTS "approval_levels_is_paused" ON "approval_levels" ("is_paused");'
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create index on pause_resume_date for auto-resume job
|
|
||||||
await queryInterface.sequelize.query(
|
|
||||||
'CREATE INDEX IF NOT EXISTS "approval_levels_pause_resume_date" ON "approval_levels" ("pause_resume_date") WHERE "is_paused" = true;'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'pause_elapsed_hours');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'pause_tat_start_time');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'pause_resume_date');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'pause_reason');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'paused_by');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'paused_at');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'is_paused');
|
|
||||||
|
|
||||||
// Note: PostgreSQL doesn't support removing enum values directly
|
|
||||||
// To fully rollback, you would need to recreate the enum type
|
|
||||||
// This is a limitation of PostgreSQL enums
|
|
||||||
// For now, we'll leave 'PAUSED' in the enum even after rollback
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,59 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Add pause fields to workflow_requests table
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'is_paused', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'paused_at', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'paused_by', {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'pause_reason', {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'pause_resume_date', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'pause_tat_snapshot', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create index on is_paused for faster queries
|
|
||||||
await queryInterface.sequelize.query(
|
|
||||||
'CREATE INDEX IF NOT EXISTS "workflow_requests_is_paused" ON "workflow_requests" ("is_paused");'
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create index on pause_resume_date for auto-resume job
|
|
||||||
await queryInterface.sequelize.query(
|
|
||||||
'CREATE INDEX IF NOT EXISTS "workflow_requests_pause_resume_date" ON "workflow_requests" ("pause_resume_date") WHERE "is_paused" = true;'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'pause_tat_snapshot');
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'pause_resume_date');
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'pause_reason');
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'paused_by');
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'paused_at');
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'is_paused');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to add 'PAUSED' value to enum_approval_status enum type
|
|
||||||
* This is required for the pause workflow feature
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Add 'PAUSED' to the enum_approval_status enum type
|
|
||||||
// PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE,
|
|
||||||
// so we check if it exists first
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_enum
|
|
||||||
WHERE enumlabel = 'PAUSED'
|
|
||||||
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_approval_status')
|
|
||||||
) THEN
|
|
||||||
ALTER TYPE enum_approval_status ADD VALUE 'PAUSED';
|
|
||||||
END IF;
|
|
||||||
END$$;
|
|
||||||
`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Note: PostgreSQL doesn't support removing enum values directly
|
|
||||||
// To fully rollback, you would need to:
|
|
||||||
// 1. Create a new enum without 'PAUSED'
|
|
||||||
// 2. Update all columns to use the new enum
|
|
||||||
// 3. Drop the old enum
|
|
||||||
// This is complex and risky, so we'll leave 'PAUSED' in the enum
|
|
||||||
// even after rollback. This is a limitation of PostgreSQL enums.
|
|
||||||
console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. PAUSED will remain in enum_approval_status.');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to add 'PAUSED' value to enum_workflow_status enum type
|
|
||||||
* This allows workflows to have a PAUSED status in addition to the isPaused boolean flag
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Add 'PAUSED' to the enum_workflow_status enum type
|
|
||||||
// PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE,
|
|
||||||
// so we check if it exists first
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_enum
|
|
||||||
WHERE enumlabel = 'PAUSED'
|
|
||||||
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_workflow_status')
|
|
||||||
) THEN
|
|
||||||
ALTER TYPE enum_workflow_status ADD VALUE 'PAUSED';
|
|
||||||
END IF;
|
|
||||||
END$$;
|
|
||||||
`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Note: PostgreSQL doesn't support removing enum values directly
|
|
||||||
// To fully rollback, you would need to:
|
|
||||||
// 1. Create a new enum without 'PAUSED'
|
|
||||||
// 2. Update all columns to use the new enum
|
|
||||||
// 3. Drop the old enum
|
|
||||||
// This is complex and risky, so we'll leave 'PAUSED' in the enum
|
|
||||||
// even after rollback. This is a limitation of PostgreSQL enums.
|
|
||||||
console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. PAUSED will remain in enum_workflow_status.');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to update any workflow requests with IN_PROGRESS status to PENDING
|
|
||||||
* Since IN_PROGRESS is essentially the same as PENDING for workflow requests
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Update any workflow requests with IN_PROGRESS status to PENDING
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
UPDATE workflow_requests
|
|
||||||
SET status = 'PENDING'
|
|
||||||
WHERE status = 'IN_PROGRESS';
|
|
||||||
`);
|
|
||||||
|
|
||||||
console.log('[Migration] Updated IN_PROGRESS workflow requests to PENDING');
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Note: We cannot reliably restore IN_PROGRESS status since we don't know
|
|
||||||
// which requests were originally IN_PROGRESS vs PENDING
|
|
||||||
// This migration is one-way
|
|
||||||
console.log('[Migration] Cannot rollback - IN_PROGRESS to PENDING migration is one-way');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,199 +0,0 @@
|
|||||||
import { QueryInterface, QueryTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to migrate from multi-provider AI to Vertex AI Gemini
|
|
||||||
*
|
|
||||||
* Removes:
|
|
||||||
* - AI_PROVIDER
|
|
||||||
* - CLAUDE_API_KEY, OPENAI_API_KEY, GEMINI_API_KEY
|
|
||||||
* - CLAUDE_MODEL, OPENAI_MODEL, GEMINI_MODEL
|
|
||||||
* - VERTEX_AI_MODEL (moved to environment variable only)
|
|
||||||
* - VERTEX_AI_LOCATION (moved to environment variable only)
|
|
||||||
*
|
|
||||||
* Note: Both VERTEX_AI_MODEL and VERTEX_AI_LOCATION are now configured via
|
|
||||||
* environment variables only (not in admin settings).
|
|
||||||
*
|
|
||||||
* This migration is idempotent - it will only delete configs that exist.
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove old AI provider configurations
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DELETE FROM admin_configurations
|
|
||||||
WHERE config_key IN (
|
|
||||||
'AI_PROVIDER',
|
|
||||||
'CLAUDE_API_KEY',
|
|
||||||
'OPENAI_API_KEY',
|
|
||||||
'GEMINI_API_KEY',
|
|
||||||
'CLAUDE_MODEL',
|
|
||||||
'OPENAI_MODEL',
|
|
||||||
'GEMINI_MODEL',
|
|
||||||
'VERTEX_AI_MODEL',
|
|
||||||
'VERTEX_AI_LOCATION'
|
|
||||||
)
|
|
||||||
`, { type: QueryTypes.DELETE });
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// This migration only removes configs, so down migration would restore them
|
|
||||||
// However, we don't restore them as they're now environment-only
|
|
||||||
console.log('[Migration] Down migration skipped - AI configs are now environment-only');
|
|
||||||
|
|
||||||
// Restore old configurations (for rollback)
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
INSERT INTO admin_configurations (
|
|
||||||
config_id, config_key, config_category, config_value, value_type,
|
|
||||||
display_name, description, default_value, is_editable, is_sensitive,
|
|
||||||
validation_rules, ui_component, options, sort_order, requires_restart,
|
|
||||||
last_modified_by, last_modified_at, created_at, updated_at
|
|
||||||
) VALUES
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'AI_PROVIDER',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'claude',
|
|
||||||
'STRING',
|
|
||||||
'AI Provider',
|
|
||||||
'Active AI provider for conclusion generation (claude, openai, or gemini)',
|
|
||||||
'claude',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{"enum": ["claude", "openai", "gemini"], "required": true}'::jsonb,
|
|
||||||
'select',
|
|
||||||
'["claude", "openai", "gemini"]'::jsonb,
|
|
||||||
22,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'CLAUDE_API_KEY',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'',
|
|
||||||
'STRING',
|
|
||||||
'Claude API Key',
|
|
||||||
'API key for Claude (Anthropic) - Get from console.anthropic.com',
|
|
||||||
'',
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
'{"pattern": "^sk-ant-", "minLength": 40}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
23,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'OPENAI_API_KEY',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'',
|
|
||||||
'STRING',
|
|
||||||
'OpenAI API Key',
|
|
||||||
'API key for OpenAI (GPT-4) - Get from platform.openai.com',
|
|
||||||
'',
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
'{"pattern": "^sk-", "minLength": 40}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
24,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'GEMINI_API_KEY',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'',
|
|
||||||
'STRING',
|
|
||||||
'Gemini API Key',
|
|
||||||
'API key for Gemini (Google) - Get from ai.google.dev',
|
|
||||||
'',
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
'{"minLength": 20}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
25,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'CLAUDE_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'claude-sonnet-4-20250514',
|
|
||||||
'STRING',
|
|
||||||
'Claude Model',
|
|
||||||
'Claude (Anthropic) model to use for AI generation',
|
|
||||||
'claude-sonnet-4-20250514',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
27,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'OPENAI_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'gpt-4o',
|
|
||||||
'STRING',
|
|
||||||
'OpenAI Model',
|
|
||||||
'OpenAI model to use for AI generation',
|
|
||||||
'gpt-4o',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
28,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'GEMINI_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'gemini-2.0-flash-lite',
|
|
||||||
'STRING',
|
|
||||||
'Gemini Model',
|
|
||||||
'Gemini (Google) model to use for AI generation',
|
|
||||||
'gemini-2.0-flash-lite',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
29,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
)
|
|
||||||
ON CONFLICT (config_key) DO NOTHING
|
|
||||||
`, { type: QueryTypes.INSERT });
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,237 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Create users table
|
|
||||||
*
|
|
||||||
* Purpose: Create the main users table with all fields including RBAC and SSO fields
|
|
||||||
*
|
|
||||||
* This must run FIRST before other tables that reference users
|
|
||||||
*
|
|
||||||
* Includes:
|
|
||||||
* - Basic user information (email, name, etc.)
|
|
||||||
* - SSO/Okta fields (manager, job_title, etc.)
|
|
||||||
* - RBAC role system (USER, MANAGEMENT, ADMIN)
|
|
||||||
* - Location and AD group information
|
|
||||||
*
|
|
||||||
* Created: 2025-11-12 (Updated for fresh setup)
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
console.log('📋 Creating users table with RBAC and extended SSO fields...');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Step 1: Create ENUM type for roles
|
|
||||||
console.log(' ✓ Creating user_role_enum...');
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE TYPE user_role_enum AS ENUM ('USER', 'MANAGEMENT', 'ADMIN');
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Step 2: Create users table
|
|
||||||
console.log(' ✓ Creating users table...');
|
|
||||||
await queryInterface.createTable('users', {
|
|
||||||
user_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
field: 'user_id',
|
|
||||||
comment: 'Primary key - UUID'
|
|
||||||
},
|
|
||||||
employee_id: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'employee_id',
|
|
||||||
comment: 'HR System Employee ID (optional) - some users may not have'
|
|
||||||
},
|
|
||||||
okta_sub: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'okta_sub',
|
|
||||||
comment: 'Okta user subject identifier - unique identifier from SSO'
|
|
||||||
},
|
|
||||||
email: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'email',
|
|
||||||
comment: 'Primary email address - unique and required'
|
|
||||||
},
|
|
||||||
first_name: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: '',
|
|
||||||
field: 'first_name',
|
|
||||||
comment: 'First name from SSO (optional)'
|
|
||||||
},
|
|
||||||
last_name: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: '',
|
|
||||||
field: 'last_name',
|
|
||||||
comment: 'Last name from SSO (optional)'
|
|
||||||
},
|
|
||||||
display_name: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: '',
|
|
||||||
field: 'display_name',
|
|
||||||
comment: 'Full display name for UI'
|
|
||||||
},
|
|
||||||
department: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Department/Division from SSO'
|
|
||||||
},
|
|
||||||
designation: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Job designation/position'
|
|
||||||
},
|
|
||||||
phone: {
|
|
||||||
type: DataTypes.STRING(20),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Office phone number'
|
|
||||||
},
|
|
||||||
|
|
||||||
// ============ Extended SSO/Okta Fields ============
|
|
||||||
manager: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Reporting manager name from SSO/AD'
|
|
||||||
},
|
|
||||||
second_email: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'second_email',
|
|
||||||
comment: 'Alternate email address from SSO'
|
|
||||||
},
|
|
||||||
job_title: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'job_title',
|
|
||||||
comment: 'Detailed job title/description from SSO'
|
|
||||||
},
|
|
||||||
employee_number: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'employee_number',
|
|
||||||
comment: 'HR system employee number from SSO (e.g., "00020330")'
|
|
||||||
},
|
|
||||||
postal_address: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'postal_address',
|
|
||||||
comment: 'Work location/office address from SSO'
|
|
||||||
},
|
|
||||||
mobile_phone: {
|
|
||||||
type: DataTypes.STRING(20),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'mobile_phone',
|
|
||||||
comment: 'Mobile contact number from SSO'
|
|
||||||
},
|
|
||||||
ad_groups: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'ad_groups',
|
|
||||||
comment: 'Active Directory group memberships from SSO (memberOf array)'
|
|
||||||
},
|
|
||||||
|
|
||||||
// ============ System Fields ============
|
|
||||||
location: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'JSON object: {city, state, country, office, timezone}'
|
|
||||||
},
|
|
||||||
is_active: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: true,
|
|
||||||
field: 'is_active',
|
|
||||||
comment: 'Account status - true=active, false=disabled'
|
|
||||||
},
|
|
||||||
role: {
|
|
||||||
type: DataTypes.ENUM('USER', 'MANAGEMENT', 'ADMIN'),
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: 'USER',
|
|
||||||
comment: 'RBAC role: USER (default), MANAGEMENT (read all), ADMIN (full access)'
|
|
||||||
},
|
|
||||||
last_login: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'last_login',
|
|
||||||
comment: 'Last successful login timestamp'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Step 3: Create indexes
|
|
||||||
console.log(' ✓ Creating indexes...');
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['email'], {
|
|
||||||
name: 'users_email_idx',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['okta_sub'], {
|
|
||||||
name: 'users_okta_sub_idx',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['employee_id'], {
|
|
||||||
name: 'users_employee_id_idx'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['department'], {
|
|
||||||
name: 'idx_users_department'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['is_active'], {
|
|
||||||
name: 'idx_users_is_active'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['role'], {
|
|
||||||
name: 'idx_users_role'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['manager'], {
|
|
||||||
name: 'idx_users_manager'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['postal_address'], {
|
|
||||||
name: 'idx_users_postal_address'
|
|
||||||
});
|
|
||||||
|
|
||||||
// GIN indexes for JSONB fields
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE INDEX idx_users_location ON users USING gin(location jsonb_path_ops);
|
|
||||||
CREATE INDEX idx_users_ad_groups ON users USING gin(ad_groups);
|
|
||||||
`);
|
|
||||||
|
|
||||||
console.log('✅ Users table created successfully with all indexes!');
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Failed to create users table:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
console.log('📋 Dropping users table...');
|
|
||||||
|
|
||||||
await queryInterface.dropTable('users');
|
|
||||||
|
|
||||||
// Drop ENUM type
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DROP TYPE IF EXISTS user_role_enum;
|
|
||||||
`);
|
|
||||||
|
|
||||||
console.log('✅ Users table dropped!');
|
|
||||||
}
|
|
||||||
@ -1,51 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Enums
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_priority') THEN
|
|
||||||
CREATE TYPE enum_priority AS ENUM ('STANDARD','EXPRESS');
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_workflow_status') THEN
|
|
||||||
CREATE TYPE enum_workflow_status AS ENUM ('DRAFT','PENDING','IN_PROGRESS','APPROVED','REJECTED','CLOSED');
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
|
|
||||||
await queryInterface.createTable('workflow_requests', {
|
|
||||||
request_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
|
||||||
request_number: { type: DataTypes.STRING(20), allowNull: false, unique: true },
|
|
||||||
initiator_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
|
||||||
template_type: { type: DataTypes.STRING(20), allowNull: false, defaultValue: 'CUSTOM' },
|
|
||||||
title: { type: DataTypes.STRING(500), allowNull: false },
|
|
||||||
description: { type: DataTypes.TEXT, allowNull: false },
|
|
||||||
priority: { type: 'enum_priority' as any, allowNull: false, defaultValue: 'STANDARD' },
|
|
||||||
status: { type: 'enum_workflow_status' as any, allowNull: false, defaultValue: 'DRAFT' },
|
|
||||||
current_level: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
|
|
||||||
total_levels: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
|
|
||||||
total_tat_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
|
|
||||||
submission_date: { type: DataTypes.DATE, allowNull: true },
|
|
||||||
closure_date: { type: DataTypes.DATE, allowNull: true },
|
|
||||||
conclusion_remark: { type: DataTypes.TEXT, allowNull: true },
|
|
||||||
ai_generated_conclusion: { type: DataTypes.TEXT, allowNull: true },
|
|
||||||
is_draft: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
|
||||||
is_deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
|
||||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_initiator_id" ON "workflow_requests" ("initiator_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_status" ON "workflow_requests" ("status");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_created_at" ON "workflow_requests" ("created_at");');
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('workflow_requests');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_workflow_status;');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_priority;');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,53 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_approval_status') THEN
|
|
||||||
CREATE TYPE enum_approval_status AS ENUM ('PENDING','IN_PROGRESS','APPROVED','REJECTED','SKIPPED');
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
|
|
||||||
await queryInterface.createTable('approval_levels', {
|
|
||||||
level_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
|
||||||
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
|
|
||||||
level_number: { type: DataTypes.INTEGER, allowNull: false },
|
|
||||||
level_name: { type: DataTypes.STRING(100), allowNull: true },
|
|
||||||
approver_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
|
||||||
approver_email: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
approver_name: { type: DataTypes.STRING(200), allowNull: false },
|
|
||||||
tat_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false },
|
|
||||||
tat_days: { type: DataTypes.INTEGER, allowNull: false },
|
|
||||||
status: { type: 'enum_approval_status' as any, allowNull: false, defaultValue: 'PENDING' },
|
|
||||||
level_start_time: { type: DataTypes.DATE, allowNull: true },
|
|
||||||
level_end_time: { type: DataTypes.DATE, allowNull: true },
|
|
||||||
action_date: { type: DataTypes.DATE, allowNull: true },
|
|
||||||
comments: { type: DataTypes.TEXT, allowNull: true },
|
|
||||||
rejection_reason: { type: DataTypes.TEXT, allowNull: true },
|
|
||||||
is_final_approver: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
|
||||||
elapsed_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
|
|
||||||
remaining_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
|
|
||||||
tat_percentage_used: { type: DataTypes.DECIMAL(5,2), allowNull: false, defaultValue: 0 },
|
|
||||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_request_id" ON "approval_levels" ("request_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_approver_id" ON "approval_levels" ("approver_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_status" ON "approval_levels" ("status");');
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_constraint WHERE conname = 'uq_approval_levels_request_level'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE "approval_levels" ADD CONSTRAINT "uq_approval_levels_request_level" UNIQUE ("request_id", "level_number");
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('approval_levels');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_approval_status;');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_participant_type') THEN
|
|
||||||
CREATE TYPE enum_participant_type AS ENUM ('SPECTATOR','INITIATOR','APPROVER','CONSULTATION');
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
|
|
||||||
await queryInterface.createTable('participants', {
|
|
||||||
participant_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
|
||||||
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
|
|
||||||
user_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
|
||||||
user_email: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
user_name: { type: DataTypes.STRING(200), allowNull: false },
|
|
||||||
participant_type: { type: 'enum_participant_type' as any, allowNull: false },
|
|
||||||
can_comment: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
|
||||||
can_view_documents: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
|
||||||
can_download_documents: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
|
||||||
notification_enabled: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
|
||||||
added_by: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
|
||||||
added_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
is_active: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "participants_request_id" ON "participants" ("request_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "participants_user_id" ON "participants" ("user_id");');
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM pg_constraint WHERE conname = 'uq_participants_request_user'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE "participants" ADD CONSTRAINT "uq_participants_request_user" UNIQUE ("request_id", "user_id");
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('participants');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_participant_type;');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.sequelize.query(`DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_document_category') THEN
|
|
||||||
CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER','COMPLETION_DOC','ACTIVITY_PHOTO');
|
|
||||||
END IF;
|
|
||||||
END$$;`);
|
|
||||||
|
|
||||||
await queryInterface.createTable('documents', {
|
|
||||||
document_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
|
||||||
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
|
|
||||||
uploaded_by: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
|
||||||
file_name: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
original_file_name: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
file_type: { type: DataTypes.STRING(100), allowNull: false },
|
|
||||||
file_extension: { type: DataTypes.STRING(10), allowNull: false },
|
|
||||||
file_size: { type: DataTypes.BIGINT, allowNull: false },
|
|
||||||
file_path: { type: DataTypes.STRING(500), allowNull: false },
|
|
||||||
storage_url: { type: DataTypes.STRING(500), allowNull: true },
|
|
||||||
mime_type: { type: DataTypes.STRING(100), allowNull: false },
|
|
||||||
checksum: { type: DataTypes.STRING(64), allowNull: false },
|
|
||||||
is_google_doc: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
|
||||||
google_doc_url: { type: DataTypes.STRING(500), allowNull: true },
|
|
||||||
category: { type: 'enum_document_category' as any, allowNull: false, defaultValue: 'OTHER' },
|
|
||||||
version: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
|
|
||||||
parent_document_id: { type: DataTypes.UUID, allowNull: true, references: { model: 'documents', key: 'document_id' } },
|
|
||||||
is_deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
|
||||||
download_count: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 0 },
|
|
||||||
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_request_id" ON "documents" ("request_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_uploaded_by" ON "documents" ("uploaded_by");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_category" ON "documents" ("category");');
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('documents');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_document_category;');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
up: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.createTable('subscriptions', {
|
|
||||||
subscription_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
|
|
||||||
user_id: { type: DataTypes.UUID, allowNull: false },
|
|
||||||
endpoint: { type: DataTypes.STRING(1000), allowNull: false, unique: true },
|
|
||||||
p256dh: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
auth: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
user_agent: { type: DataTypes.STRING(500), allowNull: true },
|
|
||||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
|
|
||||||
});
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "subscriptions_user_id" ON "subscriptions" ("user_id");');
|
|
||||||
},
|
|
||||||
down: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.dropTable('subscriptions');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
up: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.createTable('activities', {
|
|
||||||
activity_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
|
|
||||||
request_id: { type: DataTypes.UUID, allowNull: false },
|
|
||||||
user_id: { type: DataTypes.UUID, allowNull: true },
|
|
||||||
user_name: { type: DataTypes.STRING(255), allowNull: true },
|
|
||||||
activity_type: { type: DataTypes.STRING(100), allowNull: false },
|
|
||||||
activity_description: { type: DataTypes.TEXT, allowNull: false },
|
|
||||||
activity_category: { type: DataTypes.STRING(100), allowNull: true },
|
|
||||||
severity: { type: DataTypes.STRING(50), allowNull: true },
|
|
||||||
metadata: { type: DataTypes.JSONB, allowNull: true },
|
|
||||||
is_system_event: { type: DataTypes.BOOLEAN, allowNull: true },
|
|
||||||
ip_address: { type: DataTypes.STRING(100), allowNull: true },
|
|
||||||
user_agent: { type: DataTypes.TEXT, allowNull: true },
|
|
||||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
|
|
||||||
});
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_request_id" ON "activities" ("request_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_created_at" ON "activities" ("created_at");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_activity_type" ON "activities" ("activity_type");');
|
|
||||||
},
|
|
||||||
down: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.dropTable('activities');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
up: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.createTable('work_notes', {
|
|
||||||
note_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
|
|
||||||
request_id: { type: DataTypes.UUID, allowNull: false },
|
|
||||||
user_id: { type: DataTypes.UUID, allowNull: false },
|
|
||||||
user_name: { type: DataTypes.STRING(255), allowNull: true },
|
|
||||||
user_role: { type: DataTypes.STRING(50), allowNull: true },
|
|
||||||
message: { type: DataTypes.TEXT, allowNull: false },
|
|
||||||
message_type: { type: DataTypes.STRING(50), allowNull: true },
|
|
||||||
is_priority: { type: DataTypes.BOOLEAN, allowNull: true },
|
|
||||||
has_attachment: { type: DataTypes.BOOLEAN, allowNull: true },
|
|
||||||
parent_note_id: { type: DataTypes.UUID, allowNull: true },
|
|
||||||
mentioned_users: { type: DataTypes.ARRAY(DataTypes.UUID), allowNull: true },
|
|
||||||
reactions: { type: DataTypes.JSONB, allowNull: true },
|
|
||||||
is_edited: { type: DataTypes.BOOLEAN, allowNull: true },
|
|
||||||
is_deleted: { type: DataTypes.BOOLEAN, allowNull: true },
|
|
||||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
|
||||||
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
|
|
||||||
});
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_request_id" ON "work_notes" ("request_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_user_id" ON "work_notes" ("user_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_created_at" ON "work_notes" ("created_at");');
|
|
||||||
},
|
|
||||||
down: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.dropTable('work_notes');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
up: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.createTable('work_note_attachments', {
|
|
||||||
attachment_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
|
|
||||||
note_id: { type: DataTypes.UUID, allowNull: false },
|
|
||||||
file_name: { type: DataTypes.STRING(255), allowNull: false },
|
|
||||||
file_type: { type: DataTypes.STRING(100), allowNull: false },
|
|
||||||
file_size: { type: DataTypes.BIGINT, allowNull: false },
|
|
||||||
file_path: { type: DataTypes.STRING(500), allowNull: false },
|
|
||||||
storage_url: { type: DataTypes.STRING(500), allowNull: true },
|
|
||||||
is_downloadable: { type: DataTypes.BOOLEAN, allowNull: true },
|
|
||||||
download_count: { type: DataTypes.INTEGER, allowNull: true, defaultValue: 0 },
|
|
||||||
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
|
|
||||||
});
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_note_attachments_note_id" ON "work_note_attachments" ("note_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_note_attachments_uploaded_at" ON "work_note_attachments" ("uploaded_at");');
|
|
||||||
},
|
|
||||||
down: async (queryInterface: QueryInterface) => {
|
|
||||||
await queryInterface.dropTable('work_note_attachments');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to add TAT alert tracking fields to approval_levels table
|
|
||||||
* These fields track whether TAT notifications have been sent
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check and add columns only if they don't exist
|
|
||||||
const tableDescription = await queryInterface.describeTable('approval_levels');
|
|
||||||
|
|
||||||
if (!tableDescription.tat50_alert_sent) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'tat50_alert_sent', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.tat75_alert_sent) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'tat75_alert_sent', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.tat_breached) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'tat_breached', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.tat_start_time) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'tat_start_time', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'tat50_alert_sent');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'tat75_alert_sent');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'tat_breached');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'tat_start_time');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,134 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create admin_configurations table
|
|
||||||
* Stores system-wide configuration settings
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('admin_configurations', {
|
|
||||||
config_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true
|
|
||||||
},
|
|
||||||
config_key: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
comment: 'Unique configuration key (e.g., "DEFAULT_TAT_EXPRESS", "MAX_FILE_SIZE")'
|
|
||||||
},
|
|
||||||
config_category: {
|
|
||||||
type: DataTypes.ENUM(
|
|
||||||
'TAT_SETTINGS',
|
|
||||||
'NOTIFICATION_RULES',
|
|
||||||
'DOCUMENT_POLICY',
|
|
||||||
'USER_ROLES',
|
|
||||||
'DASHBOARD_LAYOUT',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'WORKFLOW_SHARING',
|
|
||||||
'SYSTEM_SETTINGS'
|
|
||||||
),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Category of the configuration'
|
|
||||||
},
|
|
||||||
config_value: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Configuration value (can be JSON string for complex values)'
|
|
||||||
},
|
|
||||||
value_type: {
|
|
||||||
type: DataTypes.ENUM('STRING', 'NUMBER', 'BOOLEAN', 'JSON', 'ARRAY'),
|
|
||||||
defaultValue: 'STRING',
|
|
||||||
comment: 'Data type of the value'
|
|
||||||
},
|
|
||||||
display_name: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Human-readable name for UI display'
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Description of what this configuration does'
|
|
||||||
},
|
|
||||||
default_value: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Default value if reset'
|
|
||||||
},
|
|
||||||
is_editable: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'Whether this config can be edited by admin'
|
|
||||||
},
|
|
||||||
is_sensitive: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
comment: 'Whether this contains sensitive data (e.g., API keys)'
|
|
||||||
},
|
|
||||||
validation_rules: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
defaultValue: {},
|
|
||||||
comment: 'Validation rules (min, max, regex, etc.)'
|
|
||||||
},
|
|
||||||
ui_component: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'UI component type (input, select, toggle, slider, etc.)'
|
|
||||||
},
|
|
||||||
options: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Options for select/radio inputs'
|
|
||||||
},
|
|
||||||
sort_order: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
defaultValue: 0,
|
|
||||||
comment: 'Display order in admin panel'
|
|
||||||
},
|
|
||||||
requires_restart: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
comment: 'Whether changing this requires server restart'
|
|
||||||
},
|
|
||||||
last_modified_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
comment: 'Admin who last modified this'
|
|
||||||
},
|
|
||||||
last_modified_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When this was last modified'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Indexes (with IF NOT EXISTS)
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "admin_configurations_config_category" ON "admin_configurations" ("config_category");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "admin_configurations_is_editable" ON "admin_configurations" ("is_editable");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "admin_configurations_sort_order" ON "admin_configurations" ("sort_order");');
|
|
||||||
|
|
||||||
// Admin config table created
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('admin_configurations');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_admin_configurations_config_category";');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_admin_configurations_value_type";');
|
|
||||||
// Admin config table dropped
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,106 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create holidays table for organization holiday calendar
|
|
||||||
* Holidays are excluded from working days in TAT calculations for STANDARD priority
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('holidays', {
|
|
||||||
holiday_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true
|
|
||||||
},
|
|
||||||
holiday_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
comment: 'The date of the holiday (YYYY-MM-DD)'
|
|
||||||
},
|
|
||||||
holiday_name: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Name/title of the holiday (e.g., "Diwali", "Republic Day")'
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Optional description or notes about the holiday'
|
|
||||||
},
|
|
||||||
is_recurring: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
comment: 'Whether this holiday recurs annually (e.g., Independence Day)'
|
|
||||||
},
|
|
||||||
recurrence_rule: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'RRULE for recurring holidays (e.g., "FREQ=YEARLY;BYMONTH=8;BYMONTHDAY=15")'
|
|
||||||
},
|
|
||||||
holiday_type: {
|
|
||||||
type: DataTypes.ENUM('NATIONAL', 'REGIONAL', 'ORGANIZATIONAL', 'OPTIONAL'),
|
|
||||||
defaultValue: 'ORGANIZATIONAL',
|
|
||||||
comment: 'Type of holiday'
|
|
||||||
},
|
|
||||||
is_active: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'Whether this holiday is currently active/applicable'
|
|
||||||
},
|
|
||||||
applies_to_departments: {
|
|
||||||
type: DataTypes.ARRAY(DataTypes.STRING),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
comment: 'If null, applies to all departments. Otherwise, specific departments only'
|
|
||||||
},
|
|
||||||
applies_to_locations: {
|
|
||||||
type: DataTypes.ARRAY(DataTypes.STRING),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
comment: 'If null, applies to all locations. Otherwise, specific locations only'
|
|
||||||
},
|
|
||||||
created_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
comment: 'Admin user who created this holiday'
|
|
||||||
},
|
|
||||||
updated_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
comment: 'Admin user who last updated this holiday'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Indexes for performance (with IF NOT EXISTS)
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_holiday_date" ON "holidays" ("holiday_date");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_is_active" ON "holidays" ("is_active");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_holiday_type" ON "holidays" ("holiday_type");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "holidays_created_by" ON "holidays" ("created_by");');
|
|
||||||
|
|
||||||
// Holidays table created
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('holidays');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_holidays_holiday_type";');
|
|
||||||
// Holidays table dropped
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,266 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create database views for KPI reporting
|
|
||||||
* These views pre-aggregate data for faster reporting queries
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
|
|
||||||
// 1. Request Volume & Status Summary View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_request_volume_summary AS
|
|
||||||
SELECT
|
|
||||||
w.request_id,
|
|
||||||
w.request_number,
|
|
||||||
w.title,
|
|
||||||
w.status,
|
|
||||||
w.priority,
|
|
||||||
w.template_type,
|
|
||||||
w.submission_date,
|
|
||||||
w.closure_date,
|
|
||||||
w.created_at,
|
|
||||||
u.user_id as initiator_id,
|
|
||||||
u.display_name as initiator_name,
|
|
||||||
u.department as initiator_department,
|
|
||||||
EXTRACT(EPOCH FROM (COALESCE(w.closure_date, NOW()) - w.submission_date)) / 3600 as cycle_time_hours,
|
|
||||||
EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / 3600 as age_hours,
|
|
||||||
w.current_level,
|
|
||||||
w.total_levels,
|
|
||||||
w.total_tat_hours,
|
|
||||||
CASE
|
|
||||||
WHEN w.status IN ('APPROVED', 'REJECTED', 'CLOSED') THEN 'COMPLETED'
|
|
||||||
WHEN w.status = 'DRAFT' THEN 'DRAFT'
|
|
||||||
ELSE 'IN_PROGRESS'
|
|
||||||
END as status_category
|
|
||||||
FROM workflow_requests w
|
|
||||||
LEFT JOIN users u ON w.initiator_id = u.user_id
|
|
||||||
WHERE w.is_deleted = false;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 2. TAT Compliance View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_tat_compliance AS
|
|
||||||
SELECT
|
|
||||||
al.level_id,
|
|
||||||
al.request_id,
|
|
||||||
w.request_number,
|
|
||||||
w.priority,
|
|
||||||
w.status as request_status,
|
|
||||||
al.level_number,
|
|
||||||
al.approver_id,
|
|
||||||
al.approver_name,
|
|
||||||
u.department as approver_department,
|
|
||||||
al.status as level_status,
|
|
||||||
al.tat_hours as allocated_hours,
|
|
||||||
al.elapsed_hours,
|
|
||||||
al.remaining_hours,
|
|
||||||
al.tat_percentage_used,
|
|
||||||
al.level_start_time,
|
|
||||||
al.level_end_time,
|
|
||||||
al.action_date,
|
|
||||||
al.tat50_alert_sent,
|
|
||||||
al.tat75_alert_sent,
|
|
||||||
al.tat_breached,
|
|
||||||
CASE
|
|
||||||
WHEN al.status IN ('APPROVED', 'REJECTED') AND al.elapsed_hours <= al.tat_hours THEN true
|
|
||||||
WHEN al.status IN ('APPROVED', 'REJECTED') AND al.elapsed_hours > al.tat_hours THEN false
|
|
||||||
WHEN al.status IN ('PENDING', 'IN_PROGRESS') AND al.tat_percentage_used >= 100 THEN false
|
|
||||||
ELSE null
|
|
||||||
END as completed_within_tat,
|
|
||||||
CASE
|
|
||||||
WHEN al.tat_percentage_used < 50 THEN 'ON_TRACK'
|
|
||||||
WHEN al.tat_percentage_used < 75 THEN 'AT_RISK'
|
|
||||||
WHEN al.tat_percentage_used < 100 THEN 'CRITICAL'
|
|
||||||
ELSE 'BREACHED'
|
|
||||||
END as tat_status,
|
|
||||||
CASE
|
|
||||||
WHEN al.status IN ('APPROVED', 'REJECTED') THEN
|
|
||||||
al.tat_hours - al.elapsed_hours
|
|
||||||
ELSE 0
|
|
||||||
END as time_saved_hours
|
|
||||||
FROM approval_levels al
|
|
||||||
JOIN workflow_requests w ON al.request_id = w.request_id
|
|
||||||
LEFT JOIN users u ON al.approver_id = u.user_id
|
|
||||||
WHERE w.is_deleted = false;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 3. Approver Performance View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_approver_performance AS
|
|
||||||
SELECT
|
|
||||||
al.approver_id,
|
|
||||||
u.display_name as approver_name,
|
|
||||||
u.department,
|
|
||||||
u.designation,
|
|
||||||
COUNT(*) as total_assignments,
|
|
||||||
COUNT(CASE WHEN al.status = 'PENDING' THEN 1 END) as pending_count,
|
|
||||||
COUNT(CASE WHEN al.status = 'IN_PROGRESS' THEN 1 END) as in_progress_count,
|
|
||||||
COUNT(CASE WHEN al.status = 'APPROVED' THEN 1 END) as approved_count,
|
|
||||||
COUNT(CASE WHEN al.status = 'REJECTED' THEN 1 END) as rejected_count,
|
|
||||||
AVG(CASE WHEN al.status IN ('APPROVED', 'REJECTED') THEN al.elapsed_hours END) as avg_response_time_hours,
|
|
||||||
SUM(CASE WHEN al.elapsed_hours <= al.tat_hours AND al.status IN ('APPROVED', 'REJECTED') THEN 1 ELSE 0 END)::FLOAT /
|
|
||||||
NULLIF(COUNT(CASE WHEN al.status IN ('APPROVED', 'REJECTED') THEN 1 END), 0) * 100 as tat_compliance_percentage,
|
|
||||||
COUNT(CASE WHEN al.tat_breached = true THEN 1 END) as breaches_count,
|
|
||||||
MIN(CASE WHEN al.status = 'PENDING' OR al.status = 'IN_PROGRESS' THEN
|
|
||||||
EXTRACT(EPOCH FROM (NOW() - al.level_start_time)) / 3600
|
|
||||||
END) as oldest_pending_hours
|
|
||||||
FROM approval_levels al
|
|
||||||
JOIN users u ON al.approver_id = u.user_id
|
|
||||||
JOIN workflow_requests w ON al.request_id = w.request_id
|
|
||||||
WHERE w.is_deleted = false
|
|
||||||
GROUP BY al.approver_id, u.display_name, u.department, u.designation;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 4. TAT Alerts Summary View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_tat_alerts_summary AS
|
|
||||||
SELECT
|
|
||||||
ta.alert_id,
|
|
||||||
ta.request_id,
|
|
||||||
w.request_number,
|
|
||||||
w.title as request_title,
|
|
||||||
w.priority,
|
|
||||||
ta.level_id,
|
|
||||||
al.level_number,
|
|
||||||
ta.approver_id,
|
|
||||||
ta.alert_type,
|
|
||||||
ta.threshold_percentage,
|
|
||||||
ta.tat_hours_allocated,
|
|
||||||
ta.tat_hours_elapsed,
|
|
||||||
ta.tat_hours_remaining,
|
|
||||||
ta.alert_sent_at,
|
|
||||||
ta.expected_completion_time,
|
|
||||||
ta.is_breached,
|
|
||||||
ta.was_completed_on_time,
|
|
||||||
ta.completion_time,
|
|
||||||
al.status as level_status,
|
|
||||||
EXTRACT(EPOCH FROM (ta.alert_sent_at - ta.level_start_time)) / 3600 as hours_before_alert,
|
|
||||||
CASE
|
|
||||||
WHEN ta.completion_time IS NOT NULL THEN
|
|
||||||
EXTRACT(EPOCH FROM (ta.completion_time - ta.alert_sent_at)) / 3600
|
|
||||||
ELSE NULL
|
|
||||||
END as response_time_after_alert_hours,
|
|
||||||
ta.metadata
|
|
||||||
FROM tat_alerts ta
|
|
||||||
JOIN workflow_requests w ON ta.request_id = w.request_id
|
|
||||||
JOIN approval_levels al ON ta.level_id = al.level_id
|
|
||||||
WHERE w.is_deleted = false
|
|
||||||
ORDER BY ta.alert_sent_at DESC;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 5. Department-wise Workflow Summary View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_department_summary AS
|
|
||||||
SELECT
|
|
||||||
u.department,
|
|
||||||
COUNT(DISTINCT w.request_id) as total_requests,
|
|
||||||
COUNT(DISTINCT CASE WHEN w.status = 'DRAFT' THEN w.request_id END) as draft_requests,
|
|
||||||
COUNT(DISTINCT CASE WHEN w.status IN ('PENDING', 'IN_PROGRESS') THEN w.request_id END) as open_requests,
|
|
||||||
COUNT(DISTINCT CASE WHEN w.status = 'APPROVED' THEN w.request_id END) as approved_requests,
|
|
||||||
COUNT(DISTINCT CASE WHEN w.status = 'REJECTED' THEN w.request_id END) as rejected_requests,
|
|
||||||
AVG(CASE WHEN w.closure_date IS NOT NULL THEN
|
|
||||||
EXTRACT(EPOCH FROM (w.closure_date - w.submission_date)) / 3600
|
|
||||||
END) as avg_cycle_time_hours,
|
|
||||||
COUNT(DISTINCT CASE WHEN w.priority = 'EXPRESS' THEN w.request_id END) as express_priority_count,
|
|
||||||
COUNT(DISTINCT CASE WHEN w.priority = 'STANDARD' THEN w.request_id END) as standard_priority_count
|
|
||||||
FROM users u
|
|
||||||
LEFT JOIN workflow_requests w ON u.user_id = w.initiator_id AND w.is_deleted = false
|
|
||||||
WHERE u.department IS NOT NULL
|
|
||||||
GROUP BY u.department;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 6. Daily/Weekly KPI Metrics View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_daily_kpi_metrics AS
|
|
||||||
SELECT
|
|
||||||
DATE(w.created_at) as date,
|
|
||||||
COUNT(*) as requests_created,
|
|
||||||
COUNT(CASE WHEN w.submission_date IS NOT NULL AND DATE(w.submission_date) = DATE(w.created_at) THEN 1 END) as requests_submitted,
|
|
||||||
COUNT(CASE WHEN w.closure_date IS NOT NULL AND DATE(w.closure_date) = DATE(w.created_at) THEN 1 END) as requests_closed,
|
|
||||||
COUNT(CASE WHEN w.status = 'APPROVED' AND DATE(w.closure_date) = DATE(w.created_at) THEN 1 END) as requests_approved,
|
|
||||||
COUNT(CASE WHEN w.status = 'REJECTED' AND DATE(w.closure_date) = DATE(w.created_at) THEN 1 END) as requests_rejected,
|
|
||||||
AVG(CASE WHEN w.closure_date IS NOT NULL AND DATE(w.closure_date) = DATE(w.created_at) THEN
|
|
||||||
EXTRACT(EPOCH FROM (w.closure_date - w.submission_date)) / 3600
|
|
||||||
END) as avg_completion_time_hours
|
|
||||||
FROM workflow_requests w
|
|
||||||
WHERE w.is_deleted = false
|
|
||||||
GROUP BY DATE(w.created_at)
|
|
||||||
ORDER BY DATE(w.created_at) DESC;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 7. Workflow Aging Report View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_workflow_aging AS
|
|
||||||
SELECT
|
|
||||||
w.request_id,
|
|
||||||
w.request_number,
|
|
||||||
w.title,
|
|
||||||
w.status,
|
|
||||||
w.priority,
|
|
||||||
w.current_level,
|
|
||||||
w.total_levels,
|
|
||||||
w.submission_date,
|
|
||||||
EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) as age_days,
|
|
||||||
CASE
|
|
||||||
WHEN EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) < 3 THEN 'FRESH'
|
|
||||||
WHEN EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) < 7 THEN 'NORMAL'
|
|
||||||
WHEN EXTRACT(EPOCH FROM (NOW() - w.submission_date)) / (3600 * 24) < 14 THEN 'AGING'
|
|
||||||
ELSE 'CRITICAL'
|
|
||||||
END as age_category,
|
|
||||||
al.approver_name as current_approver,
|
|
||||||
al.level_start_time as current_level_start,
|
|
||||||
EXTRACT(EPOCH FROM (NOW() - al.level_start_time)) / 3600 as current_level_age_hours,
|
|
||||||
al.tat_hours as current_level_tat_hours,
|
|
||||||
al.tat_percentage_used as current_level_tat_used
|
|
||||||
FROM workflow_requests w
|
|
||||||
LEFT JOIN approval_levels al ON w.request_id = al.request_id
|
|
||||||
AND al.level_number = w.current_level
|
|
||||||
AND al.status IN ('PENDING', 'IN_PROGRESS')
|
|
||||||
WHERE w.status IN ('PENDING', 'IN_PROGRESS')
|
|
||||||
AND w.is_deleted = false
|
|
||||||
ORDER BY age_days DESC;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// 8. Engagement & Quality Metrics View
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
CREATE OR REPLACE VIEW vw_engagement_metrics AS
|
|
||||||
SELECT
|
|
||||||
w.request_id,
|
|
||||||
w.request_number,
|
|
||||||
w.title,
|
|
||||||
w.status,
|
|
||||||
COUNT(DISTINCT wn.note_id) as work_notes_count,
|
|
||||||
COUNT(DISTINCT d.document_id) as documents_count,
|
|
||||||
COUNT(DISTINCT p.participant_id) as spectators_count,
|
|
||||||
COUNT(DISTINCT al.approver_id) as approvers_count,
|
|
||||||
MAX(wn.created_at) as last_comment_date,
|
|
||||||
MAX(d.uploaded_at) as last_document_date,
|
|
||||||
CASE
|
|
||||||
WHEN COUNT(DISTINCT wn.note_id) > 10 THEN 'HIGH'
|
|
||||||
WHEN COUNT(DISTINCT wn.note_id) > 5 THEN 'MEDIUM'
|
|
||||||
ELSE 'LOW'
|
|
||||||
END as engagement_level
|
|
||||||
FROM workflow_requests w
|
|
||||||
LEFT JOIN work_notes wn ON w.request_id = wn.request_id AND wn.is_deleted = false
|
|
||||||
LEFT JOIN documents d ON w.request_id = d.request_id AND d.is_deleted = false
|
|
||||||
LEFT JOIN participants p ON w.request_id = p.request_id AND p.participant_type = 'SPECTATOR'
|
|
||||||
LEFT JOIN approval_levels al ON w.request_id = al.request_id
|
|
||||||
WHERE w.is_deleted = false
|
|
||||||
GROUP BY w.request_id, w.request_number, w.title, w.status;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// KPI views created
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_engagement_metrics;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_workflow_aging;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_daily_kpi_metrics;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_department_summary;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_tat_alerts_summary;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_approver_performance;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_tat_compliance;');
|
|
||||||
await queryInterface.sequelize.query('DROP VIEW IF EXISTS vw_request_volume_summary;');
|
|
||||||
// KPI views dropped
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,134 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create TAT alerts/reminders table
|
|
||||||
* Stores all TAT-related notifications sent (50%, 75%, 100%)
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('tat_alerts', {
|
|
||||||
alert_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
level_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'approval_levels',
|
|
||||||
key: 'level_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
approver_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
alert_type: {
|
|
||||||
type: DataTypes.ENUM('TAT_50', 'TAT_75', 'TAT_100'),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
threshold_percentage: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
comment: '50, 75, or 100'
|
|
||||||
},
|
|
||||||
tat_hours_allocated: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Total TAT hours for this level'
|
|
||||||
},
|
|
||||||
tat_hours_elapsed: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Hours elapsed when alert was sent'
|
|
||||||
},
|
|
||||||
tat_hours_remaining: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Hours remaining when alert was sent'
|
|
||||||
},
|
|
||||||
level_start_time: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'When the approval level started'
|
|
||||||
},
|
|
||||||
alert_sent_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
comment: 'When the alert was sent'
|
|
||||||
},
|
|
||||||
expected_completion_time: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'When the level should be completed'
|
|
||||||
},
|
|
||||||
alert_message: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'The notification message sent'
|
|
||||||
},
|
|
||||||
notification_sent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'Whether notification was successfully sent'
|
|
||||||
},
|
|
||||||
notification_channels: {
|
|
||||||
type: DataTypes.ARRAY(DataTypes.STRING),
|
|
||||||
defaultValue: [],
|
|
||||||
comment: 'push, email, sms'
|
|
||||||
},
|
|
||||||
is_breached: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
comment: 'Whether this was a breach alert (100%)'
|
|
||||||
},
|
|
||||||
was_completed_on_time: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Set when level is completed - was it on time?'
|
|
||||||
},
|
|
||||||
completion_time: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When the level was actually completed'
|
|
||||||
},
|
|
||||||
metadata: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
defaultValue: {},
|
|
||||||
comment: 'Additional context (priority, request title, etc.)'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Indexes for performance (with IF NOT EXISTS check)
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_request_id" ON "tat_alerts" ("request_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_level_id" ON "tat_alerts" ("level_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_approver_id" ON "tat_alerts" ("approver_id");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_alert_type" ON "tat_alerts" ("alert_type");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_alert_sent_at" ON "tat_alerts" ("alert_sent_at");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_is_breached" ON "tat_alerts" ("is_breached");');
|
|
||||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "tat_alerts_was_completed_on_time" ON "tat_alerts" ("was_completed_on_time");');
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('tat_alerts');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_tat_alerts_alert_type";');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,97 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Add skip-related fields to approval_levels table
|
|
||||||
* Purpose: Track approvers who were skipped by initiator
|
|
||||||
* Date: 2025-11-05
|
|
||||||
*/
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if table exists first
|
|
||||||
const tables = await queryInterface.showAllTables();
|
|
||||||
if (!tables.includes('approval_levels')) {
|
|
||||||
// Table doesn't exist yet, skipping
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get existing columns
|
|
||||||
const tableDescription = await queryInterface.describeTable('approval_levels');
|
|
||||||
|
|
||||||
// Add skip-related columns only if they don't exist
|
|
||||||
if (!tableDescription.is_skipped) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'is_skipped', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false,
|
|
||||||
comment: 'Indicates if this approver was skipped by initiator'
|
|
||||||
});
|
|
||||||
// Added is_skipped column
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.skipped_at) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'skipped_at', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Timestamp when approver was skipped'
|
|
||||||
});
|
|
||||||
// Added skipped_at column
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.skipped_by) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'skipped_by', {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
comment: 'User ID who skipped this approver'
|
|
||||||
});
|
|
||||||
// Added skipped_by column
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.skip_reason) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'skip_reason', {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Reason for skipping this approver'
|
|
||||||
});
|
|
||||||
// Added skip_reason column
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if index exists before creating
|
|
||||||
try {
|
|
||||||
const indexes: any[] = await queryInterface.showIndex('approval_levels') as any[];
|
|
||||||
const indexExists = Array.isArray(indexes) && indexes.some((idx: any) => idx.name === 'idx_approval_levels_skipped');
|
|
||||||
|
|
||||||
if (!indexExists) {
|
|
||||||
await queryInterface.addIndex('approval_levels', ['is_skipped'], {
|
|
||||||
name: 'idx_approval_levels_skipped',
|
|
||||||
where: {
|
|
||||||
is_skipped: true
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// Index added
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Index already exists
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip fields added
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove index first
|
|
||||||
await queryInterface.removeIndex('approval_levels', 'idx_approval_levels_skipped');
|
|
||||||
|
|
||||||
// Remove columns
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'skip_reason');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'skipped_by');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'skipped_at');
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'is_skipped');
|
|
||||||
|
|
||||||
// Skip fields removed
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,76 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Convert tat_days to GENERATED STORED column
|
|
||||||
*
|
|
||||||
* This ensures tat_days is auto-calculated from tat_hours across all environments.
|
|
||||||
* Production already has this as a generated column, this migration makes other environments consistent.
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if tat_days is already a generated column
|
|
||||||
const result = await queryInterface.sequelize.query(`
|
|
||||||
SELECT
|
|
||||||
a.attname as column_name,
|
|
||||||
a.attgenerated as is_generated
|
|
||||||
FROM pg_attribute a
|
|
||||||
JOIN pg_class c ON a.attrelid = c.oid
|
|
||||||
WHERE c.relname = 'approval_levels'
|
|
||||||
AND a.attname = 'tat_days'
|
|
||||||
AND NOT a.attisdropped;
|
|
||||||
`, { type: 'SELECT' });
|
|
||||||
|
|
||||||
const column = result[0] as any;
|
|
||||||
|
|
||||||
if (column && column.is_generated === 's') {
|
|
||||||
// Already a GENERATED column, skipping
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Converting tat_days to GENERATED column
|
|
||||||
|
|
||||||
// Step 1: Drop the existing regular column
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE approval_levels DROP COLUMN IF EXISTS tat_days;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Step 2: Add it back as a GENERATED STORED column
|
|
||||||
// Formula: CEIL(tat_hours / 24.0) - rounds up to nearest day
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE approval_levels
|
|
||||||
ADD COLUMN tat_days INTEGER
|
|
||||||
GENERATED ALWAYS AS (CAST(CEIL(tat_hours / 24.0) AS INTEGER)) STORED;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// tat_days is now auto-calculated
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Rolling back to regular column
|
|
||||||
|
|
||||||
// Drop the generated column
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE approval_levels DROP COLUMN IF EXISTS tat_days;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Add it back as a regular column (with default calculation for existing rows)
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE approval_levels
|
|
||||||
ADD COLUMN tat_days INTEGER;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Populate existing rows with calculated values
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
UPDATE approval_levels
|
|
||||||
SET tat_days = CAST(CEIL(tat_hours / 24.0) AS INTEGER)
|
|
||||||
WHERE tat_days IS NULL;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Make it NOT NULL after populating
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE approval_levels
|
|
||||||
ALTER COLUMN tat_days SET NOT NULL;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Rolled back successfully
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,109 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to create conclusion_remarks table
|
|
||||||
* Stores AI-generated and finalized conclusion remarks for workflow requests
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('conclusion_remarks', {
|
|
||||||
conclusion_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
unique: true // One conclusion per request
|
|
||||||
},
|
|
||||||
ai_generated_remark: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
ai_model_used: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
ai_confidence_score: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
final_remark: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
edited_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'SET NULL'
|
|
||||||
},
|
|
||||||
is_edited: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
},
|
|
||||||
edit_count: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: 0
|
|
||||||
},
|
|
||||||
approval_summary: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
document_summary: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
key_discussion_points: {
|
|
||||||
type: DataTypes.ARRAY(DataTypes.TEXT),
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: []
|
|
||||||
},
|
|
||||||
generated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
finalized_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add index on request_id for faster lookups
|
|
||||||
await queryInterface.addIndex('conclusion_remarks', ['request_id'], {
|
|
||||||
name: 'idx_conclusion_remarks_request_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add index on finalized_at for KPI queries
|
|
||||||
await queryInterface.addIndex('conclusion_remarks', ['finalized_at'], {
|
|
||||||
name: 'idx_conclusion_remarks_finalized_at'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('conclusion_remarks');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,137 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Create priority enum type
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DO $$ BEGIN
|
|
||||||
CREATE TYPE notification_priority_enum AS ENUM ('LOW', 'MEDIUM', 'HIGH', 'URGENT');
|
|
||||||
EXCEPTION
|
|
||||||
WHEN duplicate_object THEN null;
|
|
||||||
END $$;
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Create notifications table
|
|
||||||
await queryInterface.createTable('notifications', {
|
|
||||||
notification_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true
|
|
||||||
},
|
|
||||||
user_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE'
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'SET NULL'
|
|
||||||
},
|
|
||||||
notification_type: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
message: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
is_read: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
priority: {
|
|
||||||
type: 'notification_priority_enum',
|
|
||||||
defaultValue: 'MEDIUM',
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
action_url: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
action_required: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
metadata: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: {}
|
|
||||||
},
|
|
||||||
sent_via: {
|
|
||||||
type: DataTypes.ARRAY(DataTypes.STRING),
|
|
||||||
defaultValue: [],
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
email_sent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
sms_sent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
push_sent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
read_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
expires_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes for better query performance
|
|
||||||
await queryInterface.addIndex('notifications', ['user_id'], {
|
|
||||||
name: 'idx_notifications_user_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('notifications', ['user_id', 'is_read'], {
|
|
||||||
name: 'idx_notifications_user_unread'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('notifications', ['request_id'], {
|
|
||||||
name: 'idx_notifications_request_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('notifications', ['created_at'], {
|
|
||||||
name: 'idx_notifications_created_at'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('notifications', ['notification_type'], {
|
|
||||||
name: 'idx_notifications_type'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('notifications');
|
|
||||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS notification_priority_enum;');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Add breach_reason column to approval_levels table
|
|
||||||
* Purpose: Store TAT breach reason directly in approval_levels table
|
|
||||||
* Date: 2025-11-18
|
|
||||||
*/
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if table exists first
|
|
||||||
const tables = await queryInterface.showAllTables();
|
|
||||||
if (!tables.includes('approval_levels')) {
|
|
||||||
// Table doesn't exist yet, skipping
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get existing columns
|
|
||||||
const tableDescription = await queryInterface.describeTable('approval_levels');
|
|
||||||
|
|
||||||
// Add breach_reason column only if it doesn't exist
|
|
||||||
if (!tableDescription.breach_reason) {
|
|
||||||
await queryInterface.addColumn('approval_levels', 'breach_reason', {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Reason for TAT breach - can contain paragraph-length text'
|
|
||||||
});
|
|
||||||
console.log('✅ Added breach_reason column to approval_levels table');
|
|
||||||
} else {
|
|
||||||
console.log('ℹ️ breach_reason column already exists, skipping');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if table exists
|
|
||||||
const tables = await queryInterface.showAllTables();
|
|
||||||
if (!tables.includes('approval_levels')) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get existing columns
|
|
||||||
const tableDescription = await queryInterface.describeTable('approval_levels');
|
|
||||||
|
|
||||||
// Remove column only if it exists
|
|
||||||
if (tableDescription.breach_reason) {
|
|
||||||
await queryInterface.removeColumn('approval_levels', 'breach_reason');
|
|
||||||
console.log('✅ Removed breach_reason column from approval_levels table');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,94 +0,0 @@
|
|||||||
import { QueryInterface, QueryTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration to add AI model configuration entries
|
|
||||||
* Adds CLAUDE_MODEL, OPENAI_MODEL, and GEMINI_MODEL to admin_configurations
|
|
||||||
*
|
|
||||||
* This migration is idempotent - it will only insert if the configs don't exist.
|
|
||||||
* For existing databases, this ensures the new model configuration fields are available.
|
|
||||||
* For fresh databases, the seed scripts will handle the initial population.
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Insert AI model configurations if they don't exist
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
INSERT INTO admin_configurations (
|
|
||||||
config_id, config_key, config_category, config_value, value_type,
|
|
||||||
display_name, description, default_value, is_editable, is_sensitive,
|
|
||||||
validation_rules, ui_component, options, sort_order, requires_restart,
|
|
||||||
last_modified_by, last_modified_at, created_at, updated_at
|
|
||||||
) VALUES
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'CLAUDE_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'claude-sonnet-4-20250514',
|
|
||||||
'STRING',
|
|
||||||
'Claude Model',
|
|
||||||
'Claude (Anthropic) model to use for AI generation',
|
|
||||||
'claude-sonnet-4-20250514',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
27,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'OPENAI_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'gpt-4o',
|
|
||||||
'STRING',
|
|
||||||
'OpenAI Model',
|
|
||||||
'OpenAI model to use for AI generation',
|
|
||||||
'gpt-4o',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
28,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
),
|
|
||||||
(
|
|
||||||
gen_random_uuid(),
|
|
||||||
'GEMINI_MODEL',
|
|
||||||
'AI_CONFIGURATION',
|
|
||||||
'gemini-2.0-flash-lite',
|
|
||||||
'STRING',
|
|
||||||
'Gemini Model',
|
|
||||||
'Gemini (Google) model to use for AI generation',
|
|
||||||
'gemini-2.0-flash-lite',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
'{}'::jsonb,
|
|
||||||
'input',
|
|
||||||
NULL,
|
|
||||||
29,
|
|
||||||
false,
|
|
||||||
NULL,
|
|
||||||
NULL,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
)
|
|
||||||
ON CONFLICT (config_key) DO NOTHING
|
|
||||||
`, { type: QueryTypes.INSERT });
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove the AI model configurations
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DELETE FROM admin_configurations
|
|
||||||
WHERE config_key IN ('CLAUDE_MODEL', 'OPENAI_MODEL', 'GEMINI_MODEL')
|
|
||||||
`, { type: QueryTypes.DELETE });
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,53 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
async up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Add notification preference columns to users table
|
|
||||||
await queryInterface.addColumn('users', 'email_notifications_enabled', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'User preference for receiving email notifications'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('users', 'push_notifications_enabled', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'User preference for receiving push notifications'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addColumn('users', 'in_app_notifications_enabled', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: true,
|
|
||||||
comment: 'User preference for receiving in-app notifications'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add indexes for faster queries
|
|
||||||
await queryInterface.addIndex('users', ['email_notifications_enabled'], {
|
|
||||||
name: 'idx_users_email_notifications_enabled'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['push_notifications_enabled'], {
|
|
||||||
name: 'idx_users_push_notifications_enabled'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('users', ['in_app_notifications_enabled'], {
|
|
||||||
name: 'idx_users_in_app_notifications_enabled'
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove indexes first
|
|
||||||
await queryInterface.removeIndex('users', 'idx_users_in_app_notifications_enabled');
|
|
||||||
await queryInterface.removeIndex('users', 'idx_users_push_notifications_enabled');
|
|
||||||
await queryInterface.removeIndex('users', 'idx_users_email_notifications_enabled');
|
|
||||||
|
|
||||||
// Remove columns
|
|
||||||
await queryInterface.removeColumn('users', 'in_app_notifications_enabled');
|
|
||||||
await queryInterface.removeColumn('users', 'push_notifications_enabled');
|
|
||||||
await queryInterface.removeColumn('users', 'email_notifications_enabled');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
@ -1,54 +0,0 @@
|
|||||||
import { QueryInterface } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add foreign key constraint for template_id after workflow_templates table exists
|
|
||||||
* This should run after both:
|
|
||||||
* - 20251210-enhance-workflow-templates (creates workflow_templates table)
|
|
||||||
* - 20251210-add-workflow-type-support (adds template_id column)
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if workflow_templates table exists
|
|
||||||
const [tables] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = 'workflow_templates';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (tables.length > 0) {
|
|
||||||
// Check if foreign key already exists
|
|
||||||
const [constraints] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT constraint_name
|
|
||||||
FROM information_schema.table_constraints
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = 'workflow_requests'
|
|
||||||
AND constraint_name = 'workflow_requests_template_id_fkey';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (constraints.length === 0) {
|
|
||||||
// Add foreign key constraint
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE workflow_requests
|
|
||||||
ADD CONSTRAINT workflow_requests_template_id_fkey
|
|
||||||
FOREIGN KEY (template_id)
|
|
||||||
REFERENCES workflow_templates(template_id)
|
|
||||||
ON UPDATE CASCADE
|
|
||||||
ON DELETE SET NULL;
|
|
||||||
`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove foreign key constraint if it exists
|
|
||||||
try {
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
ALTER TABLE workflow_requests
|
|
||||||
DROP CONSTRAINT IF EXISTS workflow_requests_template_id_fkey;
|
|
||||||
`);
|
|
||||||
} catch (error) {
|
|
||||||
// Ignore if constraint doesn't exist
|
|
||||||
console.log('Note: Foreign key constraint may not exist');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,116 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check if columns already exist (for idempotency and backward compatibility)
|
|
||||||
const tableDescription = await queryInterface.describeTable('workflow_requests');
|
|
||||||
|
|
||||||
// 1. Add workflow_type column to workflow_requests (only if it doesn't exist)
|
|
||||||
if (!tableDescription.workflow_type) {
|
|
||||||
try {
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'workflow_type', {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: 'NON_TEMPLATIZED'
|
|
||||||
});
|
|
||||||
console.log('✅ Added workflow_type column');
|
|
||||||
} catch (error: any) {
|
|
||||||
// Column might have been added manually, check if it exists now
|
|
||||||
const updatedDescription = await queryInterface.describeTable('workflow_requests');
|
|
||||||
if (!updatedDescription.workflow_type) {
|
|
||||||
throw error; // Re-throw if column still doesn't exist
|
|
||||||
}
|
|
||||||
console.log('Note: workflow_type column already exists (may have been added manually)');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
console.log('Note: workflow_type column already exists, skipping');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Add template_id column (nullable, for admin templates)
|
|
||||||
// Note: Foreign key constraint will be added later if workflow_templates table exists
|
|
||||||
if (!tableDescription.template_id) {
|
|
||||||
try {
|
|
||||||
await queryInterface.addColumn('workflow_requests', 'template_id', {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
console.log('✅ Added template_id column');
|
|
||||||
} catch (error: any) {
|
|
||||||
// Column might have been added manually, check if it exists now
|
|
||||||
const updatedDescription = await queryInterface.describeTable('workflow_requests');
|
|
||||||
if (!updatedDescription.template_id) {
|
|
||||||
throw error; // Re-throw if column still doesn't exist
|
|
||||||
}
|
|
||||||
console.log('Note: template_id column already exists (may have been added manually)');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
console.log('Note: template_id column already exists, skipping');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get updated table description for index creation
|
|
||||||
const finalTableDescription = await queryInterface.describeTable('workflow_requests');
|
|
||||||
|
|
||||||
// 3. Create index for workflow_type (only if column exists)
|
|
||||||
if (finalTableDescription.workflow_type) {
|
|
||||||
try {
|
|
||||||
await queryInterface.addIndex('workflow_requests', ['workflow_type'], {
|
|
||||||
name: 'idx_workflow_requests_workflow_type'
|
|
||||||
});
|
|
||||||
console.log('✅ Created workflow_type index');
|
|
||||||
} catch (error: any) {
|
|
||||||
// Index might already exist, ignore error
|
|
||||||
if (error.message?.includes('already exists') || error.message?.includes('duplicate')) {
|
|
||||||
console.log('Note: workflow_type index already exists');
|
|
||||||
} else {
|
|
||||||
console.log('Note: Could not create workflow_type index:', error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. Create index for template_id (only if column exists)
|
|
||||||
if (finalTableDescription.template_id) {
|
|
||||||
try {
|
|
||||||
await queryInterface.addIndex('workflow_requests', ['template_id'], {
|
|
||||||
name: 'idx_workflow_requests_template_id'
|
|
||||||
});
|
|
||||||
console.log('✅ Created template_id index');
|
|
||||||
} catch (error: any) {
|
|
||||||
// Index might already exist, ignore error
|
|
||||||
if (error.message?.includes('already exists') || error.message?.includes('duplicate')) {
|
|
||||||
console.log('Note: template_id index already exists');
|
|
||||||
} else {
|
|
||||||
console.log('Note: Could not create template_id index:', error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5. Update existing records to have workflow_type (if any exist and column exists)
|
|
||||||
if (finalTableDescription.workflow_type) {
|
|
||||||
try {
|
|
||||||
const [result] = await queryInterface.sequelize.query(`
|
|
||||||
UPDATE workflow_requests
|
|
||||||
SET workflow_type = 'NON_TEMPLATIZED'
|
|
||||||
WHERE workflow_type IS NULL;
|
|
||||||
`);
|
|
||||||
console.log('✅ Updated existing records with workflow_type');
|
|
||||||
} catch (error: any) {
|
|
||||||
// Ignore if table is empty or other error
|
|
||||||
console.log('Note: Could not update existing records:', error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error('Migration error:', error.message);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove indexes
|
|
||||||
await queryInterface.removeIndex('workflow_requests', 'idx_workflow_requests_template_id');
|
|
||||||
await queryInterface.removeIndex('workflow_requests', 'idx_workflow_requests_workflow_type');
|
|
||||||
|
|
||||||
// Remove columns
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'template_id');
|
|
||||||
await queryInterface.removeColumn('workflow_requests', 'workflow_type');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,214 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// 1. Create dealer_claim_details table
|
|
||||||
await queryInterface.createTable('dealer_claim_details', {
|
|
||||||
claim_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
activity_name: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
activity_type: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
dealer_code: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
dealer_name: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
dealer_email: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
dealer_phone: {
|
|
||||||
type: DataTypes.STRING(20),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
dealer_address: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
activity_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
location: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
period_start_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
period_end_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('dealer_claim_details', ['request_id'], {
|
|
||||||
name: 'idx_dealer_claim_details_request_id',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_details', ['dealer_code'], {
|
|
||||||
name: 'idx_dealer_claim_details_dealer_code'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_details', ['activity_type'], {
|
|
||||||
name: 'idx_dealer_claim_details_activity_type'
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2. Create dealer_proposal_details table (Step 1: Dealer Proposal)
|
|
||||||
await queryInterface.createTable('dealer_proposal_details', {
|
|
||||||
proposal_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
proposal_document_path: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
proposal_document_url: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
total_estimated_budget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
timeline_mode: {
|
|
||||||
type: DataTypes.STRING(10),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
expected_completion_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
expected_completion_days: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
dealer_comments: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
submitted_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealer_proposal_details', ['request_id'], {
|
|
||||||
name: 'idx_dealer_proposal_details_request_id',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// 3. Create dealer_completion_details table (Step 5: Dealer Completion)
|
|
||||||
await queryInterface.createTable('dealer_completion_details', {
|
|
||||||
completion_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
activity_completion_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
number_of_participants: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
total_closed_expenses: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
submitted_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealer_completion_details', ['request_id'], {
|
|
||||||
name: 'idx_dealer_completion_details_request_id',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('dealer_completion_details');
|
|
||||||
await queryInterface.dropTable('dealer_proposal_details');
|
|
||||||
await queryInterface.dropTable('dealer_claim_details');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,194 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Create dealer_proposal_cost_items table
|
|
||||||
*
|
|
||||||
* Purpose: Separate table for cost breakups to enable better querying, reporting, and data integrity
|
|
||||||
* This replaces the JSONB costBreakup field in dealer_proposal_details
|
|
||||||
*
|
|
||||||
* Benefits:
|
|
||||||
* - Better querying and filtering
|
|
||||||
* - Easier to update individual cost items
|
|
||||||
* - Better for analytics and reporting
|
|
||||||
* - Maintains referential integrity
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if table already exists
|
|
||||||
const [tables] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = 'dealer_proposal_cost_items';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (tables.length === 0) {
|
|
||||||
// Create dealer_proposal_cost_items table
|
|
||||||
await queryInterface.createTable('dealer_proposal_cost_items', {
|
|
||||||
cost_item_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
field: 'cost_item_id'
|
|
||||||
},
|
|
||||||
proposal_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'proposal_id',
|
|
||||||
references: {
|
|
||||||
model: 'dealer_proposal_details',
|
|
||||||
key: 'proposal_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
comment: 'Denormalized for easier querying without joins'
|
|
||||||
},
|
|
||||||
item_description: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'item_description'
|
|
||||||
},
|
|
||||||
amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'amount',
|
|
||||||
comment: 'Cost amount in INR'
|
|
||||||
},
|
|
||||||
item_order: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: 0,
|
|
||||||
field: 'item_order',
|
|
||||||
comment: 'Order of item in the cost breakdown list'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes for better query performance
|
|
||||||
await queryInterface.addIndex('dealer_proposal_cost_items', ['proposal_id'], {
|
|
||||||
name: 'idx_proposal_cost_items_proposal_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealer_proposal_cost_items', ['request_id'], {
|
|
||||||
name: 'idx_proposal_cost_items_request_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealer_proposal_cost_items', ['proposal_id', 'item_order'], {
|
|
||||||
name: 'idx_proposal_cost_items_proposal_order'
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('✅ Created dealer_proposal_cost_items table');
|
|
||||||
} else {
|
|
||||||
console.log('Note: dealer_proposal_cost_items table already exists');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migrate existing JSONB costBreakup data to the new table
|
|
||||||
try {
|
|
||||||
const [existingProposals] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT proposal_id, request_id, cost_breakup
|
|
||||||
FROM dealer_proposal_details
|
|
||||||
WHERE cost_breakup IS NOT NULL
|
|
||||||
AND cost_breakup::text != 'null'
|
|
||||||
AND cost_breakup::text != '[]';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (Array.isArray(existingProposals) && existingProposals.length > 0) {
|
|
||||||
console.log(`📦 Migrating ${existingProposals.length} existing proposal(s) with cost breakups...`);
|
|
||||||
|
|
||||||
for (const proposal of existingProposals as any[]) {
|
|
||||||
const proposalId = proposal.proposal_id;
|
|
||||||
const requestId = proposal.request_id;
|
|
||||||
let costBreakup = proposal.cost_breakup;
|
|
||||||
|
|
||||||
// Parse JSONB if it's a string
|
|
||||||
if (typeof costBreakup === 'string') {
|
|
||||||
try {
|
|
||||||
costBreakup = JSON.parse(costBreakup);
|
|
||||||
} catch (e) {
|
|
||||||
console.warn(`⚠️ Failed to parse costBreakup for proposal ${proposalId}:`, e);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure it's an array
|
|
||||||
if (!Array.isArray(costBreakup)) {
|
|
||||||
console.warn(`⚠️ costBreakup is not an array for proposal ${proposalId}`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert cost items
|
|
||||||
for (let i = 0; i < costBreakup.length; i++) {
|
|
||||||
const item = costBreakup[i];
|
|
||||||
if (item && item.description && item.amount !== undefined) {
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
INSERT INTO dealer_proposal_cost_items
|
|
||||||
(proposal_id, request_id, item_description, amount, item_order, created_at, updated_at)
|
|
||||||
VALUES (:proposalId, :requestId, :description, :amount, :order, NOW(), NOW())
|
|
||||||
ON CONFLICT DO NOTHING;
|
|
||||||
`, {
|
|
||||||
replacements: {
|
|
||||||
proposalId,
|
|
||||||
requestId,
|
|
||||||
description: item.description,
|
|
||||||
amount: item.amount,
|
|
||||||
order: i
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('✅ Migrated existing cost breakups to new table');
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
console.warn('⚠️ Could not migrate existing cost breakups:', error.message);
|
|
||||||
// Don't fail the migration if migration of existing data fails
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Drop indexes first
|
|
||||||
try {
|
|
||||||
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_proposal_order');
|
|
||||||
} catch (e) {
|
|
||||||
// Index might not exist
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_request_id');
|
|
||||||
} catch (e) {
|
|
||||||
// Index might not exist
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_proposal_id');
|
|
||||||
} catch (e) {
|
|
||||||
// Index might not exist
|
|
||||||
}
|
|
||||||
|
|
||||||
// Drop table
|
|
||||||
await queryInterface.dropTable('dealer_proposal_cost_items');
|
|
||||||
console.log('✅ Dropped dealer_proposal_cost_items table');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,174 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Check if workflow_templates table exists, if not create it
|
|
||||||
const [tables] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = 'workflow_templates';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (tables.length === 0) {
|
|
||||||
// Create workflow_templates table if it doesn't exist
|
|
||||||
await queryInterface.createTable('workflow_templates', {
|
|
||||||
template_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4
|
|
||||||
},
|
|
||||||
template_name: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
template_code: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
unique: true
|
|
||||||
},
|
|
||||||
template_description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
template_category: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
workflow_type: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
approval_levels_config: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
default_tat_hours: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: 24
|
|
||||||
},
|
|
||||||
form_steps_config: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
user_field_mappings: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
dynamic_approver_config: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
is_active: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: true
|
|
||||||
},
|
|
||||||
is_system_template: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
},
|
|
||||||
usage_count: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: 0
|
|
||||||
},
|
|
||||||
created_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('workflow_templates', ['template_code'], {
|
|
||||||
name: 'idx_workflow_templates_template_code',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('workflow_templates', ['workflow_type'], {
|
|
||||||
name: 'idx_workflow_templates_workflow_type'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('workflow_templates', ['is_active'], {
|
|
||||||
name: 'idx_workflow_templates_is_active'
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Table exists, add new columns if they don't exist
|
|
||||||
const tableDescription = await queryInterface.describeTable('workflow_templates');
|
|
||||||
|
|
||||||
if (!tableDescription.form_steps_config) {
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'form_steps_config', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.user_field_mappings) {
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'user_field_mappings', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.dynamic_approver_config) {
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'dynamic_approver_config', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.workflow_type) {
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'workflow_type', {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.is_system_template) {
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'is_system_template', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Remove columns if they exist
|
|
||||||
const tableDescription = await queryInterface.describeTable('workflow_templates');
|
|
||||||
|
|
||||||
if (tableDescription.dynamic_approver_config) {
|
|
||||||
await queryInterface.removeColumn('workflow_templates', 'dynamic_approver_config');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tableDescription.user_field_mappings) {
|
|
||||||
await queryInterface.removeColumn('workflow_templates', 'user_field_mappings');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tableDescription.form_steps_config) {
|
|
||||||
await queryInterface.removeColumn('workflow_templates', 'form_steps_config');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tableDescription.workflow_type) {
|
|
||||||
await queryInterface.removeColumn('workflow_templates', 'workflow_type');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tableDescription.is_system_template) {
|
|
||||||
await queryInterface.removeColumn('workflow_templates', 'is_system_template');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,197 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Create claim_budget_tracking table for comprehensive budget management
|
|
||||||
await queryInterface.createTable('claim_budget_tracking', {
|
|
||||||
budget_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
// Initial Budget (from claim creation)
|
|
||||||
initial_estimated_budget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Initial estimated budget when claim was created'
|
|
||||||
},
|
|
||||||
// Proposal Budget (from Step 1 - Dealer Proposal)
|
|
||||||
proposal_estimated_budget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Total estimated budget from dealer proposal'
|
|
||||||
},
|
|
||||||
proposal_submitted_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When dealer submitted proposal'
|
|
||||||
},
|
|
||||||
// Approved Budget (from Step 2 - Requestor Evaluation)
|
|
||||||
approved_budget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Budget approved by requestor in Step 2'
|
|
||||||
},
|
|
||||||
approved_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When budget was approved by requestor'
|
|
||||||
},
|
|
||||||
approved_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
comment: 'User who approved the budget'
|
|
||||||
},
|
|
||||||
// IO Blocked Budget (from Step 3 - Department Lead)
|
|
||||||
io_blocked_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Amount blocked in IO (from internal_orders table)'
|
|
||||||
},
|
|
||||||
io_blocked_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When budget was blocked in IO'
|
|
||||||
},
|
|
||||||
// Closed Expenses (from Step 5 - Dealer Completion)
|
|
||||||
closed_expenses: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Total closed expenses from completion documents'
|
|
||||||
},
|
|
||||||
closed_expenses_submitted_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When completion expenses were submitted'
|
|
||||||
},
|
|
||||||
// Final Claim Amount (from Step 6 - Requestor Claim Approval)
|
|
||||||
final_claim_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Final claim amount approved/modified by requestor in Step 6'
|
|
||||||
},
|
|
||||||
final_claim_amount_approved_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When final claim amount was approved'
|
|
||||||
},
|
|
||||||
final_claim_amount_approved_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
comment: 'User who approved final claim amount'
|
|
||||||
},
|
|
||||||
// Credit Note (from Step 8 - Finance)
|
|
||||||
credit_note_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Credit note amount issued by finance'
|
|
||||||
},
|
|
||||||
credit_note_issued_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When credit note was issued'
|
|
||||||
},
|
|
||||||
// Budget Status
|
|
||||||
budget_status: {
|
|
||||||
type: DataTypes.ENUM('DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED'),
|
|
||||||
defaultValue: 'DRAFT',
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Current status of budget lifecycle'
|
|
||||||
},
|
|
||||||
// Currency
|
|
||||||
currency: {
|
|
||||||
type: DataTypes.STRING(3),
|
|
||||||
defaultValue: 'INR',
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Currency code (INR, USD, etc.)'
|
|
||||||
},
|
|
||||||
// Budget Variance
|
|
||||||
variance_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Difference between approved and closed expenses (closed - approved)'
|
|
||||||
},
|
|
||||||
variance_percentage: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Variance as percentage of approved budget'
|
|
||||||
},
|
|
||||||
// Audit fields
|
|
||||||
last_modified_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
comment: 'Last user who modified budget'
|
|
||||||
},
|
|
||||||
last_modified_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'When budget was last modified'
|
|
||||||
},
|
|
||||||
modification_reason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Reason for budget modification'
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('claim_budget_tracking', ['request_id'], {
|
|
||||||
name: 'idx_claim_budget_tracking_request_id',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('claim_budget_tracking', ['budget_status'], {
|
|
||||||
name: 'idx_claim_budget_tracking_status'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('claim_budget_tracking', ['approved_by'], {
|
|
||||||
name: 'idx_claim_budget_tracking_approved_by'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('claim_budget_tracking', ['final_claim_amount_approved_by'], {
|
|
||||||
name: 'idx_claim_budget_tracking_final_approved_by'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('claim_budget_tracking');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,95 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Create internal_orders table for storing IO (Internal Order) details
|
|
||||||
await queryInterface.createTable('internal_orders', {
|
|
||||||
io_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
io_number: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
io_remark: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
io_available_balance: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
io_blocked_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
io_remaining_balance: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
organized_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE'
|
|
||||||
},
|
|
||||||
organized_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
sap_document_number: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
status: {
|
|
||||||
type: DataTypes.ENUM('PENDING', 'BLOCKED', 'RELEASED', 'CANCELLED'),
|
|
||||||
defaultValue: 'PENDING',
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create indexes
|
|
||||||
await queryInterface.addIndex('internal_orders', ['io_number'], {
|
|
||||||
name: 'idx_internal_orders_io_number'
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('internal_orders', ['organized_by'], {
|
|
||||||
name: 'idx_internal_orders_organized_by'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create unique constraint: one IO per request (unique index on request_id)
|
|
||||||
await queryInterface.addIndex('internal_orders', ['request_id'], {
|
|
||||||
name: 'idx_internal_orders_request_id_unique',
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('internal_orders');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,162 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('claim_invoices', {
|
|
||||||
invoice_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true, // one invoice per request (adjust later if multiples needed)
|
|
||||||
references: { model: 'workflow_requests', key: 'request_id' },
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
invoice_number: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
invoice_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
invoice_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
dms_number: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
invoice_file_path: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
generation_status: {
|
|
||||||
type: DataTypes.STRING(50), // e.g., PENDING, GENERATED, SENT, FAILED, CANCELLED
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
error_message: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
generated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('claim_invoices', ['request_id'], { name: 'idx_claim_invoices_request_id', unique: true });
|
|
||||||
await queryInterface.addIndex('claim_invoices', ['invoice_number'], { name: 'idx_claim_invoices_invoice_number' });
|
|
||||||
await queryInterface.addIndex('claim_invoices', ['dms_number'], { name: 'idx_claim_invoices_dms_number' });
|
|
||||||
await queryInterface.addIndex('claim_invoices', ['generation_status'], { name: 'idx_claim_invoices_status' });
|
|
||||||
|
|
||||||
await queryInterface.createTable('claim_credit_notes', {
|
|
||||||
credit_note_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true, // one credit note per request (adjust later if multiples needed)
|
|
||||||
references: { model: 'workflow_requests', key: 'request_id' },
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
invoice_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: { model: 'claim_invoices', key: 'invoice_id' },
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
credit_note_number: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
credit_note_date: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
credit_amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
sap_document_number: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
credit_note_file_path: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
confirmation_status: {
|
|
||||||
type: DataTypes.STRING(50), // e.g., PENDING, GENERATED, CONFIRMED, FAILED, CANCELLED
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
error_message: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
confirmed_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: { model: 'users', key: 'user_id' },
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
confirmed_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
reason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('claim_credit_notes', ['request_id'], { name: 'idx_claim_credit_notes_request_id', unique: true });
|
|
||||||
await queryInterface.addIndex('claim_credit_notes', ['invoice_id'], { name: 'idx_claim_credit_notes_invoice_id' });
|
|
||||||
await queryInterface.addIndex('claim_credit_notes', ['credit_note_number'], { name: 'idx_claim_credit_notes_number' });
|
|
||||||
await queryInterface.addIndex('claim_credit_notes', ['sap_document_number'], { name: 'idx_claim_credit_notes_sap_doc' });
|
|
||||||
await queryInterface.addIndex('claim_credit_notes', ['confirmation_status'], { name: 'idx_claim_credit_notes_status' });
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('claim_credit_notes');
|
|
||||||
await queryInterface.dropTable('claim_invoices');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,68 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper function to check if a column exists in a table
|
|
||||||
*/
|
|
||||||
async function columnExists(
|
|
||||||
queryInterface: QueryInterface,
|
|
||||||
tableName: string,
|
|
||||||
columnName: string
|
|
||||||
): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const tableDescription = await queryInterface.describeTable(tableName);
|
|
||||||
return columnName in tableDescription;
|
|
||||||
} catch (error) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
const columnsToRemove = [
|
|
||||||
'dms_number',
|
|
||||||
'e_invoice_number',
|
|
||||||
'e_invoice_date',
|
|
||||||
'credit_note_number',
|
|
||||||
'credit_note_date',
|
|
||||||
'credit_note_amount',
|
|
||||||
];
|
|
||||||
|
|
||||||
// Only remove columns if they exist
|
|
||||||
// This handles the case where dealer_claim_details was created without these columns
|
|
||||||
for (const columnName of columnsToRemove) {
|
|
||||||
const exists = await columnExists(queryInterface, 'dealer_claim_details', columnName);
|
|
||||||
if (exists) {
|
|
||||||
await queryInterface.removeColumn('dealer_claim_details', columnName);
|
|
||||||
console.log(` ✅ Removed column: ${columnName}`);
|
|
||||||
} else {
|
|
||||||
console.log(` ⏭️ Column ${columnName} does not exist, skipping...`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.addColumn('dealer_claim_details', 'dms_number', {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
await queryInterface.addColumn('dealer_claim_details', 'e_invoice_number', {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
await queryInterface.addColumn('dealer_claim_details', 'e_invoice_date', {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
await queryInterface.addColumn('dealer_claim_details', 'credit_note_number', {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
await queryInterface.addColumn('dealer_claim_details', 'credit_note_date', {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
await queryInterface.addColumn('dealer_claim_details', 'credit_note_amount', {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.createTable('dealer_completion_expenses', {
|
|
||||||
expense_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: { model: 'workflow_requests', key: 'request_id' },
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
completion_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: { model: 'dealer_completion_details', key: 'completion_id' },
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: false,
|
|
||||||
},
|
|
||||||
amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: false,
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
},
|
|
||||||
updated_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await queryInterface.addIndex('dealer_completion_expenses', ['request_id'], {
|
|
||||||
name: 'idx_dealer_completion_expenses_request_id',
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_completion_expenses', ['completion_id'], {
|
|
||||||
name: 'idx_dealer_completion_expenses_completion_id',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
await queryInterface.dropTable('dealer_completion_expenses');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,240 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper function to check if a column exists in a table
|
|
||||||
*/
|
|
||||||
async function columnExists(
|
|
||||||
queryInterface: QueryInterface,
|
|
||||||
tableName: string,
|
|
||||||
columnName: string
|
|
||||||
): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const tableDescription = await queryInterface.describeTable(tableName);
|
|
||||||
return columnName in tableDescription;
|
|
||||||
} catch (error) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migration: Fix column names in claim_invoices and claim_credit_notes tables
|
|
||||||
*
|
|
||||||
* This migration handles the case where tables were created with old column names
|
|
||||||
* and need to be updated to match the new schema.
|
|
||||||
*/
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Check if claim_invoices table exists
|
|
||||||
const [invoiceTables] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = 'claim_invoices';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (invoiceTables.length > 0) {
|
|
||||||
// Fix claim_invoices table
|
|
||||||
const hasOldAmount = await columnExists(queryInterface, 'claim_invoices', 'amount');
|
|
||||||
const hasNewAmount = await columnExists(queryInterface, 'claim_invoices', 'invoice_amount');
|
|
||||||
|
|
||||||
if (hasOldAmount && !hasNewAmount) {
|
|
||||||
// Rename amount to invoice_amount
|
|
||||||
await queryInterface.renameColumn('claim_invoices', 'amount', 'invoice_amount');
|
|
||||||
console.log('✅ Renamed claim_invoices.amount to invoice_amount');
|
|
||||||
} else if (!hasOldAmount && !hasNewAmount) {
|
|
||||||
// Add invoice_amount if neither exists
|
|
||||||
await queryInterface.addColumn('claim_invoices', 'invoice_amount', {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added invoice_amount column to claim_invoices');
|
|
||||||
} else if (hasNewAmount) {
|
|
||||||
console.log('✅ invoice_amount column already exists in claim_invoices');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for status vs generation_status
|
|
||||||
const hasStatus = await columnExists(queryInterface, 'claim_invoices', 'status');
|
|
||||||
const hasGenerationStatus = await columnExists(queryInterface, 'claim_invoices', 'generation_status');
|
|
||||||
|
|
||||||
if (hasStatus && !hasGenerationStatus) {
|
|
||||||
// Rename status to generation_status
|
|
||||||
await queryInterface.renameColumn('claim_invoices', 'status', 'generation_status');
|
|
||||||
console.log('✅ Renamed claim_invoices.status to generation_status');
|
|
||||||
} else if (!hasStatus && !hasGenerationStatus) {
|
|
||||||
// Add generation_status if neither exists
|
|
||||||
await queryInterface.addColumn('claim_invoices', 'generation_status', {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added generation_status column to claim_invoices');
|
|
||||||
} else if (hasGenerationStatus) {
|
|
||||||
console.log('✅ generation_status column already exists in claim_invoices');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if claim_credit_notes table exists
|
|
||||||
const [creditNoteTables] = await queryInterface.sequelize.query(`
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = 'claim_credit_notes';
|
|
||||||
`);
|
|
||||||
|
|
||||||
if (creditNoteTables.length > 0) {
|
|
||||||
// Fix claim_credit_notes table
|
|
||||||
const hasOldAmount = await columnExists(queryInterface, 'claim_credit_notes', 'credit_note_amount');
|
|
||||||
const hasNewAmount = await columnExists(queryInterface, 'claim_credit_notes', 'credit_amount');
|
|
||||||
|
|
||||||
if (hasOldAmount && !hasNewAmount) {
|
|
||||||
// Rename credit_note_amount to credit_amount
|
|
||||||
await queryInterface.renameColumn('claim_credit_notes', 'credit_note_amount', 'credit_amount');
|
|
||||||
console.log('✅ Renamed claim_credit_notes.credit_note_amount to credit_amount');
|
|
||||||
} else if (!hasOldAmount && !hasNewAmount) {
|
|
||||||
// Add credit_amount if neither exists
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'credit_amount', {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added credit_amount column to claim_credit_notes');
|
|
||||||
} else if (hasNewAmount) {
|
|
||||||
console.log('✅ credit_amount column already exists in claim_credit_notes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for status vs confirmation_status
|
|
||||||
const hasStatus = await columnExists(queryInterface, 'claim_credit_notes', 'status');
|
|
||||||
const hasConfirmationStatus = await columnExists(queryInterface, 'claim_credit_notes', 'confirmation_status');
|
|
||||||
|
|
||||||
if (hasStatus && !hasConfirmationStatus) {
|
|
||||||
// Rename status to confirmation_status
|
|
||||||
await queryInterface.renameColumn('claim_credit_notes', 'status', 'confirmation_status');
|
|
||||||
console.log('✅ Renamed claim_credit_notes.status to confirmation_status');
|
|
||||||
} else if (!hasStatus && !hasConfirmationStatus) {
|
|
||||||
// Add confirmation_status if neither exists
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'confirmation_status', {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added confirmation_status column to claim_credit_notes');
|
|
||||||
} else if (hasConfirmationStatus) {
|
|
||||||
console.log('✅ confirmation_status column already exists in claim_credit_notes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure invoice_id column exists
|
|
||||||
const hasInvoiceId = await columnExists(queryInterface, 'claim_credit_notes', 'invoice_id');
|
|
||||||
if (!hasInvoiceId) {
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'invoice_id', {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'claim_invoices',
|
|
||||||
key: 'invoice_id',
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
});
|
|
||||||
console.log('✅ Added invoice_id column to claim_credit_notes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure sap_document_number column exists
|
|
||||||
const hasSapDoc = await columnExists(queryInterface, 'claim_credit_notes', 'sap_document_number');
|
|
||||||
if (!hasSapDoc) {
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'sap_document_number', {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added sap_document_number column to claim_credit_notes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure credit_note_file_path column exists
|
|
||||||
const hasFilePath = await columnExists(queryInterface, 'claim_credit_notes', 'credit_note_file_path');
|
|
||||||
if (!hasFilePath) {
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'credit_note_file_path', {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added credit_note_file_path column to claim_credit_notes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure confirmed_by column exists
|
|
||||||
const hasConfirmedBy = await columnExists(queryInterface, 'claim_credit_notes', 'confirmed_by');
|
|
||||||
if (!hasConfirmedBy) {
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'confirmed_by', {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id',
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
});
|
|
||||||
console.log('✅ Added confirmed_by column to claim_credit_notes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure confirmed_at column exists
|
|
||||||
const hasConfirmedAt = await columnExists(queryInterface, 'claim_credit_notes', 'confirmed_at');
|
|
||||||
if (!hasConfirmedAt) {
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'confirmed_at', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added confirmed_at column to claim_credit_notes');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure invoice_file_path exists in claim_invoices
|
|
||||||
if (invoiceTables.length > 0) {
|
|
||||||
const hasFilePath = await columnExists(queryInterface, 'claim_invoices', 'invoice_file_path');
|
|
||||||
if (!hasFilePath) {
|
|
||||||
await queryInterface.addColumn('claim_invoices', 'invoice_file_path', {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added invoice_file_path column to claim_invoices');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure error_message exists
|
|
||||||
const hasErrorMessage = await columnExists(queryInterface, 'claim_invoices', 'error_message');
|
|
||||||
if (!hasErrorMessage) {
|
|
||||||
await queryInterface.addColumn('claim_invoices', 'error_message', {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added error_message column to claim_invoices');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure generated_at exists
|
|
||||||
const hasGeneratedAt = await columnExists(queryInterface, 'claim_invoices', 'generated_at');
|
|
||||||
if (!hasGeneratedAt) {
|
|
||||||
await queryInterface.addColumn('claim_invoices', 'generated_at', {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added generated_at column to claim_invoices');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure error_message exists in claim_credit_notes
|
|
||||||
if (creditNoteTables.length > 0) {
|
|
||||||
const hasErrorMessage = await columnExists(queryInterface, 'claim_credit_notes', 'error_message');
|
|
||||||
if (!hasErrorMessage) {
|
|
||||||
await queryInterface.addColumn('claim_credit_notes', 'error_message', {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
});
|
|
||||||
console.log('✅ Added error_message column to claim_credit_notes');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error('Migration error:', error.message);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// This migration is idempotent and safe to run multiple times
|
|
||||||
// The down migration would reverse the changes, but it's safer to keep the new schema
|
|
||||||
console.log('Note: Down migration not implemented - keeping new column names');
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,134 +0,0 @@
|
|||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export const up = async (queryInterface: QueryInterface) => {
|
|
||||||
// 1. Drop and recreate the enum type for snapshot_type to ensure all values are included
|
|
||||||
// This ensures APPROVE is always present when table is recreated
|
|
||||||
// Note: Table should be dropped manually before running this migration
|
|
||||||
try {
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
-- Drop enum if it exists (cascade will handle any dependencies)
|
|
||||||
IF EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_dealer_claim_history_snapshot_type') THEN
|
|
||||||
DROP TYPE IF EXISTS enum_dealer_claim_history_snapshot_type CASCADE;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Create enum with all values including APPROVE
|
|
||||||
CREATE TYPE enum_dealer_claim_history_snapshot_type AS ENUM ('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE');
|
|
||||||
END $$;
|
|
||||||
`);
|
|
||||||
} catch (error) {
|
|
||||||
// If enum creation fails, log error but continue
|
|
||||||
console.error('Enum creation error:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Create new simplified level-based dealer_claim_history table
|
|
||||||
await queryInterface.createTable('dealer_claim_history', {
|
|
||||||
history_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true
|
|
||||||
},
|
|
||||||
request_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'CASCADE'
|
|
||||||
},
|
|
||||||
approval_level_id: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true, // Nullable for workflow-level snapshots
|
|
||||||
references: {
|
|
||||||
model: 'approval_levels',
|
|
||||||
key: 'level_id'
|
|
||||||
},
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
onDelete: 'SET NULL'
|
|
||||||
},
|
|
||||||
level_number: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true, // Nullable for workflow-level snapshots
|
|
||||||
comment: 'Level number for easier querying (e.g., 1=Dealer, 3=Dept Lead, 4/5=Completion)'
|
|
||||||
},
|
|
||||||
level_name: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true, // Nullable for workflow-level snapshots
|
|
||||||
comment: 'Level name for consistent matching (e.g., "Dealer Proposal Submission", "Department Lead Approval")'
|
|
||||||
},
|
|
||||||
version: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Version number for this specific level (starts at 1 per level)'
|
|
||||||
},
|
|
||||||
snapshot_type: {
|
|
||||||
type: DataTypes.ENUM('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE'),
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'Type of snapshot: PROPOSAL (Step 1), COMPLETION (Step 4/5), INTERNAL_ORDER (Step 3), WORKFLOW (general), APPROVE (approver actions with comments)'
|
|
||||||
},
|
|
||||||
snapshot_data: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: false,
|
|
||||||
comment: 'JSON object containing all snapshot data specific to this level and type. Structure varies by snapshot_type.'
|
|
||||||
},
|
|
||||||
change_reason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Reason for this version change (e.g., "Revision Requested: ...")'
|
|
||||||
},
|
|
||||||
changed_by: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
created_at: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add indexes for efficient querying
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'level_number', 'version'], {
|
|
||||||
name: 'idx_history_request_level_version'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['approval_level_id', 'version'], {
|
|
||||||
name: 'idx_history_level_version'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'snapshot_type'], {
|
|
||||||
name: 'idx_history_request_type'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['snapshot_type', 'level_number'], {
|
|
||||||
name: 'idx_history_type_level'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['request_id', 'level_name'], {
|
|
||||||
name: 'idx_history_request_level_name'
|
|
||||||
});
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['level_name', 'snapshot_type'], {
|
|
||||||
name: 'idx_history_level_name_type'
|
|
||||||
});
|
|
||||||
// Index for JSONB queries on snapshot_data
|
|
||||||
await queryInterface.addIndex('dealer_claim_history', ['snapshot_type'], {
|
|
||||||
name: 'idx_history_snapshot_type',
|
|
||||||
using: 'BTREE'
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const down = async (queryInterface: QueryInterface) => {
|
|
||||||
// Note: Table should be dropped manually
|
|
||||||
// Drop the enum type
|
|
||||||
try {
|
|
||||||
await queryInterface.sequelize.query(`
|
|
||||||
DROP TYPE IF EXISTS enum_dealer_claim_history_snapshot_type CASCADE;
|
|
||||||
`);
|
|
||||||
} catch (error) {
|
|
||||||
console.warn('Enum drop warning:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@ -1,115 +0,0 @@
|
|||||||
|
|
||||||
import { QueryInterface, DataTypes } from 'sequelize';
|
|
||||||
|
|
||||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
try {
|
|
||||||
const tableDescription = await queryInterface.describeTable('workflow_templates');
|
|
||||||
|
|
||||||
// 1. Rename id -> template_id
|
|
||||||
if (tableDescription.id && !tableDescription.template_id) {
|
|
||||||
console.log('Renaming id to template_id...');
|
|
||||||
await queryInterface.renameColumn('workflow_templates', 'id', 'template_id');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Rename name -> template_name
|
|
||||||
if (tableDescription.name && !tableDescription.template_name) {
|
|
||||||
console.log('Renaming name to template_name...');
|
|
||||||
await queryInterface.renameColumn('workflow_templates', 'name', 'template_name');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Rename description -> template_description
|
|
||||||
if (tableDescription.description && !tableDescription.template_description) {
|
|
||||||
console.log('Renaming description to template_description...');
|
|
||||||
await queryInterface.renameColumn('workflow_templates', 'description', 'template_description');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. Rename category -> template_category
|
|
||||||
if (tableDescription.category && !tableDescription.template_category) {
|
|
||||||
console.log('Renaming category to template_category...');
|
|
||||||
await queryInterface.renameColumn('workflow_templates', 'category', 'template_category');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5. Rename suggested_sla -> default_tat_hours
|
|
||||||
if (tableDescription.suggested_sla && !tableDescription.default_tat_hours) {
|
|
||||||
console.log('Renaming suggested_sla to default_tat_hours...');
|
|
||||||
await queryInterface.renameColumn('workflow_templates', 'suggested_sla', 'default_tat_hours');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 6. Add missing columns
|
|
||||||
if (!tableDescription.template_code) {
|
|
||||||
console.log('Adding template_code column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'template_code', {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.workflow_type) {
|
|
||||||
console.log('Adding workflow_type column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'workflow_type', {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.approval_levels_config) {
|
|
||||||
console.log('Adding approval_levels_config column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'approval_levels_config', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.form_steps_config) {
|
|
||||||
console.log('Adding form_steps_config column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'form_steps_config', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.user_field_mappings) {
|
|
||||||
console.log('Adding user_field_mappings column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'user_field_mappings', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.dynamic_approver_config) {
|
|
||||||
console.log('Adding dynamic_approver_config column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'dynamic_approver_config', {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.is_system_template) {
|
|
||||||
console.log('Adding is_system_template column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'is_system_template', {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tableDescription.usage_count) {
|
|
||||||
console.log('Adding usage_count column...');
|
|
||||||
await queryInterface.addColumn('workflow_templates', 'usage_count', {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: 0
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('✅ Schema validation/fix complete');
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error in schema fix migration:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
|
||||||
// Revert is complex/risky effectively, skipping for this fix-forward migration
|
|
||||||
}
|
|
||||||
@ -1,120 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
|
|
||||||
interface ActivityAttributes {
|
|
||||||
activityId: string;
|
|
||||||
requestId: string;
|
|
||||||
userId?: string | null;
|
|
||||||
userName?: string | null;
|
|
||||||
activityType: string; // activity_type
|
|
||||||
activityDescription: string; // activity_description
|
|
||||||
activityCategory?: string | null;
|
|
||||||
severity?: string | null;
|
|
||||||
metadata?: object | null;
|
|
||||||
isSystemEvent?: boolean | null;
|
|
||||||
ipAddress?: string | null;
|
|
||||||
userAgent?: string | null;
|
|
||||||
createdAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ActivityCreationAttributes extends Optional<ActivityAttributes, 'activityId' | 'createdAt'> {}
|
|
||||||
|
|
||||||
class Activity extends Model<ActivityAttributes, ActivityCreationAttributes> implements ActivityAttributes {
|
|
||||||
public activityId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public userId!: string | null;
|
|
||||||
public userName!: string | null;
|
|
||||||
public activityType!: string;
|
|
||||||
public activityDescription!: string;
|
|
||||||
public activityCategory!: string | null;
|
|
||||||
public severity!: string | null;
|
|
||||||
public metadata!: object | null;
|
|
||||||
public isSystemEvent!: boolean | null;
|
|
||||||
public ipAddress!: string | null;
|
|
||||||
public userAgent!: string | null;
|
|
||||||
public createdAt!: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
Activity.init(
|
|
||||||
{
|
|
||||||
activityId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'activity_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'request_id'
|
|
||||||
},
|
|
||||||
userId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'user_id'
|
|
||||||
},
|
|
||||||
userName: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'user_name'
|
|
||||||
},
|
|
||||||
activityType: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'activity_type'
|
|
||||||
},
|
|
||||||
activityDescription: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'activity_description'
|
|
||||||
},
|
|
||||||
activityCategory: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'activity_category'
|
|
||||||
},
|
|
||||||
severity: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
metadata: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
isSystemEvent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'is_system_event'
|
|
||||||
},
|
|
||||||
ipAddress: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'ip_address'
|
|
||||||
},
|
|
||||||
userAgent: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'user_agent'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'Activity',
|
|
||||||
tableName: 'activities',
|
|
||||||
timestamps: false,
|
|
||||||
indexes: [
|
|
||||||
{ fields: ['request_id'] },
|
|
||||||
{ fields: ['created_at'] }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
export { Activity };
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,127 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { User } from './User';
|
|
||||||
|
|
||||||
interface ActivityTypeAttributes {
|
|
||||||
activityTypeId: string;
|
|
||||||
title: string;
|
|
||||||
itemCode?: string;
|
|
||||||
taxationType?: string;
|
|
||||||
sapRefNo?: string;
|
|
||||||
isActive: boolean;
|
|
||||||
createdBy: string;
|
|
||||||
updatedBy?: string;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ActivityTypeCreationAttributes extends Optional<ActivityTypeAttributes, 'activityTypeId' | 'itemCode' | 'taxationType' | 'sapRefNo' | 'isActive' | 'updatedBy' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class ActivityType extends Model<ActivityTypeAttributes, ActivityTypeCreationAttributes> implements ActivityTypeAttributes {
|
|
||||||
public activityTypeId!: string;
|
|
||||||
public title!: string;
|
|
||||||
public itemCode?: string;
|
|
||||||
public taxationType?: string;
|
|
||||||
public sapRefNo?: string;
|
|
||||||
public isActive!: boolean;
|
|
||||||
public createdBy!: string;
|
|
||||||
public updatedBy?: string;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
public creator?: User;
|
|
||||||
public updater?: User;
|
|
||||||
}
|
|
||||||
|
|
||||||
ActivityType.init(
|
|
||||||
{
|
|
||||||
activityTypeId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'activity_type_id'
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'title'
|
|
||||||
},
|
|
||||||
itemCode: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
field: 'item_code'
|
|
||||||
},
|
|
||||||
taxationType: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
field: 'taxation_type'
|
|
||||||
},
|
|
||||||
sapRefNo: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: null,
|
|
||||||
field: 'sap_ref_no'
|
|
||||||
},
|
|
||||||
isActive: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: true,
|
|
||||||
field: 'is_active'
|
|
||||||
},
|
|
||||||
createdBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'created_by'
|
|
||||||
},
|
|
||||||
updatedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'updated_by'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'ActivityType',
|
|
||||||
tableName: 'activity_types',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{ fields: ['title'], unique: true },
|
|
||||||
{ fields: ['is_active'] },
|
|
||||||
{ fields: ['item_code'] },
|
|
||||||
{ fields: ['created_by'] }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
ActivityType.belongsTo(User, {
|
|
||||||
as: 'creator',
|
|
||||||
foreignKey: 'createdBy',
|
|
||||||
targetKey: 'userId'
|
|
||||||
});
|
|
||||||
|
|
||||||
ActivityType.belongsTo(User, {
|
|
||||||
as: 'updater',
|
|
||||||
foreignKey: 'updatedBy',
|
|
||||||
targetKey: 'userId'
|
|
||||||
});
|
|
||||||
|
|
||||||
export { ActivityType };
|
|
||||||
|
|
||||||
@ -1,307 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { User } from './User';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
import { ApprovalStatus } from '../types/common.types';
|
|
||||||
|
|
||||||
interface ApprovalLevelAttributes {
|
|
||||||
levelId: string;
|
|
||||||
requestId: string;
|
|
||||||
levelNumber: number;
|
|
||||||
levelName?: string;
|
|
||||||
approverId: string;
|
|
||||||
approverEmail: string;
|
|
||||||
approverName: string;
|
|
||||||
tatHours: number;
|
|
||||||
tatDays: number;
|
|
||||||
status: ApprovalStatus;
|
|
||||||
levelStartTime?: Date;
|
|
||||||
levelEndTime?: Date;
|
|
||||||
actionDate?: Date;
|
|
||||||
comments?: string;
|
|
||||||
rejectionReason?: string;
|
|
||||||
breachReason?: string;
|
|
||||||
isFinalApprover: boolean;
|
|
||||||
elapsedHours: number;
|
|
||||||
remainingHours: number;
|
|
||||||
tatPercentageUsed: number;
|
|
||||||
tat50AlertSent: boolean;
|
|
||||||
tat75AlertSent: boolean;
|
|
||||||
tatBreached: boolean;
|
|
||||||
tatStartTime?: Date;
|
|
||||||
isPaused: boolean;
|
|
||||||
pausedAt?: Date;
|
|
||||||
pausedBy?: string;
|
|
||||||
pauseReason?: string;
|
|
||||||
pauseResumeDate?: Date;
|
|
||||||
pauseTatStartTime?: Date;
|
|
||||||
pauseElapsedHours?: number;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ApprovalLevelCreationAttributes extends Optional<ApprovalLevelAttributes, 'levelId' | 'levelName' | 'levelStartTime' | 'levelEndTime' | 'actionDate' | 'comments' | 'rejectionReason' | 'breachReason' | 'tat50AlertSent' | 'tat75AlertSent' | 'tatBreached' | 'tatStartTime' | 'tatDays' | 'isPaused' | 'pausedAt' | 'pausedBy' | 'pauseReason' | 'pauseResumeDate' | 'pauseTatStartTime' | 'pauseElapsedHours' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class ApprovalLevel extends Model<ApprovalLevelAttributes, ApprovalLevelCreationAttributes> implements ApprovalLevelAttributes {
|
|
||||||
public levelId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public levelNumber!: number;
|
|
||||||
public levelName?: string;
|
|
||||||
public approverId!: string;
|
|
||||||
public approverEmail!: string;
|
|
||||||
public approverName!: string;
|
|
||||||
public tatHours!: number;
|
|
||||||
public tatDays!: number;
|
|
||||||
public status!: ApprovalStatus;
|
|
||||||
public levelStartTime?: Date;
|
|
||||||
public levelEndTime?: Date;
|
|
||||||
public actionDate?: Date;
|
|
||||||
public comments?: string;
|
|
||||||
public rejectionReason?: string;
|
|
||||||
public breachReason?: string;
|
|
||||||
public isFinalApprover!: boolean;
|
|
||||||
public elapsedHours!: number;
|
|
||||||
public remainingHours!: number;
|
|
||||||
public tatPercentageUsed!: number;
|
|
||||||
public tat50AlertSent!: boolean;
|
|
||||||
public tat75AlertSent!: boolean;
|
|
||||||
public tatBreached!: boolean;
|
|
||||||
public tatStartTime?: Date;
|
|
||||||
public isPaused!: boolean;
|
|
||||||
public pausedAt?: Date;
|
|
||||||
public pausedBy?: string;
|
|
||||||
public pauseReason?: string;
|
|
||||||
public pauseResumeDate?: Date;
|
|
||||||
public pauseTatStartTime?: Date;
|
|
||||||
public pauseElapsedHours?: number;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
public request?: WorkflowRequest;
|
|
||||||
public approver?: User;
|
|
||||||
}
|
|
||||||
|
|
||||||
ApprovalLevel.init(
|
|
||||||
{
|
|
||||||
levelId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'level_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
levelNumber: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'level_number'
|
|
||||||
},
|
|
||||||
levelName: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'level_name'
|
|
||||||
},
|
|
||||||
approverId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'approver_id',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
approverEmail: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'approver_email'
|
|
||||||
},
|
|
||||||
approverName: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'approver_name'
|
|
||||||
},
|
|
||||||
tatHours: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'tat_hours'
|
|
||||||
},
|
|
||||||
tatDays: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'tat_days'
|
|
||||||
// This is a GENERATED STORED column in production DB (calculated as CEIL(tat_hours / 24.0))
|
|
||||||
// Database will auto-calculate this value - do NOT pass it during INSERT/UPDATE operations
|
|
||||||
},
|
|
||||||
status: {
|
|
||||||
type: DataTypes.ENUM('PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'SKIPPED', 'PAUSED'),
|
|
||||||
defaultValue: 'PENDING'
|
|
||||||
},
|
|
||||||
levelStartTime: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'level_start_time'
|
|
||||||
},
|
|
||||||
levelEndTime: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'level_end_time'
|
|
||||||
},
|
|
||||||
actionDate: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'action_date'
|
|
||||||
},
|
|
||||||
comments: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
rejectionReason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'rejection_reason'
|
|
||||||
},
|
|
||||||
breachReason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'breach_reason',
|
|
||||||
comment: 'Reason for TAT breach - can contain paragraph-length text'
|
|
||||||
},
|
|
||||||
isFinalApprover: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
field: 'is_final_approver'
|
|
||||||
},
|
|
||||||
elapsedHours: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
defaultValue: 0,
|
|
||||||
field: 'elapsed_hours'
|
|
||||||
},
|
|
||||||
remainingHours: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
defaultValue: 0,
|
|
||||||
field: 'remaining_hours'
|
|
||||||
},
|
|
||||||
tatPercentageUsed: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
defaultValue: 0,
|
|
||||||
field: 'tat_percentage_used'
|
|
||||||
},
|
|
||||||
tat50AlertSent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
field: 'tat50_alert_sent'
|
|
||||||
},
|
|
||||||
tat75AlertSent: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
field: 'tat75_alert_sent'
|
|
||||||
},
|
|
||||||
tatBreached: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
field: 'tat_breached'
|
|
||||||
},
|
|
||||||
tatStartTime: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'tat_start_time'
|
|
||||||
},
|
|
||||||
isPaused: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
defaultValue: false,
|
|
||||||
field: 'is_paused'
|
|
||||||
},
|
|
||||||
pausedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'paused_at'
|
|
||||||
},
|
|
||||||
pausedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'paused_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
pauseReason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'pause_reason'
|
|
||||||
},
|
|
||||||
pauseResumeDate: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'pause_resume_date'
|
|
||||||
},
|
|
||||||
pauseTatStartTime: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'pause_tat_start_time'
|
|
||||||
},
|
|
||||||
pauseElapsedHours: {
|
|
||||||
type: DataTypes.DECIMAL(10, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'pause_elapsed_hours'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'ApprovalLevel',
|
|
||||||
tableName: 'approval_levels',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{
|
|
||||||
fields: ['request_id']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['approver_id']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['status']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
unique: true,
|
|
||||||
fields: ['request_id', 'level_number']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
ApprovalLevel.belongsTo(WorkflowRequest, {
|
|
||||||
as: 'request',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
targetKey: 'requestId'
|
|
||||||
});
|
|
||||||
|
|
||||||
ApprovalLevel.belongsTo(User, {
|
|
||||||
as: 'approver',
|
|
||||||
foreignKey: 'approverId',
|
|
||||||
targetKey: 'userId'
|
|
||||||
});
|
|
||||||
|
|
||||||
export { ApprovalLevel };
|
|
||||||
@ -1,295 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
import { User } from './User';
|
|
||||||
|
|
||||||
export enum BudgetStatus {
|
|
||||||
DRAFT = 'DRAFT',
|
|
||||||
PROPOSED = 'PROPOSED',
|
|
||||||
APPROVED = 'APPROVED',
|
|
||||||
BLOCKED = 'BLOCKED',
|
|
||||||
CLOSED = 'CLOSED',
|
|
||||||
SETTLED = 'SETTLED'
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ClaimBudgetTrackingAttributes {
|
|
||||||
budgetId: string;
|
|
||||||
requestId: string;
|
|
||||||
// Initial Budget
|
|
||||||
initialEstimatedBudget?: number;
|
|
||||||
// Proposal Budget
|
|
||||||
proposalEstimatedBudget?: number;
|
|
||||||
proposalSubmittedAt?: Date;
|
|
||||||
// Approved Budget
|
|
||||||
approvedBudget?: number;
|
|
||||||
approvedAt?: Date;
|
|
||||||
approvedBy?: string;
|
|
||||||
// IO Blocked Budget
|
|
||||||
ioBlockedAmount?: number;
|
|
||||||
ioBlockedAt?: Date;
|
|
||||||
// Closed Expenses
|
|
||||||
closedExpenses?: number;
|
|
||||||
closedExpensesSubmittedAt?: Date;
|
|
||||||
// Final Claim Amount
|
|
||||||
finalClaimAmount?: number;
|
|
||||||
finalClaimAmountApprovedAt?: Date;
|
|
||||||
finalClaimAmountApprovedBy?: string;
|
|
||||||
// Credit Note
|
|
||||||
creditNoteAmount?: number;
|
|
||||||
creditNoteIssuedAt?: Date;
|
|
||||||
// Status & Metadata
|
|
||||||
budgetStatus: BudgetStatus;
|
|
||||||
currency: string;
|
|
||||||
varianceAmount?: number;
|
|
||||||
variancePercentage?: number;
|
|
||||||
// Audit
|
|
||||||
lastModifiedBy?: string;
|
|
||||||
lastModifiedAt?: Date;
|
|
||||||
modificationReason?: string;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ClaimBudgetTrackingCreationAttributes extends Optional<ClaimBudgetTrackingAttributes, 'budgetId' | 'initialEstimatedBudget' | 'proposalEstimatedBudget' | 'proposalSubmittedAt' | 'approvedBudget' | 'approvedAt' | 'approvedBy' | 'ioBlockedAmount' | 'ioBlockedAt' | 'closedExpenses' | 'closedExpensesSubmittedAt' | 'finalClaimAmount' | 'finalClaimAmountApprovedAt' | 'finalClaimAmountApprovedBy' | 'creditNoteAmount' | 'creditNoteIssuedAt' | 'varianceAmount' | 'variancePercentage' | 'lastModifiedBy' | 'lastModifiedAt' | 'modificationReason' | 'budgetStatus' | 'currency' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class ClaimBudgetTracking extends Model<ClaimBudgetTrackingAttributes, ClaimBudgetTrackingCreationAttributes> implements ClaimBudgetTrackingAttributes {
|
|
||||||
public budgetId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public initialEstimatedBudget?: number;
|
|
||||||
public proposalEstimatedBudget?: number;
|
|
||||||
public proposalSubmittedAt?: Date;
|
|
||||||
public approvedBudget?: number;
|
|
||||||
public approvedAt?: Date;
|
|
||||||
public approvedBy?: string;
|
|
||||||
public ioBlockedAmount?: number;
|
|
||||||
public ioBlockedAt?: Date;
|
|
||||||
public closedExpenses?: number;
|
|
||||||
public closedExpensesSubmittedAt?: Date;
|
|
||||||
public finalClaimAmount?: number;
|
|
||||||
public finalClaimAmountApprovedAt?: Date;
|
|
||||||
public finalClaimAmountApprovedBy?: string;
|
|
||||||
public creditNoteAmount?: number;
|
|
||||||
public creditNoteIssuedAt?: Date;
|
|
||||||
public budgetStatus!: BudgetStatus;
|
|
||||||
public currency!: string;
|
|
||||||
public varianceAmount?: number;
|
|
||||||
public variancePercentage?: number;
|
|
||||||
public lastModifiedBy?: string;
|
|
||||||
public lastModifiedAt?: Date;
|
|
||||||
public modificationReason?: string;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
public request?: WorkflowRequest;
|
|
||||||
public approver?: User;
|
|
||||||
public finalApprover?: User;
|
|
||||||
public lastModifier?: User;
|
|
||||||
}
|
|
||||||
|
|
||||||
ClaimBudgetTracking.init(
|
|
||||||
{
|
|
||||||
budgetId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'budget_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
initialEstimatedBudget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'initial_estimated_budget'
|
|
||||||
},
|
|
||||||
proposalEstimatedBudget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'proposal_estimated_budget'
|
|
||||||
},
|
|
||||||
proposalSubmittedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'proposal_submitted_at'
|
|
||||||
},
|
|
||||||
approvedBudget: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'approved_budget'
|
|
||||||
},
|
|
||||||
approvedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'approved_at'
|
|
||||||
},
|
|
||||||
approvedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'approved_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ioBlockedAmount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'io_blocked_amount'
|
|
||||||
},
|
|
||||||
ioBlockedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'io_blocked_at'
|
|
||||||
},
|
|
||||||
closedExpenses: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'closed_expenses'
|
|
||||||
},
|
|
||||||
closedExpensesSubmittedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'closed_expenses_submitted_at'
|
|
||||||
},
|
|
||||||
finalClaimAmount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'final_claim_amount'
|
|
||||||
},
|
|
||||||
finalClaimAmountApprovedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'final_claim_amount_approved_at'
|
|
||||||
},
|
|
||||||
finalClaimAmountApprovedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'final_claim_amount_approved_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
creditNoteAmount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_note_amount'
|
|
||||||
},
|
|
||||||
creditNoteIssuedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_note_issued_at'
|
|
||||||
},
|
|
||||||
budgetStatus: {
|
|
||||||
type: DataTypes.ENUM('DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED'),
|
|
||||||
defaultValue: 'DRAFT',
|
|
||||||
allowNull: false,
|
|
||||||
field: 'budget_status'
|
|
||||||
},
|
|
||||||
currency: {
|
|
||||||
type: DataTypes.STRING(3),
|
|
||||||
defaultValue: 'INR',
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
varianceAmount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'variance_amount'
|
|
||||||
},
|
|
||||||
variancePercentage: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'variance_percentage'
|
|
||||||
},
|
|
||||||
lastModifiedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'last_modified_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
lastModifiedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'last_modified_at'
|
|
||||||
},
|
|
||||||
modificationReason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'modification_reason'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'ClaimBudgetTracking',
|
|
||||||
tableName: 'claim_budget_tracking',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{
|
|
||||||
fields: ['request_id'],
|
|
||||||
unique: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['budget_status']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['approved_by']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['final_claim_amount_approved_by']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
ClaimBudgetTracking.belongsTo(WorkflowRequest, {
|
|
||||||
as: 'request',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
targetKey: 'requestId'
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimBudgetTracking.belongsTo(User, {
|
|
||||||
as: 'approver',
|
|
||||||
foreignKey: 'approvedBy',
|
|
||||||
targetKey: 'userId'
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimBudgetTracking.belongsTo(User, {
|
|
||||||
as: 'finalApprover',
|
|
||||||
foreignKey: 'finalClaimAmountApprovedBy',
|
|
||||||
targetKey: 'userId'
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimBudgetTracking.belongsTo(User, {
|
|
||||||
as: 'lastModifier',
|
|
||||||
foreignKey: 'lastModifiedBy',
|
|
||||||
targetKey: 'userId'
|
|
||||||
});
|
|
||||||
|
|
||||||
export { ClaimBudgetTracking };
|
|
||||||
|
|
||||||
@ -1,193 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
import { ClaimInvoice } from './ClaimInvoice';
|
|
||||||
|
|
||||||
interface ClaimCreditNoteAttributes {
|
|
||||||
creditNoteId: string;
|
|
||||||
requestId: string;
|
|
||||||
invoiceId?: string;
|
|
||||||
creditNoteNumber?: string;
|
|
||||||
creditNoteDate?: Date;
|
|
||||||
creditNoteAmount?: number;
|
|
||||||
sapDocumentNumber?: string;
|
|
||||||
creditNoteFilePath?: string;
|
|
||||||
status?: string;
|
|
||||||
errorMessage?: string;
|
|
||||||
confirmedBy?: string;
|
|
||||||
confirmedAt?: Date;
|
|
||||||
reason?: string;
|
|
||||||
description?: string;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ClaimCreditNoteCreationAttributes extends Optional<ClaimCreditNoteAttributes, 'creditNoteId' | 'invoiceId' | 'creditNoteNumber' | 'creditNoteDate' | 'creditNoteAmount' | 'sapDocumentNumber' | 'creditNoteFilePath' | 'status' | 'errorMessage' | 'confirmedBy' | 'confirmedAt' | 'reason' | 'description' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class ClaimCreditNote extends Model<ClaimCreditNoteAttributes, ClaimCreditNoteCreationAttributes> implements ClaimCreditNoteAttributes {
|
|
||||||
public creditNoteId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public invoiceId?: string;
|
|
||||||
public creditNoteNumber?: string;
|
|
||||||
public creditNoteDate?: Date;
|
|
||||||
public creditNoteAmount?: number;
|
|
||||||
public sapDocumentNumber?: string;
|
|
||||||
public creditNoteFilePath?: string;
|
|
||||||
public status?: string;
|
|
||||||
public errorMessage?: string;
|
|
||||||
public confirmedBy?: string;
|
|
||||||
public confirmedAt?: Date;
|
|
||||||
public reason?: string;
|
|
||||||
public description?: string;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
ClaimCreditNote.init(
|
|
||||||
{
|
|
||||||
creditNoteId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'credit_note_id',
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id',
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
invoiceId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'invoice_id',
|
|
||||||
references: {
|
|
||||||
model: 'claim_invoices',
|
|
||||||
key: 'invoice_id',
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
creditNoteNumber: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_note_number',
|
|
||||||
},
|
|
||||||
creditNoteDate: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_note_date',
|
|
||||||
},
|
|
||||||
creditNoteAmount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_amount',
|
|
||||||
},
|
|
||||||
sapDocumentNumber: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'sap_document_number',
|
|
||||||
},
|
|
||||||
creditNoteFilePath: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_note_file_path',
|
|
||||||
},
|
|
||||||
status: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'confirmation_status',
|
|
||||||
},
|
|
||||||
errorMessage: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'error_message',
|
|
||||||
},
|
|
||||||
confirmedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'confirmed_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id',
|
|
||||||
},
|
|
||||||
onDelete: 'SET NULL',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
confirmedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'confirmed_at',
|
|
||||||
},
|
|
||||||
reason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'reason',
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'description',
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at',
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'ClaimCreditNote',
|
|
||||||
tableName: 'claim_credit_notes',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{ unique: true, fields: ['request_id'], name: 'idx_claim_credit_notes_request_id' },
|
|
||||||
{ fields: ['invoice_id'], name: 'idx_claim_credit_notes_invoice_id' },
|
|
||||||
{ fields: ['credit_note_number'], name: 'idx_claim_credit_notes_number' },
|
|
||||||
{ fields: ['sap_document_number'], name: 'idx_claim_credit_notes_sap_doc' },
|
|
||||||
{ fields: ['confirmation_status'], name: 'idx_claim_credit_notes_status' },
|
|
||||||
],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
WorkflowRequest.hasOne(ClaimCreditNote, {
|
|
||||||
as: 'claimCreditNote',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
sourceKey: 'requestId',
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimCreditNote.belongsTo(WorkflowRequest, {
|
|
||||||
as: 'workflowRequest',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
targetKey: 'requestId',
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimCreditNote.belongsTo(ClaimInvoice, {
|
|
||||||
as: 'claimInvoice',
|
|
||||||
foreignKey: 'invoiceId',
|
|
||||||
targetKey: 'invoiceId',
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimInvoice.hasMany(ClaimCreditNote, {
|
|
||||||
as: 'creditNotes',
|
|
||||||
foreignKey: 'invoiceId',
|
|
||||||
sourceKey: 'invoiceId',
|
|
||||||
});
|
|
||||||
|
|
||||||
export { ClaimCreditNote };
|
|
||||||
|
|
||||||
@ -1,149 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
|
|
||||||
interface ClaimInvoiceAttributes {
|
|
||||||
invoiceId: string;
|
|
||||||
requestId: string;
|
|
||||||
invoiceNumber?: string;
|
|
||||||
invoiceDate?: Date;
|
|
||||||
amount?: number;
|
|
||||||
dmsNumber?: string;
|
|
||||||
invoiceFilePath?: string;
|
|
||||||
status?: string;
|
|
||||||
errorMessage?: string;
|
|
||||||
generatedAt?: Date;
|
|
||||||
description?: string;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ClaimInvoiceCreationAttributes extends Optional<ClaimInvoiceAttributes, 'invoiceId' | 'invoiceNumber' | 'invoiceDate' | 'amount' | 'dmsNumber' | 'invoiceFilePath' | 'status' | 'errorMessage' | 'generatedAt' | 'description' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class ClaimInvoice extends Model<ClaimInvoiceAttributes, ClaimInvoiceCreationAttributes> implements ClaimInvoiceAttributes {
|
|
||||||
public invoiceId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public invoiceNumber?: string;
|
|
||||||
public invoiceDate?: Date;
|
|
||||||
public amount?: number;
|
|
||||||
public dmsNumber?: string;
|
|
||||||
public invoiceFilePath?: string;
|
|
||||||
public status?: string;
|
|
||||||
public errorMessage?: string;
|
|
||||||
public generatedAt?: Date;
|
|
||||||
public description?: string;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
ClaimInvoice.init(
|
|
||||||
{
|
|
||||||
invoiceId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'invoice_id',
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id',
|
|
||||||
},
|
|
||||||
onDelete: 'CASCADE',
|
|
||||||
onUpdate: 'CASCADE',
|
|
||||||
},
|
|
||||||
invoiceNumber: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'invoice_number',
|
|
||||||
},
|
|
||||||
invoiceDate: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'invoice_date',
|
|
||||||
},
|
|
||||||
amount: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'invoice_amount',
|
|
||||||
},
|
|
||||||
dmsNumber: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dms_number',
|
|
||||||
},
|
|
||||||
invoiceFilePath: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'invoice_file_path',
|
|
||||||
},
|
|
||||||
status: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'generation_status',
|
|
||||||
},
|
|
||||||
errorMessage: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'error_message',
|
|
||||||
},
|
|
||||||
generatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'generated_at',
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'description',
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at',
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'ClaimInvoice',
|
|
||||||
tableName: 'claim_invoices',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{ unique: true, fields: ['request_id'], name: 'idx_claim_invoices_request_id' },
|
|
||||||
{ fields: ['invoice_number'], name: 'idx_claim_invoices_invoice_number' },
|
|
||||||
{ fields: ['dms_number'], name: 'idx_claim_invoices_dms_number' },
|
|
||||||
{ fields: ['generation_status'], name: 'idx_claim_invoices_status' },
|
|
||||||
],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
WorkflowRequest.hasOne(ClaimInvoice, {
|
|
||||||
as: 'claimInvoice',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
sourceKey: 'requestId',
|
|
||||||
});
|
|
||||||
|
|
||||||
ClaimInvoice.belongsTo(WorkflowRequest, {
|
|
||||||
as: 'workflowRequest',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
targetKey: 'requestId',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Note: hasMany association with ClaimCreditNote is defined in ClaimCreditNote.ts
|
|
||||||
// to avoid circular dependency issues
|
|
||||||
|
|
||||||
export { ClaimInvoice };
|
|
||||||
|
|
||||||
@ -1,152 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '../config/database';
|
|
||||||
|
|
||||||
interface ConclusionRemarkAttributes {
|
|
||||||
conclusionId: string;
|
|
||||||
requestId: string;
|
|
||||||
aiGeneratedRemark: string | null;
|
|
||||||
aiModelUsed: string | null;
|
|
||||||
aiConfidenceScore: number | null;
|
|
||||||
finalRemark: string | null;
|
|
||||||
editedBy: string | null;
|
|
||||||
isEdited: boolean;
|
|
||||||
editCount: number;
|
|
||||||
approvalSummary: any;
|
|
||||||
documentSummary: any;
|
|
||||||
keyDiscussionPoints: string[];
|
|
||||||
generatedAt: Date | null;
|
|
||||||
finalizedAt: Date | null;
|
|
||||||
createdAt?: Date;
|
|
||||||
updatedAt?: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ConclusionRemarkCreationAttributes
|
|
||||||
extends Optional<ConclusionRemarkAttributes, 'conclusionId' | 'aiGeneratedRemark' | 'aiModelUsed' | 'aiConfidenceScore' | 'finalRemark' | 'editedBy' | 'isEdited' | 'editCount' | 'approvalSummary' | 'documentSummary' | 'keyDiscussionPoints' | 'generatedAt' | 'finalizedAt'> {}
|
|
||||||
|
|
||||||
class ConclusionRemark extends Model<ConclusionRemarkAttributes, ConclusionRemarkCreationAttributes>
|
|
||||||
implements ConclusionRemarkAttributes {
|
|
||||||
public conclusionId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public aiGeneratedRemark!: string | null;
|
|
||||||
public aiModelUsed!: string | null;
|
|
||||||
public aiConfidenceScore!: number | null;
|
|
||||||
public finalRemark!: string | null;
|
|
||||||
public editedBy!: string | null;
|
|
||||||
public isEdited!: boolean;
|
|
||||||
public editCount!: number;
|
|
||||||
public approvalSummary!: any;
|
|
||||||
public documentSummary!: any;
|
|
||||||
public keyDiscussionPoints!: string[];
|
|
||||||
public generatedAt!: Date | null;
|
|
||||||
public finalizedAt!: Date | null;
|
|
||||||
public readonly createdAt!: Date;
|
|
||||||
public readonly updatedAt!: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
ConclusionRemark.init(
|
|
||||||
{
|
|
||||||
conclusionId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'conclusion_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
aiGeneratedRemark: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'ai_generated_remark'
|
|
||||||
},
|
|
||||||
aiModelUsed: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'ai_model_used'
|
|
||||||
},
|
|
||||||
aiConfidenceScore: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'ai_confidence_score'
|
|
||||||
},
|
|
||||||
finalRemark: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'final_remark'
|
|
||||||
},
|
|
||||||
editedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'edited_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
isEdited: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: false,
|
|
||||||
field: 'is_edited'
|
|
||||||
},
|
|
||||||
editCount: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: 0,
|
|
||||||
field: 'edit_count'
|
|
||||||
},
|
|
||||||
approvalSummary: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'approval_summary'
|
|
||||||
},
|
|
||||||
documentSummary: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'document_summary'
|
|
||||||
},
|
|
||||||
keyDiscussionPoints: {
|
|
||||||
type: DataTypes.ARRAY(DataTypes.TEXT),
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: [],
|
|
||||||
field: 'key_discussion_points'
|
|
||||||
},
|
|
||||||
generatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'generated_at'
|
|
||||||
},
|
|
||||||
finalizedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'finalized_at'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
tableName: 'conclusion_remarks',
|
|
||||||
timestamps: true,
|
|
||||||
underscored: true
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
export default ConclusionRemark;
|
|
||||||
|
|
||||||
@ -1,442 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '../config/database';
|
|
||||||
|
|
||||||
interface DealerAttributes {
|
|
||||||
dealerId: string;
|
|
||||||
salesCode?: string | null;
|
|
||||||
serviceCode?: string | null;
|
|
||||||
gearCode?: string | null;
|
|
||||||
gmaCode?: string | null;
|
|
||||||
region?: string | null;
|
|
||||||
dealership?: string | null;
|
|
||||||
state?: string | null;
|
|
||||||
district?: string | null;
|
|
||||||
city?: string | null;
|
|
||||||
location?: string | null;
|
|
||||||
cityCategoryPst?: string | null;
|
|
||||||
layoutFormat?: string | null;
|
|
||||||
tierCityCategory?: string | null;
|
|
||||||
onBoardingCharges?: string | null;
|
|
||||||
date?: string | null;
|
|
||||||
singleFormatMonthYear?: string | null;
|
|
||||||
domainId?: string | null;
|
|
||||||
replacement?: string | null;
|
|
||||||
terminationResignationStatus?: string | null;
|
|
||||||
dateOfTerminationResignation?: string | null;
|
|
||||||
lastDateOfOperations?: string | null;
|
|
||||||
oldCodes?: string | null;
|
|
||||||
branchDetails?: string | null;
|
|
||||||
dealerPrincipalName?: string | null;
|
|
||||||
dealerPrincipalEmailId?: string | null;
|
|
||||||
dpContactNumber?: string | null;
|
|
||||||
dpContacts?: string | null;
|
|
||||||
showroomAddress?: string | null;
|
|
||||||
showroomPincode?: string | null;
|
|
||||||
workshopAddress?: string | null;
|
|
||||||
workshopPincode?: string | null;
|
|
||||||
locationDistrict?: string | null;
|
|
||||||
stateWorkshop?: string | null;
|
|
||||||
noOfStudios?: number | null;
|
|
||||||
websiteUpdate?: string | null;
|
|
||||||
gst?: string | null;
|
|
||||||
pan?: string | null;
|
|
||||||
firmType?: string | null;
|
|
||||||
propManagingPartnersDirectors?: string | null;
|
|
||||||
totalPropPartnersDirectors?: string | null;
|
|
||||||
docsFolderLink?: string | null;
|
|
||||||
workshopGmaCodes?: string | null;
|
|
||||||
existingNew?: string | null;
|
|
||||||
dlrcode?: string | null;
|
|
||||||
isActive: boolean;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DealerCreationAttributes extends Optional<DealerAttributes, 'dealerId' | 'isActive' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class Dealer extends Model<DealerAttributes, DealerCreationAttributes> implements DealerAttributes {
|
|
||||||
public dealerId!: string;
|
|
||||||
public salesCode?: string | null;
|
|
||||||
public serviceCode?: string | null;
|
|
||||||
public gearCode?: string | null;
|
|
||||||
public gmaCode?: string | null;
|
|
||||||
public region?: string | null;
|
|
||||||
public dealership?: string | null;
|
|
||||||
public state?: string | null;
|
|
||||||
public district?: string | null;
|
|
||||||
public city?: string | null;
|
|
||||||
public location?: string | null;
|
|
||||||
public cityCategoryPst?: string | null;
|
|
||||||
public layoutFormat?: string | null;
|
|
||||||
public tierCityCategory?: string | null;
|
|
||||||
public onBoardingCharges?: string | null;
|
|
||||||
public date?: string | null;
|
|
||||||
public singleFormatMonthYear?: string | null;
|
|
||||||
public domainId?: string | null;
|
|
||||||
public replacement?: string | null;
|
|
||||||
public terminationResignationStatus?: string | null;
|
|
||||||
public dateOfTerminationResignation?: string | null;
|
|
||||||
public lastDateOfOperations?: string | null;
|
|
||||||
public oldCodes?: string | null;
|
|
||||||
public branchDetails?: string | null;
|
|
||||||
public dealerPrincipalName?: string | null;
|
|
||||||
public dealerPrincipalEmailId?: string | null;
|
|
||||||
public dpContactNumber?: string | null;
|
|
||||||
public dpContacts?: string | null;
|
|
||||||
public showroomAddress?: string | null;
|
|
||||||
public showroomPincode?: string | null;
|
|
||||||
public workshopAddress?: string | null;
|
|
||||||
public workshopPincode?: string | null;
|
|
||||||
public locationDistrict?: string | null;
|
|
||||||
public stateWorkshop?: string | null;
|
|
||||||
public noOfStudios?: number | null;
|
|
||||||
public websiteUpdate?: string | null;
|
|
||||||
public gst?: string | null;
|
|
||||||
public pan?: string | null;
|
|
||||||
public firmType?: string | null;
|
|
||||||
public propManagingPartnersDirectors?: string | null;
|
|
||||||
public totalPropPartnersDirectors?: string | null;
|
|
||||||
public docsFolderLink?: string | null;
|
|
||||||
public workshopGmaCodes?: string | null;
|
|
||||||
public existingNew?: string | null;
|
|
||||||
public dlrcode?: string | null;
|
|
||||||
public isActive!: boolean;
|
|
||||||
public readonly createdAt!: Date;
|
|
||||||
public readonly updatedAt!: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
Dealer.init(
|
|
||||||
{
|
|
||||||
dealerId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
primaryKey: true,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
field: 'dealer_id'
|
|
||||||
},
|
|
||||||
salesCode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'sales_code',
|
|
||||||
comment: 'Sales Code'
|
|
||||||
},
|
|
||||||
serviceCode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'service_code',
|
|
||||||
comment: 'Service Code'
|
|
||||||
},
|
|
||||||
gearCode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'gear_code',
|
|
||||||
comment: 'Gear Code'
|
|
||||||
},
|
|
||||||
gmaCode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'gma_code',
|
|
||||||
comment: 'GMA CODE'
|
|
||||||
},
|
|
||||||
region: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Region'
|
|
||||||
},
|
|
||||||
dealership: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Dealership name'
|
|
||||||
},
|
|
||||||
state: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'State'
|
|
||||||
},
|
|
||||||
district: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'District'
|
|
||||||
},
|
|
||||||
city: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'City'
|
|
||||||
},
|
|
||||||
location: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Location'
|
|
||||||
},
|
|
||||||
cityCategoryPst: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'city_category_pst',
|
|
||||||
comment: 'City category (PST)'
|
|
||||||
},
|
|
||||||
layoutFormat: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'layout_format',
|
|
||||||
comment: 'Layout format'
|
|
||||||
},
|
|
||||||
tierCityCategory: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'tier_city_category',
|
|
||||||
comment: 'TIER City Category'
|
|
||||||
},
|
|
||||||
onBoardingCharges: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'on_boarding_charges',
|
|
||||||
comment: 'On Boarding Charges (stored as text to allow text values)'
|
|
||||||
},
|
|
||||||
date: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'DATE (stored as text to avoid format validation)'
|
|
||||||
},
|
|
||||||
singleFormatMonthYear: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'single_format_month_year',
|
|
||||||
comment: 'Single Format of Month/Year (stored as text)'
|
|
||||||
},
|
|
||||||
domainId: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'domain_id',
|
|
||||||
comment: 'Domain Id'
|
|
||||||
},
|
|
||||||
replacement: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'Replacement (stored as text to allow longer values)'
|
|
||||||
},
|
|
||||||
terminationResignationStatus: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'termination_resignation_status',
|
|
||||||
comment: 'Termination / Resignation under Proposal or Evaluation'
|
|
||||||
},
|
|
||||||
dateOfTerminationResignation: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'date_of_termination_resignation',
|
|
||||||
comment: 'Date Of termination/ resignation (stored as text to avoid format validation)'
|
|
||||||
},
|
|
||||||
lastDateOfOperations: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'last_date_of_operations',
|
|
||||||
comment: 'Last date of operations (stored as text to avoid format validation)'
|
|
||||||
},
|
|
||||||
oldCodes: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'old_codes',
|
|
||||||
comment: 'Old Codes'
|
|
||||||
},
|
|
||||||
branchDetails: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'branch_details',
|
|
||||||
comment: 'Branch Details'
|
|
||||||
},
|
|
||||||
dealerPrincipalName: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dealer_principal_name',
|
|
||||||
comment: 'Dealer Principal Name'
|
|
||||||
},
|
|
||||||
dealerPrincipalEmailId: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dealer_principal_email_id',
|
|
||||||
comment: 'Dealer Principal Email Id'
|
|
||||||
},
|
|
||||||
dpContactNumber: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dp_contact_number',
|
|
||||||
comment: 'DP CONTACT NUMBER (stored as text to allow multiple numbers)'
|
|
||||||
},
|
|
||||||
dpContacts: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dp_contacts',
|
|
||||||
comment: 'DP CONTACTS (stored as text to allow multiple contacts)'
|
|
||||||
},
|
|
||||||
showroomAddress: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'showroom_address',
|
|
||||||
comment: 'Showroom Address'
|
|
||||||
},
|
|
||||||
showroomPincode: {
|
|
||||||
type: DataTypes.STRING(10),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'showroom_pincode',
|
|
||||||
comment: 'Showroom Pincode'
|
|
||||||
},
|
|
||||||
workshopAddress: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'workshop_address',
|
|
||||||
comment: 'Workshop Address'
|
|
||||||
},
|
|
||||||
workshopPincode: {
|
|
||||||
type: DataTypes.STRING(10),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'workshop_pincode',
|
|
||||||
comment: 'Workshop Pincode'
|
|
||||||
},
|
|
||||||
locationDistrict: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'location_district',
|
|
||||||
comment: 'Location / District'
|
|
||||||
},
|
|
||||||
stateWorkshop: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'state_workshop',
|
|
||||||
comment: 'State (for workshop)'
|
|
||||||
},
|
|
||||||
noOfStudios: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true,
|
|
||||||
defaultValue: 0,
|
|
||||||
field: 'no_of_studios',
|
|
||||||
comment: 'No Of Studios'
|
|
||||||
},
|
|
||||||
websiteUpdate: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'website_update',
|
|
||||||
comment: 'Website update (stored as text to allow longer values)'
|
|
||||||
},
|
|
||||||
gst: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'GST'
|
|
||||||
},
|
|
||||||
pan: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'PAN'
|
|
||||||
},
|
|
||||||
firmType: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'firm_type',
|
|
||||||
comment: 'Firm Type'
|
|
||||||
},
|
|
||||||
propManagingPartnersDirectors: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'prop_managing_partners_directors',
|
|
||||||
comment: 'Prop. / Managing Partners / Managing Directors'
|
|
||||||
},
|
|
||||||
totalPropPartnersDirectors: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'total_prop_partners_directors',
|
|
||||||
comment: 'Total Prop. / Partners / Directors'
|
|
||||||
},
|
|
||||||
docsFolderLink: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'docs_folder_link',
|
|
||||||
comment: 'DOCS Folder Link'
|
|
||||||
},
|
|
||||||
workshopGmaCodes: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'workshop_gma_codes',
|
|
||||||
comment: 'Workshop GMA Codes'
|
|
||||||
},
|
|
||||||
existingNew: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'existing_new',
|
|
||||||
comment: 'Existing / New'
|
|
||||||
},
|
|
||||||
dlrcode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: true,
|
|
||||||
comment: 'dlrcode'
|
|
||||||
},
|
|
||||||
isActive: {
|
|
||||||
type: DataTypes.BOOLEAN,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: true,
|
|
||||||
field: 'is_active',
|
|
||||||
comment: 'Whether the dealer is currently active'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
tableName: 'dealers',
|
|
||||||
modelName: 'Dealer',
|
|
||||||
timestamps: true,
|
|
||||||
underscored: true,
|
|
||||||
indexes: [
|
|
||||||
{
|
|
||||||
fields: ['sales_code'],
|
|
||||||
name: 'idx_dealers_sales_code'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['service_code'],
|
|
||||||
name: 'idx_dealers_service_code'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['gma_code'],
|
|
||||||
name: 'idx_dealers_gma_code'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['domain_id'],
|
|
||||||
name: 'idx_dealers_domain_id'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['region'],
|
|
||||||
name: 'idx_dealers_region'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['state'],
|
|
||||||
name: 'idx_dealers_state'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['city'],
|
|
||||||
name: 'idx_dealers_city'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['district'],
|
|
||||||
name: 'idx_dealers_district'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['dlrcode'],
|
|
||||||
name: 'idx_dealers_dlrcode'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['is_active'],
|
|
||||||
name: 'idx_dealers_is_active'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
export { Dealer };
|
|
||||||
export type { DealerAttributes, DealerCreationAttributes };
|
|
||||||
@ -1,167 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
|
|
||||||
interface DealerClaimDetailsAttributes {
|
|
||||||
claimId: string;
|
|
||||||
requestId: string;
|
|
||||||
activityName: string;
|
|
||||||
activityType: string;
|
|
||||||
dealerCode: string;
|
|
||||||
dealerName: string;
|
|
||||||
dealerEmail?: string;
|
|
||||||
dealerPhone?: string;
|
|
||||||
dealerAddress?: string;
|
|
||||||
activityDate?: Date;
|
|
||||||
location?: string;
|
|
||||||
periodStartDate?: Date;
|
|
||||||
periodEndDate?: Date;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DealerClaimDetailsCreationAttributes extends Optional<DealerClaimDetailsAttributes, 'claimId' | 'dealerEmail' | 'dealerPhone' | 'dealerAddress' | 'activityDate' | 'location' | 'periodStartDate' | 'periodEndDate' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class DealerClaimDetails extends Model<DealerClaimDetailsAttributes, DealerClaimDetailsCreationAttributes> implements DealerClaimDetailsAttributes {
|
|
||||||
public claimId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public activityName!: string;
|
|
||||||
public activityType!: string;
|
|
||||||
public dealerCode!: string;
|
|
||||||
public dealerName!: string;
|
|
||||||
public dealerEmail?: string;
|
|
||||||
public dealerPhone?: string;
|
|
||||||
public dealerAddress?: string;
|
|
||||||
public activityDate?: Date;
|
|
||||||
public location?: string;
|
|
||||||
public periodStartDate?: Date;
|
|
||||||
public periodEndDate?: Date;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
public workflowRequest?: WorkflowRequest;
|
|
||||||
}
|
|
||||||
|
|
||||||
DealerClaimDetails.init(
|
|
||||||
{
|
|
||||||
claimId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'claim_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
activityName: {
|
|
||||||
type: DataTypes.STRING(500),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'activity_name'
|
|
||||||
},
|
|
||||||
activityType: {
|
|
||||||
type: DataTypes.STRING(100),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'activity_type'
|
|
||||||
},
|
|
||||||
dealerCode: {
|
|
||||||
type: DataTypes.STRING(50),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'dealer_code'
|
|
||||||
},
|
|
||||||
dealerName: {
|
|
||||||
type: DataTypes.STRING(200),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'dealer_name'
|
|
||||||
},
|
|
||||||
dealerEmail: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dealer_email'
|
|
||||||
},
|
|
||||||
dealerPhone: {
|
|
||||||
type: DataTypes.STRING(20),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dealer_phone'
|
|
||||||
},
|
|
||||||
dealerAddress: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'dealer_address'
|
|
||||||
},
|
|
||||||
activityDate: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'activity_date'
|
|
||||||
},
|
|
||||||
location: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true
|
|
||||||
},
|
|
||||||
periodStartDate: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'period_start_date'
|
|
||||||
},
|
|
||||||
periodEndDate: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'period_end_date'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'DealerClaimDetails',
|
|
||||||
tableName: 'dealer_claim_details',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{
|
|
||||||
unique: true,
|
|
||||||
fields: ['request_id']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['dealer_code']
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['activity_type']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Associations
|
|
||||||
DealerClaimDetails.belongsTo(WorkflowRequest, {
|
|
||||||
as: 'workflowRequest',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
targetKey: 'requestId'
|
|
||||||
});
|
|
||||||
|
|
||||||
WorkflowRequest.hasOne(DealerClaimDetails, {
|
|
||||||
as: 'claimDetails',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
sourceKey: 'requestId'
|
|
||||||
});
|
|
||||||
|
|
||||||
export { DealerClaimDetails };
|
|
||||||
|
|
||||||
@ -1,190 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
import { ApprovalLevel } from './ApprovalLevel';
|
|
||||||
import { User } from './User';
|
|
||||||
|
|
||||||
export enum SnapshotType {
|
|
||||||
PROPOSAL = 'PROPOSAL',
|
|
||||||
COMPLETION = 'COMPLETION',
|
|
||||||
INTERNAL_ORDER = 'INTERNAL_ORDER',
|
|
||||||
WORKFLOW = 'WORKFLOW',
|
|
||||||
APPROVE = 'APPROVE'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Type definitions for snapshot data structures
|
|
||||||
export interface ProposalSnapshotData {
|
|
||||||
documentUrl?: string;
|
|
||||||
totalBudget?: number;
|
|
||||||
comments?: string;
|
|
||||||
expectedCompletionDate?: string;
|
|
||||||
costItems?: Array<{
|
|
||||||
description: string;
|
|
||||||
amount: number;
|
|
||||||
order: number;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CompletionSnapshotData {
|
|
||||||
documentUrl?: string;
|
|
||||||
totalExpenses?: number;
|
|
||||||
comments?: string;
|
|
||||||
expenses?: Array<{
|
|
||||||
description: string;
|
|
||||||
amount: number;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IOSnapshotData {
|
|
||||||
ioNumber?: string;
|
|
||||||
blockedAmount?: number;
|
|
||||||
availableBalance?: number;
|
|
||||||
remainingBalance?: number;
|
|
||||||
sapDocumentNumber?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WorkflowSnapshotData {
|
|
||||||
status?: string;
|
|
||||||
currentLevel?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ApprovalSnapshotData {
|
|
||||||
action: 'APPROVE' | 'REJECT';
|
|
||||||
comments?: string;
|
|
||||||
rejectionReason?: string;
|
|
||||||
approverName?: string;
|
|
||||||
approverEmail?: string;
|
|
||||||
levelName?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DealerClaimHistoryAttributes {
|
|
||||||
historyId: string;
|
|
||||||
requestId: string;
|
|
||||||
approvalLevelId?: string;
|
|
||||||
levelNumber?: number;
|
|
||||||
levelName?: string;
|
|
||||||
version: number;
|
|
||||||
snapshotType: SnapshotType;
|
|
||||||
snapshotData: ProposalSnapshotData | CompletionSnapshotData | IOSnapshotData | WorkflowSnapshotData | ApprovalSnapshotData | any;
|
|
||||||
changeReason?: string;
|
|
||||||
changedBy: string;
|
|
||||||
createdAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DealerClaimHistoryCreationAttributes extends Optional<DealerClaimHistoryAttributes, 'historyId' | 'approvalLevelId' | 'levelNumber' | 'levelName' | 'changeReason' | 'createdAt'> { }
|
|
||||||
|
|
||||||
class DealerClaimHistory extends Model<DealerClaimHistoryAttributes, DealerClaimHistoryCreationAttributes> implements DealerClaimHistoryAttributes {
|
|
||||||
public historyId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public approvalLevelId?: string;
|
|
||||||
public levelNumber?: number;
|
|
||||||
public version!: number;
|
|
||||||
public snapshotType!: SnapshotType;
|
|
||||||
public snapshotData!: ProposalSnapshotData | CompletionSnapshotData | IOSnapshotData | WorkflowSnapshotData | any;
|
|
||||||
public changeReason?: string;
|
|
||||||
public changedBy!: string;
|
|
||||||
public createdAt!: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
DealerClaimHistory.init(
|
|
||||||
{
|
|
||||||
historyId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'history_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
approvalLevelId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'approval_level_id',
|
|
||||||
references: {
|
|
||||||
model: 'approval_levels',
|
|
||||||
key: 'level_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
levelNumber: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'level_number'
|
|
||||||
},
|
|
||||||
levelName: {
|
|
||||||
type: DataTypes.STRING(255),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'level_name'
|
|
||||||
},
|
|
||||||
version: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: false
|
|
||||||
},
|
|
||||||
snapshotType: {
|
|
||||||
type: DataTypes.ENUM('PROPOSAL', 'COMPLETION', 'INTERNAL_ORDER', 'WORKFLOW', 'APPROVE'),
|
|
||||||
allowNull: false,
|
|
||||||
field: 'snapshot_type'
|
|
||||||
},
|
|
||||||
snapshotData: {
|
|
||||||
type: DataTypes.JSONB,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'snapshot_data'
|
|
||||||
},
|
|
||||||
changeReason: {
|
|
||||||
type: DataTypes.TEXT,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'change_reason'
|
|
||||||
},
|
|
||||||
changedBy: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'changed_by',
|
|
||||||
references: {
|
|
||||||
model: 'users',
|
|
||||||
key: 'user_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'DealerClaimHistory',
|
|
||||||
tableName: 'dealer_claim_history',
|
|
||||||
timestamps: false,
|
|
||||||
indexes: [
|
|
||||||
{
|
|
||||||
fields: ['request_id', 'level_number', 'version'],
|
|
||||||
name: 'idx_history_request_level_version'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['approval_level_id', 'version'],
|
|
||||||
name: 'idx_history_level_version'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['request_id', 'snapshot_type'],
|
|
||||||
name: 'idx_history_request_type'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
fields: ['snapshot_type', 'level_number'],
|
|
||||||
name: 'idx_history_type_level'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
DealerClaimHistory.belongsTo(WorkflowRequest, { foreignKey: 'requestId' });
|
|
||||||
DealerClaimHistory.belongsTo(ApprovalLevel, { foreignKey: 'approvalLevelId' });
|
|
||||||
DealerClaimHistory.belongsTo(User, { as: 'changer', foreignKey: 'changedBy' });
|
|
||||||
|
|
||||||
export { DealerClaimHistory };
|
|
||||||
@ -1,111 +0,0 @@
|
|||||||
import { DataTypes, Model, Optional } from 'sequelize';
|
|
||||||
import { sequelize } from '@config/database';
|
|
||||||
import { WorkflowRequest } from './WorkflowRequest';
|
|
||||||
|
|
||||||
interface DealerCompletionDetailsAttributes {
|
|
||||||
completionId: string;
|
|
||||||
requestId: string;
|
|
||||||
activityCompletionDate: Date;
|
|
||||||
numberOfParticipants?: number;
|
|
||||||
totalClosedExpenses?: number;
|
|
||||||
submittedAt?: Date;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DealerCompletionDetailsCreationAttributes extends Optional<DealerCompletionDetailsAttributes, 'completionId' | 'numberOfParticipants' | 'totalClosedExpenses' | 'submittedAt' | 'createdAt' | 'updatedAt'> {}
|
|
||||||
|
|
||||||
class DealerCompletionDetails extends Model<DealerCompletionDetailsAttributes, DealerCompletionDetailsCreationAttributes> implements DealerCompletionDetailsAttributes {
|
|
||||||
public completionId!: string;
|
|
||||||
public requestId!: string;
|
|
||||||
public activityCompletionDate!: Date;
|
|
||||||
public numberOfParticipants?: number;
|
|
||||||
public totalClosedExpenses?: number;
|
|
||||||
public submittedAt?: Date;
|
|
||||||
public createdAt!: Date;
|
|
||||||
public updatedAt!: Date;
|
|
||||||
|
|
||||||
public workflowRequest?: WorkflowRequest;
|
|
||||||
}
|
|
||||||
|
|
||||||
DealerCompletionDetails.init(
|
|
||||||
{
|
|
||||||
completionId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
defaultValue: DataTypes.UUIDV4,
|
|
||||||
primaryKey: true,
|
|
||||||
field: 'completion_id'
|
|
||||||
},
|
|
||||||
requestId: {
|
|
||||||
type: DataTypes.UUID,
|
|
||||||
allowNull: false,
|
|
||||||
unique: true,
|
|
||||||
field: 'request_id',
|
|
||||||
references: {
|
|
||||||
model: 'workflow_requests',
|
|
||||||
key: 'request_id'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
activityCompletionDate: {
|
|
||||||
type: DataTypes.DATEONLY,
|
|
||||||
allowNull: false,
|
|
||||||
field: 'activity_completion_date'
|
|
||||||
},
|
|
||||||
numberOfParticipants: {
|
|
||||||
type: DataTypes.INTEGER,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'number_of_participants'
|
|
||||||
},
|
|
||||||
totalClosedExpenses: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'total_closed_expenses'
|
|
||||||
},
|
|
||||||
submittedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: true,
|
|
||||||
field: 'submitted_at'
|
|
||||||
},
|
|
||||||
createdAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'created_at'
|
|
||||||
},
|
|
||||||
updatedAt: {
|
|
||||||
type: DataTypes.DATE,
|
|
||||||
allowNull: false,
|
|
||||||
defaultValue: DataTypes.NOW,
|
|
||||||
field: 'updated_at'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
sequelize,
|
|
||||||
modelName: 'DealerCompletionDetails',
|
|
||||||
tableName: 'dealer_completion_details',
|
|
||||||
timestamps: true,
|
|
||||||
createdAt: 'created_at',
|
|
||||||
updatedAt: 'updated_at',
|
|
||||||
indexes: [
|
|
||||||
{
|
|
||||||
unique: true,
|
|
||||||
fields: ['request_id']
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
DealerCompletionDetails.belongsTo(WorkflowRequest, {
|
|
||||||
as: 'workflowRequest',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
targetKey: 'requestId'
|
|
||||||
});
|
|
||||||
|
|
||||||
WorkflowRequest.hasOne(DealerCompletionDetails, {
|
|
||||||
as: 'completionDetails',
|
|
||||||
foreignKey: 'requestId',
|
|
||||||
sourceKey: 'requestId'
|
|
||||||
});
|
|
||||||
|
|
||||||
export { DealerCompletionDetails };
|
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user