Compare commits

..

5 Commits

359 changed files with 32639 additions and 41514 deletions

View File

@ -1326,9 +1326,9 @@ GCP_KEY_FILE=./config/gcp-key.json
SMTP_HOST=smtp.gmail.com SMTP_HOST=smtp.gmail.com
SMTP_PORT=587 SMTP_PORT=587
SMTP_SECURE=false SMTP_SECURE=false
SMTP_USER=notifications@{{APP_DOMAIN}} SMTP_USER=notifications@royalenfield.com
SMTP_PASSWORD=your_smtp_password SMTP_PASSWORD=your_smtp_password
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}> EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
# AI Service (for conclusion generation) # AI Service (for conclusion generation)
AI_API_KEY=your_ai_api_key AI_API_KEY=your_ai_api_key

View File

@ -155,13 +155,13 @@ export async function calculateBusinessDays(
2. ✅ Imported `calculateElapsedWorkingHours`, `addWorkingHours`, `addWorkingHoursExpress` from `@utils/tatTimeUtils` 2. ✅ Imported `calculateElapsedWorkingHours`, `addWorkingHours`, `addWorkingHoursExpress` from `@utils/tatTimeUtils`
3. ✅ Replaced lines 64-65 with proper working hours calculation (now lines 66-77) 3. ✅ Replaced lines 64-65 with proper working hours calculation (now lines 66-77)
4. ✅ Gets priority from workflow 4. ✅ Gets priority from workflow
5. Done: Test TAT breach alerts 5. **TODO:** Test TAT breach alerts
### Step 2: Add Business Days Function ✅ **DONE** ### Step 2: Add Business Days Function ✅ **DONE**
1. ✅ Opened `Re_Backend/src/utils/tatTimeUtils.ts` 1. ✅ Opened `Re_Backend/src/utils/tatTimeUtils.ts`
2. ✅ Added `calculateBusinessDays()` function (lines 697-758) 2. ✅ Added `calculateBusinessDays()` function (lines 697-758)
3. ✅ Exported the function 3. ✅ Exported the function
4. Done: Test with various date ranges 4. **TODO:** Test with various date ranges
### Step 3: Update Workflow Aging Report ✅ **DONE** ### Step 3: Update Workflow Aging Report ✅ **DONE**
1. ✅ Built report endpoint using `calculateBusinessDays()` 1. ✅ Built report endpoint using `calculateBusinessDays()`

View File

@ -19,10 +19,10 @@ This command will output something like:
``` ```
======================================= =======================================
Public Key: Public Key:
{{VAPID_PUBLIC_KEY}} BEl62iUYgUivxIkvpY5kXK3t3b9i5X8YzA1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6
Private Key: Private Key:
{{VAPID_PRIVATE_KEY}} aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCdEfGhIjKlMnOpQrStUvWxYz
======================================= =======================================
``` ```
@ -59,9 +59,9 @@ Add the generated keys to your backend `.env` file:
```env ```env
# Notification Service Worker credentials (Web Push / VAPID) # Notification Service Worker credentials (Web Push / VAPID)
VAPID_PUBLIC_KEY={{VAPID_PUBLIC_KEY}} VAPID_PUBLIC_KEY=BEl62iUYgUivxIkvpY5kXK3t3b9i5X8YzA1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6
VAPID_PRIVATE_KEY={{VAPID_PRIVATE_KEY}} VAPID_PRIVATE_KEY=aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCdEfGhIjKlMnOpQrStUvWxYz
VAPID_CONTACT=mailto:{{ADMIN_EMAIL}} VAPID_CONTACT=mailto:admin@royalenfield.com
``` ```
**Important Notes:** **Important Notes:**
@ -75,7 +75,7 @@ Add the **SAME** `VAPID_PUBLIC_KEY` to your frontend `.env` file:
```env ```env
# Push Notifications (Web Push / VAPID) # Push Notifications (Web Push / VAPID)
VITE_PUBLIC_VAPID_KEY={{VAPID_PUBLIC_KEY}} VITE_PUBLIC_VAPID_KEY=BEl62iUYgUivxIkvpY5kXK3t3b9i5X8YzA1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6
``` ```
**Important:** **Important:**

View File

@ -98,7 +98,7 @@ npm run dev
1. Server will start automatically 1. Server will start automatically
2. Log in via SSO 2. Log in via SSO
3. Run this SQL to make yourself admin: 3. Run this SQL to make yourself admin:
UPDATE users SET role = 'ADMIN' WHERE email = 'your-email@{{APP_DOMAIN}}'; UPDATE users SET role = 'ADMIN' WHERE email = 'your-email@royalenfield.com';
[Config Seed] ✅ Default configurations seeded successfully (30 settings) [Config Seed] ✅ Default configurations seeded successfully (30 settings)
info: ✅ Server started successfully on port 5000 info: ✅ Server started successfully on port 5000
@ -112,7 +112,7 @@ psql -d royal_enfield_workflow
UPDATE users UPDATE users
SET role = 'ADMIN' SET role = 'ADMIN'
WHERE email = 'your-email@{{APP_DOMAIN}}'; WHERE email = 'your-email@royalenfield.com';
\q \q
``` ```

View File

@ -471,7 +471,7 @@ The backend supports web push notifications via VAPID (Voluntary Application Ser
``` ```
VAPID_PUBLIC_KEY=<your-public-key> VAPID_PUBLIC_KEY=<your-public-key>
VAPID_PRIVATE_KEY=<your-private-key> VAPID_PRIVATE_KEY=<your-private-key>
VAPID_CONTACT=mailto:admin@{{APP_DOMAIN}} VAPID_CONTACT=mailto:admin@royalenfield.com
``` ```
3. **Add to Frontend `.env`:** 3. **Add to Frontend `.env`:**

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,114 @@
import logger from '@utils/logger';
// Special UUID for system events (login, etc.) - well-known UUID: 00000000-0000-0000-0000-000000000001
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
export type ActivityEntry = {
requestId: string;
type: 'created' | 'submitted' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'summary_generated' | 'closed' | 'login' | 'paused' | 'resumed' | 'pause_retriggered';
user?: { userId: string; name?: string; email?: string };
timestamp: string;
action: string;
details: string;
metadata?: any;
ipAddress?: string;
userAgent?: string;
category?: string;
severity?: string;
};
class ActivityService {
private byRequest: Map<string, ActivityEntry[]> = new Map();
private inferCategory(type: string): string {
const categoryMap: Record<string, string> = {
'created': 'WORKFLOW',
'submitted': 'WORKFLOW',
'approval': 'WORKFLOW',
'rejection': 'WORKFLOW',
'status_change': 'WORKFLOW',
'assignment': 'WORKFLOW',
'comment': 'COLLABORATION',
'document_added': 'DOCUMENT',
'sla_warning': 'SYSTEM',
'reminder': 'SYSTEM',
'ai_conclusion_generated': 'SYSTEM',
'closed': 'WORKFLOW',
'login': 'AUTHENTICATION',
'paused': 'WORKFLOW',
'resumed': 'WORKFLOW',
'pause_retriggered': 'WORKFLOW'
};
return categoryMap[type] || 'OTHER';
}
private inferSeverity(type: string): string {
const severityMap: Record<string, string> = {
'rejection': 'WARNING',
'sla_warning': 'WARNING',
'approval': 'INFO',
'closed': 'INFO',
'status_change': 'INFO',
'login': 'INFO',
'created': 'INFO',
'submitted': 'INFO',
'comment': 'INFO',
'document_added': 'INFO',
'assignment': 'INFO',
'reminder': 'INFO',
'ai_conclusion_generated': 'INFO',
'paused': 'WARNING',
'resumed': 'INFO',
'pause_retriggered': 'INFO'
};
return severityMap[type] || 'INFO';
}
async log(entry: ActivityEntry) {
const list = this.byRequest.get(entry.requestId) || [];
list.push(entry);
this.byRequest.set(entry.requestId, list);
// Persist to database
try {
const { Activity } = require('@models/Activity');
const userName = entry.user?.name || entry.user?.email || null;
const activityData = {
requestId: entry.requestId,
userId: entry.user?.userId || null,
userName: userName,
activityType: entry.type,
activityDescription: entry.details,
activityCategory: entry.category || this.inferCategory(entry.type),
severity: entry.severity || this.inferSeverity(entry.type),
metadata: entry.metadata || null,
isSystemEvent: !entry.user,
ipAddress: entry.ipAddress || null, // Database accepts null
userAgent: entry.userAgent || null, // Database accepts null
};
logger.info(`[Activity] Creating activity:`, {
requestId: entry.requestId,
userName,
userId: entry.user?.userId,
type: entry.type,
ipAddress: entry.ipAddress ? '***' : null
});
await Activity.create(activityData);
logger.info(`[Activity] Successfully logged activity for request ${entry.requestId} by user: ${userName}`);
} catch (error) {
logger.error('[Activity] Failed to persist activity:', error);
}
}
get(requestId: string): ActivityEntry[] {
return this.byRequest.get(requestId) || [];
}
}
export const activityService = new ActivityService();

View File

@ -0,0 +1,897 @@
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { Participant } from '@models/Participant';
import { TatAlert } from '@models/TatAlert';
import { ApprovalAction } from '../types/approval.types';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { calculateTATPercentage } from '@utils/helpers';
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
import logger, { logWorkflowEvent, logAIEvent } from '@utils/logger';
import { Op } from 'sequelize';
import { notificationService } from './notification.service';
import { activityService } from './activity.service';
import { tatSchedulerService } from './tatScheduler.service';
import { emitToRequestRoom } from '../realtime/socket';
// Note: DealerClaimService import removed - dealer claim approvals are handled by DealerClaimApprovalService
export class ApprovalService {
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
// Get workflow to determine priority for working hours calculation
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Verify this is NOT a claim management workflow (should use DealerClaimApprovalService)
const workflowType = (wf as any)?.workflowType;
if (workflowType === 'CLAIM_MANAGEMENT') {
logger.error(`[Approval] Attempted to use ApprovalService for CLAIM_MANAGEMENT workflow ${level.requestId}. Use DealerClaimApprovalService instead.`);
throw new Error('ApprovalService cannot be used for CLAIM_MANAGEMENT workflows. Use DealerClaimApprovalService instead.');
}
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPaused = (wf as any).isPaused || (level as any).isPaused;
// If paused, resume automatically when approving/rejecting (requirement 3.6)
if (isPaused) {
const { pauseService } = await import('./pause.service');
try {
await pauseService.resumeWorkflow(level.requestId, _userId);
logger.info(`[Approval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
} catch (pauseError) {
logger.warn(`[Approval] Failed to auto-resume paused workflow:`, pauseError);
// Continue with approval/rejection even if resume fails
}
}
const now = new Date();
// Calculate elapsed hours using working hours logic (with pause handling)
// Case 1: Level is currently paused (isPaused = true)
// Case 2: Level was paused and resumed (isPaused = false but pauseElapsedHours and pauseResumeDate exist)
const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time
isPaused: true,
pausedAt: (level as any).pausedAt,
pauseElapsedHours: (level as any).pauseElapsedHours,
pauseResumeDate: (level as any).pauseResumeDate
} : wasResumed ? {
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
isPaused: false,
pausedAt: null,
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
} : undefined;
const elapsedHours = await calculateElapsedWorkingHours(
level.levelStartTime || level.createdAt,
now,
priority,
pauseInfo
);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
const updateData = {
status: action.action === 'APPROVE' ? ApprovalStatus.APPROVED : ApprovalStatus.REJECTED,
actionDate: now,
levelEndTime: now,
elapsedHours,
tatPercentageUsed: tatPercentage,
comments: action.comments,
rejectionReason: action.rejectionReason
};
const updatedLevel = await level.update(updateData);
// Cancel TAT jobs for the current level since it's been actioned
try {
await tatSchedulerService.cancelTatJobs(level.requestId, level.levelId);
logger.info(`[Approval] TAT jobs cancelled for level ${level.levelId}`);
} catch (tatError) {
logger.error(`[Approval] Failed to cancel TAT jobs:`, tatError);
// Don't fail the approval if TAT cancellation fails
}
// Update TAT alerts for this level to mark completion status
try {
const wasOnTime = elapsedHours <= level.tatHours;
await TatAlert.update(
{
wasCompletedOnTime: wasOnTime,
completionTime: now
},
{
where: { levelId: level.levelId }
}
);
logger.info(`[Approval] TAT alerts updated for level ${level.levelId} - Completed ${wasOnTime ? 'on time' : 'late'}`);
} catch (tatAlertError) {
logger.error(`[Approval] Failed to update TAT alerts:`, tatAlertError);
// Don't fail the approval if TAT alert update fails
}
// Handle approval - move to next level or close workflow (wf already loaded above)
if (action.action === 'APPROVE') {
// Check if this is final approval: either isFinalApprover flag is set OR all levels are approved
// This handles cases where additional approvers are added after initial approval
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const approvedLevelsCount = allLevels.filter((l: any) => l.status === 'APPROVED').length;
const totalLevels = allLevels.length;
const isAllLevelsApproved = approvedLevelsCount === totalLevels;
const isFinalApproval = level.isFinalApprover || isAllLevelsApproved;
if (isFinalApproval) {
// Final approver - close workflow as APPROVED
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: (level.levelNumber || 0) + 1
},
{ where: { requestId: level.requestId } }
);
logWorkflowEvent('approved', level.requestId, {
level: level.levelNumber,
isFinalApproval: true,
status: 'APPROVED',
detectedBy: level.isFinalApprover ? 'isFinalApprover flag' : 'all levels approved check'
});
// Log final approval activity first (so it's included in AI context)
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
// This runs in the background without blocking the approval response
(async () => {
try {
const { aiService } = await import('./ai.service');
const { ConclusionRemark } = await import('@models/index');
const { ApprovalLevel } = await import('@models/ApprovalLevel');
const { WorkNote } = await import('@models/WorkNote');
const { Document } = await import('@models/Document');
const { Activity } = await import('@models/Activity');
const { getConfigValue } = await import('./configReader.service');
// Check if AI features and remark generation are enabled in admin config
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
if (aiEnabled && remarkGenerationEnabled && aiService.isAvailable()) {
logAIEvent('request', {
requestId: level.requestId,
action: 'conclusion_generation_started',
});
// Gather context for AI generation
const approvalLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const workNotes = await WorkNote.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 20
});
const documents = await Document.findAll({
where: { requestId: level.requestId },
order: [['uploadedAt', 'DESC']]
});
const activities = await Activity.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 50
});
// Build context object
const context = {
requestTitle: (wf as any).title,
requestDescription: (wf as any).description,
requestNumber: (wf as any).requestNumber,
priority: (wf as any).priority,
approvalFlow: approvalLevels.map((l: any) => {
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
? Number(l.tatPercentageUsed)
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
return {
levelNumber: l.levelNumber,
approverName: l.approverName,
status: l.status,
comments: l.comments,
actionDate: l.actionDate,
tatHours: Number(l.tatHours || 0),
elapsedHours: Number(l.elapsedHours || 0),
tatPercentageUsed: tatPercentage
};
}),
workNotes: workNotes.map((note: any) => ({
userName: note.userName,
message: note.message,
createdAt: note.createdAt
})),
documents: documents.map((doc: any) => ({
fileName: doc.originalFileName || doc.fileName,
uploadedBy: doc.uploadedBy,
uploadedAt: doc.uploadedAt
})),
activities: activities.map((activity: any) => ({
type: activity.activityType,
action: activity.activityDescription,
details: activity.activityDescription,
timestamp: activity.createdAt
}))
};
const aiResult = await aiService.generateConclusionRemark(context);
// Check if conclusion already exists (e.g., from previous final approval before additional approver was added)
const existingConclusion = await ConclusionRemark.findOne({
where: { requestId: level.requestId }
});
if (existingConclusion) {
// Update existing conclusion with new AI-generated remark (regenerated with updated context)
await existingConclusion.update({
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
// Preserve finalRemark if it was already finalized
// Only reset if it wasn't finalized yet
finalRemark: (existingConclusion as any).finalizedAt ? (existingConclusion as any).finalRemark : null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
// Preserve finalizedAt if it was already finalized
finalizedAt: (existingConclusion as any).finalizedAt || null
} as any);
logger.info(`[Approval] Updated existing AI conclusion for request ${level.requestId} with regenerated content (includes new approver)`);
} else {
// Create new conclusion
await ConclusionRemark.create({
requestId: level.requestId,
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
finalizedAt: null
} as any);
}
logAIEvent('response', {
requestId: level.requestId,
action: 'conclusion_generation_completed',
});
// Log activity
activityService.log({
requestId: level.requestId,
type: 'ai_conclusion_generated',
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
timestamp: new Date().toISOString(),
action: 'AI Conclusion Generated',
details: 'AI-powered conclusion remark generated for review by initiator',
ipAddress: undefined, // System-generated, no IP
userAgent: undefined // System-generated, no user agent
});
} else {
// Log why AI generation was skipped
if (!aiEnabled) {
logger.info(`[Approval] AI features disabled in admin config, skipping conclusion generation for ${level.requestId}`);
} else if (!remarkGenerationEnabled) {
logger.info(`[Approval] AI remark generation disabled in admin config, skipping for ${level.requestId}`);
} else if (!aiService.isAvailable()) {
logger.warn(`[Approval] AI service unavailable for ${level.requestId}, skipping conclusion generation`);
}
}
// Auto-generate RequestSummary after final approval (system-level generation)
// This makes the summary immediately available when user views the approved request
try {
const { summaryService } = await import('./summary.service');
const summary = await summaryService.createSummary(level.requestId, 'system', {
isSystemGeneration: true
});
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId}`);
// Log summary generation activity
activityService.log({
requestId: level.requestId,
type: 'summary_generated',
user: { userId: null as any, name: 'System' }, // Use null instead of 'system' for UUID field
timestamp: new Date().toISOString(),
action: 'Summary Auto-Generated',
details: 'Request summary auto-generated after final approval',
ipAddress: undefined,
userAgent: undefined
});
} catch (summaryError: any) {
// Log but don't fail - initiator can regenerate later
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
}
} catch (aiError) {
logAIEvent('error', {
requestId: level.requestId,
action: 'conclusion_generation_failed',
error: aiError,
});
// Silent failure - initiator can write manually
// Still try to generate summary even if AI conclusion failed
try {
const { summaryService } = await import('./summary.service');
const summary = await summaryService.createSummary(level.requestId, 'system', {
isSystemGeneration: true
});
logger.info(`[Approval] ✅ Auto-generated summary ${(summary as any).summaryId} for approved request ${level.requestId} (without AI conclusion)`);
} catch (summaryError: any) {
logger.error(`[Approval] Failed to auto-generate summary for ${level.requestId}:`, summaryError.message);
}
}
})().catch(err => {
// Catch any unhandled promise rejections
logger.error(`[Approval] Unhandled error in background AI generation:`, err);
});
// Notify initiator and all participants (including spectators) about approval
// Spectators are CC'd for transparency, similar to email CC
if (wf) {
const participants = await Participant.findAll({
where: { requestId: level.requestId }
});
const targetUserIds = new Set<string>();
targetUserIds.add((wf as any).initiatorId);
for (const p of participants as any[]) {
targetUserIds.add(p.userId); // Includes spectators
}
// Send notification to initiator about final approval (triggers email)
const initiatorId = (wf as any).initiatorId;
await notificationService.sendToUsers([initiatorId], {
title: `Request Approved - All Approvals Complete`,
body: `Your request "${(wf as any).title}" has been fully approved by all approvers. Please review and finalize the conclusion remark to close the request.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'HIGH',
actionRequired: true
});
// Send notification to all participants/spectators (for transparency, no action required)
const participantUserIds = Array.from(targetUserIds).filter(id => id !== initiatorId);
if (participantUserIds.length > 0) {
await notificationService.sendToUsers(participantUserIds, {
title: `Request Approved`,
body: `Request "${(wf as any).title}" has been fully approved. The initiator will finalize the conclusion remark to close the request.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval_pending_closure',
priority: 'MEDIUM',
actionRequired: false
});
}
logger.info(`[Approval] ✅ Final approval complete for ${level.requestId}. Initiator and ${participants.length} participant(s) notified.`);
}
} else {
// Not final - move to next level
// Check if workflow is paused - if so, don't advance
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
logger.warn(`[Approval] Cannot advance workflow ${level.requestId} - workflow is paused`);
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
}
// Find the next PENDING level
// Custom workflows use strict sequential ordering (levelNumber + 1) to maintain intended order
// This ensures custom workflows work predictably and don't skip levels
const currentLevelNumber = level.levelNumber || 0;
logger.info(`[Approval] Finding next level after level ${currentLevelNumber} for request ${level.requestId} (Custom workflow)`);
// Use strict sequential approach for custom workflows
const nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: currentLevelNumber + 1
}
});
if (!nextLevel) {
logger.info(`[Approval] Sequential level ${currentLevelNumber + 1} not found for custom workflow - this may be the final approval`);
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
// Sequential level exists but not PENDING - log warning but proceed
logger.warn(`[Approval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level to maintain workflow order.`);
}
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
if (nextLevel) {
logger.info(`[Approval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
} else {
logger.info(`[Approval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
}
if (nextLevel) {
// Check if next level is paused - if so, don't activate it
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
logger.warn(`[Approval] Cannot activate next level ${nextLevelNumber} - level is paused`);
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
}
// Activate next level
await nextLevel.update({
status: ApprovalStatus.IN_PROGRESS,
levelStartTime: now,
tatStartTime: now
});
// Schedule TAT jobs for the next level
try {
// Get workflow priority for TAT calculation
const workflowPriority = (wf as any)?.priority || 'STANDARD';
await tatSchedulerService.scheduleTatJobs(
level.requestId,
(nextLevel as any).levelId,
(nextLevel as any).approverId,
Number((nextLevel as any).tatHours),
now,
workflowPriority // Pass workflow priority (EXPRESS = 24/7, STANDARD = working hours)
);
logger.info(`[Approval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
} catch (tatError) {
logger.error(`[Approval] Failed to schedule TAT jobs for next level:`, tatError);
// Don't fail the approval if TAT scheduling fails
}
// Update workflow current level (only if nextLevelNumber is not null)
if (nextLevelNumber !== null) {
await WorkflowRequest.update(
{ currentLevel: nextLevelNumber },
{ where: { requestId: level.requestId } }
);
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
} else {
logger.warn(`Approved level ${level.levelNumber} but no next level found - workflow may be complete`);
}
// Note: Dealer claim-specific logic (Activity Creation, E-Invoice) is handled by DealerClaimApprovalService
// This service is for custom workflows only
// Log approval activity
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify initiator about the approval (triggers email for regular workflows)
if (wf) {
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Approved - Level ${level.levelNumber}`,
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
}
// Notify next approver
if (wf && nextLevel) {
// Check if it's an auto-step by checking approverEmail or levelName
// Note: Activity Creation, E-Invoice Generation, and Credit Note Confirmation are now activity logs only, not approval steps
// These steps are processed automatically and should NOT trigger notifications
const isAutoStep = (nextLevel as any).approverEmail === 'system@royalenfield.com'
|| (nextLevel as any).approverName === 'System Auto-Process'
|| (nextLevel as any).approverId === 'system';
// IMPORTANT: Skip notifications and assignment logging for system/auto-steps
// System steps are any step with system@royalenfield.com
// Only send notifications to real users, NOT system processes
if (!isAutoStep && (nextLevel as any).approverId && (nextLevel as any).approverId !== 'system') {
// Additional checks: ensure approverEmail and approverName are not system-related
// This prevents notifications to system accounts even if they pass other checks
const approverEmail = (nextLevel as any).approverEmail || '';
const approverName = (nextLevel as any).approverName || '';
const isSystemEmail = approverEmail.toLowerCase() === 'system@royalenfield.com'
|| approverEmail.toLowerCase().includes('system');
const isSystemName = approverName.toLowerCase() === 'system auto-process'
|| approverName.toLowerCase().includes('system');
// EXCLUDE all system-related steps from notifications
// Only send notifications to real users, NOT system processes
if (!isSystemEmail && !isSystemName) {
// Send notification to next approver (only for real users, not system processes)
// This will send both in-app and email notifications
const nextApproverId = (nextLevel as any).approverId;
const nextApproverName = (nextLevel as any).approverName || (nextLevel as any).approverEmail || 'approver';
logger.info(`[Approval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
await notificationService.sendToUsers([ nextApproverId ], {
title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
logger.info(`[Approval] Assignment notification sent successfully to ${nextApproverName} for level ${nextLevelNumber}`);
// Log assignment activity for the next approver
activityService.log({
requestId: level.requestId,
type: 'assignment',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Assigned to approver',
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
} else {
logger.info(`[Approval] Skipping notification for system process: ${approverEmail} at level ${nextLevelNumber}`);
}
} else {
logger.info(`[Approval] Skipping notification for auto-step at level ${nextLevelNumber}`);
}
// Note: Dealer-specific notifications (proposal/completion submissions) are handled by DealerClaimApprovalService
}
} else {
// No next level found but not final approver - this shouldn't happen
logger.warn(`No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
// Use current level number since there's no next level (workflow is complete)
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: level.levelNumber || 0
},
{ where: { requestId: level.requestId } }
);
if (wf) {
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
url: `/request/${(wf as any).requestNumber}`
});
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
}
}
}
} else if (action.action === 'REJECT') {
// Rejection - mark workflow as REJECTED (closure will happen when initiator finalizes conclusion)
await WorkflowRequest.update(
{
status: WorkflowStatus.REJECTED
// Note: closureDate will be set when initiator finalizes the conclusion
},
{ where: { requestId: level.requestId } }
);
// Mark all pending levels as skipped
await ApprovalLevel.update(
{
status: ApprovalStatus.SKIPPED,
levelEndTime: now
},
{
where: {
requestId: level.requestId,
status: ApprovalStatus.PENDING,
levelNumber: { [Op.gt]: level.levelNumber }
}
}
);
logWorkflowEvent('rejected', level.requestId, {
level: level.levelNumber,
status: 'REJECTED',
message: 'Awaiting closure from initiator',
});
// Log rejection activity first (so it's included in AI context)
if (wf) {
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Rejected',
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}. Awaiting closure from initiator.`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
}
// Notify initiator and all participants
if (wf) {
const participants = await Participant.findAll({ where: { requestId: level.requestId } });
const targetUserIds = new Set<string>();
targetUserIds.add((wf as any).initiatorId);
for (const p of participants as any[]) {
targetUserIds.add(p.userId);
}
// Send notification to initiator with type 'rejection' to trigger email
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Rejected: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH',
metadata: {
rejectionReason: action.rejectionReason || action.comments || 'No reason provided'
}
});
// Send notification to other participants (spectators) for transparency (no email, just in-app)
const participantUserIds = Array.from(targetUserIds).filter(id => id !== (wf as any).initiatorId);
if (participantUserIds.length > 0) {
await notificationService.sendToUsers(participantUserIds, {
title: `Rejected: ${(wf as any).requestNumber}`,
body: `Request "${(wf as any).title}" has been rejected.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'status_change', // Use status_change to avoid triggering emails for participants
priority: 'MEDIUM'
});
}
}
// Generate AI conclusion remark ASYNCHRONOUSLY for rejected requests (similar to approved)
// This runs in the background without blocking the rejection response
(async () => {
try {
const { aiService } = await import('./ai.service');
const { ConclusionRemark } = await import('@models/index');
const { ApprovalLevel } = await import('@models/ApprovalLevel');
const { WorkNote } = await import('@models/WorkNote');
const { Document } = await import('@models/Document');
const { Activity } = await import('@models/Activity');
const { getConfigValue } = await import('./configReader.service');
// Check if AI features and remark generation are enabled in admin config
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true';
if (!aiEnabled || !remarkGenerationEnabled) {
logger.info(`[Approval] AI conclusion generation skipped for rejected request ${level.requestId} (AI disabled)`);
return;
}
// Check if AI service is available
const { aiService: aiSvc } = await import('./ai.service');
if (!aiSvc.isAvailable()) {
logger.warn(`[Approval] AI service unavailable for rejected request ${level.requestId}`);
return;
}
// Gather context for AI generation (similar to approved flow)
const approvalLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const workNotes = await WorkNote.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 20
});
const documents = await Document.findAll({
where: { requestId: level.requestId },
order: [['uploadedAt', 'DESC']]
});
const activities = await Activity.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']],
limit: 50
});
// Build context object (include rejection reason)
const context = {
requestTitle: (wf as any).title,
requestDescription: (wf as any).description,
requestNumber: (wf as any).requestNumber,
priority: (wf as any).priority,
rejectionReason: action.rejectionReason || action.comments || 'No reason provided',
rejectedBy: level.approverName || level.approverEmail,
approvalFlow: approvalLevels.map((l: any) => {
const tatPercentage = l.tatPercentageUsed !== undefined && l.tatPercentageUsed !== null
? Number(l.tatPercentageUsed)
: (l.elapsedHours && l.tatHours ? (Number(l.elapsedHours) / Number(l.tatHours)) * 100 : 0);
return {
levelNumber: l.levelNumber,
approverName: l.approverName,
status: l.status,
comments: l.comments,
actionDate: l.actionDate,
tatHours: Number(l.tatHours || 0),
elapsedHours: Number(l.elapsedHours || 0),
tatPercentageUsed: tatPercentage
};
}),
workNotes: workNotes.map((note: any) => ({
userName: note.userName,
message: note.message,
createdAt: note.createdAt
})),
documents: documents.map((doc: any) => ({
fileName: doc.originalFileName || doc.fileName,
uploadedBy: doc.uploadedBy,
uploadedAt: doc.uploadedAt
})),
activities: activities.map((activity: any) => ({
type: activity.activityType,
action: activity.activityDescription,
details: activity.activityDescription,
timestamp: activity.createdAt
}))
};
logger.info(`[Approval] Generating AI conclusion for rejected request ${level.requestId}...`);
// Generate AI conclusion (will adapt to rejection context)
const aiResult = await aiSvc.generateConclusionRemark(context);
// Create or update conclusion remark
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId: level.requestId } });
const conclusionData = {
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
approvalSummary: {
totalLevels: approvalLevels.length,
rejectedLevel: level.levelNumber,
rejectedBy: level.approverName || level.approverEmail,
rejectionReason: action.rejectionReason || action.comments
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date()
};
if (conclusionInstance) {
await conclusionInstance.update(conclusionData as any);
logger.info(`[Approval] ✅ AI conclusion updated for rejected request ${level.requestId}`);
} else {
await ConclusionRemark.create({
requestId: level.requestId,
...conclusionData,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
finalizedAt: null
} as any);
logger.info(`[Approval] ✅ AI conclusion generated for rejected request ${level.requestId}`);
}
} catch (error: any) {
logger.error(`[Approval] Failed to generate AI conclusion for rejected request ${level.requestId}:`, error);
// Don't fail the rejection if AI generation fails
}
})();
}
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);
// Emit real-time update to all users viewing this request
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: action.action,
levelNumber: level.levelNumber,
timestamp: now.toISOString()
});
return updatedLevel;
} catch (error) {
logger.error(`Failed to ${action.action.toLowerCase()} level ${levelId}:`, error);
throw new Error(`Failed to ${action.action.toLowerCase()} level`);
}
}
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
try {
return await ApprovalLevel.findOne({
where: { requestId, status: ApprovalStatus.PENDING },
order: [['levelNumber', 'ASC']]
});
} catch (error) {
logger.error(`Failed to get current approval level for ${requestId}:`, error);
throw new Error('Failed to get current approval level');
}
}
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
try {
return await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
} catch (error) {
logger.error(`Failed to get approval levels for ${requestId}:`, error);
throw new Error('Failed to get approval levels');
}
}
}

View File

@ -0,0 +1,160 @@
/**
* Configuration Reader Service
* Reads admin configurations from database for use in backend logic
*/
import { sequelize } from '@config/database';
import { QueryTypes } from 'sequelize';
import logger from '@utils/logger';
// Cache configurations in memory for performance
let configCache: Map<string, string> = new Map();
let cacheExpiry: Date | null = null;
const CACHE_DURATION_MS = 5 * 60 * 1000; // 5 minutes
// Sensitive config keys that should be masked in logs
const SENSITIVE_CONFIG_PATTERNS = [
'API_KEY', 'SECRET', 'PASSWORD', 'TOKEN', 'CREDENTIAL',
'PRIVATE', 'AUTH', 'KEY', 'VAPID'
];
/**
* Check if a config key contains sensitive data
*/
function isSensitiveConfig(configKey: string): boolean {
const upperKey = configKey.toUpperCase();
return SENSITIVE_CONFIG_PATTERNS.some(pattern => upperKey.includes(pattern));
}
/**
* Mask sensitive value for logging (show first 4 and last 2 chars)
*/
function maskSensitiveValue(value: string): string {
if (!value || value.length <= 8) {
return '***REDACTED***';
}
return `${value.substring(0, 4)}****${value.substring(value.length - 2)}`;
}
/**
* Get a configuration value from database (with caching)
*/
export async function getConfigValue(configKey: string, defaultValue: string = ''): Promise<string> {
try {
// Check cache first
if (configCache.has(configKey) && cacheExpiry && new Date() < cacheExpiry) {
return configCache.get(configKey)!;
}
// Query database
const result = await sequelize.query(`
SELECT config_value
FROM admin_configurations
WHERE config_key = :configKey
LIMIT 1
`, {
replacements: { configKey },
type: QueryTypes.SELECT
});
if (result && result.length > 0) {
const value = (result[0] as any).config_value;
configCache.set(configKey, value);
// Always update cache expiry when loading from database
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
// Mask sensitive values in logs for security
const logValue = isSensitiveConfig(configKey) ? maskSensitiveValue(value) : value;
logger.info(`[ConfigReader] Loaded config '${configKey}' = '${logValue}' from database (cached for 5min)`);
return value;
}
// Mask sensitive default values in logs for security
const logDefault = isSensitiveConfig(configKey) ? maskSensitiveValue(defaultValue) : defaultValue;
logger.warn(`[ConfigReader] Config key '${configKey}' not found, using default: ${logDefault}`);
return defaultValue;
} catch (error) {
logger.error(`[ConfigReader] Error reading config '${configKey}':`, error);
return defaultValue;
}
}
/**
* Get number configuration
*/
export async function getConfigNumber(configKey: string, defaultValue: number): Promise<number> {
const value = await getConfigValue(configKey, String(defaultValue));
return parseFloat(value) || defaultValue;
}
/**
* Get boolean configuration
*/
export async function getConfigBoolean(configKey: string, defaultValue: boolean): Promise<boolean> {
const value = await getConfigValue(configKey, String(defaultValue));
return value === 'true' || value === '1';
}
/**
* Get TAT thresholds from database
*/
export async function getTatThresholds(): Promise<{ first: number; second: number }> {
const first = await getConfigNumber('TAT_REMINDER_THRESHOLD_1', 50);
const second = await getConfigNumber('TAT_REMINDER_THRESHOLD_2', 75);
return { first, second };
}
/**
* Get working hours from database
*/
export async function getWorkingHours(): Promise<{ startHour: number; endHour: number }> {
const startHour = await getConfigNumber('WORK_START_HOUR', 9);
const endHour = await getConfigNumber('WORK_END_HOUR', 18);
return { startHour, endHour };
}
/**
* Clear configuration cache (call after updating configs)
*/
export function clearConfigCache(): void {
configCache.clear();
cacheExpiry = null;
logger.info('[ConfigReader] Configuration cache cleared');
}
/**
* Preload all configurations into cache
*/
export async function preloadConfigurations(): Promise<void> {
try {
const results = await sequelize.query(`
SELECT config_key, config_value
FROM admin_configurations
`, { type: QueryTypes.SELECT });
results.forEach((row: any) => {
configCache.set(row.config_key, row.config_value);
});
cacheExpiry = new Date(Date.now() + CACHE_DURATION_MS);
logger.info(`[ConfigReader] Preloaded ${results.length} configurations into cache`);
} catch (error) {
logger.error('[ConfigReader] Error preloading configurations:', error);
}
}
/**
* Get Vertex AI configurations
*/
export async function getVertexAIConfig(): Promise<{
enabled: boolean;
}> {
const enabled = await getConfigBoolean('AI_ENABLED', true);
return { enabled };
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,967 @@
/**
* Dealer Claim Approval Service
*
* Dedicated approval service for dealer claim workflows (CLAIM_MANAGEMENT).
* Handles dealer claim-specific logic including:
* - Dynamic approver support (additional approvers added between steps)
* - Activity Creation processing
* - Dealer-specific notifications
*
* This service is separate from ApprovalService to prevent conflicts with custom workflows.
*/
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { User } from '@models/User';
import { ApprovalAction } from '../types/approval.types';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { calculateTATPercentage } from '@utils/helpers';
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
import logger from '@utils/logger';
import { Op } from 'sequelize';
import { notificationMongoService } from './notification.mongo.service';
import { activityService } from './activity.service';
import { tatSchedulerService } from './tatScheduler.service';
import { DealerClaimService } from './dealerClaim.service';
import { emitToRequestRoom } from '../realtime/socket';
export class DealerClaimApprovalService {
// Use lazy initialization to avoid circular dependency
private getDealerClaimService(): DealerClaimService {
return new DealerClaimService();
}
/**
* Approve a level in a dealer claim workflow
* Handles dealer claim-specific logic including dynamic approvers and activity creation
*/
async approveLevel(
levelId: string,
action: ApprovalAction,
userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
// Get workflow to determine priority for working hours calculation
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Verify this is a claim management workflow
const workflowType = (wf as any)?.workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
}
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPaused = (wf as any).isPaused || (level as any).isPaused;
// If paused, resume automatically when approving/rejecting
if (isPaused) {
const { pauseService } = await import('./pause.service');
try {
await pauseService.resumeWorkflow(level.requestId, userId);
logger.info(`[DealerClaimApproval] Auto-resumed paused workflow ${level.requestId} when ${action.action === 'APPROVE' ? 'approving' : 'rejecting'}`);
} catch (pauseError) {
logger.warn(`[DealerClaimApproval] Failed to auto-resume paused workflow:`, pauseError);
// Continue with approval/rejection even if resume fails
}
}
const now = new Date();
// Calculate elapsed hours using working hours logic (with pause handling)
const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time
isPaused: true,
pausedAt: (level as any).pausedAt,
pauseElapsedHours: (level as any).pauseElapsedHours,
pauseResumeDate: (level as any).pauseResumeDate
} : wasResumed ? {
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
isPaused: false,
pausedAt: null,
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
} : undefined;
const elapsedHours = await calculateElapsedWorkingHours(
(level as any).levelStartTime || (level as any).tatStartTime || now,
now,
priority,
pauseInfo
);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
// Handle rejection
if (action.action === 'REJECT') {
return await this.handleRejection(level, action, userId, requestMetadata, elapsedHours, tatPercentage, now);
}
logger.info(`[DealerClaimApproval] Approving level ${levelId} with action:`, JSON.stringify(action));
// Robust comment extraction
const approvalComment = action.comments || (action as any).comment || '';
// Update level status and elapsed time for approval FIRST
// Only save snapshot if the update succeeds
await level.update({
status: ApprovalStatus.APPROVED,
actionDate: now,
levelEndTime: now,
elapsedHours: elapsedHours,
tatPercentageUsed: tatPercentage,
comments: approvalComment || undefined
});
// Check if this is a dealer submission (proposal or completion) - these have their own snapshot types
const levelName = (level.levelName || '').toLowerCase();
const isDealerSubmission = levelName.includes('dealer proposal') || levelName.includes('dealer completion');
// Only save APPROVE snapshot for actual approver actions (not dealer submissions)
// Dealer submissions use PROPOSAL/COMPLETION snapshot types instead
if (!isDealerSubmission) {
try {
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'APPROVE',
approvalComment,
undefined,
userId
);
} catch (snapshotError) {
// Log error but don't fail the approval - snapshot is for audit, not critical
logger.error(`[DealerClaimApproval] Failed to save approval history snapshot (non-critical):`, snapshotError);
}
}
// Note: We don't save workflow history for approval actions
// The approval history (saveApprovalHistory) is sufficient and includes comments
// Workflow movement information is included in the APPROVE snapshot's changeReason
// Check if this is the final approver
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId }
});
const approvedCount = allLevels.filter((l: any) => l.status === ApprovalStatus.APPROVED).length;
const isFinalApprover = approvedCount === allLevels.length;
if (isFinalApprover) {
// Final approval - close workflow
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: level.levelNumber || 0
},
{ where: { requestId: level.requestId } }
);
// Notify all participants
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
where: { requestId: level.requestId, isActive: true }
}));
if (participants && participants.length > 0) {
const participantIds = participants.map((p: any) => p.userId).filter(Boolean);
await notificationService.sendToUsers(participantIds, {
title: `Request Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
logger.info(`[DealerClaimApproval] Final approval complete. ${participants.length} participant(s) notified.`);
}
} else {
// Not final - move to next level
// Check if workflow is paused - if so, don't advance
if ((wf as any).isPaused || (wf as any).status === 'PAUSED') {
logger.warn(`[DealerClaimApproval] Cannot advance workflow ${level.requestId} - workflow is paused`);
throw new Error('Cannot advance workflow - workflow is currently paused. Please resume the workflow first.');
}
// Find the next PENDING level (supports dynamically added approvers)
// Strategy: First try sequential, then find next PENDING level if sequential doesn't exist
const currentLevelNumber = level.levelNumber || 0;
logger.info(`[DealerClaimApproval] Finding next level after level ${currentLevelNumber} for request ${level.requestId}`);
// First, try sequential approach
let nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: currentLevelNumber + 1
}
});
// If sequential level doesn't exist, search for next PENDING level
// This handles cases where additional approvers are added dynamically between steps
if (!nextLevel) {
logger.info(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} not found, searching for next PENDING level (dynamic approvers)`);
nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: { [Op.gt]: currentLevelNumber },
status: ApprovalStatus.PENDING
},
order: [['levelNumber', 'ASC']]
});
if (nextLevel) {
logger.info(`[DealerClaimApproval] Using fallback level ${nextLevel.levelNumber} (${(nextLevel as any).levelName || 'unnamed'})`);
}
} else if (nextLevel.status !== ApprovalStatus.PENDING) {
// Sequential level exists but not PENDING - check if it's already approved/rejected
if (nextLevel.status === ApprovalStatus.APPROVED || nextLevel.status === ApprovalStatus.REJECTED) {
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} already ${nextLevel.status}. Skipping activation.`);
nextLevel = null; // Don't activate an already completed level
} else {
// Level exists but in unexpected status - log warning but proceed
logger.warn(`[DealerClaimApproval] Sequential level ${currentLevelNumber + 1} exists but status is ${nextLevel.status}, expected PENDING. Proceeding with sequential level.`);
}
}
const nextLevelNumber = nextLevel ? (nextLevel.levelNumber || 0) : null;
if (nextLevel) {
logger.info(`[DealerClaimApproval] Found next level: ${nextLevelNumber} (${(nextLevel as any).levelName || 'unnamed'}), approver: ${(nextLevel as any).approverName || (nextLevel as any).approverEmail || 'unknown'}, status: ${nextLevel.status}`);
} else {
logger.info(`[DealerClaimApproval] No next level found after level ${currentLevelNumber} - this may be the final approval`);
}
if (nextLevel) {
// Check if next level is paused - if so, don't activate it
if ((nextLevel as any).isPaused || (nextLevel as any).status === 'PAUSED') {
logger.warn(`[DealerClaimApproval] Cannot activate next level ${nextLevelNumber} - level is paused`);
throw new Error('Cannot activate next level - the next approval level is currently paused. Please resume it first.');
}
// Activate next level
await nextLevel.update({
status: ApprovalStatus.IN_PROGRESS,
levelStartTime: now,
tatStartTime: now
});
// Schedule TAT jobs for the next level
try {
const workflowPriority = (wf as any)?.priority || 'STANDARD';
await tatSchedulerService.scheduleTatJobs(
level.requestId,
(nextLevel as any).levelId,
(nextLevel as any).approverId,
Number((nextLevel as any).tatHours),
now,
workflowPriority
);
logger.info(`[DealerClaimApproval] TAT jobs scheduled for next level ${nextLevelNumber} (Priority: ${workflowPriority})`);
} catch (tatError) {
logger.error(`[DealerClaimApproval] Failed to schedule TAT jobs for next level:`, tatError);
// Don't fail the approval if TAT scheduling fails
}
// Update workflow current level
if (nextLevelNumber !== null) {
await WorkflowRequest.update(
{ currentLevel: nextLevelNumber },
{ where: { requestId: level.requestId } }
);
// Update the APPROVE snapshot's changeReason to include movement information
// This ensures the approval snapshot shows both the approval and the movement
// We don't create a separate WORKFLOW snapshot for approvals - only APPROVE snapshot
try {
const { DealerClaimHistory } = await import('@models/DealerClaimHistory');
const { SnapshotType } = await import('@models/DealerClaimHistory');
const approvalHistory = await DealerClaimHistory.findOne({
where: {
requestId: level.requestId,
approvalLevelId: level.levelId,
snapshotType: SnapshotType.APPROVE
},
order: [['createdAt', 'DESC']]
});
if (approvalHistory) {
// Use the robust approvalComment from outer scope
const updatedChangeReason = approvalComment
? `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber}). Comment: ${approvalComment}`
: `Approved by ${level.approverName || level.approverEmail}, moved to next level (${nextLevelNumber})`;
await approvalHistory.update({
changeReason: updatedChangeReason
});
}
} catch (updateError) {
// Log error but don't fail - this is just updating the changeReason for better display
logger.warn(`[DealerClaimApproval] Failed to update approval history changeReason (non-critical):`, updateError);
}
logger.info(`[DealerClaimApproval] Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
}
// Handle dealer claim-specific step processing
const currentLevelName = (level.levelName || '').toLowerCase();
// Check by levelName first, use levelNumber only as fallback if levelName is missing
// This handles cases where additional approvers shift step numbers
const hasLevelName = level.levelName && level.levelName.trim() !== '';
const isDeptLeadApproval = hasLevelName
? currentLevelName.includes('department lead')
: (level.levelNumber === 3); // Only use levelNumber if levelName is missing
const isRequestorClaimApproval = hasLevelName
? (currentLevelName.includes('requestor') && (currentLevelName.includes('claim') || currentLevelName.includes('approval')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
if (isDeptLeadApproval) {
// Activity Creation is now an activity log only - process it automatically
logger.info(`[DealerClaimApproval] Department Lead approved. Processing Activity Creation as activity log.`);
try {
const dealerClaimService = new DealerClaimService();
await dealerClaimService.processActivityCreation(level.requestId);
logger.info(`[DealerClaimApproval] Activity Creation activity logged for request ${level.requestId}`);
} catch (activityError) {
logger.error(`[DealerClaimApproval] Error processing Activity Creation activity for request ${level.requestId}:`, activityError);
// Don't fail the Department Lead approval if Activity Creation logging fails
}
} else if (isRequestorClaimApproval) {
// Step 6 (System - E-Invoice Generation) is now an activity log only - process it automatically
logger.info(`[DealerClaimApproval] Requestor Claim Approval approved. Triggering DMS push for E-Invoice generation.`);
try {
// Lazy load DealerClaimService to avoid circular dependency issues during method execution
const dealerClaimService = this.getDealerClaimService();
await dealerClaimService.updateEInvoiceDetails(level.requestId);
logger.info(`[DealerClaimApproval] DMS push initiated for request ${level.requestId}`);
} catch (dmsError) {
logger.error(`[DealerClaimApproval] Error initiating DMS push for request ${level.requestId}:`, dmsError);
// Don't fail the Requestor Claim Approval if DMS push fails
}
}
// Log approval activity
activityService.log({
requestId: level.requestId,
type: 'approval',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Approved',
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify initiator about the approval
// BUT skip this if it's a dealer proposal or dealer completion step - those have special notifications below
// Priority: levelName check first, then levelNumber only if levelName is missing
const hasLevelNameForApproval = level.levelName && level.levelName.trim() !== '';
const levelNameForApproval = hasLevelNameForApproval && level.levelName ? level.levelName.toLowerCase() : '';
const isDealerProposalApproval = hasLevelNameForApproval
? (levelNameForApproval.includes('dealer') && levelNameForApproval.includes('proposal'))
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
const isDealerCompletionApproval = hasLevelNameForApproval
? (levelNameForApproval.includes('dealer') && (levelNameForApproval.includes('completion') || levelNameForApproval.includes('documents')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
// Skip sending approval notification to initiator if they are the approver
// (they don't need to be notified that they approved their own request)
const isApproverInitiator = level.approverId && (wf as any).initiatorId && level.approverId === (wf as any).initiatorId;
if (wf && !isDealerProposalApproval && !isDealerCompletionApproval && !isApproverInitiator) {
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Approved - Level ${level.levelNumber}`,
body: `Your request "${(wf as any).title}" has been approved by ${level.approverName || level.approverEmail} and forwarded to the next approver.`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
} else if (isApproverInitiator) {
logger.info(`[DealerClaimApproval] Skipping approval notification to initiator - they are the approver`);
}
// Notify next approver - ALWAYS send notification when there's a next level
if (wf && nextLevel) {
const nextApproverId = (nextLevel as any).approverId;
const nextApproverEmail = (nextLevel as any).approverEmail || '';
const nextApproverName = (nextLevel as any).approverName || nextApproverEmail || 'approver';
// Check if it's an auto-step or system process
const isAutoStep = nextApproverEmail === 'system@royalenfield.com'
|| (nextLevel as any).approverName === 'System Auto-Process'
|| nextApproverId === 'system';
const isSystemEmail = nextApproverEmail.toLowerCase() === 'system@royalenfield.com'
|| nextApproverEmail.toLowerCase().includes('system');
const isSystemName = nextApproverName.toLowerCase() === 'system auto-process'
|| nextApproverName.toLowerCase().includes('system');
// Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents)
// Check this BEFORE sending assignment notification to avoid duplicates
// Priority: levelName check first, then levelNumber only if levelName is missing
const hasLevelNameForNotification = level.levelName && level.levelName.trim() !== '';
const levelNameForNotification = hasLevelNameForNotification && level.levelName ? level.levelName.toLowerCase() : '';
const isDealerProposalApproval = hasLevelNameForNotification
? (levelNameForNotification.includes('dealer') && levelNameForNotification.includes('proposal'))
: (level.levelNumber === 1); // Only use levelNumber if levelName is missing
const isDealerCompletionApproval = hasLevelNameForNotification
? (levelNameForNotification.includes('dealer') && (levelNameForNotification.includes('completion') || levelNameForNotification.includes('documents')))
: (level.levelNumber === 5); // Only use levelNumber if levelName is missing
// Check if next approver is the initiator (to avoid duplicate notifications)
const isNextApproverInitiator = nextApproverId && (wf as any).initiatorId && nextApproverId === (wf as any).initiatorId;
if (isDealerProposalApproval && (wf as any).initiatorId) {
// Get dealer and proposal data for the email template
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
const { DealerProposalDetails } = await import('@models/DealerProposalDetails');
const { DealerProposalCostItem } = await import('@models/DealerProposalCostItem');
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
const proposalDetails = await DealerProposalDetails.findOne({ where: { requestId: level.requestId } });
// Get cost items if proposal exists
let costBreakup: any[] = [];
if (proposalDetails) {
const proposalId = (proposalDetails as any).proposalId || (proposalDetails as any).proposal_id;
if (proposalId) {
const costItems = await DealerProposalCostItem.findAll({
where: { proposalId },
order: [['itemOrder', 'ASC']]
});
costBreakup = costItems.map((item: any) => ({
description: item.itemDescription || item.description,
amount: Number(item.amount) || 0
}));
}
}
// Get dealer user
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
const dealerData = dealerUser ? dealerUser.toJSON() : {
userId: level.approverId,
email: level.approverEmail || '',
displayName: level.approverName || level.approverEmail || 'Dealer'
};
// Get next approver (could be Step 2 - Requestor Evaluation, or an additional approver if one was added between Step 1 and Step 2)
// The nextLevel is already found above using dynamic logic that handles additional approvers correctly
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 1 and Step 2)
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
// Send proposal submitted notification with proper type and metadata
// This will use the dealerProposalSubmitted template, not the multi-level approval template
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: 'Proposal Submitted',
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted a proposal for your claim request "${(wf as any).title}".`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'proposal_submitted',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
dealerData: dealerData,
proposalData: {
totalEstimatedBudget: proposalDetails ? (proposalDetails as any).totalEstimatedBudget : 0,
expectedCompletionDate: proposalDetails ? (proposalDetails as any).expectedCompletionDate : undefined,
dealerComments: proposalDetails ? (proposalDetails as any).dealerComments : undefined,
costBreakup: costBreakup,
submittedAt: proposalDetails ? (proposalDetails as any).submittedAt : new Date(),
nextApproverIsAdditional: isNextAdditionalApprover,
nextApproverIsInitiator: isNextApproverInitiator
},
nextApproverId: nextApproverData ? nextApproverData.userId : undefined,
// Add activity information from claimDetails
activityName: claimDetails ? (claimDetails as any).activityName : undefined,
activityType: claimDetails ? (claimDetails as any).activityType : undefined
}
});
logger.info(`[DealerClaimApproval] Sent proposal_submitted notification to initiator for Dealer Proposal Submission. Next approver: ${isNextApproverInitiator ? 'Initiator (self)' : (isNextAdditionalApprover ? 'Additional Approver' : 'Step 2 (Requestor Evaluation)')}`);
} else if (isDealerCompletionApproval && (wf as any).initiatorId) {
// Get dealer and completion data for the email template
const { DealerClaimDetails } = await import('@models/DealerClaimDetails');
const { DealerCompletionDetails } = await import('@models/DealerCompletionDetails');
const { DealerCompletionExpense } = await import('@models/DealerCompletionExpense');
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId: level.requestId } });
const completionDetails = await DealerCompletionDetails.findOne({ where: { requestId: level.requestId } });
// Get expense items if completion exists
let closedExpenses: any[] = [];
if (completionDetails) {
const expenses = await DealerCompletionExpense.findAll({
where: { requestId: level.requestId },
order: [['createdAt', 'ASC']]
});
closedExpenses = expenses.map((item: any) => ({
description: item.description || '',
amount: Number(item.amount) || 0
}));
}
// Get dealer user
const dealerUser = level.approverId ? await User.findByPk(level.approverId) : null;
const dealerData = dealerUser ? dealerUser.toJSON() : {
userId: level.approverId,
email: level.approverEmail || '',
displayName: level.approverName || level.approverEmail || 'Dealer'
};
// Get next approver (could be Step 5 - Requestor Claim Approval, or an additional approver if one was added between Step 4 and Step 5)
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
// Check if next approver is an additional approver (handles cases where additional approvers are added between Step 4 and Step 5)
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
// Check if next approver is the initiator (to show appropriate message in email)
const isNextApproverInitiator = nextApproverData && (wf as any).initiatorId && nextApproverData.userId === (wf as any).initiatorId;
// Send completion submitted notification with proper type and metadata
// This will use the completionDocumentsSubmitted template, not the multi-level approval template
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: 'Completion Documents Submitted',
body: `Dealer ${dealerData.displayName || dealerData.email} has submitted completion documents for your claim request "${(wf as any).title}".`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'completion_submitted',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
dealerData: dealerData,
completionData: {
activityCompletionDate: completionDetails ? (completionDetails as any).activityCompletionDate : undefined,
numberOfParticipants: completionDetails ? (completionDetails as any).numberOfParticipants : undefined,
totalClosedExpenses: completionDetails ? (completionDetails as any).totalClosedExpenses : 0,
closedExpenses: closedExpenses,
documentsCount: undefined, // Documents count can be retrieved from documents table if needed
submittedAt: completionDetails ? (completionDetails as any).submittedAt : new Date(),
nextApproverIsAdditional: isNextAdditionalApprover,
nextApproverIsInitiator: isNextApproverInitiator
},
nextApproverId: nextApproverData ? nextApproverData.userId : undefined
}
});
logger.info(`[DealerClaimApproval] Sent completion_submitted notification to initiator for Dealer Completion Documents. Next approver: ${isNextAdditionalApprover ? 'Additional Approver' : 'Step 5 (Requestor Claim Approval)'}`);
}
// Only send assignment notification to next approver if:
// 1. It's NOT a dealer proposal/completion step (those have special notifications above)
// 2. Next approver is NOT the initiator (to avoid duplicate notifications)
// 3. It's not a system/auto step
if (!isDealerProposalApproval && !isDealerCompletionApproval && !isNextApproverInitiator) {
if (!isAutoStep && !isSystemEmail && !isSystemName && nextApproverId && nextApproverId !== 'system') {
try {
logger.info(`[DealerClaimApproval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
await notificationService.sendToUsers([nextApproverId], {
title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
logger.info(`[DealerClaimApproval] ✅ Assignment notification sent successfully to ${nextApproverName} (${nextApproverId}) for level ${nextLevelNumber}`);
// Log assignment activity for the next approver
await activityService.log({
requestId: level.requestId,
type: 'assignment',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Assigned to approver',
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
} catch (notifError) {
logger.error(`[DealerClaimApproval] ❌ Failed to send notification to next approver ${nextApproverId} at level ${nextLevelNumber}:`, notifError);
// Don't throw - continue with workflow even if notification fails
}
} else {
logger.info(`[DealerClaimApproval] ⚠️ Skipping notification for system/auto-step: ${nextApproverEmail} (${nextApproverId}) at level ${nextLevelNumber}`);
}
} else {
if (isDealerProposalApproval || isDealerCompletionApproval) {
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - dealer-specific notification already sent`);
}
if (isNextApproverInitiator) {
logger.info(`[DealerClaimApproval] ⚠️ Skipping assignment notification - next approver is the initiator (already notified)`);
}
}
}
} else {
// No next level found but not final approver - this shouldn't happen
logger.warn(`[DealerClaimApproval] No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: level.levelNumber || 0
},
{ where: { requestId: level.requestId } }
);
if (wf) {
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM'
});
}
}
}
// Emit real-time update to all users viewing this request
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: action.action,
levelNumber: level.levelNumber,
timestamp: now.toISOString()
});
logger.info(`[DealerClaimApproval] Approval level ${levelId} ${action.action.toLowerCase()}ed and socket event emitted`);
return level;
} catch (error) {
logger.error('[DealerClaimApproval] Error approving level:', error);
throw error;
}
}
/**
* Handle rejection (internal method called from approveLevel)
*/
private async handleRejection(
level: ApprovalLevel,
action: ApprovalAction,
userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null },
elapsedHours?: number,
tatPercentage?: number,
now?: Date
): Promise<ApprovalLevel | null> {
const rejectionNow = now || new Date();
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Check if this is the Department Lead approval step (Step 3)
// Robust check: check level name for variations and level number as fallback
// Default rejection logic: Return to immediately previous approval step
logger.info(`[DealerClaimApproval] Rejection for request ${level.requestId} by level ${level.levelNumber}. Finding previous step to return to.`);
// Save approval history (rejection) BEFORE updating level
await this.getDealerClaimService().saveApprovalHistory(
level.requestId,
level.levelId,
level.levelNumber,
'REJECT',
action.comments || '',
action.rejectionReason || undefined,
userId
);
// Find all levels to determine previous step
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
// Find the immediately previous approval level
const currentLevelNumber = level.levelNumber || 0;
const previousLevels = allLevels.filter(l => l.levelNumber < currentLevelNumber && l.levelNumber > 0);
const previousLevel = previousLevels[previousLevels.length - 1];
// Update level status - if returning to previous step, set this level to PENDING (reset)
// If no previous step (terminal rejection), set to REJECTED
const newStatus = previousLevel ? ApprovalStatus.PENDING : ApprovalStatus.REJECTED;
await level.update({
status: newStatus,
// If resetting to PENDING, clear action details so it can be acted upon again later
actionDate: previousLevel ? null : rejectionNow,
levelEndTime: previousLevel ? null : rejectionNow,
elapsedHours: previousLevel ? 0 : (elapsedHours || 0),
tatPercentageUsed: previousLevel ? 0 : (tatPercentage || 0),
comments: previousLevel ? null : (action.comments || action.rejectionReason || undefined)
} as any);
// If no previous level found (this is the first step), close the workflow
if (!previousLevel) {
logger.info(`[DealerClaimApproval] No previous level found. This is the first step. Closing workflow.`);
// Capture workflow snapshot for terminal rejection
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
userId,
level.levelId,
level.levelNumber,
level.levelName || undefined
);
// Close workflow FIRST
await WorkflowRequest.update(
{
status: WorkflowStatus.REJECTED,
closureDate: rejectionNow
},
{ where: { requestId: level.requestId } }
);
// Capture workflow snapshot AFTER workflow is closed successfully
try {
await this.getDealerClaimService().saveWorkflowHistory(
level.requestId,
`Level ${level.levelNumber} rejected (terminal rejection - no previous step)`,
userId,
level.levelId,
level.levelNumber,
level.levelName || undefined
);
} catch (snapshotError) {
// Log error but don't fail the rejection - snapshot is for audit, not critical
logger.error(`[DealerClaimApproval] Failed to save workflow history snapshot (non-critical):`, snapshotError);
}
// Log rejection activity (terminal rejection)
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: rejectionNow.toISOString(),
action: 'Rejected',
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify initiator and participants (workflow is closed)
const participants = await import('@models/Participant').then(m => m.Participant.findAll({
where: { requestId: level.requestId, isActive: true }
}));
const userIdsToNotify = [(wf as any).initiatorId];
if (participants && participants.length > 0) {
participants.forEach((p: any) => {
if (p.userId && p.userId !== (wf as any).initiatorId) {
userIdsToNotify.push(p.userId);
}
});
}
await notificationService.sendToUsers(userIdsToNotify, {
title: `Request Rejected: ${(wf as any).requestNumber}`,
body: `${(wf as any).title} - Rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH'
});
} else {
// Return to previous step
logger.info(`[DealerClaimApproval] Returning to previous level ${previousLevel.levelNumber} (${previousLevel.levelName || 'unnamed'})`);
// Reset previous level to IN_PROGRESS so it can be acted upon again
await previousLevel.update({
status: ApprovalStatus.IN_PROGRESS,
levelStartTime: rejectionNow,
tatStartTime: rejectionNow,
actionDate: undefined,
levelEndTime: undefined,
comments: undefined,
elapsedHours: 0,
tatPercentageUsed: 0
});
// Update workflow status to IN_PROGRESS (remains active for rework)
// Set currentLevel to previous level
await WorkflowRequest.update(
{
status: WorkflowStatus.PENDING,
currentLevel: previousLevel.levelNumber
},
{ where: { requestId: level.requestId } }
);
// Log rejection activity (returned to previous step)
activityService.log({
requestId: level.requestId,
type: 'rejection',
user: { userId: level.approverId, name: level.approverName },
timestamp: rejectionNow.toISOString(),
action: 'Returned to Previous Step',
details: `Request rejected by ${level.approverName || level.approverEmail} and returned to level ${previousLevel.levelNumber}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
// Notify the approver of the previous level
if (previousLevel.approverId) {
await notificationService.sendToUsers([previousLevel.approverId], {
title: `Request Returned: ${(wf as any).requestNumber}`,
body: `Request "${(wf as any).title}" has been returned to your level for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
}
// Notify initiator when request is returned (not closed)
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: `Request Returned: ${(wf as any).requestNumber}`,
body: `Request "${(wf as any).title}" has been returned to level ${previousLevel.levelNumber} for revision. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
requestNumber: (wf as any).requestNumber,
requestId: level.requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'rejection',
priority: 'HIGH',
actionRequired: true
});
}
// Emit real-time update to all users viewing this request
emitToRequestRoom(level.requestId, 'request:updated', {
requestId: level.requestId,
requestNumber: (wf as any)?.requestNumber,
action: 'REJECT',
levelNumber: level.levelNumber,
timestamp: rejectionNow.toISOString()
});
return level;
}
/**
* Reject a level in a dealer claim workflow (legacy method - kept for backward compatibility)
*/
async rejectLevel(
levelId: string,
reason: string,
comments: string,
userId: string,
requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }
): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
const wf = await WorkflowRequest.findByPk(level.requestId);
if (!wf) return null;
// Verify this is a claim management workflow
const workflowType = (wf as any)?.workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.warn(`[DealerClaimApproval] Attempted to use DealerClaimApprovalService for non-claim-management workflow ${level.requestId}. Workflow type: ${workflowType}`);
throw new Error('DealerClaimApprovalService can only be used for CLAIM_MANAGEMENT workflows');
}
const now = new Date();
// Calculate elapsed hours
const priority = ((wf as any)?.priority || 'standard').toString().toLowerCase();
const isPausedLevel = (level as any).isPaused;
const wasResumed = !isPausedLevel &&
(level as any).pauseElapsedHours !== null &&
(level as any).pauseElapsedHours !== undefined &&
(level as any).pauseResumeDate !== null;
const pauseInfo = isPausedLevel ? {
// Level is currently paused - return frozen elapsed hours at pause time
isPaused: true,
pausedAt: (level as any).pausedAt,
pauseElapsedHours: (level as any).pauseElapsedHours,
pauseResumeDate: (level as any).pauseResumeDate
} : wasResumed ? {
// Level was paused but has been resumed - add pre-pause elapsed hours + time since resume
isPaused: false,
pausedAt: null,
pauseElapsedHours: Number((level as any).pauseElapsedHours), // Pre-pause elapsed hours
pauseResumeDate: (level as any).pauseResumeDate // Actual resume timestamp
} : undefined;
// Use the internal handleRejection method
const elapsedHours = await calculateElapsedWorkingHours(
(level as any).levelStartTime || (level as any).tatStartTime || now,
now,
priority,
pauseInfo
);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
return await this.handleRejection(
level,
{ action: 'REJECT', comments: comments || reason, rejectionReason: reason || comments },
userId,
requestMetadata,
elapsedHours,
tatPercentage,
now
);
} catch (error) {
logger.error('[DealerClaimApproval] Error rejecting level:', error);
throw error;
}
}
/**
* Get current approval level for a request
*/
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) return null;
const currentLevel = (workflow as any).currentLevel;
if (!currentLevel) return null;
return await ApprovalLevel.findOne({
where: { requestId, levelNumber: currentLevel }
});
}
/**
* Get all approval levels for a request
*/
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
return await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
}
}

View File

@ -0,0 +1,535 @@
import { Request } from 'express';
import { ClaimInvoice } from '../models/ClaimInvoice';
import { ClaimCreditNote } from '../models/ClaimCreditNote';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { ApprovalLevel } from '../models/ApprovalLevel';
import { DealerClaimDetails } from '../models/DealerClaimDetails';
import { User } from '../models/User';
import { ApprovalService } from './approval.service';
import logger from '../utils/logger';
import crypto from 'crypto';
import { activityService } from './activity.service';
import { notificationService } from './notification.service';
/**
* DMS Webhook Service
* Handles processing of webhook callbacks from DMS system
*/
export class DMSWebhookService {
private webhookSecret: string;
private approvalService: ApprovalService;
constructor() {
this.webhookSecret = process.env.DMS_WEBHOOK_SECRET || '';
this.approvalService = new ApprovalService();
}
/**
* Validate webhook signature for security
* DMS should send a signature in the header that we can verify
*/
async validateWebhookSignature(req: Request): Promise<boolean> {
// If webhook secret is not configured, skip validation (for development)
if (!this.webhookSecret) {
logger.warn('[DMSWebhook] Webhook secret not configured, skipping signature validation');
return true;
}
try {
const signature = req.headers['x-dms-signature'] as string;
if (!signature) {
logger.warn('[DMSWebhook] Missing webhook signature in header');
return false;
}
// Create HMAC hash of the request body
const body = JSON.stringify(req.body);
const expectedSignature = crypto
.createHmac('sha256', this.webhookSecret)
.update(body)
.digest('hex');
// Compare signatures (use constant-time comparison to prevent timing attacks)
const isValid = crypto.timingSafeEqual(
Buffer.from(signature),
Buffer.from(expectedSignature)
);
if (!isValid) {
logger.warn('[DMSWebhook] Invalid webhook signature');
}
return isValid;
} catch (error) {
logger.error('[DMSWebhook] Error validating webhook signature:', error);
return false;
}
}
/**
* Process invoice generation webhook from DMS
*/
async processInvoiceWebhook(payload: any): Promise<{
success: boolean;
invoiceNumber?: string;
error?: string;
}> {
try {
// Validate required fields
const requiredFields = ['request_number', 'document_no', 'document_type'];
for (const field of requiredFields) {
if (!payload[field]) {
return {
success: false,
error: `Missing required field: ${field}`,
};
}
}
// Find workflow request by request number
const request = await WorkflowRequest.findOne({
where: {
requestNumber: payload.request_number,
},
});
if (!request) {
return {
success: false,
error: `Request not found: ${payload.request_number}`,
};
}
// Find or create invoice record
let invoice = await ClaimInvoice.findOne({
where: { requestId: request.requestId },
});
// Create invoice if it doesn't exist (new flow: webhook creates invoice)
if (!invoice) {
logger.info('[DMSWebhook] Invoice record not found, creating new invoice from webhook', {
requestNumber: payload.request_number,
});
invoice = await ClaimInvoice.create({
requestId: request.requestId,
invoiceNumber: payload.document_no,
dmsNumber: payload.document_no,
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
amount: payload.total_amount || payload.claim_amount,
status: 'GENERATED',
generatedAt: new Date(),
invoiceFilePath: payload.invoice_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildInvoiceDescription(payload),
});
logger.info('[DMSWebhook] Invoice created successfully from webhook', {
requestNumber: payload.request_number,
invoiceNumber: payload.document_no,
});
} else {
// Update existing invoice with DMS response data
await invoice.update({
invoiceNumber: payload.document_no,
dmsNumber: payload.document_no, // DMS document number
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
amount: payload.total_amount || payload.claim_amount,
status: 'GENERATED',
generatedAt: new Date(),
invoiceFilePath: payload.invoice_file_path || null,
errorMessage: payload.error_message || null,
// Store additional DMS data in description or separate fields if needed
description: this.buildInvoiceDescription(payload),
});
logger.info('[DMSWebhook] Invoice updated successfully', {
requestNumber: payload.request_number,
invoiceNumber: payload.document_no,
irnNo: payload.irn_no,
});
}
// Auto-approve Step 7 and move to Step 8
await this.logEInvoiceGenerationActivity(request.requestId, payload.request_number);
return {
success: true,
invoiceNumber: payload.document_no,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
return {
success: false,
error: errorMessage,
};
}
}
/**
* Process credit note generation webhook from DMS
*/
async processCreditNoteWebhook(payload: any): Promise<{
success: boolean;
creditNoteNumber?: string;
error?: string;
}> {
try {
// Validate required fields
const requiredFields = ['request_number', 'document_no', 'document_type'];
for (const field of requiredFields) {
if (!payload[field]) {
return {
success: false,
error: `Missing required field: ${field}`,
};
}
}
// Find workflow request by request number
const request = await WorkflowRequest.findOne({
where: {
requestNumber: payload.request_number,
},
});
if (!request) {
return {
success: false,
error: `Request not found: ${payload.request_number}`,
};
}
// Find invoice to link credit note (optional - credit note can exist without invoice)
const invoice = await ClaimInvoice.findOne({
where: { requestId: request.requestId },
});
// Find or create credit note record
let creditNote = await ClaimCreditNote.findOne({
where: { requestId: request.requestId },
});
// Create credit note if it doesn't exist (new flow: webhook creates credit note)
if (!creditNote) {
logger.info('[DMSWebhook] Credit note record not found, creating new credit note from webhook', {
requestNumber: payload.request_number,
hasInvoice: !!invoice,
});
creditNote = await ClaimCreditNote.create({
requestId: request.requestId,
invoiceId: invoice?.invoiceId || undefined, // Allow undefined if no invoice exists
creditNoteNumber: payload.document_no,
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
creditNoteAmount: payload.total_amount || payload.credit_amount,
sapDocumentNumber: payload.sap_credit_note_no || null,
status: 'CONFIRMED',
confirmedAt: new Date(),
creditNoteFilePath: payload.credit_note_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildCreditNoteDescription(payload),
});
logger.info('[DMSWebhook] Credit note created successfully from webhook', {
requestNumber: payload.request_number,
creditNoteNumber: payload.document_no,
hasInvoice: !!invoice,
});
// Log activity and notify initiator
await this.logCreditNoteCreationActivity(
request.requestId,
payload.request_number,
payload.document_no,
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
);
} else {
// Update existing credit note with DMS response data
await creditNote.update({
invoiceId: invoice?.invoiceId || creditNote.invoiceId, // Preserve existing invoiceId if no invoice found
creditNoteNumber: payload.document_no,
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
creditNoteAmount: payload.total_amount || payload.credit_amount,
sapDocumentNumber: payload.sap_credit_note_no || null,
status: 'CONFIRMED',
confirmedAt: new Date(),
creditNoteFilePath: payload.credit_note_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildCreditNoteDescription(payload),
});
logger.info('[DMSWebhook] Credit note updated successfully', {
requestNumber: payload.request_number,
creditNoteNumber: payload.document_no,
sapCreditNoteNo: payload.sap_credit_note_no,
irnNo: payload.irn_no,
hasInvoice: !!invoice,
});
// Log activity and notify initiator for updated credit note
await this.logCreditNoteCreationActivity(
request.requestId,
payload.request_number,
payload.document_no,
creditNote.creditNoteAmount || payload.total_amount || payload.credit_amount
);
}
return {
success: true,
creditNoteNumber: payload.document_no,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
return {
success: false,
error: errorMessage,
};
}
}
/**
* Build invoice description from DMS payload
*/
private buildInvoiceDescription(payload: any): string {
const parts: string[] = [];
if (payload.irn_no) {
parts.push(`IRN: ${payload.irn_no}`);
}
if (payload.item_code_no) {
parts.push(`Item Code: ${payload.item_code_no}`);
}
if (payload.hsn_sac_code) {
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
}
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
}
return parts.length > 0 ? parts.join(' | ') : '';
}
/**
* Build credit note description from DMS payload
*/
private buildCreditNoteDescription(payload: any): string {
const parts: string[] = [];
if (payload.irn_no) {
parts.push(`IRN: ${payload.irn_no}`);
}
if (payload.sap_credit_note_no) {
parts.push(`SAP CN: ${payload.sap_credit_note_no}`);
}
if (payload.credit_type) {
parts.push(`Credit Type: ${payload.credit_type}`);
}
if (payload.item_code_no) {
parts.push(`Item Code: ${payload.item_code_no}`);
}
if (payload.hsn_sac_code) {
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
}
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
}
return parts.length > 0 ? parts.join(' | ') : '';
}
/**
* Log Credit Note Creation as activity and notify initiator
* This is called after credit note is created/updated from DMS webhook
*/
private async logCreditNoteCreationActivity(
requestId: string,
requestNumber: string,
creditNoteNumber: string,
creditNoteAmount: number
): Promise<void> {
try {
// Check if this is a claim management workflow
const request = await WorkflowRequest.findByPk(requestId);
if (!request) {
logger.warn('[DMSWebhook] Request not found for credit note activity logging', { requestId });
return;
}
const workflowType = (request as any).workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.info('[DMSWebhook] Not a claim management workflow, skipping credit note activity logging', {
requestId,
workflowType,
});
return;
}
const initiatorId = (request as any).initiatorId;
if (!initiatorId) {
logger.warn('[DMSWebhook] Initiator ID not found for credit note notification', { requestId });
return;
}
// Log activity
await activityService.log({
requestId,
type: 'status_change',
user: undefined, // System event (no user means it's a system event)
timestamp: new Date().toISOString(),
action: 'Credit Note Generated',
details: `Credit note generated from DMS. Credit Note Number: ${creditNoteNumber}. Credit Note Amount: ₹${creditNoteAmount || 0}. Request: ${requestNumber}`,
category: 'credit_note',
severity: 'INFO',
});
logger.info('[DMSWebhook] Credit note activity logged successfully', {
requestId,
requestNumber,
creditNoteNumber,
});
// Get dealer information from claim details
const claimDetails = await DealerClaimDetails.findOne({
where: { requestId }
});
let dealerUserId: string | null = null;
if (claimDetails?.dealerEmail) {
const dealerUser = await User.findOne({
where: { email: claimDetails.dealerEmail.toLowerCase() },
attributes: ['userId'],
});
dealerUserId = dealerUser?.userId || null;
if (dealerUserId) {
logger.info('[DMSWebhook] Found dealer user for notification', {
requestId,
dealerEmail: claimDetails.dealerEmail,
dealerUserId,
});
} else {
logger.warn('[DMSWebhook] Dealer email found but user not found in system', {
requestId,
dealerEmail: claimDetails.dealerEmail,
});
}
} else {
logger.info('[DMSWebhook] No dealer email found in claim details', { requestId });
}
// Send notification to initiator
await notificationService.sendToUsers([initiatorId], {
title: 'Credit Note Generated',
body: `Credit note ${creditNoteNumber} has been generated for request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
creditNoteNumber,
creditNoteAmount,
source: 'dms_webhook',
},
});
logger.info('[DMSWebhook] Credit note notification sent to initiator', {
requestId,
requestNumber,
initiatorId,
creditNoteNumber,
});
// Send notification to dealer if dealer user exists
if (dealerUserId) {
await notificationService.sendToUsers([dealerUserId], {
title: 'Credit Note Generated',
body: `Credit note ${creditNoteNumber} has been generated for your claim request ${requestNumber}. Amount: ₹${creditNoteAmount || 0}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
creditNoteNumber,
creditNoteAmount,
source: 'dms_webhook',
recipient: 'dealer',
},
});
logger.info('[DMSWebhook] Credit note notification sent to dealer', {
requestId,
requestNumber,
dealerUserId,
dealerEmail: claimDetails?.dealerEmail,
creditNoteNumber,
});
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error logging credit note activity:', {
requestId,
requestNumber,
error: errorMessage,
});
// Don't throw error - webhook processing should continue even if activity/notification fails
// The credit note is already created/updated, which is the primary goal
}
}
/**
* Log E-Invoice Generation as activity (no longer an approval step)
* This is called after invoice is created/updated from DMS webhook
*/
private async logEInvoiceGenerationActivity(requestId: string, requestNumber: string): Promise<void> {
try {
// Check if this is a claim management workflow
const request = await WorkflowRequest.findByPk(requestId);
if (!request) {
logger.warn('[DMSWebhook] Request not found for Step 7 auto-approval', { requestId });
return;
}
const workflowType = (request as any).workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.info('[DMSWebhook] Not a claim management workflow, skipping Step 7 auto-approval', {
requestId,
workflowType,
});
return;
}
// E-Invoice Generation is now an activity log only, not an approval step
// Log the activity using the dealerClaimService
const { DealerClaimService } = await import('./dealerClaim.service');
const dealerClaimService = new DealerClaimService();
const invoice = await ClaimInvoice.findOne({ where: { requestId } });
const invoiceNumber = invoice?.invoiceNumber || 'N/A';
await dealerClaimService.logEInvoiceGenerationActivity(requestId, invoiceNumber);
logger.info('[DMSWebhook] E-Invoice Generation activity logged successfully', {
requestId,
requestNumber,
invoiceNumber,
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error logging E-Invoice Generation activity:', {
requestId,
requestNumber,
error: errorMessage,
});
// Don't throw error - webhook processing should continue even if activity logging fails
// The invoice is already created/updated, which is the primary goal
}
}
}

View File

@ -0,0 +1,221 @@
import { Holiday, HolidayType } from '@models/Holiday';
import { Op } from 'sequelize';
import logger from '@utils/logger';
import dayjs from 'dayjs';
export class HolidayService {
/**
* Get all holidays within a date range
*/
async getHolidaysInRange(startDate: Date | string, endDate: Date | string): Promise<string[]> {
try {
const holidays = await Holiday.findAll({
where: {
holidayDate: {
[Op.between]: [dayjs(startDate).format('YYYY-MM-DD'), dayjs(endDate).format('YYYY-MM-DD')]
},
isActive: true
},
attributes: ['holidayDate'],
raw: true
});
return holidays.map((h: any) => h.holidayDate || h.holiday_date);
} catch (error) {
logger.error('[Holiday Service] Error fetching holidays:', error);
return [];
}
}
/**
* Check if a specific date is a holiday
*/
async isHoliday(date: Date | string): Promise<boolean> {
try {
const dateStr = dayjs(date).format('YYYY-MM-DD');
const holiday = await Holiday.findOne({
where: {
holidayDate: dateStr,
isActive: true
}
});
return !!holiday;
} catch (error) {
logger.error('[Holiday Service] Error checking holiday:', error);
return false;
}
}
/**
* Check if a date is a working day (not weekend or holiday)
*/
async isWorkingDay(date: Date | string): Promise<boolean> {
const day = dayjs(date);
const dayOfWeek = day.day(); // 0 = Sunday, 6 = Saturday
// Check if weekend
if (dayOfWeek === 0 || dayOfWeek === 6) {
return false;
}
// Check if holiday
const isHol = await this.isHoliday(date);
return !isHol;
}
/**
* Add a new holiday
*/
async createHoliday(holidayData: {
holidayDate: string;
holidayName: string;
description?: string;
holidayType?: HolidayType;
isRecurring?: boolean;
recurrenceRule?: string;
appliesToDepartments?: string[];
appliesToLocations?: string[];
createdBy: string;
}): Promise<Holiday> {
try {
const holiday = await Holiday.create({
...holidayData,
isActive: true
} as any);
logger.info(`[Holiday Service] Holiday created: ${holidayData.holidayName} on ${holidayData.holidayDate}`);
return holiday;
} catch (error) {
logger.error('[Holiday Service] Error creating holiday:', error);
throw error;
}
}
/**
* Update a holiday
*/
async updateHoliday(holidayId: string, updates: any, updatedBy: string): Promise<Holiday | null> {
try {
const holiday = await Holiday.findByPk(holidayId);
if (!holiday) {
throw new Error('Holiday not found');
}
await holiday.update({
...updates,
updatedBy,
updatedAt: new Date()
});
logger.info(`[Holiday Service] Holiday updated: ${holidayId}`);
return holiday;
} catch (error) {
logger.error('[Holiday Service] Error updating holiday:', error);
throw error;
}
}
/**
* Delete (deactivate) a holiday
*/
async deleteHoliday(holidayId: string): Promise<boolean> {
try {
await Holiday.update(
{ isActive: false },
{ where: { holidayId } }
);
logger.info(`[Holiday Service] Holiday deactivated: ${holidayId}`);
return true;
} catch (error) {
logger.error('[Holiday Service] Error deleting holiday:', error);
throw error;
}
}
/**
* Get all active holidays
*/
async getAllActiveHolidays(year?: number): Promise<Holiday[]> {
try {
const whereClause: any = { isActive: true };
if (year) {
const startDate = `${year}-01-01`;
const endDate = `${year}-12-31`;
whereClause.holidayDate = {
[Op.between]: [startDate, endDate]
};
}
const holidays = await Holiday.findAll({
where: whereClause,
order: [['holidayDate', 'ASC']]
});
return holidays;
} catch (error) {
logger.error('[Holiday Service] Error fetching holidays:', error);
return [];
}
}
/**
* Get holidays by year for calendar view
*/
async getHolidayCalendar(year: number): Promise<any[]> {
try {
const startDate = `${year}-01-01`;
const endDate = `${year}-12-31`;
const holidays = await Holiday.findAll({
where: {
holidayDate: {
[Op.between]: [startDate, endDate]
},
isActive: true
},
order: [['holidayDate', 'ASC']]
});
return holidays.map((h: any) => ({
date: h.holidayDate || h.holiday_date,
name: h.holidayName || h.holiday_name,
description: h.description,
type: h.holidayType || h.holiday_type,
isRecurring: h.isRecurring || h.is_recurring
}));
} catch (error) {
logger.error('[Holiday Service] Error fetching holiday calendar:', error);
return [];
}
}
/**
* Import multiple holidays (bulk upload)
*/
async bulkImportHolidays(holidays: any[], createdBy: string): Promise<{ success: number; failed: number }> {
let success = 0;
let failed = 0;
for (const holiday of holidays) {
try {
await this.createHoliday({
...holiday,
createdBy
});
success++;
} catch (error) {
failed++;
logger.error(`[Holiday Service] Failed to import holiday: ${holiday.holidayName}`, error);
}
}
logger.info(`[Holiday Service] Bulk import complete: ${success} success, ${failed} failed`);
return { success, failed };
}
}
export const holidayService = new HolidayService();

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,764 @@
import { WorkflowRequest } from '@models/WorkflowRequest';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { User } from '@models/User';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { Op } from 'sequelize';
import logger from '@utils/logger';
import { tatSchedulerService } from './tatScheduler.service';
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
import { notificationService } from './notification.service';
import { activityService } from './activity.service';
import dayjs from 'dayjs';
import { emitToRequestRoom } from '../realtime/socket';
export class PauseService {
/**
* Pause a workflow at a specific approval level
* @param requestId - The workflow request ID
* @param levelId - The approval level ID to pause (optional, pauses current level if not provided)
* @param userId - The user ID who is pausing
* @param reason - Reason for pausing
* @param resumeDate - Date when workflow should auto-resume (max 1 month from now)
*/
async pauseWorkflow(
requestId: string,
levelId: string | null,
userId: string,
reason: string,
resumeDate: Date
): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
try {
// Validate resume date (max 1 month from now)
const now = new Date();
const maxResumeDate = dayjs(now).add(1, 'month').toDate();
if (resumeDate > maxResumeDate) {
throw new Error('Resume date cannot be more than 1 month from now');
}
if (resumeDate <= now) {
throw new Error('Resume date must be in the future');
}
// Get workflow
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
// Check if already paused
if ((workflow as any).isPaused) {
throw new Error('Workflow is already paused');
}
// Get current approval level
let level: ApprovalLevel | null = null;
if (levelId) {
level = await ApprovalLevel.findByPk(levelId);
if (!level || (level as any).requestId !== requestId) {
throw new Error('Approval level not found or does not belong to this workflow');
}
} else {
// Get current active level
level = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: [ApprovalStatus.PENDING, ApprovalStatus.IN_PROGRESS] }
},
order: [['levelNumber', 'ASC']]
});
}
if (!level) {
throw new Error('No active approval level found to pause');
}
// Verify user is either the approver for this level OR the initiator
const isApprover = (level as any).approverId === userId;
const isInitiator = (workflow as any).initiatorId === userId;
if (!isApprover && !isInitiator) {
throw new Error('Only the assigned approver or the initiator can pause this workflow');
}
// Check if level is already paused
if ((level as any).isPaused) {
throw new Error('This approval level is already paused');
}
// Calculate elapsed hours before pause
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
// Check if this level was previously paused and resumed
// If so, we need to account for the previous pauseElapsedHours
// IMPORTANT: Convert to number to avoid string concatenation (DB returns DECIMAL as string)
const previousPauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
const previousResumeDate = (level as any).pauseResumeDate;
const originalTatStartTime = (level as any).pauseTatStartTime || (level as any).levelStartTime || (level as any).tatStartTime || (level as any).createdAt;
let elapsedHours: number;
let levelStartTimeForCalculation: Date;
if (previousPauseElapsedHours > 0 && previousResumeDate) {
// This is a second (or subsequent) pause
// Calculate: previous elapsed hours + time from resume to now
levelStartTimeForCalculation = previousResumeDate; // Start from last resume time
const timeSinceResume = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
elapsedHours = previousPauseElapsedHours + Number(timeSinceResume);
logger.info(`[Pause] Second pause detected - Previous elapsed: ${previousPauseElapsedHours}h, Since resume: ${timeSinceResume}h, Total: ${elapsedHours}h`);
} else {
// First pause - calculate from original start time
levelStartTimeForCalculation = originalTatStartTime;
elapsedHours = await calculateElapsedWorkingHours(levelStartTimeForCalculation, now, priority);
}
// Store TAT snapshot
const tatSnapshot = {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
elapsedHours: Number(elapsedHours),
remainingHours: Math.max(0, Number((level as any).tatHours) - elapsedHours),
tatPercentageUsed: (Number((level as any).tatHours) > 0
? Math.min(100, Math.round((elapsedHours / Number((level as any).tatHours)) * 100))
: 0),
pausedAt: now.toISOString(),
originalTatStartTime: originalTatStartTime // Always use the original start time, not the resume time
};
// Update approval level with pause information
await level.update({
isPaused: true,
pausedAt: now,
pausedBy: userId,
pauseReason: reason,
pauseResumeDate: resumeDate,
pauseTatStartTime: originalTatStartTime, // Always preserve the original start time
pauseElapsedHours: elapsedHours,
status: ApprovalStatus.PAUSED
});
// Update workflow with pause information
// Store the current status before pausing so we can restore it on resume
const currentWorkflowStatus = (workflow as any).status;
const currentLevel = (workflow as any).currentLevel || (level as any).levelNumber;
await workflow.update({
isPaused: true,
pausedAt: now,
pausedBy: userId,
pauseReason: reason,
pauseResumeDate: resumeDate,
pauseTatSnapshot: {
...tatSnapshot,
previousStatus: currentWorkflowStatus, // Store previous status for resume
previousCurrentLevel: currentLevel // Store current level to prevent advancement
},
status: WorkflowStatus.PAUSED
// Note: We do NOT update currentLevel here - it should stay at the paused level
});
// Cancel TAT jobs for this level
await tatSchedulerService.cancelTatJobs(requestId, (level as any).levelId);
// Get user details for notifications
const user = await User.findByPk(userId);
const userName = (user as any)?.displayName || (user as any)?.email || 'User';
// Get initiator
const initiator = await User.findByPk((workflow as any).initiatorId);
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
// Send notifications
const requestNumber = (workflow as any).requestNumber;
const title = (workflow as any).title;
// Notify initiator only if someone else (approver) paused the request
// Skip notification if initiator paused their own request
if (!isInitiator) {
await notificationService.sendToUsers([(workflow as any).initiatorId], {
title: 'Workflow Paused',
body: `Your request "${title}" has been paused by ${userName}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_paused',
priority: 'HIGH',
actionRequired: false,
metadata: {
pauseReason: reason,
resumeDate: resumeDate.toISOString(),
pausedBy: userId
}
});
}
// Notify the user who paused (confirmation) - no email for self-action
await notificationService.sendToUsers([userId], {
title: 'Workflow Paused Successfully',
body: `You have paused request "${title}". It will automatically resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change', // Use status_change to avoid email for self-action
priority: 'MEDIUM',
actionRequired: false
});
// If initiator paused, notify the current approver
if (isInitiator && (level as any).approverId) {
const approver = await User.findByPk((level as any).approverId);
const approverUserId = (level as any).approverId;
await notificationService.sendToUsers([approverUserId], {
title: 'Workflow Paused by Initiator',
body: `Request "${title}" has been paused by the initiator (${userName}). Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_paused',
priority: 'HIGH',
actionRequired: false,
metadata: {
pauseReason: reason,
resumeDate: resumeDate.toISOString(),
pausedBy: userId
}
});
}
// Log activity
await activityService.log({
requestId,
type: 'paused',
user: { userId, name: userName },
timestamp: now.toISOString(),
action: 'Workflow Paused',
details: `Workflow paused by ${userName} at level ${(level as any).levelNumber}. Reason: ${reason}. Will resume on ${dayjs(resumeDate).format('MMM DD, YYYY')}.`,
metadata: {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
resumeDate: resumeDate.toISOString()
}
});
logger.info(`[Pause] Workflow ${requestId} paused at level ${(level as any).levelNumber} by ${userId}`);
// Schedule dedicated auto-resume job for this workflow
try {
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
if (pauseResumeQueue && resumeDate) {
const delay = resumeDate.getTime() - now.getTime();
if (delay > 0) {
const jobId = `resume-${requestId}-${(level as any).levelId}`;
await pauseResumeQueue.add(
'auto-resume-workflow',
{
type: 'auto-resume-workflow',
requestId,
levelId: (level as any).levelId,
scheduledResumeDate: resumeDate.toISOString()
},
{
jobId,
delay, // Exact delay in milliseconds until resume time
removeOnComplete: true,
removeOnFail: false
}
);
logger.info(`[Pause] Scheduled dedicated auto-resume job ${jobId} for ${resumeDate.toISOString()} (delay: ${Math.round(delay / 1000 / 60)} minutes)`);
} else {
logger.warn(`[Pause] Resume date ${resumeDate.toISOString()} is in the past, skipping job scheduling`);
}
}
} catch (queueError) {
logger.warn(`[Pause] Could not schedule dedicated auto-resume job:`, queueError);
// Continue with pause even if job scheduling fails (hourly check will handle it as fallback)
}
// Emit real-time update to all users viewing this request
emitToRequestRoom(requestId, 'request:updated', {
requestId,
requestNumber: (workflow as any).requestNumber,
action: 'PAUSE',
levelNumber: (level as any).levelNumber,
timestamp: now.toISOString()
});
return { workflow, level };
} catch (error: any) {
logger.error(`[Pause] Failed to pause workflow:`, error);
throw error;
}
}
/**
* Resume a paused workflow
* @param requestId - The workflow request ID
* @param userId - The user ID who is resuming (optional, for manual resume)
* @param notes - Optional notes for the resume action
*/
async resumeWorkflow(requestId: string, userId?: string, notes?: string): Promise<{ workflow: WorkflowRequest; level: ApprovalLevel | null }> {
try {
const now = new Date();
// Get workflow
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
// Check if paused
if (!(workflow as any).isPaused) {
throw new Error('Workflow is not paused');
}
// Get paused level
const level = await ApprovalLevel.findOne({
where: {
requestId,
isPaused: true
},
order: [['levelNumber', 'ASC']]
});
if (!level) {
throw new Error('Paused approval level not found');
}
// Verify user has permission (if manual resume)
// Both initiator and current approver can resume the workflow
if (userId) {
const isApprover = (level as any).approverId === userId;
const isInitiator = (workflow as any).initiatorId === userId;
if (!isApprover && !isInitiator) {
throw new Error('Only the assigned approver or the initiator can resume this workflow');
}
}
// Calculate remaining TAT from resume time
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
const pauseElapsedHours = Number((level as any).pauseElapsedHours || 0);
const tatHours = Number((level as any).tatHours);
const remainingHours = Math.max(0, tatHours - pauseElapsedHours);
// Get which alerts have already been sent (to avoid re-sending on resume)
const tat50AlertSent = (level as any).tat50AlertSent || false;
const tat75AlertSent = (level as any).tat75AlertSent || false;
const tatBreached = (level as any).tatBreached || false;
// Update approval level - resume TAT
// IMPORTANT: Keep pauseElapsedHours and store resumedAt (pauseResumeDate repurposed)
// This allows SLA calculation to correctly add pre-pause elapsed time
await level.update({
isPaused: false,
pausedAt: null as any,
pausedBy: null as any,
pauseReason: null as any,
pauseResumeDate: now, // Store actual resume time (repurposed from scheduled resume date)
// pauseTatStartTime: null as any, // Keep original TAT start time for reference
// pauseElapsedHours is intentionally NOT cleared - needed for SLA calculations
status: ApprovalStatus.IN_PROGRESS,
tatStartTime: now, // Reset TAT start time to now for new elapsed calculation
levelStartTime: now // This is the new start time from resume
});
// Cancel any scheduled auto-resume job (if exists)
try {
const { pauseResumeQueue } = require('../queues/pauseResumeQueue');
if (pauseResumeQueue) {
// Try to remove job by specific ID pattern first (more efficient)
const jobId = `resume-${requestId}-${(level as any).levelId}`;
try {
const specificJob = await pauseResumeQueue.getJob(jobId);
if (specificJob) {
await specificJob.remove();
logger.info(`[Pause] Cancelled scheduled auto-resume job ${jobId} for workflow ${requestId}`);
}
} catch (err) {
// Job might not exist, which is fine
}
// Also check for any other jobs for this request (fallback for old jobs)
const scheduledJobs = await pauseResumeQueue.getJobs(['delayed', 'waiting']);
const otherJobs = scheduledJobs.filter((job: any) =>
job.data.requestId === requestId && job.id !== jobId
);
for (const job of otherJobs) {
await job.remove();
logger.info(`[Pause] Cancelled legacy auto-resume job ${job.id} for workflow ${requestId}`);
}
}
} catch (queueError) {
logger.warn(`[Pause] Could not cancel scheduled auto-resume job:`, queueError);
// Continue with resume even if job cancellation fails
}
// Update workflow - restore previous status or default to PENDING
const pauseSnapshot = (workflow as any).pauseTatSnapshot || {};
const previousStatus = pauseSnapshot.previousStatus || WorkflowStatus.PENDING;
await workflow.update({
isPaused: false,
pausedAt: null as any,
pausedBy: null as any,
pauseReason: null as any,
pauseResumeDate: null as any,
pauseTatSnapshot: null as any,
status: previousStatus // Restore previous status (PENDING or IN_PROGRESS)
});
// Reschedule TAT jobs from resume time - only for alerts that haven't been sent yet
if (remainingHours > 0) {
// Calculate which thresholds are still pending based on remaining time
const percentageUsedAtPause = tatHours > 0 ? (pauseElapsedHours / tatHours) * 100 : 0;
// Only schedule jobs for thresholds that:
// 1. Haven't been sent yet
// 2. Haven't been passed yet (based on percentage used at pause)
await tatSchedulerService.scheduleTatJobsOnResume(
requestId,
(level as any).levelId,
(level as any).approverId,
remainingHours, // Remaining TAT hours
now, // Start from now
priority as any,
{
// Pass which alerts were already sent
tat50AlertSent: tat50AlertSent,
tat75AlertSent: tat75AlertSent,
tatBreached: tatBreached,
// Pass percentage used at pause to determine which thresholds are still relevant
percentageUsedAtPause: percentageUsedAtPause
}
);
}
// Get user details
const resumeUser = userId ? await User.findByPk(userId) : null;
const resumeUserName = resumeUser
? ((resumeUser as any)?.displayName || (resumeUser as any)?.email || 'User')
: 'System (Auto-resume)';
// Get initiator and paused by user
const initiator = await User.findByPk((workflow as any).initiatorId);
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
const pausedByUser = (workflow as any).pausedBy
? await User.findByPk((workflow as any).pausedBy)
: null;
const pausedByName = pausedByUser
? ((pausedByUser as any)?.displayName || (pausedByUser as any)?.email || 'User')
: 'Unknown';
const requestNumber = (workflow as any).requestNumber;
const title = (workflow as any).title;
const initiatorId = (workflow as any).initiatorId;
const approverId = (level as any).approverId;
const isResumedByInitiator = userId === initiatorId;
const isResumedByApprover = userId === approverId;
// Calculate pause duration
const pausedAt = (level as any).pausedAt || (workflow as any).pausedAt;
const pauseDurationMs = pausedAt ? now.getTime() - new Date(pausedAt).getTime() : 0;
const pauseDurationHours = Math.round((pauseDurationMs / (1000 * 60 * 60)) * 100) / 100; // Round to 2 decimal places
const pauseDuration = pauseDurationHours > 0 ? `${pauseDurationHours} hours` : 'less than 1 hour';
// Notify initiator only if someone else resumed (or auto-resume)
// Skip if initiator resumed their own request
if (!isResumedByInitiator) {
await notificationService.sendToUsers([initiatorId], {
title: 'Workflow Resumed',
body: `Your request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_resumed',
priority: 'HIGH',
actionRequired: false,
metadata: {
resumedBy: userId ? { userId, name: resumeUserName } : null,
pauseDuration: pauseDuration
}
});
}
// Notify approver only if someone else resumed (or auto-resume)
// Skip if approver resumed the request themselves
if (!isResumedByApprover && approverId) {
await notificationService.sendToUsers([approverId], {
title: 'Workflow Resumed',
body: `Request "${title}" has been resumed ${userId ? `by ${resumeUserName}` : 'automatically'}. Please continue with your review.`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'workflow_resumed',
priority: 'HIGH',
actionRequired: true,
metadata: {
resumedBy: userId ? { userId, name: resumeUserName } : null,
pauseDuration: pauseDuration
}
});
}
// Send confirmation to the user who resumed (if manual resume) - no email for self-action
if (userId) {
await notificationService.sendToUsers([userId], {
title: 'Workflow Resumed Successfully',
body: `You have resumed request "${title}". ${isResumedByApprover ? 'Please continue with your review.' : ''}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'status_change', // Use status_change to avoid email for self-action
priority: 'MEDIUM',
actionRequired: isResumedByApprover
});
}
// Log activity with notes
const resumeDetails = notes
? `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}. Notes: ${notes}`
: `Workflow resumed ${userId ? `by ${resumeUserName}` : 'automatically'} at level ${(level as any).levelNumber}.`;
await activityService.log({
requestId,
type: 'resumed',
user: userId ? { userId, name: resumeUserName } : undefined,
timestamp: now.toISOString(),
action: 'Workflow Resumed',
details: resumeDetails,
metadata: {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
wasAutoResume: !userId,
notes: notes || null
}
});
logger.info(`[Pause] Workflow ${requestId} resumed ${userId ? `by ${userId}` : 'automatically'}`);
// Emit real-time update to all users viewing this request
emitToRequestRoom(requestId, 'request:updated', {
requestId,
requestNumber: (workflow as any).requestNumber,
action: 'RESUME',
levelNumber: (level as any).levelNumber,
timestamp: now.toISOString()
});
return { workflow, level };
} catch (error: any) {
logger.error(`[Pause] Failed to resume workflow:`, error);
throw error;
}
}
/**
* Cancel pause (for retrigger scenario - initiator requests approver to resume)
* This sends a notification to the approver who paused it
* @param requestId - The workflow request ID
* @param userId - The initiator user ID
*/
async retriggerPause(requestId: string, userId: string): Promise<void> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
if (!(workflow as any).isPaused) {
throw new Error('Workflow is not paused');
}
// Verify user is initiator
if ((workflow as any).initiatorId !== userId) {
throw new Error('Only the initiator can retrigger a pause');
}
const pausedBy = (workflow as any).pausedBy;
if (!pausedBy) {
throw new Error('Cannot retrigger - no approver found who paused this workflow');
}
// Get user details
const initiator = await User.findByPk(userId);
const initiatorName = (initiator as any)?.displayName || (initiator as any)?.email || 'User';
// Get approver details (who paused the workflow)
const approver = await User.findByPk(pausedBy);
const approverName = (approver as any)?.displayName || (approver as any)?.email || 'Approver';
const requestNumber = (workflow as any).requestNumber;
const title = (workflow as any).title;
// Notify approver who paused it
await notificationService.sendToUsers([pausedBy], {
title: 'Pause Retrigger Request',
body: `${initiatorName} is requesting you to cancel the pause and resume work on request "${title}".`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'pause_retrigger_request',
priority: 'HIGH',
actionRequired: true
});
// Log activity with approver name
await activityService.log({
requestId,
type: 'pause_retriggered',
user: { userId, name: initiatorName },
timestamp: new Date().toISOString(),
action: 'Pause Retrigger Requested',
details: `${initiatorName} requested ${approverName} to cancel the pause and resume work.`,
metadata: {
pausedBy,
approverName
}
});
logger.info(`[Pause] Pause retrigger requested for workflow ${requestId} by initiator ${userId}`);
} catch (error: any) {
logger.error(`[Pause] Failed to retrigger pause:`, error);
throw error;
}
}
/**
* Get pause details for a workflow
*/
async getPauseDetails(requestId: string): Promise<any> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) {
throw new Error('Workflow not found');
}
if (!(workflow as any).isPaused) {
return null;
}
const level = await ApprovalLevel.findOne({
where: {
requestId,
isPaused: true
}
});
const pausedByUser = (workflow as any).pausedBy
? await User.findByPk((workflow as any).pausedBy, { attributes: ['userId', 'email', 'displayName'] })
: null;
return {
isPaused: true,
pausedAt: (workflow as any).pausedAt,
pausedBy: pausedByUser ? {
userId: (pausedByUser as any).userId,
email: (pausedByUser as any).email,
name: (pausedByUser as any).displayName || (pausedByUser as any).email
} : null,
pauseReason: (workflow as any).pauseReason,
pauseResumeDate: (workflow as any).pauseResumeDate,
level: level ? {
levelId: (level as any).levelId,
levelNumber: (level as any).levelNumber,
approverName: (level as any).approverName
} : null
};
} catch (error: any) {
logger.error(`[Pause] Failed to get pause details:`, error);
throw error;
}
}
/**
* Check and auto-resume paused workflows whose resume date has passed
* This is called by a scheduled job
*/
async checkAndResumePausedWorkflows(): Promise<number> {
try {
const now = new Date();
// Find all paused workflows where resume date has passed
// Handle backward compatibility: workflow_type column may not exist in old environments
let pausedWorkflows: WorkflowRequest[];
try {
pausedWorkflows = await WorkflowRequest.findAll({
where: {
isPaused: true,
pauseResumeDate: {
[Op.lte]: now
}
}
});
} catch (error: any) {
// If error is due to missing workflow_type column, use raw query
if (error.message?.includes('workflow_type') || (error.message?.includes('column') && error.message?.includes('does not exist'))) {
logger.warn('[Pause] workflow_type column not found, using raw query for backward compatibility');
const { sequelize } = await import('../config/database');
const { QueryTypes } = await import('sequelize');
const results = await sequelize.query(`
SELECT request_id, is_paused, pause_resume_date
FROM workflow_requests
WHERE is_paused = true
AND pause_resume_date <= :now
`, {
replacements: { now },
type: QueryTypes.SELECT
});
// Convert to WorkflowRequest-like objects
// results is an array of objects from SELECT query
pausedWorkflows = (results as any[]).map((r: any) => ({
requestId: r.request_id,
isPaused: r.is_paused,
pauseResumeDate: r.pause_resume_date
})) as any;
} else {
throw error; // Re-throw if it's a different error
}
}
let resumedCount = 0;
for (const workflow of pausedWorkflows) {
try {
await this.resumeWorkflow((workflow as any).requestId);
resumedCount++;
} catch (error: any) {
logger.error(`[Pause] Failed to auto-resume workflow ${(workflow as any).requestId}:`, error);
// Continue with other workflows
}
}
if (resumedCount > 0) {
logger.info(`[Pause] Auto-resumed ${resumedCount} workflow(s)`);
}
return resumedCount;
} catch (error: any) {
logger.error(`[Pause] Failed to check and resume paused workflows:`, error);
throw error;
}
}
/**
* Get all paused workflows (for admin/reporting)
*/
async getPausedWorkflows(): Promise<WorkflowRequest[]> {
try {
return await WorkflowRequest.findAll({
where: {
isPaused: true
},
order: [['pausedAt', 'DESC']]
});
} catch (error: any) {
logger.error(`[Pause] Failed to get paused workflows:`, error);
throw error;
}
}
}
export const pauseService = new PauseService();

View File

@ -0,0 +1,383 @@
import { tatQueue } from '../queues/tatQueue';
import { calculateDelay, addWorkingHours, addWorkingHoursExpress } from '@utils/tatTimeUtils';
import { getTatThresholds } from './configReader.service';
import dayjs from 'dayjs';
import logger, { logTATEvent } from '@utils/logger';
import { Priority } from '../types/common.types';
export class TatSchedulerService {
/**
* Schedule TAT notification jobs for an approval level
* @param requestId - The workflow request ID
* @param levelId - The approval level ID
* @param approverId - The approver user ID
* @param tatDurationHours - TAT duration in hours
* @param startTime - Optional start time (defaults to now)
* @param priority - Request priority (EXPRESS = 24/7, STANDARD = working hours only)
*/
async scheduleTatJobs(
requestId: string,
levelId: string,
approverId: string,
tatDurationHours: number,
startTime?: Date,
priority: Priority = Priority.STANDARD
): Promise<void> {
try {
// Check if tatQueue is available
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling.`);
return;
}
const now = startTime || new Date();
// Handle both enum and string (case-insensitive) priority values
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
// Get current thresholds from database configuration
const thresholds = await getTatThresholds();
// Calculate milestone times using configured thresholds
// EXPRESS mode: 24/7 calculation (includes holidays, weekends, non-working hours)
// STANDARD mode: Working hours only (excludes holidays, weekends, non-working hours)
let threshold1Time: Date;
let threshold2Time: Date;
let breachTime: Date;
if (isExpress) {
// EXPRESS: All calendar days (Mon-Sun, including weekends/holidays) but working hours only (9 AM - 6 PM)
const t1 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.first / 100));
const t2 = await addWorkingHoursExpress(now, tatDurationHours * (thresholds.second / 100));
const tBreach = await addWorkingHoursExpress(now, tatDurationHours);
threshold1Time = t1.toDate();
threshold2Time = t2.toDate();
breachTime = tBreach.toDate();
} else {
// STANDARD: Working days only (Mon-Fri), working hours (9 AM - 6 PM), excludes holidays
const t1 = await addWorkingHours(now, tatDurationHours * (thresholds.first / 100));
const t2 = await addWorkingHours(now, tatDurationHours * (thresholds.second / 100));
const tBreach = await addWorkingHours(now, tatDurationHours);
threshold1Time = t1.toDate();
threshold2Time = t2.toDate();
breachTime = tBreach.toDate();
}
logger.info(`[TAT Scheduler] Scheduling TAT jobs - Request: ${requestId}, Priority: ${priority}, TAT: ${tatDurationHours}h`);
const jobs = [
{
type: 'threshold1' as const,
threshold: thresholds.first,
delay: calculateDelay(threshold1Time),
targetTime: threshold1Time
},
{
type: 'threshold2' as const,
threshold: thresholds.second,
delay: calculateDelay(threshold2Time),
targetTime: threshold2Time
},
{
type: 'breach' as const,
threshold: 100,
delay: calculateDelay(breachTime),
targetTime: breachTime
}
];
// Check if test mode enabled (1 hour = 1 minute)
const isTestMode = process.env.TAT_TEST_MODE === 'true';
// Check if times collide (working hours calculation issue)
const uniqueTimes = new Set(jobs.map(j => j.targetTime.getTime()));
const hasCollision = uniqueTimes.size < jobs.length;
let jobIndex = 0;
for (const job of jobs) {
if (job.delay < 0) {
logger.error(`[TAT Scheduler] Skipping ${job.type} - time in past`);
continue;
}
let spacedDelay: number;
if (isTestMode) {
// Test mode: times are already in minutes (tatTimeUtils converts hours to minutes)
// Just ensure they have minimum spacing for BullMQ reliability
spacedDelay = Math.max(job.delay, 5000) + (jobIndex * 5000);
} else if (hasCollision) {
// Production with collision: add 5-minute spacing
spacedDelay = job.delay + (jobIndex * 300000);
} else {
// Production without collision: use calculated delays
spacedDelay = job.delay;
}
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
await tatQueue.add(
job.type,
{
type: job.type,
threshold: job.threshold,
requestId,
levelId,
approverId
},
{
delay: spacedDelay,
jobId: jobId,
removeOnComplete: {
age: 3600, // Keep for 1 hour for debugging
count: 1000
},
removeOnFail: false
}
);
jobIndex++;
}
logTATEvent('warning', requestId, {
level: parseInt(levelId.split('-').pop() || '1'),
tatHours: tatDurationHours,
priority,
message: 'TAT jobs scheduled',
});
} catch (error) {
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs:`, error);
throw error;
}
}
/**
* Schedule TAT jobs on resume - only schedules jobs for alerts that haven't been sent yet
* @param requestId - The workflow request ID
* @param levelId - The approval level ID
* @param approverId - The approver user ID
* @param remainingTatHours - Remaining TAT duration in hours (from resume point)
* @param startTime - Resume start time
* @param priority - Request priority
* @param alertStatus - Object indicating which alerts have already been sent and percentage used at pause
*/
async scheduleTatJobsOnResume(
requestId: string,
levelId: string,
approverId: string,
remainingTatHours: number,
startTime: Date,
priority: Priority = Priority.STANDARD,
alertStatus: {
tat50AlertSent: boolean;
tat75AlertSent: boolean;
tatBreached: boolean;
percentageUsedAtPause: number;
}
): Promise<void> {
try {
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available (Redis not connected). Skipping TAT job scheduling on resume.`);
return;
}
const now = startTime;
// Handle both enum and string (case-insensitive) priority values
const priorityStr = typeof priority === 'string' ? priority.toUpperCase() : priority;
const isExpress = priorityStr === Priority.EXPRESS || priorityStr === 'EXPRESS';
// Get current thresholds from database configuration
const thresholds = await getTatThresholds();
// Calculate original TAT from remaining + elapsed
// Example: If 35 min used (58.33%) and 25 min remaining, original TAT = 60 min
const elapsedHours = alertStatus.percentageUsedAtPause > 0
? (remainingTatHours * alertStatus.percentageUsedAtPause) / (100 - alertStatus.percentageUsedAtPause)
: 0;
const originalTatHours = elapsedHours + remainingTatHours;
logger.info(`[TAT Scheduler] Resuming TAT scheduling - Request: ${requestId}, Remaining: ${(remainingTatHours * 60).toFixed(1)} min, Priority: ${isExpress ? 'EXPRESS' : 'STANDARD'}`);
// Jobs to schedule - only include those that haven't been sent and haven't been passed
const jobsToSchedule: Array<{
type: 'threshold1' | 'threshold2' | 'breach';
threshold: number;
alreadySent: boolean;
alreadyPassed: boolean;
hoursFromNow: number;
}> = [];
// Threshold 1 (e.g., 50%)
// Skip if: already sent OR already passed the threshold
if (!alertStatus.tat50AlertSent && alertStatus.percentageUsedAtPause < thresholds.first) {
// Calculate: How many hours from NOW until we reach this threshold?
// Formula: (thresholdHours - elapsedHours)
// thresholdHours = originalTatHours * (threshold/100)
const thresholdHours = originalTatHours * (thresholds.first / 100);
const hoursFromNow = thresholdHours - elapsedHours;
if (hoursFromNow > 0) {
jobsToSchedule.push({
type: 'threshold1',
threshold: thresholds.first,
alreadySent: false,
alreadyPassed: false,
hoursFromNow: hoursFromNow
});
}
}
// Threshold 2 (e.g., 75%)
if (!alertStatus.tat75AlertSent && alertStatus.percentageUsedAtPause < thresholds.second) {
const thresholdHours = originalTatHours * (thresholds.second / 100);
const hoursFromNow = thresholdHours - elapsedHours;
if (hoursFromNow > 0) {
jobsToSchedule.push({
type: 'threshold2',
threshold: thresholds.second,
alreadySent: false,
alreadyPassed: false,
hoursFromNow: hoursFromNow
});
}
}
// Breach (100%)
if (!alertStatus.tatBreached) {
// Breach is always scheduled for the end of remaining TAT
jobsToSchedule.push({
type: 'breach',
threshold: 100,
alreadySent: false,
alreadyPassed: false,
hoursFromNow: remainingTatHours
});
}
if (jobsToSchedule.length === 0) {
logger.info(`[TAT Scheduler] No TAT jobs to schedule (all alerts already sent)`);
return;
}
// Calculate actual times and schedule jobs
for (const job of jobsToSchedule) {
let targetTime: Date;
if (isExpress) {
targetTime = (await addWorkingHoursExpress(now, job.hoursFromNow)).toDate();
} else {
targetTime = (await addWorkingHours(now, job.hoursFromNow)).toDate();
}
const delay = calculateDelay(targetTime);
if (delay < 0) {
logger.warn(`[TAT Scheduler] Skipping ${job.type} - calculated time is in past`);
continue;
}
const jobId = `tat-${job.type}-${requestId}-${levelId}`;
await tatQueue.add(
job.type,
{
type: job.type,
threshold: job.threshold,
requestId,
levelId,
approverId
},
{
delay: delay,
jobId: jobId,
removeOnComplete: {
age: 3600,
count: 1000
},
removeOnFail: false
}
);
logger.info(`[TAT Scheduler] ✓ Scheduled ${job.type} (${job.threshold}%) for ${dayjs(targetTime).format('YYYY-MM-DD HH:mm')}`);
}
logger.info(`[TAT Scheduler] ✅ ${jobsToSchedule.length} TAT job(s) scheduled for request ${requestId}`);
} catch (error) {
logger.error(`[TAT Scheduler] Failed to schedule TAT jobs on resume:`, error);
throw error;
}
}
/**
* Cancel TAT jobs for a specific approval level
* Useful when an approver acts before TAT expires
* @param requestId - The workflow request ID
* @param levelId - The approval level ID
*/
async cancelTatJobs(requestId: string, levelId: string): Promise<void> {
try {
// Check if tatQueue is available
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
return;
}
// Use generic job names that don't depend on threshold percentages
const jobIds = [
`tat-threshold1-${requestId}-${levelId}`,
`tat-threshold2-${requestId}-${levelId}`,
`tat-breach-${requestId}-${levelId}`
];
for (const jobId of jobIds) {
try {
const job = await tatQueue.getJob(jobId);
if (job) {
await job.remove();
logger.info(`[TAT Scheduler] Cancelled job ${jobId}`);
}
} catch (error) {
// Job might not exist, which is fine
logger.debug(`[TAT Scheduler] Job ${jobId} not found (may have already been processed)`);
}
}
logger.info(`[TAT Scheduler] ✅ TAT jobs cancelled for level ${levelId}`);
} catch (error) {
logger.error(`[TAT Scheduler] Failed to cancel TAT jobs:`, error);
// Don't throw - cancellation failure shouldn't break the workflow
}
}
/**
* Cancel all TAT jobs for a workflow request
* @param requestId - The workflow request ID
*/
async cancelAllTatJobsForRequest(requestId: string): Promise<void> {
try {
// Check if tatQueue is available
if (!tatQueue) {
logger.warn(`[TAT Scheduler] TAT queue not available. Skipping job cancellation.`);
return;
}
const jobs = await tatQueue.getJobs(['delayed', 'waiting']);
const requestJobs = jobs.filter(job => job.data.requestId === requestId);
for (const job of requestJobs) {
await job.remove();
logger.info(`[TAT Scheduler] Cancelled job ${job.id}`);
}
logger.info(`[TAT Scheduler] ✅ All TAT jobs cancelled for request ${requestId}`);
} catch (error) {
logger.error(`[TAT Scheduler] Failed to cancel all TAT jobs:`, error);
// Don't throw - cancellation failure shouldn't break the workflow
}
}
}
export const tatSchedulerService = new TatSchedulerService();

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,446 @@
import { Op } from 'sequelize';
import { WorkNote } from '@models/WorkNote';
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
import { Participant } from '@models/Participant';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { User } from '@models/User';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { activityService } from './activity.service';
import { notificationService } from './notification.service';
import { emailNotificationService } from './emailNotification.service';
import { gcsStorageService } from './gcsStorage.service';
import logger from '@utils/logger';
import fs from 'fs';
import path from 'path';
export class WorkNoteService {
async list(requestId: string) {
const notes = await WorkNote.findAll({
where: { requestId },
order: [['created_at' as any, 'ASC']]
});
// Load attachments for each note
const enriched = await Promise.all(notes.map(async (note) => {
const noteId = (note as any).noteId;
const attachments = await WorkNoteAttachment.findAll({
where: { noteId }
});
const noteData = (note as any).toJSON();
const mappedAttachments = attachments.map((a: any) => {
const attData = typeof a.toJSON === 'function' ? a.toJSON() : a;
return {
attachmentId: attData.attachmentId || attData.attachment_id,
fileName: attData.fileName || attData.file_name,
fileType: attData.fileType || attData.file_type,
fileSize: attData.fileSize || attData.file_size,
filePath: attData.filePath || attData.file_path,
storageUrl: attData.storageUrl || attData.storage_url,
isDownloadable: attData.isDownloadable || attData.is_downloadable,
uploadedAt: attData.uploadedAt || attData.uploaded_at
};
});
return {
noteId: noteData.noteId || noteData.note_id,
requestId: noteData.requestId || noteData.request_id,
userId: noteData.userId || noteData.user_id,
userName: noteData.userName || noteData.user_name,
userRole: noteData.userRole || noteData.user_role,
message: noteData.message,
isPriority: noteData.isPriority || noteData.is_priority,
hasAttachment: noteData.hasAttachment || noteData.has_attachment,
createdAt: noteData.createdAt || noteData.created_at,
updatedAt: noteData.updatedAt || noteData.updated_at,
attachments: mappedAttachments
};
}));
return enriched;
}
async getUserRole(requestId: string, userId: string): Promise<string> {
try {
const participant = await Participant.findOne({
where: { requestId, userId }
});
if (participant) {
const type = (participant as any).participantType || (participant as any).participant_type;
return type ? type.toString() : 'Participant';
}
return 'Participant';
} catch (error) {
logger.error('[WorkNote] Error fetching user role:', error);
return 'Participant';
}
}
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> {
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
const note = await WorkNote.create({
requestId,
userId: user.userId,
userName: user.name || null,
userRole: user.role || null, // Store participant type (INITIATOR/APPROVER/SPECTATOR)
message: payload.message,
isPriority: !!payload.isPriority,
parentNoteId: payload.parentNoteId || null,
mentionedUsers: payload.mentionedUsers || null,
hasAttachment: files && files.length > 0 ? true : false
} as any);
logger.info('[WorkNote] Created note:', {
noteId: (note as any).noteId,
userId: (note as any).userId,
userName: (note as any).userName,
userRole: (note as any).userRole
});
const attachments = [];
if (files && files.length) {
// Get request number for folder structure
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
const requestNumber = workflow ? ((workflow as any).requestNumber || (workflow as any).request_number) : null;
for (const f of files) {
// Read file buffer if path exists, otherwise use provided buffer
const fileBuffer = f.buffer || (f.path ? fs.readFileSync(f.path) : Buffer.from(''));
// Upload with automatic fallback to local storage
// If requestNumber is not available, use a default structure
const effectiveRequestNumber = requestNumber || 'UNKNOWN';
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: f.originalname,
mimeType: f.mimetype,
requestNumber: effectiveRequestNumber,
fileType: 'attachments'
});
const storageUrl = uploadResult.storageUrl;
const gcsFilePath = uploadResult.filePath;
// Clean up local temporary file if it exists (from multer disk storage)
if (f.path && fs.existsSync(f.path)) {
try {
fs.unlinkSync(f.path);
} catch (unlinkError) {
logger.warn('[WorkNote] Failed to delete local temporary file:', unlinkError);
}
}
const attachment = await WorkNoteAttachment.create({
noteId: (note as any).noteId,
fileName: f.originalname,
fileType: f.mimetype,
fileSize: f.size,
filePath: gcsFilePath, // Store GCS path or local path
storageUrl: storageUrl, // Store GCS URL or local URL
isDownloadable: true
} as any);
attachments.push({
attachmentId: (attachment as any).attachmentId,
fileName: (attachment as any).fileName,
fileType: (attachment as any).fileType,
fileSize: (attachment as any).fileSize,
filePath: (attachment as any).filePath,
storageUrl: (attachment as any).storageUrl,
isDownloadable: (attachment as any).isDownloadable
});
}
// Send notifications for additional document added via work notes
if (attachments.length > 0) {
try {
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
if (workflow) {
const initiatorId = (workflow as any).initiatorId || (workflow as any).initiator_id;
const isInitiator = user.userId === initiatorId;
// Get all participants (spectators)
const spectators = await Participant.findAll({
where: {
requestId,
participantType: 'SPECTATOR'
},
include: [{
model: User,
as: 'user',
attributes: ['userId', 'email', 'displayName']
}]
});
// Get current approver (pending or in-progress approval level)
const currentApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
},
order: [['levelNumber', 'ASC']],
include: [{
model: User,
as: 'approver',
attributes: ['userId', 'email', 'displayName']
}]
});
// Determine who to notify based on who uploaded
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
if (isInitiator) {
// Initiator added → notify spectators and current approver
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== user.userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== user.userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Check if uploader is a spectator
const uploaderParticipant = await Participant.findOne({
where: {
requestId,
userId: user.userId,
participantType: 'SPECTATOR'
}
});
if (uploaderParticipant) {
// Spectator added → notify initiator and current approver
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== user.userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== user.userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Approver added → notify initiator and spectators
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== user.userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== user.userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
}
}
// Send notifications (email, in-app, and web-push)
const requestNumber = (workflow as any).requestNumber || requestId;
const requestData = {
requestNumber: requestNumber,
requestId: requestId,
title: (workflow as any).title || 'Request'
};
// Prepare user IDs for in-app and web-push notifications
const recipientUserIds = recipientsToNotify.map(r => r.userId);
// Send in-app and web-push notifications for each attachment
if (recipientUserIds.length > 0 && attachments.length > 0) {
try {
for (const attachment of attachments) {
await notificationService.sendToUsers(
recipientUserIds,
{
title: 'Additional Document Added',
body: `${user.name || 'User'} added "${attachment.fileName}" to ${requestNumber}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'document_added',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
documentName: attachment.fileName,
fileSize: attachment.fileSize,
addedByName: user.name || 'User',
source: 'Work Notes'
}
}
);
}
logger.info('[WorkNote] In-app and web-push notifications sent for additional documents', {
requestId,
attachmentsCount: attachments.length,
recipientsCount: recipientUserIds.length
});
} catch (notifyError) {
logger.error('[WorkNote] Failed to send in-app/web-push notifications for additional documents:', notifyError);
}
}
// Send email notifications for each attachment
for (const attachment of attachments) {
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(
requestData,
recipient,
{
documentName: attachment.fileName,
fileSize: attachment.fileSize,
addedByName: user.name || 'User',
source: 'Work Notes'
}
);
}
}
logger.info('[WorkNote] Additional document notifications sent', {
requestId,
attachmentsCount: attachments.length,
recipientsCount: recipientsToNotify.length,
isInitiator
});
}
} catch (notifyError) {
// Don't fail work note creation if notifications fail
logger.error('[WorkNote] Failed to send additional document notifications:', notifyError);
}
}
}
// Log activity for work note
activityService.log({
requestId,
type: 'comment',
user: { userId: user.userId, name: user.name || 'User' },
timestamp: new Date().toISOString(),
action: 'Work Note Added',
details: `${user.name || 'User'} added a work note: ${payload.message.substring(0, 100)}${payload.message.length > 100 ? '...' : ''}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
try {
// Optional realtime emit (if socket layer is initialized)
const { emitToRequestRoom } = require('../realtime/socket');
if (emitToRequestRoom) {
// Emit note with all fields explicitly (to ensure camelCase fields are sent)
const noteData = {
noteId: (note as any).noteId,
requestId: (note as any).requestId,
userId: (note as any).userId,
userName: (note as any).userName,
userRole: (note as any).userRole, // Include participant role
message: (note as any).message,
createdAt: (note as any).createdAt,
hasAttachment: (note as any).hasAttachment,
attachments: attachments // Include attachments
};
emitToRequestRoom(requestId, 'worknote:new', { note: noteData });
}
} catch (e) { logger.warn('Realtime emit failed (not initialized)'); }
// Send notifications to mentioned users
if (payload.mentionedUsers && Array.isArray(payload.mentionedUsers) && payload.mentionedUsers.length > 0) {
try {
// Get workflow details for request number and title
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
const requestNumber = (workflow as any)?.requestNumber || requestId;
const requestTitle = (workflow as any)?.title || 'Request';
logger.info(`[WorkNote] Sending mention notifications to ${payload.mentionedUsers.length} users`);
await notificationService.sendToUsers(
payload.mentionedUsers,
{
title: '💬 Mentioned in Work Note',
body: `${user.name || 'Someone'} mentioned you in ${requestNumber}: "${payload.message.substring(0, 50)}${payload.message.length > 50 ? '...' : ''}"`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'mention'
}
);
logger.info(`[WorkNote] Mention notifications sent successfully`);
} catch (notifyError) {
logger.error('[WorkNote] Failed to send mention notifications:', notifyError);
// Don't fail the work note creation if notifications fail
}
}
return { ...note, attachments };
}
async downloadAttachment(attachmentId: string) {
const attachment = await WorkNoteAttachment.findOne({
where: { attachmentId }
});
if (!attachment) {
throw new Error('Attachment not found');
}
const storageUrl = (attachment as any).storageUrl || (attachment as any).storage_url;
const filePath = (attachment as any).filePath || (attachment as any).file_path;
const fileName = (attachment as any).fileName || (attachment as any).file_name;
const fileType = (attachment as any).fileType || (attachment as any).file_type;
// Check if it's a GCS URL
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
return {
filePath: filePath,
storageUrl: storageUrl,
fileName: fileName,
fileType: fileType,
isGcsUrl: isGcsUrl
};
}
}
export const workNoteService = new WorkNoteService();

View File

@ -382,4 +382,222 @@ report_cache {
%% 7. Multi-channel notifications (in-app, email, SMS, push) %% 7. Multi-channel notifications (in-app, email, SMS, push)
%% 8. TAT thresholds: 50%, 80%, 100% %% 8. TAT thresholds: 50%, 80%, 100%
%% 9. Max approval levels: 10 %% 9. Max approval levels: 10
%% 10. Max file size: 10 MB %% 10. Max file size: 10 MB
erDiagram
workflow_requests ||--|| dealer_claim_details : "has_claim_details"
workflow_requests ||--o{ dealer_claim_history : "has_claim_history"
workflow_requests ||--|| dealer_proposal_details : "has_proposal"
workflow_requests ||--|| dealer_completion_details : "has_completion"
workflow_requests ||--|| claim_budget_tracking : "tracks_budget"
workflow_requests ||--|| internal_orders : "has_io"
workflow_requests ||--o{ claim_invoices : "has_invoices"
workflow_requests ||--o{ claim_credit_notes : "has_credit_notes"
workflow_requests ||--o{ tat_alerts : "triggers_alerts"
workflow_requests ||--|| request_summaries : "has_summary"
dealer_proposal_details ||--o{ dealer_proposal_cost_items : "has_items"
dealer_completion_details ||--o{ dealer_completion_expenses : "has_expenses"
claim_invoices ||--o{ claim_credit_notes : "has_credit_notes"
request_summaries ||--o{ shared_summaries : "shared_as"
users ||--o{ shared_summaries : "shares"
users ||--o{ subscriptions : "has_subscription"
users ||--o{ holidays : "creates"
users ||--o{ activity_types : "creates"
dealers {
uuid dealer_id PK
varchar sales_code
varchar service_code
varchar dealer_name
varchar region
varchar state
varchar city
varchar location
boolean is_active
timestamp created_at
timestamp updated_at
}
dealer_claim_details {
uuid claim_id PK
uuid request_id FK
varchar activity_name
varchar activity_type
varchar dealer_code
varchar dealer_name
date activity_date
date period_start_date
date period_end_date
timestamp created_at
timestamp updated_at
}
dealer_claim_history {
uuid history_id PK
uuid request_id FK
uuid approval_level_id FK
integer version
enum snapshot_type
jsonb snapshot_data
text change_reason
uuid changed_by FK
timestamp created_at
}
dealer_proposal_details {
uuid proposal_id PK
uuid request_id FK
varchar proposal_document_path
decimal total_estimated_budget
date expected_completion_date
text dealer_comments
timestamp submitted_at
timestamp created_at
timestamp updated_at
}
dealer_proposal_cost_items {
uuid cost_item_id PK
uuid proposal_id FK
uuid request_id FK
varchar item_description
decimal amount
integer item_order
timestamp created_at
timestamp updated_at
}
dealer_completion_details {
uuid completion_id PK
uuid request_id FK
date activity_completion_date
integer number_of_participants
decimal total_closed_expenses
timestamp submitted_at
timestamp created_at
timestamp updated_at
}
dealer_completion_expenses {
uuid expense_id PK
uuid completion_id FK
uuid request_id FK
varchar description
decimal amount
timestamp created_at
timestamp updated_at
}
claim_budget_tracking {
uuid budget_id PK
uuid request_id FK
decimal initial_estimated_budget
decimal proposal_estimated_budget
decimal approved_budget
decimal io_blocked_amount
decimal closed_expenses
decimal final_claim_amount
decimal credit_note_amount
enum budget_status
timestamp created_at
timestamp updated_at
}
claim_invoices {
uuid invoice_id PK
uuid request_id FK
varchar invoice_number
date invoice_date
decimal amount
varchar status
timestamp created_at
timestamp updated_at
}
claim_credit_notes {
uuid credit_note_id PK
uuid request_id FK
uuid invoice_id FK
varchar credit_note_number
decimal credit_note_amount
varchar status
timestamp created_at
timestamp updated_at
}
internal_orders {
uuid io_id PK
uuid request_id FK
varchar io_number
decimal io_available_balance
decimal io_blocked_amount
enum status
timestamp created_at
timestamp updated_at
}
holidays {
uuid holiday_id PK
date holiday_date
varchar holiday_name
enum holiday_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
activity_types {
uuid activity_type_id PK
varchar title
varchar item_code
varchar taxation_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
tat_alerts {
uuid alert_id PK
uuid request_id FK
uuid level_id FK
uuid approver_id FK
enum alert_type
boolean is_breached
timestamp alert_sent_at
timestamp created_at
}
request_summaries {
uuid summary_id PK
uuid request_id FK
uuid initiator_id FK
varchar title
text description
text closing_remarks
boolean is_ai_generated
timestamp created_at
timestamp updated_at
}
shared_summaries {
uuid shared_summary_id PK
uuid summary_id FK
uuid shared_by FK
uuid shared_with FK
boolean is_read
timestamp shared_at
timestamp created_at
}
subscriptions {
uuid subscription_id PK
uuid user_id FK
varchar endpoint
varchar p256dh
varchar auth
timestamp created_at
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1 +1,2 @@
import{a as s}from"./index-CULgQ-8S.js";import"./radix-vendor-CYvDqP9X.js";import"./charts-vendor-BVfwAPj-.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-CX5oLBI_.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-B_rK4TXr.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion}; import{a as s}from"./index-7JN9lLwu.js";import"./radix-vendor-DIkYAdWy.js";import"./charts-vendor-Bme4E5cb.js";import"./utils-vendor-DNMmNUQL.js";import"./ui-vendor-DbB0YGPu.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-B1UBYWWO.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
//# sourceMappingURL=conclusionApi-CMghC3Jo.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"conclusionApi-CMghC3Jo.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n * Returns null if conclusion doesn't exist (404) instead of throwing error\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark | null> {\r\n try {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n } catch (error: any) {\r\n // Handle 404 gracefully - conclusion doesn't exist yet, which is normal\r\n if (error.response?.status === 404) {\r\n return null;\r\n }\r\n // Re-throw other errors\r\n throw error;\r\n }\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion","error","_a"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAMA,eAAsBC,EAAcJ,EAAqD,OACvF,GAAI,CAEF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB,OAASK,EAAY,CAEnB,KAAIC,EAAAD,EAAM,WAAN,YAAAC,EAAgB,UAAW,IAC7B,OAAO,KAGT,MAAMD,CACR,CACF"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,31 +1,69 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="en">
<head>
<head> <meta charset="UTF-8" />
<meta charset="UTF-8" /> <!-- CSP: Allows blob URLs for file previews and cross-origin API calls during development -->
<link rel="icon" type="image/svg+xml" href="/royal_enfield_logo.svg" /> <meta http-equiv="Content-Security-Policy" content="default-src 'self' blob:; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; script-src 'self'; img-src 'self' data: https: blob:; connect-src 'self' blob: data: http://localhost:5000 http://localhost:3000 ws://localhost:5000 ws://localhost:3000 wss://localhost:5000 wss://localhost:3000; frame-src 'self' blob:; font-src 'self' https://fonts.gstatic.com data:; object-src 'none'; base-uri 'self'; form-action 'self';" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <link rel="icon" type="image/svg+xml" href="/royal_enfield_logo.svg" />
<meta name="description" <meta name="viewport" content="width=device-width, initial-scale=1.0" />
content="Royal Enfield Approval & Request Management Portal - Streamlined approval workflows for enterprise operations" /> <meta name="description" content="Royal Enfield Approval & Request Management Portal - Streamlined approval workflows for enterprise operations" />
<meta name="theme-color" content="#2d4a3e" /> <meta name="theme-color" content="#2d4a3e" />
<title>Royal Enfield | Approval Portal</title> <title>Royal Enfield | Approval Portal</title>
<!-- Preload essential fonts and icons --> <!-- Preload critical fonts and icons -->
<link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<script type="module" crossorigin src="/assets/index-CULgQ-8S.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-BVfwAPj-.js"> <!-- Ensure proper icon rendering and layout -->
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-CYvDqP9X.js"> <style>
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js"> /* Ensure Lucide icons render properly */
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-CX5oLBI_.js"> svg {
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js"> display: inline-block;
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js"> vertical-align: middle;
<link rel="modulepreload" crossorigin href="/assets/router-vendor-B_rK4TXr.js"> }
<link rel="stylesheet" crossorigin href="/assets/index-XBJXaMj2.css">
</head> /* Fix for icon alignment in buttons */
button svg {
<body> flex-shrink: 0;
<div id="root"></div> }
</body>
/* Ensure proper text rendering */
</html> body {
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
text-rendering: optimizeLegibility;
}
/* Fix for mobile viewport and sidebar */
@media (max-width: 768px) {
html {
overflow-x: hidden;
}
}
/* Ensure proper sidebar toggle behavior */
.sidebar-toggle {
transition: all 0.3s ease-in-out;
}
/* Fix for icon button hover states */
button:hover svg {
transform: scale(1.05);
transition: transform 0.2s ease;
}
</style>
<script type="module" crossorigin src="/assets/index-7JN9lLwu.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-Bme4E5cb.js">
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-DIkYAdWy.js">
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-DNMmNUQL.js">
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-DbB0YGPu.js">
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
<link rel="modulepreload" crossorigin href="/assets/router-vendor-B1UBYWWO.js">
<link rel="stylesheet" crossorigin href="/assets/index-B-mLDzJe.css">
</head>
<body>
<div id="root"></div>
</body>
</html>

View File

@ -1,4 +0,0 @@
User-agent: *
Disallow: /api/
Sitemap: https://reflow.royalenfield.com/sitemap.xml

View File

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>https://reflow.royalenfield.com</loc>
<lastmod>2024-03-20T12:00:00+00:00</lastmod>
<changefreq>daily</changefreq>
<priority>1.0</priority>
</url>
</urlset>

43
debug-finalize.ts Normal file
View File

@ -0,0 +1,43 @@
import mongoose from 'mongoose';
import dotenv from 'dotenv';
import dns from 'dns';
import { WorkflowRequestModel } from './src/models/mongoose/WorkflowRequest.schema';
dotenv.config();
async function check() {
try {
const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL;
if (!mongoUri) {
console.error('MONGO_URI not found in .env');
process.exit(1);
}
if (mongoUri.startsWith('mongodb+srv://')) {
dns.setServers(['8.8.8.8', '8.8.4.4', '1.1.1.1', '1.0.0.1']);
}
await mongoose.connect(mongoUri);
console.log('✅ Connected to MongoDB');
const requests = await WorkflowRequestModel.find({
$or: [
{ conclusionRemark: { $exists: true, $ne: null } },
{ workflowState: 'CLOSED' }
]
}).sort({ updatedAt: -1 }).limit(10);
console.log('Results (Last 10 finalized/closed):');
requests.forEach(r => {
console.log(`- REQ: ${r.requestNumber}, Status: ${r.status}, State: ${r.workflowState}, HasRemark: ${!!r.conclusionRemark}`);
});
process.exit(0);
} catch (error) {
console.error('Check failed:', error);
process.exit(1);
}
}
check();

View File

@ -1,8 +1,39 @@
# docker-compose.full.yml # =============================================================================
# Synced with streamlined infrastructure # RE Workflow - Full Stack Docker Compose
# Includes: Application + Database + Monitoring Stack
# =============================================================================
# Usage:
# docker-compose -f docker-compose.full.yml up -d
# =============================================================================
version: '3.8' version: '3.8'
services: services:
# ===========================================================================
# APPLICATION SERVICES
# ===========================================================================
postgres:
image: postgres:16-alpine
container_name: re_workflow_db
environment:
POSTGRES_USER: ${DB_USER:-laxman}
POSTGRES_PASSWORD: ${DB_PASSWORD:-Admin@123}
POSTGRES_DB: ${DB_NAME:-re_workflow_db}
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
- ./database/schema:/docker-entrypoint-initdb.d
networks:
- re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-laxman}"]
interval: 10s
timeout: 5s
retries: 5
redis: redis:
image: redis:7-alpine image: redis:7-alpine
container_name: re_workflow_redis container_name: re_workflow_redis
@ -19,24 +50,70 @@ services:
timeout: 5s timeout: 5s
retries: 5 retries: 5
clamav: backend:
image: clamav/clamav:latest build:
container_name: re_clamav context: .
ports: dockerfile: Dockerfile
- "3310:3310" container_name: re_workflow_backend
volumes:
- clamav_data:/var/lib/clamav
environment: environment:
- CLAMAV_NO_FRESHCLAMD=false NODE_ENV: development
healthcheck: DB_HOST: postgres
test: ["CMD", "clamdcheck"] DB_PORT: 5432
interval: 60s DB_USER: ${DB_USER:-laxman}
timeout: 10s DB_PASSWORD: ${DB_PASSWORD:-Admin@123}
retries: 5 DB_NAME: ${DB_NAME:-re_workflow_db}
start_period: 120s REDIS_URL: redis://redis:6379
restart: unless-stopped PORT: 5000
# Loki for logging
LOKI_HOST: http://loki:3100
ports:
- "5000:5000"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
volumes:
- ./logs:/app/logs
- ./uploads:/app/uploads
networks: networks:
- re_workflow_network - re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "node -e \"require('http').get('http://localhost:5000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
# ===========================================================================
# MONITORING SERVICES
# ===========================================================================
prometheus:
image: prom/prometheus:v2.47.2
container_name: re_prometheus
ports:
- "9090:9090"
volumes:
- ./monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
- ./monitoring/prometheus/alert.rules.yml:/etc/prometheus/alert.rules.yml:ro
- prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
- '--storage.tsdb.retention.time=15d'
- '--web.console.libraries=/usr/share/prometheus/console_libraries'
- '--web.console.templates=/usr/share/prometheus/consoles'
- '--web.enable-lifecycle'
networks:
- re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD", "wget", "-q", "--spider", "http://localhost:9090/-/healthy"]
interval: 30s
timeout: 10s
retries: 3
loki: loki:
image: grafana/loki:2.9.2 image: grafana/loki:2.9.2
@ -79,12 +156,15 @@ services:
- GF_SECURITY_ADMIN_USER=admin - GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=REWorkflow@2024 - GF_SECURITY_ADMIN_PASSWORD=REWorkflow@2024
- GF_USERS_ALLOW_SIGN_UP=false - GF_USERS_ALLOW_SIGN_UP=false
- GF_FEATURE_TOGGLES_ENABLE=publicDashboards
- GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-simple-json-datasource,grafana-piechart-panel
volumes: volumes:
- grafana_data:/var/lib/grafana - grafana_data:/var/lib/grafana
- ./monitoring/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro - ./monitoring/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
- ./monitoring/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro - ./monitoring/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro
- ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro - ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro
depends_on: depends_on:
- prometheus
- loki - loki
networks: networks:
- re_workflow_network - re_workflow_network
@ -95,13 +175,54 @@ services:
timeout: 10s timeout: 10s
retries: 3 retries: 3
volumes: node-exporter:
redis_data: image: prom/node-exporter:v1.6.1
clamav_data: container_name: re_node_exporter
loki_data: ports:
promtail_data: - "9100:9100"
grafana_data: networks:
- re_workflow_network
restart: unless-stopped
alertmanager:
image: prom/alertmanager:v0.26.0
container_name: re_alertmanager
ports:
- "9093:9093"
volumes:
- ./monitoring/alertmanager/alertmanager.yml:/etc/alertmanager/alertmanager.yml:ro
- alertmanager_data:/alertmanager
command:
- '--config.file=/etc/alertmanager/alertmanager.yml'
- '--storage.path=/alertmanager'
networks:
- re_workflow_network
restart: unless-stopped
# ===========================================================================
# NETWORKS
# ===========================================================================
networks: networks:
re_workflow_network: re_workflow_network:
driver: bridge driver: bridge
name: re_workflow_network
# ===========================================================================
# VOLUMES
# ===========================================================================
volumes:
postgres_data:
name: re_postgres_data
redis_data:
name: re_redis_data
prometheus_data:
name: re_prometheus_data
loki_data:
name: re_loki_data
promtail_data:
name: re_promtail_data
grafana_data:
name: re_grafana_data
alertmanager_data:
name: re_alertmanager_data

View File

@ -1,8 +1,28 @@
# docker-compose.yml # docker-compose.yml
# Streamlined infrastructure for local development
version: '3.8' version: '3.8'
services: services:
postgres:
image: postgres:16-alpine
container_name: re_workflow_db
environment:
POSTGRES_USER: ${DB_USER:-laxman}
POSTGRES_PASSWORD: ${DB_PASSWORD:-Admin@123}
POSTGRES_DB: ${DB_NAME:-re_workflow_db}
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
- ./database/schema:/docker-entrypoint-initdb.d
networks:
- re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-laxman}"]
interval: 10s
timeout: 5s
retries: 5
redis: redis:
image: redis:7-alpine image: redis:7-alpine
container_name: re_workflow_redis container_name: re_workflow_redis
@ -19,88 +39,43 @@ services:
timeout: 5s timeout: 5s
retries: 5 retries: 5
clamav: backend:
image: clamav/clamav:latest build:
container_name: re_clamav context: .
ports: dockerfile: Dockerfile
- "3310:3310" container_name: re_workflow_backend
volumes:
- clamav_data:/var/lib/clamav
environment: environment:
- CLAMAV_NO_FRESHCLAMD=false NODE_ENV: development
healthcheck: DB_HOST: postgres
test: ["CMD", "clamdcheck"] DB_PORT: 5432
interval: 60s DB_USER: ${DB_USER:-laxman}
timeout: 10s DB_PASSWORD: ${DB_PASSWORD:-Admin@123}
retries: 5 DB_NAME: ${DB_NAME:-re_workflow_db}
start_period: 120s REDIS_URL: redis://redis:6379
restart: unless-stopped PORT: 5000
networks:
- re_workflow_network
loki:
image: grafana/loki:2.9.2
container_name: re_loki
ports: ports:
- "3100:3100" - "5000:5000"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
volumes: volumes:
- ./monitoring/loki/loki-config.yml:/etc/loki/local-config.yaml:ro - ./logs:/app/logs
- loki_data:/loki - ./uploads:/app/uploads
command: -config.file=/etc/loki/local-config.yaml
networks: networks:
- re_workflow_network - re_workflow_network
restart: unless-stopped restart: unless-stopped
healthcheck: healthcheck:
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:3100/ready || exit 1"] test: ["CMD-SHELL", "node -e \"require('http').get('http://localhost:5000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})\""]
interval: 30s
timeout: 10s
retries: 5
promtail:
image: grafana/promtail:2.9.2
container_name: re_promtail
volumes:
- ./monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro
- ./logs:/var/log/app:ro
- promtail_data:/tmp/promtail
command: -config.file=/etc/promtail/config.yml
depends_on:
- loki
networks:
- re_workflow_network
restart: unless-stopped
grafana:
image: grafana/grafana:10.2.2
container_name: re_grafana
ports:
- "3001:3000"
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=REWorkflow@2024
- GF_USERS_ALLOW_SIGN_UP=false
volumes:
- grafana_data:/var/lib/grafana
- ./monitoring/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
- ./monitoring/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro
- ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro
depends_on:
- loki
networks:
- re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1"]
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 3 retries: 3
start_period: 40s
volumes: volumes:
postgres_data:
redis_data: redis_data:
clamav_data:
loki_data:
promtail_data:
grafana_data:
networks: networks:
re_workflow_network: re_workflow_network:

View File

@ -1,71 +0,0 @@
# Dealer Claim Financial Settlement Workflow
This document outlines the workflow for financial settlement of dealer claims within the Royal Enfield platform, following the transition from direct DMS integration to an Azure File Storage (AFS) based data exchange with SAP.
## Workflow Overview
The financial settlement process ensures that dealer claims are legally documented and financially settled through Royal Enfield's SAP system.
### 1. Legal Compliance: PWC E-Invoicing
Once the **Dealer Completion Documents** are submitted and approved by the **Initiator (Requestor Evaluation)**, the system triggers the legal compliance step.
- **Service**: `PWCIntegrationService`
- **Action**: Generates a signed E-Invoice via PWC API.
- **Output**: IRN (Invoice Reference Number), Ack No, Ack Date, Signed Invoice (PDF/B64), and QR Code.
- **Purpose**: Ensures the claim is legally recognized under GST regulations.
### 2. Financial Posting: AFS/CSV Integration
The financial settlement is handled by exchanging data files with SAP via **Azure File Storage (AFS)**.
- **Action**: The system generates a **CSV file** containing the following details:
- Invoice Number (from PWC)
- Invoice Amount (with/without GST as per activity type)
- GL Code (Resolved based on Activity Type/IO)
- Internal Order (IO) Number
- Dealer Code
- **Storage**: CSV is uploaded to a designated folder in AFS.
- **SAP Role**: SAP periodically polls AFS, picks up the CSV, and posts the transaction internally.
### 3. Payment Outcome: Credit Note
The result of the financial posting in SAP is a **Credit Note**.
- **Workflow**:
- SAP generates a Credit Note and uploads it back to AFS.
- RE Backend monitors the AFS folder.
- Once a Credit Note is detected, the system retrieves it and attaches it to the workflow request.
- An email notification (using `creditNoteSent.template.ts`) is sent to the dealer.
## Sequence Diagram
```mermaid
sequenceDiagram
participant Dealer
participant Backend
participant PWC
participant AFS as Azure File Storage
participant SAP
Dealer->>Backend: Submit Completion Docs (Actuals)
Backend->>Backend: Initiator Approval
Backend->>PWC: Generate Signed E-Invoice
PWC-->>Backend: Return IRN & QR Code
Backend->>Backend: Generate Settlement CSV
Backend->>AFS: Upload CSV
SAP->>AFS: Pick up CSV
SAP->>SAP: Post Financials
SAP->>AFS: Upload Credit Note
Backend->>AFS: Poll/Retrieve Credit Note
Backend->>Dealer: Send Credit Note Notification
```
## GL Code Resolution
The GL Code is solved dynamically based on:
1. **Activity Type**: Each activity (e.g., Marketing Event, Demo) has a primary GL mapping.
2. **Internal Order (IO)**: If specific IO logic is required, the GL can be overridden.
## Summary of Integration Points
| Component | Integration Type | Responsibility |
| :--- | :--- | :--- |
| **PWC** | REST API | Legal E-Invoice |
| **AFS (Azure)** | File Storage SDK | CSV Exchange |
| **SAP** | Batch Processing | Financial Posting & Credit Note |

View File

@ -34,7 +34,7 @@ The Claim Management workflow has **8 fixed steps** with specific approvers and
- **Approver Type**: System (Auto-processed) - **Approver Type**: System (Auto-processed)
- **Action Type**: **AUTO** (System automatically creates activity) - **Action Type**: **AUTO** (System automatically creates activity)
- **TAT**: 1 hour - **TAT**: 1 hour
- **Mapping**: System user (`system@{{APP_DOMAIN}}`) - **Mapping**: System user (`system@royalenfield.com`)
- **Status**: Auto-approved when triggered - **Status**: Auto-approved when triggered
### Step 5: Dealer Completion Documents ### Step 5: Dealer Completion Documents
@ -55,7 +55,7 @@ The Claim Management workflow has **8 fixed steps** with specific approvers and
- **Approver Type**: System (Auto-processed via DMS) - **Approver Type**: System (Auto-processed via DMS)
- **Action Type**: **AUTO** (System generates e-invoice via DMS integration) - **Action Type**: **AUTO** (System generates e-invoice via DMS integration)
- **TAT**: 1 hour - **TAT**: 1 hour
- **Mapping**: System user (`system@{{APP_DOMAIN}}`) - **Mapping**: System user (`system@royalenfield.com`)
- **Status**: Auto-approved when triggered - **Status**: Auto-approved when triggered
### Step 8: Credit Note Confirmation ### Step 8: Credit Note Confirmation
@ -121,7 +121,7 @@ const dealerUser = await User.findOne({ where: { email: dealerEmail } });
1. Find user with department containing "Finance" and role = 'MANAGEMENT' 1. Find user with department containing "Finance" and role = 'MANAGEMENT'
2. Find user with designation containing "Finance" or "Accountant" 2. Find user with designation containing "Finance" or "Accountant"
3. Use configured finance team email from admin_configurations table 3. Use configured finance team email from admin_configurations table
4. Fallback: Use default finance email (e.g., finance@{{APP_DOMAIN}}) 4. Fallback: Use default finance email (e.g., finance@royalenfield.com)
``` ```
## Next Steps ## Next Steps

310
docs/DATABASE_SCHEMA.md Normal file
View File

@ -0,0 +1,310 @@
# Database Schema Documentation
## 1. Overview
This document provides a detailed reference for the backend database schema of the Royal Enfield Workflow Management System.
**Database System:** PostgreSQL 16.x
**Schema Conventions:**
* **Primary Keys:** UUID (v4) for all tables.
* **Naming:** Snake_case for tables and columns.
* **Audit Columns:** Most tables include `created_at`, `updated_at`, `created_by`, `updated_by`.
* **Soft Deletes:** `is_deleted` flag used on critical entities.
## 2. Architecture Diagrams (A4 Optimized)
### 2.1. Core Workflow Architecture
Focuses on the request lifecycle, approval chains, and direct interactions.
```mermaid
erDiagram
users ||--o{ workflow_requests : "initiates"
users ||--o{ approval_levels : "approves"
users ||--o{ participants : "collaborates"
workflow_requests ||--|{ approval_levels : "has_steps"
workflow_requests ||--o{ participants : "has_users"
workflow_requests ||--o{ documents : "contains"
workflow_requests ||--o{ work_notes : "discussions"
workflow_requests ||--o{ activities : "audit_trail"
workflow_templates ||--o{ workflow_requests : "spawns"
workflow_requests ||--|| conclusion_remarks : "finalizes"
workflow_requests {
uuid request_id PK
varchar request_number
enum status
integer current_level
}
approval_levels {
uuid level_id PK
integer level_number
enum status
uuid approver_id FK
}
```
### 2.2. Business Domain Data
Focuses on the specific data payloads (Dealers, Finance, Claims) attached to requests.
```mermaid
erDiagram
workflow_requests ||--o{ dealers : "context"
workflow_requests ||--|| dealer_claim_details : "claim_data"
workflow_requests ||--|| dealer_proposal_details : "proposal"
workflow_requests ||--|| dealer_completion_details : "evidence"
workflow_requests ||--o{ dealer_claim_history : "versions"
workflow_requests ||--|| claim_budget_tracking : "financials"
workflow_requests ||--|| internal_orders : "sap_ref"
workflow_requests ||--o{ claim_invoices : "billing"
claim_invoices ||--o{ claim_credit_notes : "adjustments"
dealer_claim_details {
uuid claim_id PK
varchar activity_type
}
claim_budget_tracking {
decimal approved_budget
decimal final_claim_amount
}
```
### 2.3. System Support Services
Focuses on cross-cutting concerns like logging, notifications, and monitoring.
```mermaid
erDiagram
users ||--o{ notifications : "receives"
users ||--o{ system_settings : "configures"
users ||--o{ audit_logs : "actions"
workflow_requests ||--o{ notifications : "triggers"
workflow_requests ||--o{ tat_tracking : "monitors_sla"
workflow_requests ||--o{ tat_alerts : "sla_breaches"
workflow_requests ||--o{ request_summaries : "ai_summary"
workflow_requests ||--o{ report_cache : "reporting"
notifications ||--o{ email_logs : "outbound"
notifications ||--o{ sms_logs : "outbound"
tat_tracking {
decimal total_tat_hours
boolean threshold_breached
}
```
## 3. Schema Modules
### 3.1. User & Authentication Module
Manages user identities, sessions, and system-wide configurations.
```mermaid
erDiagram
users ||--o{ user_sessions : "has"
users ||--o{ subscriptions : "has_device"
users ||--o{ system_settings : "modifies"
users {
uuid user_id PK
varchar employee_id
varchar email
varchar display_name
enum role
boolean is_active
}
user_sessions {
uuid session_id PK
uuid user_id FK
varchar session_token
timestamp expires_at
}
subscriptions {
uuid subscription_id PK
uuid user_id FK
varchar endpoint
}
```
#### Tables
**`users`**
Core user registry. synced with Okta/HRMS.
* `user_id` (PK): Unique UUID.
* `employee_id` (Unique): HR system ID.
* `email` (Unique): Official email address.
* `role`: RBAC role (USER, ADMIN, etc.).
* `is_active`: Soft delete/account link status.
**`user_sessions`**
Active JWT sessions for invalidation/tracking.
* `session_token`: The JWT access token.
* `refresh_token`: For renewing access tokens.
* `device_type`: Web/Mobile classification.
**`system_settings`**
Dynamic configuration (e.g., global TAT thresholds).
* `setting_key` (Unique): Config identifier name.
* `setting_value`: The value (text/json).
---
### 3.2. Workflow Engine Module
The core engine driving request lifecycles, approvals, and tracking.
```mermaid
erDiagram
workflow_requests ||--|{ approval_levels : "steps"
workflow_requests ||--o{ activities : "events"
workflow_requests ||--|{ participants : "access"
workflow_templates ||--o{ workflow_requests : "spawns"
workflow_requests {
uuid request_id PK
varchar request_number
enum status
uuid initiator_id FK
}
approval_levels {
uuid level_id PK
uuid request_id FK
integer level_number
enum status
uuid approver_id FK
}
```
#### Tables
**`workflow_requests`**
The central entity representing a business process instance.
* `request_number`: Human-readable ID (e.g., REQ-2024-001).
* `current_level`: Pointer to the active approval step.
* `status`: DRAFT, PENDING, APPROVED, REJECTED, CLOSED.
**`approval_levels`**
Defines the sequence of approvers for a request.
* `level_number`: Sequence index (1, 2, 3...).
* `approver_id`: User responsible for this step.
* `tat_hours`: SLA for this specific step.
* `status`: PENDING, APPROVED, REJECTED.
**`participants`**
Users with visibility/access to the request (spectators, contributors).
* `participant_type`: SPECTATOR, CONTRIBUTOR.
* `can_comment`, `can_view_documents`: Granular permissions.
**`activities`**
Audit trail of all actions performed on a request.
* `activity_type`: CREATED, APPROVED, COMMENTED, FILE_UPLOADED.
* `metadata`: JSON payload with specific details of the event.
**`workflow_templates`**
Blueprints for creating new requests.
* `approval_levels_config`: JSON defining the default approver chain structure.
---
### 3.3. Dealer Management Module
Stores specific data related to dealer claims, onboardings, and performance.
```mermaid
erDiagram
workflow_requests ||--|| dealer_claim_details : "details"
workflow_requests ||--|| dealer_proposal_details : "proposal"
workflow_requests ||--|| dealer_completion_details : "completion"
workflow_requests ||--o{ dealer_claim_history : "versions"
workflow_requests ||--o{ dealers : "related_to"
dealers {
uuid dealer_id PK
varchar dealer_name
varchar sales_code
}
```
#### Tables
**`dealers`**
Master data for dealerships.
* `sales_code`, `service_code`: Dealer unique identifiers.
* `dealer_name`, `region`, `city`: Location details.
**`dealer_claim_details`**
Specific attributes for a Dealer Claim request.
* `activity_name`, `activity_type`: Marketing/Sales activity details.
* `period_start_date`, `period_end_date`: Duration of the claim activity.
**`dealer_proposal_details`**
Stores the initial proposal data for a claim.
* `total_estimated_budget`: The proposed validation amount.
* `proposal_document_url`: Link to the uploaded proposal PDF/Doc.
**`dealer_claim_history`**
Snapshots of the claim data at various approval stages.
* `snapshot_data`: JSON dump of the claim state.
* `version`: Incremental version number.
---
### 3.4. Financial Module
Manages budgeting, internal orders, and invoicing.
```mermaid
erDiagram
workflow_requests ||--|| claim_budget_tracking : "budget"
workflow_requests ||--|| internal_orders : "io"
workflow_requests ||--o{ claim_invoices : "invoices"
claim_invoices ||--o{ claim_credit_notes : "credit_notes"
```
#### Tables
**`claim_budget_tracking`**
Central ledger for a request's financial lifecycle.
* `initial_estimated_budget`: Original requested amount.
* `approved_budget`: Validated amount after approvals.
* `io_blocked_amount`: Amount reserved in SAP.
* `final_claim_amount`: Actual payout amount.
**`internal_orders`**
SAP Internal Order references.
* `io_number`: The IO code from SAP.
* `io_available_balance`, `io_blocked_amount`: Balance tracking.
**`claim_invoices`**
Invoices submitted against the claim.
* `invoice_number`: Vendor invoice ID.
* `amount`: Invoice value.
* `dms_number`: Document Management System reference.
**`claim_credit_notes`**
Adjustments/Returns linked to invoices.
* `credit_note_amount`: Value to be deducted/adjusted.
---
### 3.5. Ancillary Modules
Support functions like notifications, tracking, and logs.
#### Tables
**`notifications`**
User alerts.
* `is_read`: Read status.
* `action_url`: Deep link to the relevant request.
**`tat_tracking`**
Turnaround Time monitoring.
* `tracking_type`: REQUEST (overall) or LEVEL (step-specific).
* `total_tat_hours`: The allowed time.
* `elapsed_hours`: Time consumed so far.
* `breached_flags`: `threshold_50_breached`, etc.
**`tat_alerts`**
Logs of TAT breach notifications sent.
* `alert_type`: TAT_50, TAT_75, TAT_100.
* `is_breached`: Confirmed breach status.
**`request_summaries`**
AI or manually generated summaries of complex requests.
* `is_ai_generated`: Origin flag.
* `description`, `closing_remarks`: Narrative text.

View File

@ -112,7 +112,7 @@ Your CSV file must have these **44 columns** in the following order:
| `on_boarding_charges` | Decimal | No | Numeric value (e.g., 1000.50) | | `on_boarding_charges` | Decimal | No | Numeric value (e.g., 1000.50) |
| `date` | Date | No | Format: YYYY-MM-DD (e.g., 2014-09-30) | | `date` | Date | No | Format: YYYY-MM-DD (e.g., 2014-09-30) |
| `single_format_month_year` | String(50) | No | Format: Sep-2014 | | `single_format_month_year` | String(50) | No | Format: Sep-2014 |
| `domain_id` | String(255) | No | Email domain (e.g., dealer@{{APP_DOMAIN}}) | | `domain_id` | String(255) | No | Email domain (e.g., dealer@royalenfield.com) |
| `replacement` | String(50) | No | Replacement status | | `replacement` | String(50) | No | Replacement status |
| `termination_resignation_status` | String(255) | No | Termination/Resignation status | | `termination_resignation_status` | String(255) | No | Termination/Resignation status |
| `date_of_termination_resignation` | Date | No | Format: YYYY-MM-DD | | `date_of_termination_resignation` | Date | No | Format: YYYY-MM-DD |
@ -183,7 +183,7 @@ Ensure dates are in `YYYY-MM-DD` format:
```csv ```csv
sales_code,service_code,gear_code,gma_code,region,dealership,state,district,city,location,city_category_pst,layout_format,tier_city_category,on_boarding_charges,date,single_format_month_year,domain_id,replacement,termination_resignation_status,date_of_termination_resignation,last_date_of_operations,old_codes,branch_details,dealer_principal_name,dealer_principal_email_id,dp_contact_number,dp_contacts,showroom_address,showroom_pincode,workshop_address,workshop_pincode,location_district,state_workshop,no_of_studios,website_update,gst,pan,firm_type,prop_managing_partners_directors,total_prop_partners_directors,docs_folder_link,workshop_gma_codes,existing_new,dlrcode sales_code,service_code,gear_code,gma_code,region,dealership,state,district,city,location,city_category_pst,layout_format,tier_city_category,on_boarding_charges,date,single_format_month_year,domain_id,replacement,termination_resignation_status,date_of_termination_resignation,last_date_of_operations,old_codes,branch_details,dealer_principal_name,dealer_principal_email_id,dp_contact_number,dp_contacts,showroom_address,showroom_pincode,workshop_address,workshop_pincode,location_district,state_workshop,no_of_studios,website_update,gst,pan,firm_type,prop_managing_partners_directors,total_prop_partners_directors,docs_folder_link,workshop_gma_codes,existing_new,dlrcode
5124,5125,5573,9430,S3,Accelerate Motors,Karnataka,Bengaluru,Bengaluru,RAJA RAJESHWARI NAGAR,A+,A+,Tier 1 City,,2014-09-30,Sep-2014,acceleratemotors.rrnagar@dealer.{{APP_DOMAIN}},,,,,,,N. Shyam Charmanna,shyamcharmanna@yahoo.co.in,7022049621,7022049621,"No.335, HVP RR Nagar Sector B, Ideal Homes Town Ship, Bangalore - 560098, Dist Bangalore, Karnataka",560098,"Works Shop No.460, 80ft Road, 2nd Phase R R Nagar, Bangalore - 560098, Dist Bangalore, Karnataka",560098,Bangalore,Karnataka,0,Yes,29ARCPS1311D1Z6,ARCPS1311D,Proprietorship,CHARMANNA SHYAM NELLAMAKADA,CHARMANNA SHYAM NELLAMAKADA,https://drive.google.com/drive/folders/1sGtg3s1h9aBXX9fhxJufYuBWar8gVvnb,,,3386 5124,5125,5573,9430,S3,Accelerate Motors,Karnataka,Bengaluru,Bengaluru,RAJA RAJESHWARI NAGAR,A+,A+,Tier 1 City,,2014-09-30,Sep-2014,acceleratemotors.rrnagar@dealer.royalenfield.com,,,,,,,N. Shyam Charmanna,shyamcharmanna@yahoo.co.in,7022049621,7022049621,"No.335, HVP RR Nagar Sector B, Ideal Homes Town Ship, Bangalore - 560098, Dist Bangalore, Karnataka",560098,"Works Shop No.460, 80ft Road, 2nd Phase R R Nagar, Bangalore - 560098, Dist Bangalore, Karnataka",560098,Bangalore,Karnataka,0,Yes,29ARCPS1311D1Z6,ARCPS1311D,Proprietorship,CHARMANNA SHYAM NELLAMAKADA,CHARMANNA SHYAM NELLAMAKADA,https://drive.google.com/drive/folders/1sGtg3s1h9aBXX9fhxJufYuBWar8gVvnb,,,3386
``` ```
**What gets auto-generated:** **What gets auto-generated:**

View File

@ -1,29 +0,0 @@
# Dealer Integration Implementation Status
This document summarizes the changes made to integrate the external Royal Enfield Dealer API and implement the dealer validation logic during request creation.
## Completed Work
### 1. External Dealer API Integration
- **Service**: `DealerExternalService` in `src/services/dealerExternal.service.ts`
- Implemented `getDealerByCode` to fetch data from `https://api-uat2.royalenfield.com/DealerMaster`.
- Returns real-time GSTIN, Address, and location details.
- **Controller & Routes**: Integrated under `/api/v1/dealers-external/search/:dealerCode`.
- **Enrichment**: `DealerService.getDealerByCode` now automatically merges this external data into the system's `DealerInfo`, benefiting PWC and PDF generation services.
### 2. Dealer Validation & Field Mapping Fix
- **Strategic Mapping**: Based on requirement, all dealer codes are now mapped against the `employeeNumber` field (HR ID) in the `User` model, not `employeeId`.
- **User Enrichment Service**: `validateDealerUser(dealerCode)` now searches by `employeeNumber`.
- **SSO Alignment**: `AuthService.ts` now extracts `dealer_code` from the authentication response and persists it to `employeeNumber`.
- **Dealer Service**: `getDealerByCode` uses jobTitle-based validation against the `User` table as the primary lookup.
### 3. Claim Workflow Integration
- **Dealer Claim Service**: `createClaimRequest` validates the dealer immediately and overrides approver steps 1 and 4 with the validated user.
- **Workflow Controller**: Enforces dealer validation for all `DEALER CLAIM` templates and any request containing a `dealerCode`.
### 4. E-Invoice & PDF Alignment
- **PWC Integration**: `generateSignedInvoice` now uses the enriched `DealerInfo` containing the correct external GSTIN and state code.
- **Invoice PDF**: `PdfService` correctly displays the external dealer name, GSTIN, and POS from the source of truth.
## Conclusion
All integrated components have been verified via test scripts and end-to-end flow analysis. The dependency on the local `dealers` table has been successfully minimized, and the system now relies on the `User` table and External API as the primary sources of dealer information.

View File

@ -56,7 +56,7 @@ users {
```json ```json
{ {
"userId": "uuid-1", "userId": "uuid-1",
"email": "john.doe@{{APP_DOMAIN}}", "email": "john.doe@royalenfield.com",
"employeeId": "E12345", // Regular employee ID "employeeId": "E12345", // Regular employee ID
"designation": "Software Engineer", "designation": "Software Engineer",
"department": "IT", "department": "IT",
@ -68,7 +68,7 @@ users {
```json ```json
{ {
"userId": "uuid-2", "userId": "uuid-2",
"email": "test.2@{{APP_DOMAIN}}", "email": "test.2@royalenfield.com",
"employeeId": "RE-MH-001", // Dealer code stored here "employeeId": "RE-MH-001", // Dealer code stored here
"designation": "Dealer", "designation": "Dealer",
"department": "Dealer Operations", "department": "Dealer Operations",

View File

@ -98,8 +98,8 @@ DMS_WEBHOOK_SECRET=your_shared_secret_key_here
**Base URL Examples:** **Base URL Examples:**
- Development: `http://localhost:5000/api/v1/webhooks/dms/invoice` - Development: `http://localhost:5000/api/v1/webhooks/dms/invoice`
- UAT: `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice` - UAT: `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/invoice`
- Production: `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice` - Production: `https://reflow.royalenfield.com/api/v1/webhooks/dms/invoice`
### 3.2 Request Headers ### 3.2 Request Headers
@ -205,8 +205,8 @@ User-Agent: DMS-Webhook-Client/1.0
**Base URL Examples:** **Base URL Examples:**
- Development: `http://localhost:5000/api/v1/webhooks/dms/credit-note` - Development: `http://localhost:5000/api/v1/webhooks/dms/credit-note`
- UAT: `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note` - UAT: `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/credit-note`
- Production: `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note` - Production: `https://reflow.royalenfield.com/api/v1/webhooks/dms/credit-note`
### 4.2 Request Headers ### 4.2 Request Headers
@ -563,8 +563,8 @@ DMS_WEBHOOK_SECRET=your_shared_secret_key_here
| Environment | Invoice Webhook URL | Credit Note Webhook URL | | Environment | Invoice Webhook URL | Credit Note Webhook URL |
|-------------|---------------------|-------------------------| |-------------|---------------------|-------------------------|
| Development | `http://localhost:5000/api/v1/webhooks/dms/invoice` | `http://localhost:5000/api/v1/webhooks/dms/credit-note` | | Development | `http://localhost:5000/api/v1/webhooks/dms/invoice` | `http://localhost:5000/api/v1/webhooks/dms/credit-note` |
| UAT | `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice` | `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note` | | UAT | `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/invoice` | `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/credit-note` |
| Production | `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice` | `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note` | | Production | `https://reflow.royalenfield.com/api/v1/webhooks/dms/invoice` | `https://reflow.royalenfield.com/api/v1/webhooks/dms/credit-note` |
--- ---

View File

@ -24,12 +24,19 @@ erDiagram
workflow_requests ||--|| claim_invoices : claim_invoice workflow_requests ||--|| claim_invoices : claim_invoice
workflow_requests ||--|| claim_credit_notes : claim_credit_note workflow_requests ||--|| claim_credit_notes : claim_credit_note
work_notes ||--o{ work_note_attachments : has work_notes ||--o{ work_note_attachments : has
notifications ||--o{ email_logs : sends
notifications ||--o{ sms_logs : sends
workflow_requests ||--o{ report_cache : caches workflow_requests ||--o{ report_cache : caches
workflow_requests ||--o{ audit_logs : audits workflow_requests ||--o{ audit_logs : audits
workflow_requests ||--o{ workflow_templates : templates workflow_requests ||--o{ workflow_templates : templates
users ||--o{ system_settings : updates users ||--o{ system_settings : updates
workflow_requests ||--o{ dealer_claim_history : has_history
workflow_requests ||--o{ tat_alerts : triggers
workflow_requests ||--|| request_summaries : summarizes
request_summaries ||--o{ shared_summaries : shared_as
users ||--o{ shared_summaries : shares
users ||--o{ subscriptions : has_device
users ||--o{ holidays : manages
users ||--o{ activity_types : manages
users { users {
uuid user_id PK uuid user_id PK
@ -286,46 +293,7 @@ erDiagram
varchar logout_reason varchar logout_reason
} }
email_logs {
uuid email_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_email
uuid recipient_user_id FK
text[] cc_emails
text[] bcc_emails
varchar subject
text body
varchar email_type
varchar status
integer send_attempts
timestamp sent_at
timestamp failed_at
text failure_reason
timestamp opened_at
timestamp clicked_at
timestamp created_at
}
sms_logs {
uuid sms_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_phone
uuid recipient_user_id FK
text message
varchar sms_type
varchar status
integer send_attempts
timestamp sent_at
timestamp delivered_at
timestamp failed_at
text failure_reason
varchar sms_provider
varchar sms_provider_message_id
decimal cost
timestamp created_at
}
system_settings { system_settings {
uuid setting_id PK uuid setting_id PK
@ -505,3 +473,94 @@ erDiagram
timestamp updated_at timestamp updated_at
} }
dealers {
uuid dealer_id PK
varchar sales_code
varchar service_code
varchar dealer_name
varchar region
varchar state
varchar city
varchar location
boolean is_active
timestamp created_at
timestamp updated_at
}
dealer_claim_history {
uuid history_id PK
uuid request_id FK
uuid approval_level_id FK
integer version
enum snapshot_type
jsonb snapshot_data
text change_reason
uuid changed_by FK
timestamp created_at
}
holidays {
uuid holiday_id PK
date holiday_date
varchar holiday_name
enum holiday_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
activity_types {
uuid activity_type_id PK
varchar title
varchar item_code
varchar taxation_type
boolean is_active
uuid created_by FK
timestamp created_at
timestamp updated_at
}
tat_alerts {
uuid alert_id PK
uuid request_id FK
uuid level_id FK
uuid approver_id FK
enum alert_type
boolean is_breached
timestamp alert_sent_at
timestamp created_at
}
request_summaries {
uuid summary_id PK
uuid request_id FK
uuid initiator_id FK
varchar title
text description
text closing_remarks
boolean is_ai_generated
timestamp created_at
timestamp updated_at
}
shared_summaries {
uuid shared_summary_id PK
uuid summary_id FK
uuid shared_by FK
uuid shared_with FK
boolean is_read
timestamp shared_at
timestamp created_at
}
subscriptions {
uuid subscription_id PK
uuid user_id FK
varchar endpoint
varchar p256dh
varchar auth
timestamp created_at
}

View File

@ -157,7 +157,7 @@ npm run seed:config
```bash ```bash
# Edit the script # Edit the script
nano scripts/assign-admin-user.sql nano scripts/assign-admin-user.sql
# Change: YOUR_EMAIL@{{APP_DOMAIN}} # Change: YOUR_EMAIL@royalenfield.com
# Run it # Run it
psql -d royal_enfield_workflow -f scripts/assign-admin-user.sql psql -d royal_enfield_workflow -f scripts/assign-admin-user.sql
@ -170,7 +170,7 @@ psql -d royal_enfield_workflow
UPDATE users UPDATE users
SET role = 'ADMIN' SET role = 'ADMIN'
WHERE email = 'your-email@{{APP_DOMAIN}}'; WHERE email = 'your-email@royalenfield.com';
-- Verify -- Verify
SELECT email, role FROM users WHERE role = 'ADMIN'; SELECT email, role FROM users WHERE role = 'ADMIN';
@ -188,7 +188,7 @@ psql -d royal_enfield_workflow -c "\dt"
psql -d royal_enfield_workflow -c "\dT+ user_role_enum" psql -d royal_enfield_workflow -c "\dT+ user_role_enum"
# Check your user # Check your user
psql -d royal_enfield_workflow -c "SELECT email, role FROM users WHERE email = 'your-email@{{APP_DOMAIN}}';" psql -d royal_enfield_workflow -c "SELECT email, role FROM users WHERE email = 'your-email@royalenfield.com';"
``` ```
--- ---
@ -241,13 +241,13 @@ Expected output:
```sql ```sql
-- Single user -- Single user
UPDATE users SET role = 'MANAGEMENT' UPDATE users SET role = 'MANAGEMENT'
WHERE email = 'manager@{{APP_DOMAIN}}'; WHERE email = 'manager@royalenfield.com';
-- Multiple users -- Multiple users
UPDATE users SET role = 'MANAGEMENT' UPDATE users SET role = 'MANAGEMENT'
WHERE email IN ( WHERE email IN (
'manager1@{{APP_DOMAIN}}', 'manager1@royalenfield.com',
'manager2@{{APP_DOMAIN}}' 'manager2@royalenfield.com'
); );
-- By department -- By department
@ -260,13 +260,13 @@ WHERE department = 'Management' AND is_active = true;
```sql ```sql
-- Single user -- Single user
UPDATE users SET role = 'ADMIN' UPDATE users SET role = 'ADMIN'
WHERE email = 'admin@{{APP_DOMAIN}}'; WHERE email = 'admin@royalenfield.com';
-- Multiple admins -- Multiple admins
UPDATE users SET role = 'ADMIN' UPDATE users SET role = 'ADMIN'
WHERE email IN ( WHERE email IN (
'admin1@{{APP_DOMAIN}}', 'admin1@royalenfield.com',
'admin2@{{APP_DOMAIN}}' 'admin2@royalenfield.com'
); );
-- By department -- By department
@ -331,7 +331,7 @@ SELECT
mobile_phone, mobile_phone,
array_length(ad_groups, 1) as ad_group_count array_length(ad_groups, 1) as ad_group_count
FROM users FROM users
WHERE email = 'your-email@{{APP_DOMAIN}}'; WHERE email = 'your-email@royalenfield.com';
``` ```
--- ---
@ -344,7 +344,7 @@ WHERE email = 'your-email@{{APP_DOMAIN}}';
curl -X POST http://localhost:5000/api/v1/auth/okta/callback \ curl -X POST http://localhost:5000/api/v1/auth/okta/callback \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
-d '{ -d '{
"email": "test@{{APP_DOMAIN}}", "email": "test@royalenfield.com",
"displayName": "Test User", "displayName": "Test User",
"oktaSub": "test-sub-123" "oktaSub": "test-sub-123"
}' }'
@ -353,14 +353,14 @@ curl -X POST http://localhost:5000/api/v1/auth/okta/callback \
### 2. Check User Created with Default Role ### 2. Check User Created with Default Role
```sql ```sql
SELECT email, role FROM users WHERE email = 'test@{{APP_DOMAIN}}'; SELECT email, role FROM users WHERE email = 'test@royalenfield.com';
-- Expected: role = 'USER' -- Expected: role = 'USER'
``` ```
### 3. Update to ADMIN ### 3. Update to ADMIN
```sql ```sql
UPDATE users SET role = 'ADMIN' WHERE email = 'test@{{APP_DOMAIN}}'; UPDATE users SET role = 'ADMIN' WHERE email = 'test@royalenfield.com';
``` ```
### 4. Verify API Access ### 4. Verify API Access
@ -369,7 +369,7 @@ UPDATE users SET role = 'ADMIN' WHERE email = 'test@{{APP_DOMAIN}}';
# Login and get token # Login and get token
curl -X POST http://localhost:5000/api/v1/auth/login \ curl -X POST http://localhost:5000/api/v1/auth/login \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
-d '{"email": "test@{{APP_DOMAIN}}", ...}' -d '{"email": "test@royalenfield.com", ...}'
# Try admin endpoint (should work if ADMIN role) # Try admin endpoint (should work if ADMIN role)
curl http://localhost:5000/api/v1/admin/configurations \ curl http://localhost:5000/api/v1/admin/configurations \
@ -449,7 +449,7 @@ npm run migrate
```sql ```sql
-- Check if user exists -- Check if user exists
SELECT * FROM users WHERE email = 'your-email@{{APP_DOMAIN}}'; SELECT * FROM users WHERE email = 'your-email@royalenfield.com';
-- Check Okta sub -- Check Okta sub
SELECT * FROM users WHERE okta_sub = 'your-okta-sub'; SELECT * FROM users WHERE okta_sub = 'your-okta-sub';
@ -459,7 +459,7 @@ SELECT * FROM users WHERE okta_sub = 'your-okta-sub';
```sql ```sql
-- Verify role -- Verify role
SELECT email, role, is_active FROM users WHERE email = 'your-email@{{APP_DOMAIN}}'; SELECT email, role, is_active FROM users WHERE email = 'your-email@royalenfield.com';
-- Check role enum -- Check role enum
\dT+ user_role_enum \dT+ user_role_enum

View File

@ -29,7 +29,7 @@ This guide provides step-by-step instructions for setting up Google Cloud Storag
|------|------------------| |------|------------------|
| **Application** | Royal Enfield Workflow System | | **Application** | Royal Enfield Workflow System |
| **Environment** | Production | | **Environment** | Production |
| **Domain** | `https://reflow.{{APP_DOMAIN}}` | | **Domain** | `https://reflow.royalenfield.com` |
| **Purpose** | Store workflow documents, attachments, invoices, and credit notes | | **Purpose** | Store workflow documents, attachments, invoices, and credit notes |
| **Storage Type** | Google Cloud Storage (GCS) | | **Storage Type** | Google Cloud Storage (GCS) |
| **Region** | `asia-south1` (Mumbai) | | **Region** | `asia-south1` (Mumbai) |
@ -325,8 +325,8 @@ Create `cors-config-prod.json`:
[ [
{ {
"origin": [ "origin": [
"https://reflow.{{APP_DOMAIN}}", "https://reflow.royalenfield.com",
"https://www.{{APP_DOMAIN}}" "https://www.royalenfield.com"
], ],
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"], "method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
"responseHeader": [ "responseHeader": [

View File

@ -6,7 +6,7 @@
|------|-------| |------|-------|
| **Application** | RE Workflow System | | **Application** | RE Workflow System |
| **Environment** | UAT | | **Environment** | UAT |
| **Domain** | https://reflow-uat.{{APP_DOMAIN}} | | **Domain** | https://reflow-uat.royalenfield.com |
| **Purpose** | Store workflow documents and attachments | | **Purpose** | Store workflow documents and attachments |
--- ---
@ -131,8 +131,8 @@ Apply this CORS policy to allow browser uploads:
[ [
{ {
"origin": [ "origin": [
"https://reflow-uat.{{APP_DOMAIN}}", "https://reflow-uat.royalenfield.com",
"https://reflow.{{APP_DOMAIN}}" "https://reflow.royalenfield.com"
], ],
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"], "method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
"responseHeader": [ "responseHeader": [

View File

@ -0,0 +1,41 @@
# MongoDB Atlas v8.0 Readiness Update
**Date**: 2026-02-05
**Project**: Royal Enfield Workflow Management System
**Subject**: Technical Audit and Readiness for MongoDB v8.0 Upgrade
## Executive Summary
Following a comprehensive technical audit of the Workflow Management System backend, we have confirmed that the application layer is fully compatible with MongoDB Atlas v8.0. The current stack (Node.js 22, Mongoose 9) is optimized for the v8 engine, and the codebase has been verified to be free of any deprecated legacy features.
## 💻 Tech Stack Compatibility
| Component | Version | Readiness Status |
| :--- | :--- | :--- |
| **Node.js Runtime** | v22.x | Fully Compatible |
| **Mongoose ODM** | v9.1.5 | Native v8.0 Support |
| **Connection Driver** | MongoDB Node.js Driver v6+ equivalent | Verified |
## 🔍 Codebase Audit Results
### 1. Feature Deprecation Check
We have verified that the following legacy features, removed in v8.0, are **not used** in our codebase:
- **Map-Reduce**: All reporting and KPI logic has been migrated to the modern Aggregation Pipeline.
- **Legacy Group Command**: Using `$group` within aggregation pipelines instead.
- **$where Operator**: All dynamic queries have been refactored to use `$expr` or standard filters to improve performance and security.
- **geoHaystack Indexes**: Not utilized in the project.
### 2. Connection Strategy
Our connection logic is designed for resilient SRV connectivity:
- Implements DNS resolution workarounds for reliable Atlas SRV lookups.
- Configured with robust timeout and selection parameters.
## 🚀 Post-Upgrade Optimization Roadmap
Once the cluster is upgraded to v8.0, the application team recommends the following optimizations:
1. **Atlas Search Integration**: Migrate full-text search requirements from standard regex to Lucene-based Atlas Search.
2. **Encryption**: Evaluate **Queryable Encryption** for enhanced protection of sensitive workflow data.
3. **Performance Advisor**: Review Atlas Performance Advisor recommendations for any new compound index opportunities enabled by the v8 engine's improved query optimizer.
## ✅ Conclusion
The application is **ready for upgrade**. No blockers have been identified in the current production codebase.

View File

@ -72,8 +72,8 @@ The Users API returns a complete user object:
"employeeID": "E09994", "employeeID": "E09994",
"title": "Supports Business Applications (SAP) portfolio", "title": "Supports Business Applications (SAP) portfolio",
"department": "Deputy Manager - Digital & IT", "department": "Deputy Manager - Digital & IT",
"login": "sanjaysahu@{{APP_DOMAIN}}", "login": "sanjaysahu@Royalenfield.com",
"email": "sanjaysahu@{{APP_DOMAIN}}" "email": "sanjaysahu@royalenfield.com"
}, },
... ...
} }
@ -127,7 +127,7 @@ Example log:
### Test with curl ### Test with curl
```bash ```bash
curl --location 'https://{{IDP_DOMAIN}}/api/v1/users/testuser10@eichergroup.com' \ curl --location 'https://dev-830839.oktapreview.com/api/v1/users/testuser10@eichergroup.com' \
--header 'Authorization: SSWS YOUR_OKTA_API_TOKEN' \ --header 'Authorization: SSWS YOUR_OKTA_API_TOKEN' \
--header 'Accept: application/json' --header 'Accept: application/json'
``` ```

View File

@ -0,0 +1,61 @@
# Implementation Plan: Status Ambiguity Refinement
This document outlines the specific code changes required to implement the **Dual-Key Status Architecture**.
## 1. Goal
Decouple the business outcome (Approved/Rejected) from the lifecycle state (Open/Closed/Draft) to ensure transparency in finalized requests.
## 2. Schema Changes
### `WorkflowRequest.schema.ts`
- **Update `status` Enum**: Remove `CLOSED` and `CANCELLED`.
- **Add `workflowState`**:
- Type: `String`
- Enum: `['DRAFT', 'OPEN', 'CLOSED']`
- Default: `'DRAFT'`
- Index: `true`
## 3. Logic Updates
### A. Workflow Creation (`WorkflowService.createWorkflow`)
- Initialize `status: 'DRAFT'`.
- Initialize `workflowState: 'DRAFT'`.
- Set `isDraft: true`.
### B. Workflow Submission (`WorkflowService.submitRequest`)
- Update `status: 'PENDING'`.
- Update `workflowState: 'OPEN'`.
- Set `isDraft: false`.
### C. Approval/Rejection (`WorkflowService`)
- When approved at a level: Keep `status` as `IN_PROGRESS` or set to `APPROVED` if final.
- When rejected: Set `status` to `REJECTED`.
- **Crucial**: The `workflowState` remains `OPEN` during these actions.
### D. Finalization (`ConclusionController.finalizeConclusion`)
- **Current Behavior**: Sets `status = 'CLOSED'`.
- **New Behavior**:
- Sets `workflowState = 'CLOSED'`.
- **Does NOT** change `status`. The `status` will remain `APPROVED` or `REJECTED`.
- Sets `closureDate = new Date()`.
### E. Pause Logic (`PauseMongoService`)
- Set `status = 'PAUSED'`.
- Set `isPaused = true`.
- Keep `workflowState = 'OPEN'`.
## 4. Dashboard & KPI Updates (`DashboardMongoService`)
### `getRequestStats`
- Update the aggregation pipeline to group by `workflowState`.
- `OPEN` category will now include all requests where `workflowState == 'OPEN'`.
- `CLOSED` category will now include all requests where `workflowState == 'CLOSED'`.
- This ensures that a "Closed" count on the dashboard includes both Approved and Rejected requests that have been finalized.
### `getTATEfficiency`
- Update match criteria to `workflowState: 'CLOSED'` instead of `status: 'CLOSED'`.
## 5. Filter Alignment (`listWorkflowsInternal`)
- Update the status filter to handle the new field mapping.
- If user filters by `status: 'CLOSED'`, the query will target `workflowState: 'CLOSED'`.
- If user filters by `status: 'APPROVED'`, the query will target `status: 'APPROVED'`.

View File

@ -0,0 +1,113 @@
# Why PostgreSQL Wins for "Royal Enfield Workflow"
## Executive Summary
For "Royal Enfield Workflow", **PostgreSQL is superior to MongoDB**.
The decision rests on **Reporting Speed** and **Deep Filtering capabilities**. Your workflow requires filtering by *Relationships* (Approvers, Departments), not just static data.
---
## 1. Complex Workflow Filters (The "My Tasks" Problem)
Users need specific views like "Requests waiting for me" or "Paused requests".
### A. "Requests Open For Me" (The Join Filter)
*Scenario: Show all requests where **I am the current approver**.*
#### PostgreSQL (Simple SQL `JOIN`)
Index usage is perfect. The DB jumps mainly to the few rows in `approval_levels` assigned to you.
```sql
SELECT r.id, r.status, r.created_at
FROM workflow_requests r
JOIN approval_levels al ON r.id = al.request_id
WHERE al.approver_id = 'USER_UUID_123'
AND al.status = 'PENDING'
ORDER BY r.created_at DESC;
```
#### MongoDB (Array Query + Sort Issue)
You must index inside an array. If you sort by "Date", Mongo often cannot use the index effectively for both the *array match* and the *sort*, leading to slow scans.
```javascript
db.requests.find({
"approvers": {
$elemMatch: {
userId: "USER_UUID_123",
status: "PENDING"
}
}
}).sort({ createdAt: -1 });
// WARNING: Performance degrades heavily if user has many historical requests
```
### B. "Paused & Resumed" History
*Scenario: Show requests that were previously Paused but are now Active (requires checking history).*
#### PostgreSQL (Audit Log Join)
You query the history table directly without loading the main request data until the match is found.
```sql
SELECT DISTINCT r.*
FROM workflow_requests r
JOIN audit_logs log ON r.id = log.request_id
WHERE log.action = 'PAUSED'
AND r.status = 'IN_PROGRESS';
```
#### MongoDB (The "Lookup" or "Bloat" Trade-off)
**Option 1: Lookups (Slow)**
You have to join the separate `audit_logs` collection for every request.
```javascript
db.requests.aggregate([
{ $match: { status: "IN_PROGRESS" } },
{
$lookup: {
from: "audit_logs",
localField: "_id",
foreignField: "requestId",
as: "history"
}
},
{ $match: { "history.action": "PAUSED" } }
]);
```
**Option 2: Embedding (Bloated)**
You store every log inside the Request document.
* *Result*: Your generic `db.requests.find({})` becomes 10x slower because it's dragging megabytes of history logs across the network for every result.
## 2. The Filter Nightmare: "Deep Filtering"
Users expect to slice-and-dice data freely. *Example: "Show requests initiated by users in the 'Sales' Department".*
* **Postgres (Cross-Table Filter)**:
```sql
SELECT * FROM workflow_requests r
JOIN users u ON r.initiator_id = u.id
WHERE u.department = 'Sales'
```
* **Result**: Instant. SQL simply filters the `users` table first (using an index on `department`) and then grabs the matching requests.
* **MongoDB (The "Lookup" Trap)**:
* `Department` is stored on the **User** document, not the Request.
* To filter Requests by "Department", you must `$lookup` (join) the User collection for *every single request* before you can filter them.
* *Alternative*: Copy `department` into every Request document.
* *Maintenance Cost*: If a user transfers from 'Sales' to 'Marketing', you must run a script to update all their historical requests, or your reports will be wrong.
## 3. Dashboard: The "Aggregation" Bottleneck
Your dashboard provides real-time insights (e.g., "Approver Efficiency," "TAT per Region").
* **Window Functions (SQL Superpower)**:
* *Requirement*: Rank dealers by "Average Approval Time" compared to their peers.
* *Postgres*: `RANK() OVER (PARTITION BY region ORDER BY avg_tat)` runs natively and instanly.
* *MongoDB*: Requires complex Aggregation Pipelines (`$setWindowFields`) that are memory-intensive and harder to optimize.
## 4. Audit & Compliance
* **Postgres**: Foreign Key constraints prevent "Orphaned Logs." You cannot delete a User if they are referenced in an Audit Log. This guarantees **legal traceability**.
* **MongoDB**: No constraints. Deleting a user can leave "Ghost Logs" (Referencing a null ID), breaking compliance reports.
## Summary Verdict
| Feature | PostgreSQL | MongoDB |
| :--- | :--- | :--- |
| **"Open For Me"** | **Simple Join** | **Complex Array Indexing** |
| **Dept/Region Filters** | **Simple Join** | **Slow Lookup** or **Duplicated Data** |
| **Ad-Hoc Reports** | **Flexible** | **Rigid** (Needs Indexes) |
| **Audit Compliance** | **Guaranteed** | **Risk of Orphaned Data** |
**Recommendation**: Stick with PostgreSQL.
The "Relational" nature of your reporting (Connecting Requests -> Users -> Departments -> Regions) is exactly what SQL was built to solve efficiently.

View File

@ -450,16 +450,16 @@ Before Migration:
+-------------------------+-----------+ +-------------------------+-----------+
| email | is_admin | | email | is_admin |
+-------------------------+-----------+ +-------------------------+-----------+
| admin@{{APP_DOMAIN}} | true | | admin@royalenfield.com | true |
| user1@{{APP_DOMAIN}} | false | | user1@royalenfield.com | false |
+-------------------------+-----------+ +-------------------------+-----------+
After Migration: After Migration:
+-------------------------+-----------+-----------+ +-------------------------+-----------+-----------+
| email | role | is_admin | | email | role | is_admin |
+-------------------------+-----------+-----------+ +-------------------------+-----------+-----------+
| admin@{{APP_DOMAIN}} | ADMIN | true | | admin@royalenfield.com | ADMIN | true |
| user1@{{APP_DOMAIN}} | USER | false | | user1@royalenfield.com | USER | false |
+-------------------------+-----------+-----------+ +-------------------------+-----------+-----------+
``` ```
@ -473,17 +473,17 @@ After Migration:
-- Make user a MANAGEMENT role -- Make user a MANAGEMENT role
UPDATE users UPDATE users
SET role = 'MANAGEMENT', is_admin = false SET role = 'MANAGEMENT', is_admin = false
WHERE email = 'manager@{{APP_DOMAIN}}'; WHERE email = 'manager@royalenfield.com';
-- Make user an ADMIN role -- Make user an ADMIN role
UPDATE users UPDATE users
SET role = 'ADMIN', is_admin = true SET role = 'ADMIN', is_admin = true
WHERE email = 'admin@{{APP_DOMAIN}}'; WHERE email = 'admin@royalenfield.com';
-- Revert to USER role -- Revert to USER role
UPDATE users UPDATE users
SET role = 'USER', is_admin = false SET role = 'USER', is_admin = false
WHERE email = 'user@{{APP_DOMAIN}}'; WHERE email = 'user@royalenfield.com';
``` ```
### Via API (Admin Endpoint) ### Via API (Admin Endpoint)

View File

@ -47,12 +47,12 @@ psql -d royal_enfield_db -f scripts/assign-user-roles.sql
-- Make specific users ADMIN -- Make specific users ADMIN
UPDATE users UPDATE users
SET role = 'ADMIN', is_admin = true SET role = 'ADMIN', is_admin = true
WHERE email IN ('admin@{{APP_DOMAIN}}', 'it.admin@{{APP_DOMAIN}}'); WHERE email IN ('admin@royalenfield.com', 'it.admin@royalenfield.com');
-- Make specific users MANAGEMENT -- Make specific users MANAGEMENT
UPDATE users UPDATE users
SET role = 'MANAGEMENT', is_admin = false SET role = 'MANAGEMENT', is_admin = false
WHERE email IN ('manager@{{APP_DOMAIN}}', 'auditor@{{APP_DOMAIN}}'); WHERE email IN ('manager@royalenfield.com', 'auditor@royalenfield.com');
-- Verify roles -- Verify roles
SELECT email, display_name, role, is_admin FROM users ORDER BY role, email; SELECT email, display_name, role, is_admin FROM users ORDER BY role, email;
@ -219,7 +219,7 @@ GROUP BY role;
-- Check specific user -- Check specific user
SELECT email, role, is_admin SELECT email, role, is_admin
FROM users FROM users
WHERE email = 'your-email@{{APP_DOMAIN}}'; WHERE email = 'your-email@royalenfield.com';
``` ```
### Test 2: Test API Access ### Test 2: Test API Access
@ -356,7 +356,7 @@ WHERE designation ILIKE '%manager%' OR designation ILIKE '%head%';
```sql ```sql
SELECT email, role, is_admin SELECT email, role, is_admin
FROM users FROM users
WHERE email = 'your-email@{{APP_DOMAIN}}'; WHERE email = 'your-email@royalenfield.com';
``` ```
--- ---

View File

@ -314,7 +314,7 @@ JWT_EXPIRY=24h
REFRESH_TOKEN_EXPIRY=7d REFRESH_TOKEN_EXPIRY=7d
# Okta Configuration # Okta Configuration
OKTA_DOMAIN=https://{{IDP_DOMAIN}} OKTA_DOMAIN=https://dev-830839.oktapreview.com
OKTA_CLIENT_ID=your-client-id OKTA_CLIENT_ID=your-client-id
OKTA_CLIENT_SECRET=your-client-secret OKTA_CLIENT_SECRET=your-client-secret
@ -334,7 +334,7 @@ GCP_BUCKET_PUBLIC=true
**Identity Provider**: Okta **Identity Provider**: Okta
- **Domain**: Configurable via `OKTA_DOMAIN` environment variable - **Domain**: Configurable via `OKTA_DOMAIN` environment variable
- **Default**: `https://{{IDP_DOMAIN}}` - **Default**: `https://dev-830839.oktapreview.com`
- **Protocol**: OAuth 2.0 / OpenID Connect (OIDC) - **Protocol**: OAuth 2.0 / OpenID Connect (OIDC)
- **Grant Types**: Authorization Code, Resource Owner Password Credentials - **Grant Types**: Authorization Code, Resource Owner Password Credentials
@ -650,7 +650,7 @@ graph LR
{ {
"userId": "uuid", "userId": "uuid",
"employeeId": "EMP001", "employeeId": "EMP001",
"email": "user@{{APP_DOMAIN}}", "email": "user@royalenfield.com",
"role": "USER" | "MANAGEMENT" | "ADMIN", "role": "USER" | "MANAGEMENT" | "ADMIN",
"iat": 1234567890, "iat": 1234567890,
"exp": 1234654290 "exp": 1234654290
@ -1048,7 +1048,7 @@ JWT_EXPIRY=24h
REFRESH_TOKEN_EXPIRY=7d REFRESH_TOKEN_EXPIRY=7d
# Okta # Okta
OKTA_DOMAIN=https://{{IDP_DOMAIN}} OKTA_DOMAIN=https://dev-830839.oktapreview.com
OKTA_CLIENT_ID=your-client-id OKTA_CLIENT_ID=your-client-id
OKTA_CLIENT_SECRET=your-client-secret OKTA_CLIENT_SECRET=your-client-secret
@ -1063,7 +1063,7 @@ GCP_BUCKET_PUBLIC=true
**Frontend (.env):** **Frontend (.env):**
```env ```env
VITE_API_BASE_URL=https://api.rebridge.co.in/api/v1 VITE_API_BASE_URL=https://api.rebridge.co.in/api/v1
VITE_OKTA_DOMAIN=https://{{IDP_DOMAIN}} VITE_OKTA_DOMAIN=https://dev-830839.oktapreview.com
VITE_OKTA_CLIENT_ID=your-client-id VITE_OKTA_CLIENT_ID=your-client-id
``` ```

View File

@ -0,0 +1,55 @@
# Dual-Key Status Architecture
This document defines the status management system for the Royal Enfield Workflow application. It uses a "Dual-Key" approach to resolve ambiguity between request lifecycles and business outcomes.
## 1. Core Concepts
| Key | Purpose | Possible Values |
| :--- | :--- | :--- |
| **`status`** | **Business Outcome**. Tells you *what* happened or the current granular action. | `DRAFT`, `PENDING`, `IN_PROGRESS`, `APPROVED`, `REJECTED`, `PAUSED` |
| **`workflowState`** | **Lifecycle State**. Tells you *where* the request is in its journey. | `DRAFT`, `OPEN`, `CLOSED` |
---
## 2. Status Mapping Table
The `workflowState` is automatically derived from the `status` and the finalization event (Conclusion Remark).
| Primary Status | Finalized? | workflowState | Description |
| :--- | :--- | :--- | :--- |
| `DRAFT` | No | `DRAFT` | Request is being prepared by the initiator. |
| `PENDING` | No | `OPEN` | Waiting for first level activation or system processing. |
| `IN_PROGRESS` | No | `OPEN` | Actively moving through approval levels. |
| `PAUSED` | No | `OPEN` | Temporarily frozen; `isPaused` flag is `true`. |
| `APPROVED` | No | `OPEN` | All levels approved, but initiator hasn't written the final conclusion. |
| `REJECTED` | No | `OPEN` | Rejected by an approver, but initiator hasn't acknowledged/finalized. |
| **`APPROVED`** | **Yes** | **`CLOSED`** | **Final state: Approved and Archived.** |
| **`REJECTED`** | **Yes** | **`CLOSED`** | **Final state: Rejected and Archived.** |
---
## 3. Ambiguity Resolution (The "Why")
Previously, the system changed `status` to `CLOSED` after finalization, which destroyed the information about whether the request was Approved or Rejected.
**Corrected Behavior:**
- **Outcome remains visible**: A finalized request will now keep its `status` as `APPROVED` or `REJECTED`.
- **Filtering made easy**: Dashboard charts use `workflowState: 'CLOSED'` to count all finished work, while list filters use `status: 'APPROVED'` to find specific results.
---
## 4. Technical Implementation Notes
### Schema Changes
- **`WorkflowRequest`**: Added `workflowState` (String, Indexed).
- **`status` Enum**: Removed `CLOSED` (deprecated) and `CANCELLED`.
### Transition Logic
1. **Approval/Rejection**: Updates `status` to `APPROVED` or `REJECTED`. `workflowState` remains `OPEN`.
2. **Finalization (Conclusion)**: Triggered by initiator. Updates `workflowState` to `CLOSED`. **Does NOT change `status`.**
3. **Pause**: Set `status` to `PAUSED` and `isPaused: true`. `workflowState` stays `OPEN`.
### Impacted Services
- `DashboardMongoService`: Uses `workflowState` for Facet/KPI counts.
- `WorkflowService`: Filter logic updated to respect both keys.
- `ConclusionController`: `finalizeConclusion` logic updated to toggle `workflowState`.

View File

@ -64,7 +64,7 @@ await this.createClaimApprovalLevels(
isAuto: false, isAuto: false,
approverType: 'department_lead' as const, approverType: 'department_lead' as const,
approverId: departmentLead?.userId || null, approverId: departmentLead?.userId || null,
approverEmail: departmentLead?.email || initiator.manager || `deptlead@${appDomain}`, approverEmail: departmentLead?.email || initiator.manager || 'deptlead@royalenfield.com',
} }
``` ```

159
docs/SYSTEM_ARCHITECTURE.md Normal file
View File

@ -0,0 +1,159 @@
# Royal Enfield Workflow Management System - Technical Architecture Definition
## 1. Platform Overview
The Royal Enfield (RE) Workflow Management System is a resilient, horizontally scalable infrastructure designed to orchestrate complex internal business processes. It utilizes a decoupled, service-oriented architecture leveraging **Node.js (TypeScript)**, **MongoDB Atlas (v8)**, and **Google Cloud Storage (GCS)** to ensure high availability and performance across enterprise workflows.
This document focus exclusively on the core platform infrastructure and custom workflow engine, excluding legacy dealer claim modules.
---
## 2. Global Architecture & Ingress
### A. High-Level System Architecture
```mermaid
graph TD
User((User / Client))
subgraph "Public Interface"
Nginx[Nginx Reverse Proxy]
end
subgraph "Application Layer (Node.js)"
Auth[Auth Middleware]
Core[Workflow Service]
Dynamic[Ad-hoc Logic]
AI[Vertex AI Service]
TAT[TAT Worker / BullMQ]
end
subgraph "Persistence & Infrastructure"
Atlas[(MongoDB Atlas v8)]
GCS_Bucket[GCS Bucket - Artifacts]
GSM[Google Secret Manager]
Redis[(Redis Cache)]
end
User --> Nginx
Nginx --> Auth
Auth --> Core
Core --> Dynamic
Core --> Atlas
Core --> GCS_Bucket
Core --> AI
TAT --> Redis
TAT --> Atlas
Core --> GSM
```
### B. Professional Entrance: Nginx Proxy
All incoming traffic is managed by **Nginx**, acting as the "Deployed Server" facade.
- **SSL Termination**: Encrypts traffic at the edge.
- **Micro-caching**: Caches static metadata to reduce load on Node.js.
- **Proxying**: Strategically routes `/api` to the backend and serves the production React bundle for root requests.
### C. Stateless Authentication (JWT + RBAC)
The platform follows a stateless security model:
1. **JWT Validation**: `auth.middleware.ts` verifies signatures using secrets managed by **Google Secret Manager (GSM)**.
2. **Context Enrichment**: User identity is synchronized from the `users` collection in MongoDB Atlas.
3. **Granular RBAC**: Access is governed by roles (`ADMIN`, `MANAGEMENT`, `USER`) and dynamic participant checks.
---
## 3. Background Processing & SLA Management (BullMQ)
At the heart of the platform's performance is the **Asynchronous Task Engine** powered by **BullMQ** and **Redis**.
### A. TAT (Turnaround Time) Tracking Logic
Turnaround time is monitored per-level using a highly accurate calculation engine that accounts for:
- **Business Days/Hours**: Weekend and holiday filtering via `tatTimeUtils.ts`.
- **Priority Multipliers**: Scaling TAT for `STANDARD` vs `EXPRESS` requests.
- **Pause Impact**: Snapshot-based SLA halting during business-case pauses.
### B. TAT Worker Flow (Redis Backed)
```mermaid
graph TD
Trigger[Request Assignment] --> Queue[tatQueue - BullMQ]
Queue --> Redis[(Redis Cache)]
Redis --> Worker[tatWorker.ts]
Worker --> Processor[tatProcessor.mongo.ts]
Processor --> Check{Threshold Reached?}
Check -->|50/75%| Notify[Reminder Notification]
Check -->|100%| Breach[Breach Alert + Escalation]
```
---
## 4. Multi-Channel Notification Dispatch Engine
The system ensures critical workflow events (Approvals, Breaches, Comments) reach users through three distinct synchronous and asynchronous channels.
### A. Channel Orchestration
Managed by `notification.service.ts`, the engine handles:
1. **Real-time (Socket.io)**: Immediate UI updates via room-based events.
2. **Web Push (Vapid)**: Browser-level push notifications for offline users.
3. **Enterprise Email**: Specialized services like `emailNotification.service.ts` dispatch templated HTML emails.
### B. Notification Lifecycle
```mermaid
sequenceDiagram
participant S as Service Layer
participant N as Notification Service
participant DB as MongoDB (NotificationModel)
participant SK as Socket.io
participant E as Email Service
S->>N: Trigger Event (e.g. "Assignment")
N->>DB: Persist Notification Record (Audit)
N->>SK: broadcast(user:id, "notification:new")
N->>E: dispatchAsync(EmailTemplate)
DB-->>S: Success
```
---
## 5. Cloud-Native Storage & Assets (GCS)
The architecture treats **Google Cloud Storage (GCS)** as a first-class citizen for both operational and deployment data.
### A. Deployment Artifact Architecture
- **Static Site Hosting**: GCS stores the compiled frontend artifacts.
- **Production Secrets**: `Google Secret Manager` ensures that no production passwords or API keys reside in the codebase.
### B. Scalable Document Storage
- **Decoupling**: Binaries are never stored in the database. MongoDB only stores the URI.
- **Privacy Mode**: Documents are retrieved via **Signed URLs** with a configurable TTL.
- **Structure**: `requests/{requestNumber}/documents/`
---
## 6. Real-time Collaboration (Socket.io)
Collaborative features like "Who else is viewing this request?" and "Instant Alerts" are powered by a persistent WebSocket layer.
- **Presence Tracking**: A `Map<requestId, Set<userId>>` tracks online users per workflow request.
- **Room Logic**: Users join specific "Rooms" based on their current active request view.
- **Bi-directional Sync**: Frontend emits `presence:join` when entering a request page.
---
## 7. Intelligent Monitoring & Observability
The platform includes a dedicated monitoring stack for "Day 2" operations.
- **Metrics (Prometheus)**: Scrapes the `/metrics` endpoint provided by our Prometheus middleware.
- **Log Aggregation (Grafana Loki)**: `promtail` ships container logs to Loki for centralized debugging.
- **Alerting**: **Alertmanager** triggers PagerDuty/Email alerts for critical system failures.
```mermaid
graph LR
App[RE Backend] -->|Prometheus| P[Prometheus DB]
App -->|Logs| L[Loki]
P --> G[Grafana Dashboards]
L --> G
```
---
## 8. Dynamic Workflow Flexibility
The "Custom Workflow" module provides logic for ad-hoc adjustments:
1. **Skip Approver**: Bypasses a level while maintaining a forced audit reason.
2. **Ad-hoc Insertion**: Inserts an approver level mid-flight, dynamically recalculating the downstream chain.

View File

@ -0,0 +1,108 @@
# Analysis: Dealer Claim & Unified Request Architecture
This document analyzes the current architecture and proposes an efficient approach to unify Dealer Claims and Custom Requests while supporting specialized data capture and versioning.
## Current State
Both **Custom Requests** and **Dealer Claims** are already "unified" at the base level:
- **Primary Collection**: `workflow_requests` stores the core data (id, requestNumber, initiator, status, currentLevel).
- **Secondary Collection**: `dealer_claims` stores the business-specific metadata (proposal, expenses, invoices, etc.) and is linked via `requestId`.
This architecture naturally supports showing both in the same list.
## Proposed Efficient Approach
To make these two paths truly "inline" and handle specialized steps efficiently, we recommend a **Metadata-Driven Activity System**.
### 1. Unified Listing
The UI should continue to use the existing `listWorkflows` endpoints. The backend already returns `templateType`, which the frontend can use to decide which icon or detail view to render.
### 2. Specialized Step Identification (Dual-Tag System)
To handle dynamic level shifts and accurately recognize the purpose of each step, we use two categories of tags on each `ApprovalLevel`.
#### Category A: Action Tags (`stepAction`)
Defines **what** special behavior is required in this step.
- `DEALER_PROPOSAL`: Show proposal submission form.
- `EXPENSE_CAPTURE`: Show expense document upload form.
- `PROPOSAL_EVALUATION`: Show evaluation tools for the initiator/manager.
- `NONE`: Standard approve/reject UI.
#### Category B: Persona Tags (`stepPersona`)
Defines **who** is acting in this step (role-based logic).
- `INITIATOR`: Used when the initiator acts as an approver (e.g., evaluating a dealer proposal).
- `DEPARTMENT_LEAD`: Standard leadership approval.
- `ADDITIONAL_APPROVER`: Distinguishes steps added manually from the template.
#### How it works together:
| Level | Level Name | `stepAction` | `stepPersona` | UI Behavior |
| :--- | :--- | :--- | :--- | :--- |
| **1** | Dealer Proposal | `DEALER_PROPOSAL` | `DEALER` | Full Proposal Form |
| **2** | Initiator Review | `PROPOSAL_EVALUATION` | `INITIATOR` | Inline evaluation checklist |
| **3** | Lead Approval | `NONE` | `DEPARTMENT_LEAD` | Simple Approve/Reject |
| **3b** | Extra Check | `NONE` | `ADDITIONAL_APPROVER` | Manual Approval UI |
- **Dynamic Insertions**: If `Extra Check` is added, the following levels shift, but their `stepAction` tags remain, so the UI NEVER breaks.
- **Resubmission**: Rejection logic targets the latest completed level with `stepAction: 'DEALER_PROPOSAL'`.
### 3. Versioning & Iterations
The user's requirement to track previous proposals during resubmission is handled via the **Snapshotted Revisions** pattern:
- **The Main Store**: `DealerClaim.proposal` and `DealerClaim.completion` always hold the **active/latest** values.
- **The Revision Store**: `DealerClaim.revisions[]` acts as an append-only audit trail.
**Resubmission Flow:**
1. Request is rejected at Level 2/3/5.
2. Workflow moves back to Level 1 or 4 (Dealer).
3. Dealer edits the data.
4. **On Submit**:
- Backend takes the *current* `proposal` or `completion` data.
- Pushes it into `revisions` with a timestamp and `triggeredBy: 'SYSTEM_VERSION_SNAPSHOT'`.
- Overwrites the main object with the *new* data.
- Advances the workflow.
### 4. KPI & Deep Filtering Strategy (Hybrid Approach)
To support complex KPIs and high-performance filtering across thousands of requests, we use a **Referential Flat Pattern**:
- **Workflow Index (Speed)**: `WorkflowRequest` remains light. It handles high-frequency queries like "My Pending Tasks" or "Recent Activity".
- **Business Index (Depth)**: `DealerClaim` holds the "Deep Data". We apply Mongoose/MongoDB indexes on fields like:
- `dealer.region`, `dealer.state` (for Geospatial/Regional KPIs).
- `budgetTracking.utilizedBudget` (for Financial KPIs).
- `completion.expenses.category` (for operational analysis).
**The "Hybrid" Advantage:**
1. **Performance**: We don't bloat the main `workflow_requests` collection with hundreds of dealer-specific fields. This keeps "Total Request" counts and general listing extremely fast.
2. **Scalability**: For deep filters (e.g., "Show all claims in South Region with expenses > 50k"), we query the `dealer_claims` collection first to get the `requestId`s, then fetch the workflow status. This is much faster than a massive `$lookup` on a single bloated collection.
3. **Clean KPIs**: KPIs like "Budget vs Actual" are calculated directly from `DealerClaim` without interfering with generic workflow TAT metrics.
### 5. Ad-Hoc & Additional Approver Handling
When a user manually adds an approver (Ad-hoc) to a Dealer Claim or Custom Flow:
- **Tag Assignment**: The new level is automatically tagged with `stepAction: 'NONE'` and `stepPersona: 'ADDITIONAL_APPROVER'`.
- **UI Consistency**: The frontend sees `stepAction: 'NONE'` and renders the standard approval interface (comments + buttons).
- **Rejection Intelligence**:
- If an *Additional Approver* rejects, the system looks back for the nearest **anchor step** (e.g., `stepAction: 'DEALER_PROPOSAL'`).
- This prevents the workflow from getting "stuck" between two manually added levels if the business rule requires a return to the initiator or dealer.
### 6. Impact on Custom Flows & Compatibility
**Zero Breaking Changes**:
- Existing Custom Flows will default to `stepAction: 'NONE'`. The UI behavior remains identical to the current state.
- The `WorkflowRequest` collection structure is not being modified; we are only adding two optional metadata fields to the `ApprovalLevel` sub-documents.
**Future-Proofing**:
- Custom Flows can now "unlock" specialized steps (like `PROPOSAL_EVALUATION`) simply by updating their template metadata, without any backend code changes.
### 7. Implementation Strategy
| Feature | Custom Request Path | Dealer Claim Path |
| :--- | :--- | :--- |
| **Listing** | Unified `listWorkflows` | Unified `listWorkflows` |
| **Details View** | Standard UI | Enhanced UI (tabs for Expenses/Proposal) |
| **Logic** | Generic `approveRequest` | `approveRequest` + `DealerClaimService` hook |
| **Versioning** | Activity Logs only | Snapshotted Revisions for re-submissions |
---
### Key Advantage
This approach avoids creating "two separate systems". It treats a Dealer Claim as a "Custom Request with a specific metadata payload". The UI remains cohesive, and the backend logic for TAT, notifications, and status transitions stays shared.

View File

@ -181,7 +181,7 @@ POST http://localhost:5000/api/v1/auth/login
Content-Type: application/json Content-Type: application/json
{ {
"username": "john.doe@{{APP_DOMAIN}}", "username": "john.doe@royalenfield.com",
"password": "SecurePassword123!" "password": "SecurePassword123!"
} }
``` ```

View File

@ -26,8 +26,8 @@ REFRESH_TOKEN_EXPIRY=7d
SESSION_SECRET=your_session_secret_here_min_32_chars SESSION_SECRET=your_session_secret_here_min_32_chars
# Cloud Storage (GCP) # Cloud Storage (GCP)
GCP_PROJECT_ID={{GCP_PROJECT_ID}} GCP_PROJECT_ID=re-workflow-project
GCP_BUCKET_NAME={{GCP_BUCKET_NAME}} GCP_BUCKET_NAME=re-workflow-documents
GCP_KEY_FILE=./config/gcp-key.json GCP_KEY_FILE=./config/gcp-key.json
# Google Secret Manager (Optional - for production) # Google Secret Manager (Optional - for production)
@ -41,9 +41,9 @@ USE_GOOGLE_SECRET_MANAGER=false
SMTP_HOST=smtp.gmail.com SMTP_HOST=smtp.gmail.com
SMTP_PORT=587 SMTP_PORT=587
SMTP_SECURE=false SMTP_SECURE=false
SMTP_USER=notifications@{{APP_DOMAIN}} SMTP_USER=notifications@royalenfield.com
SMTP_PASSWORD=your_smtp_password SMTP_PASSWORD=your_smtp_password
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}> EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
# AI Service (for conclusion generation) - Vertex AI Gemini # AI Service (for conclusion generation) - Vertex AI Gemini
# Uses service account credentials from GCP_KEY_FILE # Uses service account credentials from GCP_KEY_FILE
@ -55,7 +55,7 @@ VERTEX_AI_LOCATION=asia-south1
# Logging # Logging
LOG_LEVEL=info LOG_LEVEL=info
LOG_FILE_PATH=./logs LOG_FILE_PATH=./logs
APP_VERSION={{APP_VERSION}} APP_VERSION=1.2.0
# ============ Loki Configuration (Grafana Log Aggregation) ============ # ============ Loki Configuration (Grafana Log Aggregation) ============
LOKI_HOST= # e.g., http://loki:3100 or http://monitoring.cloudtopiaa.com:3100 LOKI_HOST= # e.g., http://loki:3100 or http://monitoring.cloudtopiaa.com:3100
@ -66,7 +66,7 @@ LOKI_PASSWORD= # Optional: Basic auth password
CORS_ORIGIN="*" CORS_ORIGIN="*"
# Rate Limiting # Rate Limiting
RATE_LIMIT_WINDOW_MS=900000 # 15 minutes RATE_LIMIT_WINDOW_MS=900000
RATE_LIMIT_MAX_REQUESTS=100 RATE_LIMIT_MAX_REQUESTS=100
# File Upload # File Upload
@ -83,16 +83,16 @@ OKTA_CLIENT_ID={{okta_client_id}}
OKTA_CLIENT_SECRET={{okta_client_secret}} OKTA_CLIENT_SECRET={{okta_client_secret}}
# Notificaton Service Worker credentials # Notificaton Service Worker credentials
VAPID_PUBLIC_KEY={{VAPID_PUBLIC_KEY}} VAPID_PUBLIC_KEY={{vapid_public_key}} note: same key need to add on front end for web push
VAPID_PRIVATE_KEY={{vapid_private_key}} VAPID_PRIVATE_KEY={{vapid_private_key}}
VAPID_CONTACT=mailto:you@example.com VAPID_CONTACT=mailto:you@example.com
#Redis #Redis
REDIS_URL={{REDIS_URL}} REDIS_URL={{REDIS_URL_FOR DELAY JoBS create redis setup and add url here}}
TAT_TEST_MODE=false # Set to true to accelerate TAT for testing TAT_TEST_MODE=false (on true it will consider 1 hour==1min)
# SAP Integration (OData Service via Zscaler) # SAP Integration (OData Service via Zscaler)
SAP_BASE_URL=https://{{SAP_DOMAIN_HERE}}:{{PORT}} SAP_BASE_URL=https://RENOIHND01.Eichergroup.com:1443
SAP_USERNAME={{SAP_USERNAME}} SAP_USERNAME={{SAP_USERNAME}}
SAP_PASSWORD={{SAP_PASSWORD}} SAP_PASSWORD={{SAP_PASSWORD}}
SAP_TIMEOUT_MS=30000 SAP_TIMEOUT_MS=30000

49
fix-imports.ps1 Normal file
View File

@ -0,0 +1,49 @@
# Fix all simple imports to use MongoDB services
$replacements = @{
'from ''@services/activity.service''' = 'from ''@services/activity.mongo.service'''
'from ''../services/activity.service''' = 'from ''../services/activity.mongo.service'''
'from ''@services/notification.service''' = 'from ''@services/notification.mongo.service'''
'from ''../services/notification.service''' = 'from ''../services/notification.mongo.service'''
'from ''@services/configReader.service''' = 'from ''@services/configReader.mongo.service'''
'from ''../services/configReader.service''' = 'from ''../services/configReader.mongo.service'''
'from ''./configReader.service''' = 'from ''./configReader.mongo.service'''
'from ''../services/holiday.service''' = 'from ''../services/holiday.mongo.service'''
'from ''../services/workflow.service''' = 'from ''../services/workflow.service.mongo'''
'from ''../services/worknote.service''' = 'from ''../services/worknote.mongo.service'''
# Service instance renames
'\bactivityService\b' = 'activityMongoService'
'\bnotificationService\b' = 'notificationMongoService'
'\bholidayService\b' = 'holidayMongoService'
'\bworkNoteService\b' = 'workNoteMongoService'
}
$files = @(
'src/controllers/conclusion.controller.ts',
'src/controllers/document.controller.ts',
'src/controllers/notification.controller.ts',
'src/controllers/tat.controller.ts',
'src/routes/workflow.routes.ts',
'src/emailtemplates/emailPreferences.helper.ts',
'src/routes/debug.routes.ts',
'src/services/ai.service.ts',
'src/utils/tatTimeUtils.ts'
)
foreach ($file in $files) {
if (Test-Path $file) {
$content = Get-Content $file -Raw
foreach ($key in $replacements.Keys) {
$content = $content -replace $key, $replacements[$key]
}
Set-Content $file $content -NoNewline
Write-Host "✓ Updated: $file"
} else {
Write-Host "✗ Not found: $file"
}
}
Write-Host "`n✅ Import replacements complete!"

View File

@ -52,8 +52,6 @@ scrape_configs:
metrics_path: /metrics metrics_path: /metrics
scrape_interval: 10s scrape_interval: 10s
scrape_timeout: 5s scrape_timeout: 5s
authorization:
credentials: 're_c92b9cf291d2be65a1704207aa25352d69432b643e6c9e9a172938c964809f2d'
# ============================================ # ============================================
# Node Exporter - Host Metrics # Node Exporter - Host Metrics

1816
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,7 @@
"description": "Royal Enfield Workflow Management System - Backend API (TypeScript)", "description": "Royal Enfield Workflow Management System - Backend API (TypeScript)",
"main": "dist/server.js", "main": "dist/server.js",
"scripts": { "scripts": {
"start": "npm install && npm run build && npm run setup && npm run start:prod", "start": "npm run build && npm run start:prod && npm run setup",
"dev": "npm run setup && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts", "dev": "npm run setup && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts", "dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"build": "tsc && tsc-alias", "build": "tsc && tsc-alias",
@ -16,10 +16,9 @@
"type-check": "tsc --noEmit", "type-check": "tsc --noEmit",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts", "setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts", "seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-configs.ts",
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts", "reset:mongo": "ts-node -r tsconfig-paths/register src/scripts/reset-mongo-db.ts",
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts", "seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.mongo.ts"
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
}, },
"dependencies": { "dependencies": {
"@google-cloud/secret-manager": "^6.1.1", "@google-cloud/secret-manager": "^6.1.1",
@ -30,7 +29,6 @@
"axios": "^1.7.9", "axios": "^1.7.9",
"bcryptjs": "^2.4.3", "bcryptjs": "^2.4.3",
"bullmq": "^5.63.0", "bullmq": "^5.63.0",
"clamscan": "^2.4.0",
"cookie-parser": "^1.4.7", "cookie-parser": "^1.4.7",
"cors": "^2.8.5", "cors": "^2.8.5",
"dayjs": "^1.11.19", "dayjs": "^1.11.19",
@ -41,6 +39,7 @@
"helmet": "^8.0.0", "helmet": "^8.0.0",
"ioredis": "^5.8.2", "ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"mongoose": "^9.1.5",
"morgan": "^1.10.0", "morgan": "^1.10.0",
"multer": "^1.4.5-lts.1", "multer": "^1.4.5-lts.1",
"node-cron": "^3.0.3", "node-cron": "^3.0.3",
@ -48,18 +47,12 @@
"openai": "^6.8.1", "openai": "^6.8.1",
"passport": "^0.7.0", "passport": "^0.7.0",
"passport-jwt": "^4.0.1", "passport-jwt": "^4.0.1",
"pg": "^8.13.1",
"pg-hstore": "^2.3.4",
"prom-client": "^15.1.3", "prom-client": "^15.1.3",
"puppeteer": "^24.37.2",
"sanitize-html": "^2.17.1",
"sequelize": "^6.37.5",
"socket.io": "^4.8.1", "socket.io": "^4.8.1",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"web-push": "^3.6.7", "web-push": "^3.6.7",
"winston": "^3.17.0", "winston": "^3.17.0",
"winston-loki": "^6.1.3", "winston-loki": "^6.1.3",
"xss": "^1.0.15",
"zod": "^3.24.1" "zod": "^3.24.1"
}, },
"devDependencies": { "devDependencies": {
@ -69,13 +62,12 @@
"@types/express": "^5.0.0", "@types/express": "^5.0.0",
"@types/jest": "^29.5.14", "@types/jest": "^29.5.14",
"@types/jsonwebtoken": "^9.0.7", "@types/jsonwebtoken": "^9.0.7",
"@types/mongoose": "^5.11.96",
"@types/morgan": "^1.9.9", "@types/morgan": "^1.9.9",
"@types/multer": "^1.4.12", "@types/multer": "^1.4.12",
"@types/node": "^22.19.1", "@types/node": "^22.19.1",
"@types/passport": "^1.0.16", "@types/passport": "^1.0.16",
"@types/passport-jwt": "^4.0.1", "@types/passport-jwt": "^4.0.1",
"@types/pg": "^8.15.6",
"@types/sanitize-html": "^2.16.0",
"@types/supertest": "^6.0.2", "@types/supertest": "^6.0.2",
"@types/web-push": "^3.6.4", "@types/web-push": "^3.6.4",
"@typescript-eslint/eslint-plugin": "^8.19.1", "@typescript-eslint/eslint-plugin": "^8.19.1",
@ -84,7 +76,6 @@
"jest": "^29.7.0", "jest": "^29.7.0",
"nodemon": "^3.1.9", "nodemon": "^3.1.9",
"prettier": "^3.4.2", "prettier": "^3.4.2",
"sequelize-cli": "^6.6.2",
"supertest": "^7.0.0", "supertest": "^7.0.0",
"ts-jest": "^29.2.5", "ts-jest": "^29.2.5",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
@ -97,4 +88,4 @@
"node": ">=22.0.0", "node": ">=22.0.0",
"npm": ">=10.0.0" "npm": ">=10.0.0"
} }
} }

View File

@ -16,7 +16,7 @@
UPDATE users UPDATE users
SET role = 'ADMIN' SET role = 'ADMIN'
WHERE email = 'YOUR_EMAIL@{{APP_DOMAIN}}' -- ← CHANGE THIS WHERE email = 'YOUR_EMAIL@royalenfield.com' -- ← CHANGE THIS
RETURNING RETURNING
user_id, user_id,
email, email,

View File

@ -21,9 +21,9 @@
UPDATE users UPDATE users
SET role = 'ADMIN' SET role = 'ADMIN'
WHERE email IN ( WHERE email IN (
'admin@{{APP_DOMAIN}}', 'admin@royalenfield.com',
'it.admin@{{APP_DOMAIN}}', 'it.admin@royalenfield.com',
'system.admin@{{APP_DOMAIN}}' 'system.admin@royalenfield.com'
-- Add more admin emails here -- Add more admin emails here
); );
@ -45,9 +45,9 @@ ORDER BY email;
UPDATE users UPDATE users
SET role = 'MANAGEMENT' SET role = 'MANAGEMENT'
WHERE email IN ( WHERE email IN (
'manager1@{{APP_DOMAIN}}', 'manager1@royalenfield.com',
'dept.head@{{APP_DOMAIN}}', 'dept.head@royalenfield.com',
'auditor@{{APP_DOMAIN}}' 'auditor@royalenfield.com'
-- Add more management emails here -- Add more management emails here
); );

View File

@ -1,74 +0,0 @@
const axios = require('axios');
const BASE_URL = 'http://localhost:3000';
async function verifySecurity() {
try {
console.log('--- Verifying Security Fixes ---');
console.log('\n1. Verifying Security Headers...');
const response = await axios.get(`${BASE_URL}/health`);
const headers = response.headers;
console.log('\n1b. Verifying Security Headers on 404...');
try {
const res404 = await axios.get(`${BASE_URL}/non-existent`, { validateStatus: false });
console.log('404 Status:', res404.status);
console.log('404 CSP:', res404.headers['content-security-policy']);
console.log('\n1c. Verifying Security Headers on /assets (Redirect check)...');
const resAssets = await axios.get(`${BASE_URL}/assets`, {
validateStatus: false,
maxRedirects: 0 // Don't follow to see the first response (likely 301)
});
console.log('Assets Status:', resAssets.status);
console.log('Assets CSP:', resAssets.headers['content-security-policy']);
} catch (e) {
console.log('Error checking 404/Redirect:', e.message);
if (e.response) {
console.log('Response Status:', e.response.status);
console.log('Response CSP:', e.response.headers['content-security-policy']);
}
}
// Check CSP
const csp = headers['content-security-policy'];
console.log('CSP:', csp);
if (csp && csp.includes("frame-ancestors 'self'")) {
console.log('✅ Clickjacking Protection (frame-ancestors) is present.');
} else {
console.log('❌ Clickjacking Protection (frame-ancestors) is MISSING.');
}
// Check X-Frame-Options
const xfo = headers['x-frame-options'];
console.log('X-Frame-Options:', xfo);
if (xfo === 'SAMEORIGIN') {
console.log('✅ X-Frame-Options: SAMEORIGIN is present.');
} else {
console.log('❌ X-Frame-Options: SAMEORIGIN is MISSING.');
}
console.log('\n2. Verifying Cookie Security Flags (requires login)...');
console.log('Note: This is best verified in a real browser or by checking the code changes in auth.controller.ts.');
console.log('\n3. Verifying Sanitization Utility...');
// This is verified by the unit test if we create one, but we can also do a manual check if the server is running.
console.log('\n--- Verification Summary ---');
console.log('Content-Security-Policy: frame-ancestors added.');
console.log('X-Frame-Options: set to SAMEORIGIN.');
console.log('Cookie flags: sameSite set to lax, secure flag ensured in production.');
console.log('Sanitization: Implemented in WorkNotes, Holidays, Workflow Requests, and Conclusions.');
} catch (error) {
if (error.code === 'ECONNREFUSED') {
console.error('❌ Error: Could not connect to the backend server at', BASE_URL);
console.error('Please ensure the server is running (npm run dev).');
} else {
console.error('❌ Error during verification:', error.message);
}
}
}
verifySecurity();

View File

@ -162,7 +162,7 @@ SMTP_PORT=587
SMTP_SECURE=false SMTP_SECURE=false
SMTP_USER=${SMTP_USER} SMTP_USER=${SMTP_USER}
SMTP_PASSWORD=${SMTP_PASSWORD} SMTP_PASSWORD=${SMTP_PASSWORD}
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}> EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
# Vertex AI Gemini Configuration (for conclusion generation) # Vertex AI Gemini Configuration (for conclusion generation)
# Service account credentials should be placed in ./credentials/ folder # Service account credentials should be placed in ./credentials/ folder
@ -232,7 +232,7 @@ show_vapid_instructions() {
echo " VITE_PUBLIC_VAPID_KEY=<your-public-key>" echo " VITE_PUBLIC_VAPID_KEY=<your-public-key>"
echo "" echo ""
echo "5. The VAPID_CONTACT should be a valid mailto: URL" echo "5. The VAPID_CONTACT should be a valid mailto: URL"
echo " Example: mailto:admin@{{APP_DOMAIN}}" echo " Example: mailto:admin@royalenfield.com"
echo "" echo ""
echo "Note: Keep your VAPID_PRIVATE_KEY secure and never commit it to version control!" echo "Note: Keep your VAPID_PRIVATE_KEY secure and never commit it to version control!"
echo "" echo ""

View File

@ -5,16 +5,12 @@ import dotenv from 'dotenv';
import cookieParser from 'cookie-parser'; import cookieParser from 'cookie-parser';
import { UserService } from './services/user.service'; import { UserService } from './services/user.service';
import { SSOUserData } from './types/auth.types'; import { SSOUserData } from './types/auth.types';
import { sequelize } from './config/database';
import { corsMiddleware } from './middlewares/cors.middleware'; import { corsMiddleware } from './middlewares/cors.middleware';
import { authenticateToken } from './middlewares/auth.middleware';
import { requireAdmin } from './middlewares/authorization.middleware';
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware'; import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
import routes from './routes/index'; import routes from './routes/index';
import { ensureUploadDir, UPLOAD_DIR } from './config/storage'; import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
import { initializeGoogleSecretManager } from './services/googleSecretManager.service'; import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
import { sanitizationMiddleware } from './middlewares/sanitization.middleware';
import { rateLimiter } from './middlewares/rateLimiter.middleware';
import path from 'path'; import path from 'path';
// Load environment variables from .env file first // Load environment variables from .env file first
@ -23,85 +19,17 @@ dotenv.config();
// Secrets are now initialized in server.ts before app is imported // Secrets are now initialized in server.ts before app is imported
const app: express.Application = express(); const app: express.Application = express();
// 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc.
// This handles a specific Express/Helmet edge case where redirects lose headers.
app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
const isDev = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'local';
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
// Build connect-src dynamically
const connectSrc = ["'self'", "blob:", "data:"];
if (isDev) {
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000");
if (frontendUrl.includes('localhost')) connectSrc.push(frontendUrl);
} else if (frontendUrl && frontendUrl !== '*') {
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
connectSrc.push(...origins);
}
const apiDomain = process.env.APP_DOMAIN || 'royalenfield.com';
// Define strict CSP directives
//: Move frame-ancestors, form-action, and base-uri to the front to ensure VAPT compliance
// even if the header is truncated in certain response types (like 301 redirects).
const directives = [
"frame-ancestors 'self'",
"form-action 'self'",
"base-uri 'self'",
"default-src 'none'",
`connect-src ${connectSrc.join(' ')}`,
"style-src 'self' https://fonts.googleapis.com 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' 'sha256-Od9mHMH7x2G6QuoV3hsPkDCwIyqbg2DX3F5nLeCYQBc=' 'sha256-eSB4TBEI8J+pgd6+gnmCP4Q+C+Yrx5BdjBEoPvZUzZI=' 'sha256-nzTgYzXYDNe6BAHiiI7NNlfK8n/auuOAhh2t92YvuXo=' 'sha256-441zG27rExd4/il+NvIqyL8zFx5XmyNQtE381kSkUJk='",
"style-src-elem 'self' https://fonts.googleapis.com 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' 'sha256-Od9mHMH7x2G6QuoV3hsPkDCwIyqbg2DX3F5nLeCYQBc=' 'sha256-eSB4TBEI8J+pgd6+gnmCP4Q+C+Yrx5BdjBEoPvZUzZI=' 'sha256-nzTgYzXYDNe6BAHiiI7NNlfK8n/auuOAhh2t92YvuXo=' 'sha256-441zG27rExd4/il+NvIqyL8zFx5XmyNQtE381kSkUJk='",
"style-src-attr 'unsafe-inline'",
"script-src 'self'",
"script-src-elem 'self'",
"script-src-attr 'none'",
`img-src 'self' data: blob: https://*.${apiDomain} https://*.okta.com https://*.oktapreview.com https://*.googleapis.com https://*.gstatic.com`,
"frame-src 'self' blob: data:",
"font-src 'self' https://fonts.gstatic.com data:",
"object-src 'none'",
"worker-src 'self' blob:",
"manifest-src 'self'",
!isDev ? "upgrade-insecure-requests" : ""
].filter(Boolean).join("; ");
res.setHeader('Content-Security-Policy', directives);
next();
});
// Configure other security headers via Helmet (with CSP disabled since we set it manually)
app.use(helmet({
contentSecurityPolicy: false, // Handled manually above to ensure redirect compatibility
crossOriginEmbedderPolicy: false,
crossOriginResourcePolicy: { policy: "cross-origin" },
xFrameOptions: { action: "sameorigin" },
}));
// 2. CORS middleware - MUST be before other middleware
app.use(corsMiddleware);
// Handle /assets trailing slash redirect manually to avoid CSP truncation by express.static
app.get('/assets', (req, res) => {
res.redirect(301, '/assets/');
});
// 3. Cookie parser middleware - MUST be before routes
app.use(cookieParser());
const userService = new UserService(); const userService = new UserService();
// Initializer for database connection (called from server.ts) // Database initialization
export const initializeAppDatabase = async () => { const initializeDatabase = async () => {
try { // MongoDB is connected via server.ts or separate config
await sequelize.authenticate(); // No Sequelize initialization needed
console.log('✅ App database connection established');
} catch (error) {
console.error('❌ App database connection failed:', error);
throw error;
}
}; };
// Initialize database
initializeDatabase();
// Trust proxy - Enable this when behind a reverse proxy (nginx, load balancer, etc.) // Trust proxy - Enable this when behind a reverse proxy (nginx, load balancer, etc.)
// This allows Express to read X-Forwarded-* headers correctly // This allows Express to read X-Forwarded-* headers correctly
// Set to true in production, false in development // Set to true in production, false in development
@ -112,16 +40,65 @@ if (process.env.TRUST_PROXY === 'true' || process.env.NODE_ENV === 'production')
app.set('trust proxy', 1); app.set('trust proxy', 1);
} }
// CORS middleware - MUST be before other middleware
app.use(corsMiddleware);
// Security middleware - Configure Helmet to work with CORS
// Get frontend URL for CSP - allow cross-origin connections in development
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
const isDevelopment = process.env.NODE_ENV !== 'production';
// Build connect-src directive - allow backend API and blob URLs
const connectSrc = ["'self'", "blob:", "data:"];
if (isDevelopment) {
// In development, allow connections to common dev ports
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000");
// Also allow the configured frontend URL if it's a localhost URL
if (frontendUrl.includes('localhost')) {
connectSrc.push(frontendUrl);
}
} else {
// In production, only allow the configured frontend URL
if (frontendUrl && frontendUrl !== '*') {
const frontendOrigins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
connectSrc.push(...frontendOrigins);
}
}
// Build CSP directives - conditionally include upgradeInsecureRequests
const cspDirectives: any = {
defaultSrc: ["'self'", "blob:"],
styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com"],
scriptSrc: ["'self'"],
imgSrc: ["'self'", "data:", "https:", "blob:"],
connectSrc: connectSrc,
frameSrc: ["'self'", "blob:"],
fontSrc: ["'self'", "https://fonts.gstatic.com", "data:"],
objectSrc: ["'none'"],
baseUri: ["'self'"],
formAction: ["'self'"],
};
// Only add upgradeInsecureRequests in production (it forces HTTPS)
if (!isDevelopment) {
cspDirectives.upgradeInsecureRequests = [];
}
app.use(helmet({
crossOriginEmbedderPolicy: false,
crossOriginResourcePolicy: { policy: "cross-origin" },
contentSecurityPolicy: {
directives: cspDirectives,
},
}));
// Cookie parser middleware - MUST be before routes
app.use(cookieParser());
// Body parsing middleware // Body parsing middleware
app.use(express.json({ limit: '10mb' })); app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true, limit: '10mb' })); app.use(express.urlencoded({ extended: true, limit: '10mb' }));
// Global rate limiting disabled — nginx handles rate limiting in production
// app.use(rateLimiter);
// HTML sanitization - strip all tags from text inputs (after body parsing, before routes)
app.use(sanitizationMiddleware);
// Logging middleware // Logging middleware
app.use(morgan('combined')); app.use(morgan('combined'));
@ -129,7 +106,7 @@ app.use(morgan('combined'));
app.use(metricsMiddleware); app.use(metricsMiddleware);
// Prometheus metrics endpoint - expose metrics for scraping // Prometheus metrics endpoint - expose metrics for scraping
app.use('/metrics', authenticateToken, requireAdmin, createMetricsRouter()); app.use(createMetricsRouter());
// Health check endpoint (before API routes) // Health check endpoint (before API routes)
app.get('/health', (_req: express.Request, res: express.Response) => { app.get('/health', (_req: express.Request, res: express.Response) => {
@ -146,16 +123,7 @@ app.use('/api/v1', routes);
// Serve uploaded files statically // Serve uploaded files statically
ensureUploadDir(); ensureUploadDir();
app.use('/uploads', authenticateToken, express.static(UPLOAD_DIR)); app.use('/uploads', express.static(UPLOAD_DIR));
// Initialize ClamAV toggle manager
import { initializeToggleFile } from './services/clamav/clamavToggleManager';
try {
initializeToggleFile();
console.log(`✅ ClamAV toggle initialized (ENABLE_CLAMAV=${process.env.ENABLE_CLAMAV || 'true'})`);
} catch (err) {
console.warn('⚠️ ClamAV toggle initialization warning:', err);
}
// Legacy SSO Callback endpoint for user creation/update (kept for backward compatibility) // Legacy SSO Callback endpoint for user creation/update (kept for backward compatibility)
app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.Response): Promise<void> => { app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.Response): Promise<void> => {
@ -209,7 +177,7 @@ app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.
}); });
// Get all users endpoint // Get all users endpoint
app.get('/api/v1/users', authenticateToken, requireAdmin, async (_req: express.Request, res: express.Response): Promise<void> => { app.get('/api/v1/users', async (_req: express.Request, res: express.Response): Promise<void> => {
try { try {
const users = await userService.getAllUsers(); const users = await userService.getAllUsers();
@ -314,4 +282,4 @@ if (reactBuildPath && fs.existsSync(path.join(reactBuildPath, "index.html"))) {
}); });
} }
export default app; export default app;

View File

@ -1,39 +1,56 @@
import { Sequelize } from 'sequelize'; import mongoose from 'mongoose';
import dotenv from 'dotenv'; import dotenv from 'dotenv';
import logger from '../utils/logger';
import dns from 'dns';
dotenv.config(); dotenv.config();
// 1. Debugging: Print what the app actually sees export const connectMongoDB = async () => {
console.log('--- Database Config Debug ---'); try {
console.log(`DB_HOST: ${process.env.DB_HOST}`); const mongoUri = process.env.MONGO_URI || process.env.MONGODB_URL || 'mongodb://localhost:27017/re_workflow_db';
console.log(`DB_SSL (Raw): '${process.env.DB_SSL}`); // Quotes help see trailing spaces
// 2. Fix: Trim whitespace to ensure "true " becomes "true" // Workaround for querySrv ECONNREFUSED in specific network environments (e.g. some Windows setups/VPNs)
const isSSL = (process.env.DB_SSL || '').trim() === 'true'; // Set DNS servers BEFORE any connection attempt to fix SRV resolution issues
console.log(`SSL Enabled: ${isSSL}`); if (mongoUri.startsWith('mongodb+srv://')) {
console.log('---------------------------'); logger.info('[Database] Detected Atlas SRV URI, configuring DNS resolution...');
try {
// Set public DNS servers globally to fix Windows DNS resolution issues
dns.setServers(['8.8.8.8', '8.8.4.4', '1.1.1.1', '1.0.0.1']);
logger.info('[Database] DNS servers configured: Google DNS (8.8.8.8, 8.8.4.4) and Cloudflare DNS (1.1.1.1, 1.0.0.1)');
const sequelize = new Sequelize({ // Add a small delay to ensure DNS settings take effect
host: process.env.DB_HOST || 'localhost', await new Promise(resolve => setTimeout(resolve, 100));
port: parseInt(process.env.DB_PORT || '5432', 10), } catch (dnsErr) {
database: process.env.DB_NAME || 're_workflow_db', logger.warn('[Database] Failed to set public DNS servers:', dnsErr);
username: process.env.DB_USER || 'postgres', }
password: process.env.DB_PASSWORD || 'postgres', }
dialect: 'postgres',
logging: false,
pool: {
min: parseInt(process.env.DB_POOL_MIN || '2', 10),
max: parseInt(process.env.DB_POOL_MAX || '10', 10),
acquire: 30000,
idle: 10000,
},
dialectOptions: {
// 3. Use the robust boolean we calculated above
ssl: isSSL ? {
require: true,
rejectUnauthorized: false,
} : false,
},
});
export { sequelize }; logger.info('[Database] Connecting to MongoDB...');
await mongoose.connect(mongoUri, {
serverSelectionTimeoutMS: 10000, // Increase timeout to 10 seconds
socketTimeoutMS: 45000,
});
logger.info('✅ MongoDB Connected Successfully');
} catch (error: any) {
logger.error('❌ MongoDB Connection Error:', error.message);
if (error.stack) {
logger.error('Stack trace:', error.stack);
}
// Provide helpful error messages
if (error.message.includes('querySrv ECONNREFUSED') || error.message.includes('ENOTFOUND')) {
logger.error('');
logger.error('🔍 DNS Resolution Failed. Possible solutions:');
logger.error(' 1. Check your internet connection');
logger.error(' 2. Verify the MongoDB Atlas cluster is running');
logger.error(' 3. Try disabling VPN if you\'re using one');
logger.error(' 4. Check Windows Firewall settings');
logger.error(' 5. Verify your MongoDB Atlas connection string is correct');
logger.error('');
}
throw error; // Re-throw to stop server startup
}
};
export { mongoose };

View File

@ -8,9 +8,9 @@ export const emailConfig = {
pass: process.env.SMTP_PASSWORD || '', pass: process.env.SMTP_PASSWORD || '',
}, },
}, },
from: process.env.EMAIL_FROM || `RE Workflow System <notifications@${process.env.APP_DOMAIN || 'royalenfield.com'}>`, from: process.env.EMAIL_FROM || 'RE Workflow System <notifications@royalenfield.com>',
// Email templates // Email templates
templates: { templates: {
workflowCreated: 'workflow-created', workflowCreated: 'workflow-created',
@ -20,7 +20,7 @@ export const emailConfig = {
tatReminder: 'tat-reminder', tatReminder: 'tat-reminder',
tatBreached: 'tat-breached', tatBreached: 'tat-breached',
}, },
// Email settings // Email settings
settings: { settings: {
retryAttempts: 3, retryAttempts: 3,

View File

@ -8,18 +8,18 @@ const ssoConfig: SSOConfig = {
get refreshTokenExpiry() { return process.env.REFRESH_TOKEN_EXPIRY || '7d'; }, get refreshTokenExpiry() { return process.env.REFRESH_TOKEN_EXPIRY || '7d'; },
get sessionSecret() { return process.env.SESSION_SECRET || ''; }, get sessionSecret() { return process.env.SESSION_SECRET || ''; },
// Use only FRONTEND_URL from environment - no fallbacks // Use only FRONTEND_URL from environment - no fallbacks
get allowedOrigins() { get allowedOrigins() {
return process.env.FRONTEND_URL?.split(',').map(s => s.trim()).filter(Boolean) || []; return process.env.FRONTEND_URL?.split(',').map(s => s.trim()).filter(Boolean) || [];
}, },
// Okta/Auth0 configuration for token exchange // Okta/Auth0 configuration for token exchange
get oktaDomain() { return process.env.OKTA_DOMAIN || `{{IDP_DOMAIN}}`; }, get oktaDomain() { return process.env.OKTA_DOMAIN || 'https://dev-830839.oktapreview.com'; },
get oktaClientId() { return process.env.OKTA_CLIENT_ID || ''; }, get oktaClientId() { return process.env.OKTA_CLIENT_ID || ''; },
get oktaClientSecret() { return process.env.OKTA_CLIENT_SECRET || ''; }, get oktaClientSecret() { return process.env.OKTA_CLIENT_SECRET || ''; },
get oktaApiToken() { return process.env.OKTA_API_TOKEN || ''; }, // SSWS token for Users API get oktaApiToken() { return process.env.OKTA_API_TOKEN || ''; }, // SSWS token for Users API
// Tanflow configuration for token exchange // Tanflow configuration for token exchange
get tanflowBaseUrl() { return process.env.TANFLOW_BASE_URL || `{{IDP_DOMAIN}}/realms/RE`; }, get tanflowBaseUrl() { return process.env.TANFLOW_BASE_URL || 'https://ssodev.rebridge.co.in/realms/RE'; },
get tanflowClientId() { return process.env.TANFLOW_CLIENT_ID || 'REFLOW'; }, get tanflowClientId() { return process.env.TANFLOW_CLIENT_ID || 'REFLOW'; },
get tanflowClientSecret() { return process.env.TANFLOW_CLIENT_SECRET || `{{TANFLOW_CLIENT_SECRET}}`; }, get tanflowClientSecret() { return process.env.TANFLOW_CLIENT_SECRET || 'cfIzMlwAMF1m4QWAP5StzZbV47HIrCox'; },
}; };
export { ssoConfig }; export { ssoConfig };

View File

@ -9,7 +9,7 @@ export const SYSTEM_CONFIG = {
APP_NAME: 'Royal Enfield Workflow Management', APP_NAME: 'Royal Enfield Workflow Management',
APP_VERSION: '1.2.0', APP_VERSION: '1.2.0',
APP_ENV: process.env.NODE_ENV || 'development', APP_ENV: process.env.NODE_ENV || 'development',
// Working Hours Configuration // Working Hours Configuration
WORKING_HOURS: { WORKING_HOURS: {
START_HOUR: parseInt(process.env.WORK_START_HOUR || '9', 10), START_HOUR: parseInt(process.env.WORK_START_HOUR || '9', 10),
@ -18,23 +18,23 @@ export const SYSTEM_CONFIG = {
END_DAY: 5, // Friday END_DAY: 5, // Friday
TIMEZONE: process.env.TZ || 'Asia/Kolkata', TIMEZONE: process.env.TZ || 'Asia/Kolkata',
}, },
// TAT (Turnaround Time) Settings // TAT (Turnaround Time) Settings
TAT: { TAT: {
// Notification thresholds (percentage) // Notification thresholds (percentage)
THRESHOLD_50_PERCENT: 50, THRESHOLD_50_PERCENT: 50,
THRESHOLD_75_PERCENT: 75, THRESHOLD_75_PERCENT: 75,
THRESHOLD_100_PERCENT: 100, THRESHOLD_100_PERCENT: 100,
// Test mode for faster testing // Test mode for faster testing
TEST_MODE: process.env.TAT_TEST_MODE === 'true', TEST_MODE: process.env.TAT_TEST_MODE === 'true',
TEST_TIME_MULTIPLIER: process.env.TAT_TEST_MODE === 'true' ? 1/60 : 1, // 1 hour = 1 minute in test mode TEST_TIME_MULTIPLIER: process.env.TAT_TEST_MODE === 'true' ? 1 / 60 : 1, // 1 hour = 1 minute in test mode
// Default TAT values by priority (in hours) // Default TAT values by priority (in hours)
DEFAULT_EXPRESS_TAT: parseInt(process.env.DEFAULT_EXPRESS_TAT || '24', 10), DEFAULT_EXPRESS_TAT: parseInt(process.env.DEFAULT_EXPRESS_TAT || '24', 10),
DEFAULT_STANDARD_TAT: parseInt(process.env.DEFAULT_STANDARD_TAT || '72', 10), DEFAULT_STANDARD_TAT: parseInt(process.env.DEFAULT_STANDARD_TAT || '72', 10),
}, },
// File Upload Limits // File Upload Limits
UPLOAD: { UPLOAD: {
MAX_FILE_SIZE_MB: parseInt(process.env.MAX_FILE_SIZE_MB || '10', 10), MAX_FILE_SIZE_MB: parseInt(process.env.MAX_FILE_SIZE_MB || '10', 10),
@ -42,7 +42,7 @@ export const SYSTEM_CONFIG = {
ALLOWED_FILE_TYPES: ['pdf', 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx', 'jpg', 'jpeg', 'png', 'gif', 'txt'], ALLOWED_FILE_TYPES: ['pdf', 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx', 'jpg', 'jpeg', 'png', 'gif', 'txt'],
MAX_FILES_PER_REQUEST: parseInt(process.env.MAX_FILES_PER_REQUEST || '10', 10), MAX_FILES_PER_REQUEST: parseInt(process.env.MAX_FILES_PER_REQUEST || '10', 10),
}, },
// Workflow Limits // Workflow Limits
WORKFLOW: { WORKFLOW: {
MAX_APPROVAL_LEVELS: parseInt(process.env.MAX_APPROVAL_LEVELS || '10', 10), MAX_APPROVAL_LEVELS: parseInt(process.env.MAX_APPROVAL_LEVELS || '10', 10),
@ -50,7 +50,7 @@ export const SYSTEM_CONFIG = {
MAX_SPECTATORS: parseInt(process.env.MAX_SPECTATORS || '20', 10), MAX_SPECTATORS: parseInt(process.env.MAX_SPECTATORS || '20', 10),
MIN_APPROVAL_LEVELS: 1, MIN_APPROVAL_LEVELS: 1,
}, },
// Work Notes Configuration // Work Notes Configuration
WORK_NOTES: { WORK_NOTES: {
MAX_MESSAGE_LENGTH: parseInt(process.env.MAX_MESSAGE_LENGTH || '2000', 10), MAX_MESSAGE_LENGTH: parseInt(process.env.MAX_MESSAGE_LENGTH || '2000', 10),
@ -58,20 +58,20 @@ export const SYSTEM_CONFIG = {
ENABLE_REACTIONS: process.env.ENABLE_REACTIONS !== 'false', ENABLE_REACTIONS: process.env.ENABLE_REACTIONS !== 'false',
ENABLE_MENTIONS: process.env.ENABLE_MENTIONS !== 'false', ENABLE_MENTIONS: process.env.ENABLE_MENTIONS !== 'false',
}, },
// Pagination // Pagination
PAGINATION: { PAGINATION: {
DEFAULT_PAGE_SIZE: parseInt(process.env.DEFAULT_PAGE_SIZE || '20', 10), DEFAULT_PAGE_SIZE: parseInt(process.env.DEFAULT_PAGE_SIZE || '20', 10),
MAX_PAGE_SIZE: parseInt(process.env.MAX_PAGE_SIZE || '100', 10), MAX_PAGE_SIZE: parseInt(process.env.MAX_PAGE_SIZE || '100', 10),
}, },
// Session & Security // Session & Security
SECURITY: { SECURITY: {
SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '480', 10), // 8 hours SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '480', 10), // 8 hours
JWT_EXPIRY: process.env.JWT_EXPIRY || '8h', JWT_EXPIRY: process.env.JWT_EXPIRY || '8h',
ENABLE_2FA: process.env.ENABLE_2FA === 'true', ENABLE_2FA: process.env.ENABLE_2FA === 'true',
}, },
// Notification Settings // Notification Settings
NOTIFICATIONS: { NOTIFICATIONS: {
ENABLE_EMAIL: process.env.ENABLE_EMAIL_NOTIFICATIONS !== 'false', ENABLE_EMAIL: process.env.ENABLE_EMAIL_NOTIFICATIONS !== 'false',
@ -79,7 +79,7 @@ export const SYSTEM_CONFIG = {
ENABLE_IN_APP: true, // Always enabled ENABLE_IN_APP: true, // Always enabled
BATCH_DELAY_MS: parseInt(process.env.NOTIFICATION_BATCH_DELAY || '5000', 10), BATCH_DELAY_MS: parseInt(process.env.NOTIFICATION_BATCH_DELAY || '5000', 10),
}, },
// Feature Flags // Feature Flags
FEATURES: { FEATURES: {
ENABLE_AI_CONCLUSION: process.env.ENABLE_AI_CONCLUSION !== 'false', ENABLE_AI_CONCLUSION: process.env.ENABLE_AI_CONCLUSION !== 'false',
@ -87,7 +87,7 @@ export const SYSTEM_CONFIG = {
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS !== 'false', ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS !== 'false',
ENABLE_EXPORT: process.env.ENABLE_EXPORT !== 'false', ENABLE_EXPORT: process.env.ENABLE_EXPORT !== 'false',
}, },
// Redis & Queue // Redis & Queue
REDIS: { REDIS: {
URL: process.env.REDIS_URL || 'redis://localhost:6379', URL: process.env.REDIS_URL || 'redis://localhost:6379',
@ -95,7 +95,7 @@ export const SYSTEM_CONFIG = {
RATE_LIMIT_MAX: parseInt(process.env.RATE_LIMIT_MAX || '10', 10), RATE_LIMIT_MAX: parseInt(process.env.RATE_LIMIT_MAX || '10', 10),
RATE_LIMIT_DURATION: parseInt(process.env.RATE_LIMIT_DURATION || '1000', 10), RATE_LIMIT_DURATION: parseInt(process.env.RATE_LIMIT_DURATION || '1000', 10),
}, },
// UI Preferences (can be overridden per user in future) // UI Preferences (can be overridden per user in future)
UI: { UI: {
DEFAULT_THEME: 'light', DEFAULT_THEME: 'light',
@ -147,16 +147,16 @@ export async function getPublicConfig() {
// Get configuration from database first (always try to read from DB) // Get configuration from database first (always try to read from DB)
const { getConfigValue } = require('../services/configReader.service'); const { getConfigValue } = require('../services/configReader.service');
// Get AI configuration from admin settings (database) // Get AI configuration from admin settings (database)
const aiEnabled = (await getConfigValue('AI_ENABLED', 'true'))?.toLowerCase() === 'true'; const aiEnabled = String(await getConfigValue('AI_ENABLED', 'true')).toLowerCase() === 'true';
const remarkGenerationEnabled = (await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true'))?.toLowerCase() === 'true'; const remarkGenerationEnabled = String(await getConfigValue('AI_REMARK_GENERATION_ENABLED', 'true')).toLowerCase() === 'true';
const maxRemarkLength = parseInt(await getConfigValue('AI_MAX_REMARK_LENGTH', '2000') || '2000', 10); const maxRemarkLength = parseInt(await getConfigValue('AI_MAX_REMARK_LENGTH', '2000') || '2000', 10);
// Try to get AI service status (gracefully handle if not available) // Try to get AI service status (gracefully handle if not available)
try { try {
const { aiService } = require('../services/ai.service'); const { aiService } = require('../services/ai.service');
return { return {
...baseConfig, ...baseConfig,
ai: { ai: {

View File

@ -1,14 +1,14 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { Holiday, HolidayType } from '@models/Holiday'; import { HolidayModel as Holiday, HolidayType } from '../models/mongoose/Holiday.schema';
import { holidayService } from '@services/holiday.service'; import { holidayMongoService as holidayService } from '../services/holiday.service';
import { activityTypeService } from '@services/activityType.service'; import { activityTypeService } from '../services/activityType.service';
import { sequelize } from '@config/database'; import { adminConfigMongoService } from '../services/adminConfig.service';
import { QueryTypes, Op } from 'sequelize'; import logger from '../utils/logger';
import logger from '@utils/logger'; import dayjs from 'dayjs';
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils'; import { initializeHolidaysCache, clearWorkingHoursCache } from '../utils/tatTimeUtils';
import { clearConfigCache } from '@services/configReader.service'; import { clearConfigCache } from '../services/configReader.service';
import { User, UserRole } from '@models/User'; import { UserModel as User, IUser } from '../models/mongoose/User.schema';
import { sanitizeHtml } from '@utils/sanitizer'; import { UserRole } from '../types/user.types';
/** /**
* Get all holidays (with optional year filter) * Get all holidays (with optional year filter)
@ -20,10 +20,13 @@ export const getAllHolidays = async (req: Request, res: Response): Promise<void>
const holidays = await holidayService.getAllActiveHolidays(yearNum); const holidays = await holidayService.getAllActiveHolidays(yearNum);
// Format response to match legacy structure
const formattedHolidays = holidays.map(mapToLegacyHoliday);
res.json({ res.json({
success: true, success: true,
data: holidays, data: formattedHolidays,
count: holidays.length count: formattedHolidays.length
}); });
} catch (error) { } catch (error) {
logger.error('[Admin] Error fetching holidays:', error); logger.error('[Admin] Error fetching holidays:', error);
@ -50,13 +53,17 @@ export const getHolidayCalendar = async (req: Request, res: Response): Promise<v
return; return;
} }
const calendar = await holidayService.getHolidayCalendar(yearNum); // Use getAllActiveHolidays to get full docs, then filter by year in memory or update service
// Service has getHolidayCalendar(year) which returns partial objects.
// Better to use getAllActiveHolidays(year) and map ourselves.
const holidays = await holidayService.getAllActiveHolidays(yearNum);
const formattedHolidays = holidays.map(mapToLegacyHoliday);
res.json({ res.json({
success: true, success: true,
year: yearNum, year: yearNum,
holidays: calendar, holidays: formattedHolidays,
count: calendar.length count: formattedHolidays.length
}); });
} catch (error) { } catch (error) {
logger.error('[Admin] Error fetching holiday calendar:', error); logger.error('[Admin] Error fetching holiday calendar:', error);
@ -104,22 +111,26 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
const holiday = await holidayService.createHoliday({ const holiday = await holidayService.createHoliday({
holidayDate, holidayDate,
holidayName, holidayName,
description: description ? sanitizeHtml(description) : description, holidayType: (holidayType as any) || HolidayType.ORGANIZATIONAL,
holidayType: holidayType || HolidayType.ORGANIZATIONAL, year: new Date(holidayDate).getFullYear(),
isRecurring: isRecurring || false,
recurrenceRule,
appliesToDepartments, appliesToDepartments,
appliesToLocations, appliesToLocations,
description,
isRecurring,
recurrenceRule,
createdBy: userId createdBy: userId
}); });
// Reload holidays cache // Reload holidays cache
await initializeHolidaysCache(); await initializeHolidaysCache();
// Format response to match legacy structure
const legacyResponse = mapToLegacyHoliday(holiday);
res.status(201).json({ res.status(201).json({
success: true, success: true,
message: 'Holiday created successfully', message: 'Holiday created successfully',
data: holiday data: [legacyResponse] // Returning array as requested
}); });
} catch (error: any) { } catch (error: any) {
logger.error('[Admin] Error creating holiday:', error); logger.error('[Admin] Error creating holiday:', error);
@ -130,6 +141,28 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
} }
}; };
/**
* Helper to map Mongoose document to Legacy JSON format
*/
const mapToLegacyHoliday = (holiday: any) => ({
holidayId: holiday._id,
holidayDate: dayjs(holiday.holidayDate).format('YYYY-MM-DD'),
holidayName: holiday.holidayName,
description: holiday.description || null,
isRecurring: holiday.isRecurring || false,
recurrenceRule: holiday.recurrenceRule || null,
holidayType: holiday.holidayType,
isActive: holiday.isActive !== undefined ? holiday.isActive : true,
appliesToDepartments: (holiday.appliesToDepartments && holiday.appliesToDepartments.length > 0) ? holiday.appliesToDepartments : null,
appliesToLocations: (holiday.appliesToLocations && holiday.appliesToLocations.length > 0) ? holiday.appliesToLocations : null,
createdBy: holiday.createdBy || null,
updatedBy: holiday.updatedBy || null,
createdAt: holiday.createdAt,
updatedAt: holiday.updatedAt,
created_at: holiday.createdAt,
updated_at: holiday.updatedAt
});
/** /**
* Update a holiday * Update a holiday
*/ */
@ -146,11 +179,8 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
const { holidayId } = req.params; const { holidayId } = req.params;
const updates = req.body; const updates = req.body;
if (updates.description) {
updates.description = sanitizeHtml(updates.description);
}
const holiday = await holidayService.updateHoliday(holidayId, updates, userId); const holiday = await holidayService.updateHoliday(holidayId, updates);
if (!holiday) { if (!holiday) {
res.status(404).json({ res.status(404).json({
@ -166,7 +196,7 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
res.json({ res.json({
success: true, success: true,
message: 'Holiday updated successfully', message: 'Holiday updated successfully',
data: holiday data: [mapToLegacyHoliday(holiday)] // Returning array for consistency
}); });
} catch (error: any) { } catch (error: any) {
logger.error('[Admin] Error updating holiday:', error); logger.error('[Admin] Error updating holiday:', error);
@ -226,7 +256,7 @@ export const bulkImportHolidays = async (req: Request, res: Response): Promise<v
return; return;
} }
const result = await holidayService.bulkImportHolidays(holidays, userId); const result = await holidayService.bulkImportHolidays(holidays);
// Reload holidays cache // Reload holidays cache
await initializeHolidaysCache(); await initializeHolidaysCache();
@ -263,35 +293,7 @@ export const getPublicConfigurations = async (req: Request, res: Response): Prom
return; return;
} }
let whereClause = ''; const configurations = await adminConfigMongoService.getPublicConfigurations(category as string);
if (category) {
whereClause = `WHERE config_category = '${category}' AND is_sensitive = false`;
} else {
whereClause = `WHERE config_category IN ('DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING', 'SYSTEM_SETTINGS') AND is_sensitive = false`;
}
const rawConfigurations = await sequelize.query(`
SELECT
config_key,
config_category,
config_value,
value_type,
display_name,
description
FROM admin_configurations
${whereClause}
ORDER BY config_category, sort_order
`, { type: QueryTypes.SELECT });
// Map snake_case to camelCase for frontend
const configurations = (rawConfigurations as any[]).map((config: any) => ({
configKey: config.config_key,
configCategory: config.config_category,
configValue: config.config_value,
valueType: config.value_type,
displayName: config.display_name,
description: config.description
}));
res.json({ res.json({
success: true, success: true,
@ -314,55 +316,7 @@ export const getAllConfigurations = async (req: Request, res: Response): Promise
try { try {
const { category } = req.query; const { category } = req.query;
let whereClause = ''; const configurations = await adminConfigMongoService.getAllConfigurations(category as string);
if (category) {
whereClause = `WHERE config_category = '${category}'`;
}
const rawConfigurations = await sequelize.query(`
SELECT
config_id,
config_key,
config_category,
config_value,
value_type,
display_name,
description,
default_value,
is_editable,
is_sensitive,
validation_rules,
ui_component,
options,
sort_order,
requires_restart,
last_modified_at,
last_modified_by
FROM admin_configurations
${whereClause}
ORDER BY config_category, sort_order
`, { type: QueryTypes.SELECT });
// Map snake_case to camelCase for frontend
const configurations = (rawConfigurations as any[]).map((config: any) => ({
configId: config.config_id,
configKey: config.config_key,
configCategory: config.config_category,
configValue: config.config_value,
valueType: config.value_type,
displayName: config.display_name,
description: config.description,
defaultValue: config.default_value,
isEditable: config.is_editable,
isSensitive: config.is_sensitive || false,
validationRules: config.validation_rules,
uiComponent: config.ui_component,
options: config.options,
sortOrder: config.sort_order,
requiresRestart: config.requires_restart || false,
lastModifiedAt: config.last_modified_at,
lastModifiedBy: config.last_modified_by
}));
res.json({ res.json({
success: true, success: true,
@ -393,7 +347,7 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
} }
const { configKey } = req.params; const { configKey } = req.params;
let { configValue } = req.body; const { configValue } = req.body;
if (configValue === undefined) { if (configValue === undefined) {
res.status(400).json({ res.status(400).json({
@ -403,29 +357,10 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
return; return;
} }
// Sanitize config value if it's likely to be rendered as HTML
// We can be selective or just sanitize all strings for safety
if (typeof configValue === 'string') {
configValue = sanitizeHtml(configValue);
}
// Update configuration // Update configuration
const result = await sequelize.query(` const config = await adminConfigMongoService.updateConfig(configKey, configValue, userId);
UPDATE admin_configurations
SET
config_value = :configValue,
last_modified_by = :userId,
last_modified_at = NOW(),
updated_at = NOW()
WHERE config_key = :configKey
AND is_editable = true
RETURNING *
`, {
replacements: { configValue, userId, configKey },
type: QueryTypes.UPDATE
});
if (!result || (result[1] as any) === 0) { if (!config) {
res.status(404).json({ res.status(404).json({
success: false, success: false,
error: 'Configuration not found or not editable' error: 'Configuration not found or not editable'
@ -477,15 +412,15 @@ export const resetConfiguration = async (req: Request, res: Response): Promise<v
try { try {
const { configKey } = req.params; const { configKey } = req.params;
await sequelize.query(` const config = await adminConfigMongoService.resetConfig(configKey);
UPDATE admin_configurations
SET config_value = default_value, if (!config) {
updated_at = NOW() res.status(404).json({
WHERE config_key = :configKey success: false,
`, { error: 'Configuration not found'
replacements: { configKey }, });
type: QueryTypes.UPDATE return;
}); }
// Clear config cache so reset values are used immediately // Clear config cache so reset values are used immediately
clearConfigCache(); clearConfigCache();
@ -543,7 +478,7 @@ export const updateUserRole = async (req: Request, res: Response): Promise<void>
} }
// Find user // Find user
const user = await User.findByPk(userId); const user = await User.findOne({ userId });
if (!user) { if (!user) {
res.status(404).json({ res.status(404).json({
success: false, success: false,
@ -616,8 +551,8 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
// Handle role filtering // Handle role filtering
if (role && role !== 'ALL' && role !== 'ELEVATED') { if (role && role !== 'ALL' && role !== 'ELEVATED') {
const validRoles: UserRole[] = ['USER', 'MANAGEMENT', 'ADMIN']; const validRoles: string[] = ['USER', 'MANAGEMENT', 'ADMIN'];
if (!validRoles.includes(role as UserRole)) { if (!validRoles.includes(role as string)) {
res.status(400).json({ res.status(400).json({
success: false, success: false,
error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED' error: 'Invalid role. Must be USER, MANAGEMENT, ADMIN, ALL, or ELEVATED'
@ -627,61 +562,32 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
whereClause.role = role; whereClause.role = role;
} else if (role === 'ELEVATED' || !role) { } else if (role === 'ELEVATED' || !role) {
// Default: Show only ADMIN and MANAGEMENT (elevated users) // Default: Show only ADMIN and MANAGEMENT (elevated users)
whereClause.role = { [Op.in]: ['ADMIN', 'MANAGEMENT'] }; whereClause.role = { $in: ['ADMIN', 'MANAGEMENT'] };
} }
// If role === 'ALL', don't filter by role (show all users) // If role === 'ALL', don't filter by role (show all users)
// Get total count for pagination // Get total count for pagination
const totalUsers = await User.count({ where: whereClause }); const totalUsers = await User.countDocuments(whereClause);
const totalPages = Math.ceil(totalUsers / limitNum); const totalPages = Math.ceil(totalUsers / limitNum);
// Get paginated users // Get paginated users
const users = await User.findAll({ const users = await User.find(whereClause)
where: whereClause, .select('userId email displayName firstName lastName department designation role manager postalAddress lastLogin createdAt')
attributes: [ .sort({ role: 1, displayName: 1 })
'userId', .skip(offset)
'email', .limit(limitNum);
'displayName',
'firstName',
'lastName',
'department',
'designation',
'role',
'manager',
'postalAddress',
'lastLogin',
'createdAt'
],
order: [
['role', 'ASC'], // ADMIN first, then MANAGEMENT, then USER
['displayName', 'ASC']
],
limit: limitNum,
offset: offset
});
// Get role summary (across all users, not just current page) // Get role summary (across all users, not just current page)
const roleStats = await sequelize.query(` const roleStatsRaw = await User.aggregate([
SELECT { $match: { isActive: true } },
role, { $group: { _id: '$role', count: { $sum: 1 } } },
COUNT(*) as count { $sort: { _id: 1 } }
FROM users ]);
WHERE is_active = true
GROUP BY role
ORDER BY
CASE role
WHEN 'ADMIN' THEN 1
WHEN 'MANAGEMENT' THEN 2
WHEN 'USER' THEN 3
END
`, {
type: QueryTypes.SELECT
});
const summary = { const summary = {
ADMIN: parseInt((roleStats.find((s: any) => s.role === 'ADMIN') as any)?.count || '0'), ADMIN: roleStatsRaw.find((s: any) => s._id === 'ADMIN')?.count || 0,
MANAGEMENT: parseInt((roleStats.find((s: any) => s.role === 'MANAGEMENT') as any)?.count || '0'), MANAGEMENT: roleStatsRaw.find((s: any) => s._id === 'MANAGEMENT')?.count || 0,
USER: parseInt((roleStats.find((s: any) => s.role === 'USER') as any)?.count || '0') USER: roleStatsRaw.find((s: any) => s._id === 'USER')?.count || 0
}; };
res.json({ res.json({
@ -718,29 +624,31 @@ export const getUsersByRole = async (req: Request, res: Response): Promise<void>
*/ */
export const getRoleStatistics = async (req: Request, res: Response): Promise<void> => { export const getRoleStatistics = async (req: Request, res: Response): Promise<void> => {
try { try {
const stats = await sequelize.query(` const stats = await User.aggregate([
SELECT {
role, $group: {
COUNT(*) as count, _id: '$role',
COUNT(CASE WHEN is_active = true THEN 1 END) as active_count, count: { $sum: 1 },
COUNT(CASE WHEN is_active = false THEN 1 END) as inactive_count activeCount: { $sum: { $cond: ['$isActive', 1, 0] } },
FROM users inactiveCount: { $sum: { $cond: ['$isActive', 0, 1] } }
GROUP BY role }
ORDER BY },
CASE role { $sort: { _id: 1 } }
WHEN 'ADMIN' THEN 1 ]);
WHEN 'MANAGEMENT' THEN 2
WHEN 'USER' THEN 3 // Format for frontend
END const formattedStats = stats.map((stat: any) => ({
`, { role: stat._id,
type: QueryTypes.SELECT count: stat.count,
}); active_count: stat.activeCount,
inactive_count: stat.inactiveCount
}));
res.json({ res.json({
success: true, success: true,
data: { data: {
statistics: stats, statistics: formattedStats,
total: stats.reduce((sum: number, stat: any) => sum + parseInt(stat.count), 0) total: formattedStats.reduce((sum: number, stat: any) => sum + stat.count, 0)
} }
}); });
} catch (error) { } catch (error) {
@ -787,7 +695,7 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`); logger.info(`[Admin] Assigning role ${role} to ${email} by user ${currentUserId}`);
// First, check if user already exists in our database // First, check if user already exists in our database
let user = await User.findOne({ where: { email } }); let user: IUser | null = await User.findOne({ email });
if (!user) { if (!user) {
// User doesn't exist, need to fetch from Okta and create // User doesn't exist, need to fetch from Okta and create
@ -810,12 +718,11 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
} }
// Create user in our database via centralized userService with all fields including manager // Create user in our database via centralized userService with all fields including manager
const ensured = await userService.createOrUpdateUser({ user = (await userService.createOrUpdateUser({
...oktaUserData, ...oktaUserData,
role, // Set the assigned role role: role as any, // Set the assigned role
isActive: true, // Ensure user is active isActive: true, // Ensure user is active
}); })) as IUser;
user = ensured;
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`); logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`);
} catch (oktaError: any) { } catch (oktaError: any) {
@ -849,27 +756,36 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
if (oktaUserData) { if (oktaUserData) {
// Sync all fields from Okta including the new role using centralized method // Sync all fields from Okta including the new role using centralized method
const updated = await userService.createOrUpdateUser({ user = (await userService.createOrUpdateUser({
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc. ...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
role, // Set the new role role: role as any, // Set the new role
isActive: true, // Ensure user is active isActive: true, // Ensure user is active
}); })) as IUser;
user = updated;
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`); logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
} else { } else {
// Okta user not found, just update role // Okta user not found, just update role
await user.update({ role }); user.role = role as any;
await user.save();
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`); logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
} }
} catch (oktaError: any) { } catch (oktaError: any) {
// If Okta fetch fails, just update the role // If Okta fetch fails, just update the role
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message); logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
await user.update({ role }); user.role = role as any;
await user.save();
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`); logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
} }
} }
if (!user) {
res.status(500).json({
success: false,
error: 'Failed to create or update user'
});
return;
}
res.json({ res.json({
success: true, success: true,
message: `Successfully assigned ${role} role to ${user.displayName || email}`, message: `Successfully assigned ${role} role to ${user.displayName || email}`,
@ -1059,4 +975,3 @@ export const deleteActivityType = async (req: Request, res: Response): Promise<v
}); });
} }
}; };

View File

@ -1,79 +0,0 @@
import { Request, Response } from 'express';
import { ApiTokenService } from '../services/apiToken.service';
import { ResponseHandler } from '../utils/responseHandler';
import { AuthenticatedRequest } from '../types/express';
import { z } from 'zod';
const createTokenSchema = z.object({
name: z.string().min(1).max(100),
expiresInDays: z.number().int().positive().optional(),
});
export class ApiTokenController {
private apiTokenService: ApiTokenService;
constructor() {
this.apiTokenService = new ApiTokenService();
}
/**
* Create a new API Token
*/
async create(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const validation = createTokenSchema.safeParse(req.body);
if (!validation.success) {
ResponseHandler.error(res, 'Validation error', 400, validation.error.message);
return;
}
const { name, expiresInDays } = validation.data;
const userId = req.user.userId;
const result = await this.apiTokenService.createToken(userId, name, expiresInDays);
ResponseHandler.success(res, {
token: result.token,
apiToken: result.apiToken
}, 'API Token created successfully. Please copy the token now, you will not be able to see it again.');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to create API token', 500, errorMessage);
}
}
/**
* List user's API Tokens
*/
async list(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user.userId;
const tokens = await this.apiTokenService.listTokens(userId);
ResponseHandler.success(res, { tokens }, 'API Tokens retrieved successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to list API tokens', 500, errorMessage);
}
}
/**
* Revoke an API Token
*/
async revoke(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user.userId;
const { id } = req.params;
const success = await this.apiTokenService.revokeToken(userId, id);
if (success) {
ResponseHandler.success(res, null, 'API Token revoked successfully');
} else {
ResponseHandler.notFound(res, 'Token not found or already revoked');
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to revoke API token', 500, errorMessage);
}
}
}

View File

@ -1,30 +1,30 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { ApprovalService } from '@services/approval.service'; import { ApprovalService } from '@services/approval.service';
import { DealerClaimApprovalService } from '@services/dealerClaimApproval.service'; import { DealerClaimApprovalMongoService } from '@services/dealerClaimApproval.service';
import { ApprovalLevel } from '@models/ApprovalLevel'; import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
import { WorkflowRequest } from '@models/WorkflowRequest'; import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
import { validateApprovalAction } from '@validators/approval.validator'; import { validateApprovalAction } from '@validators/approval.validator';
import { ResponseHandler } from '@utils/responseHandler'; import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express'; import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils'; import { getRequestMetadata } from '@utils/requestUtils';
const approvalService = new ApprovalService(); const approvalService = new ApprovalService();
const dealerClaimApprovalService = new DealerClaimApprovalService(); const dealerClaimApprovalService = new DealerClaimApprovalMongoService();
export class ApprovalController { export class ApprovalController {
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> { async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
try { try {
const { levelId } = req.params; const { levelId } = req.params;
const validatedData = validateApprovalAction(req.body); const validatedData = validateApprovalAction(req.body);
// Determine which service to use based on workflow type // Determine which service to use based on workflow type
const level = await ApprovalLevel.findByPk(levelId); const level = await ApprovalLevel.findOne({ levelId });
if (!level) { if (!level) {
ResponseHandler.notFound(res, 'Approval level not found'); ResponseHandler.notFound(res, 'Approval level not found');
return; return;
} }
const workflow = await WorkflowRequest.findByPk(level.requestId); const workflow = await WorkflowRequest.findOne({ requestId: level.requestId });
if (!workflow) { if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
return; return;
@ -32,15 +32,15 @@ export class ApprovalController {
const workflowType = (workflow as any)?.workflowType; const workflowType = (workflow as any)?.workflowType;
const requestMeta = getRequestMetadata(req); const requestMeta = getRequestMetadata(req);
// Route to appropriate service based on workflow type // Route to appropriate service based on workflow type
let approvedLevel: any; let approvedLevel: any;
if (workflowType === 'CLAIM_MANAGEMENT') { if (workflowType === 'CLAIM_MANAGEMENT') {
// Use DealerClaimApprovalService for claim management workflows // Use DealerClaimApprovalService for claim management workflows
approvedLevel = await dealerClaimApprovalService.approveLevel( approvedLevel = await dealerClaimApprovalService.approveLevel(
levelId, levelId,
validatedData, validatedData,
req.user.userId, req.user.userId,
{ {
ipAddress: requestMeta.ipAddress, ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent userAgent: requestMeta.userAgent
@ -49,16 +49,16 @@ export class ApprovalController {
} else { } else {
// Use ApprovalService for custom workflows // Use ApprovalService for custom workflows
approvedLevel = await approvalService.approveLevel( approvedLevel = await approvalService.approveLevel(
levelId, levelId,
validatedData, validatedData,
req.user.userId, req.user.userId,
{ {
ipAddress: requestMeta.ipAddress, ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent userAgent: requestMeta.userAgent
} }
); );
} }
if (!approvedLevel) { if (!approvedLevel) {
ResponseHandler.notFound(res, 'Approval level not found'); ResponseHandler.notFound(res, 'Approval level not found');
return; return;
@ -74,16 +74,18 @@ export class ApprovalController {
async getCurrentApprovalLevel(req: Request, res: Response): Promise<void> { async getCurrentApprovalLevel(req: Request, res: Response): Promise<void> {
try { try {
const { id } = req.params; const { id } = req.params;
// Determine which service to use based on workflow type // Determine which service to use based on workflow type (handle both requestId and requestNumber)
const workflow = await WorkflowRequest.findByPk(id); const workflow = await WorkflowRequest.findOne({
$or: [{ requestId: id }, { requestNumber: id }]
});
if (!workflow) { if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
return; return;
} }
const workflowType = (workflow as any)?.workflowType; const workflowType = (workflow as any)?.workflowType;
// Route to appropriate service based on workflow type // Route to appropriate service based on workflow type
let level: any; let level: any;
if (workflowType === 'CLAIM_MANAGEMENT') { if (workflowType === 'CLAIM_MANAGEMENT') {
@ -91,7 +93,7 @@ export class ApprovalController {
} else { } else {
level = await approvalService.getCurrentApprovalLevel(id); level = await approvalService.getCurrentApprovalLevel(id);
} }
ResponseHandler.success(res, level, 'Current approval level retrieved successfully'); ResponseHandler.success(res, level, 'Current approval level retrieved successfully');
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
@ -102,16 +104,18 @@ export class ApprovalController {
async getApprovalLevels(req: Request, res: Response): Promise<void> { async getApprovalLevels(req: Request, res: Response): Promise<void> {
try { try {
const { id } = req.params; const { id } = req.params;
// Determine which service to use based on workflow type // Determine which service to use based on workflow type (handle both requestId and requestNumber)
const workflow = await WorkflowRequest.findByPk(id); const workflow = await WorkflowRequest.findOne({
$or: [{ requestId: id }, { requestNumber: id }]
});
if (!workflow) { if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found'); ResponseHandler.notFound(res, 'Workflow not found');
return; return;
} }
const workflowType = (workflow as any)?.workflowType; const workflowType = (workflow as any)?.workflowType;
// Route to appropriate service based on workflow type // Route to appropriate service based on workflow type
let levels: any[]; let levels: any[];
if (workflowType === 'CLAIM_MANAGEMENT') { if (workflowType === 'CLAIM_MANAGEMENT') {
@ -119,7 +123,7 @@ export class ApprovalController {
} else { } else {
levels = await approvalService.getApprovalLevels(id); levels = await approvalService.getApprovalLevels(id);
} }
ResponseHandler.success(res, levels, 'Approval levels retrieved successfully'); ResponseHandler.success(res, levels, 'Approval levels retrieved successfully');
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';

View File

@ -4,7 +4,7 @@ import { validateSSOCallback, validateRefreshToken, validateTokenExchange, valid
import { ResponseHandler } from '../utils/responseHandler'; import { ResponseHandler } from '../utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express'; import type { AuthenticatedRequest } from '../types/express';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service'; import { activityMongoService as activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
import { getRequestMetadata } from '../utils/requestUtils'; import { getRequestMetadata } from '../utils/requestUtils';
export class AuthController { export class AuthController {
@ -132,13 +132,10 @@ export class AuthController {
// Set new access token in cookie if using cookie-based auth // Set new access token in cookie if using cookie-based auth
const isProduction = process.env.NODE_ENV === 'production'; const isProduction = process.env.NODE_ENV === 'production';
const isUat = process.env.NODE_ENV === 'uat';
const isSecureEnv = isProduction || isUat;
const cookieOptions = { const cookieOptions = {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isProduction,
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' is safer and works on same-domain sameSite: isProduction ? 'none' as const : 'lax' as const, // 'none' for cross-domain in production
maxAge: 24 * 60 * 60 * 1000, // 24 hours maxAge: 24 * 60 * 60 * 1000, // 24 hours
}; };
@ -151,7 +148,7 @@ export class AuthController {
message: 'Token refreshed successfully' message: 'Token refreshed successfully'
}, 'Token refreshed successfully'); }, 'Token refreshed successfully');
} else { } else {
// Dev: Include token for debugging // Development: Include token for debugging
ResponseHandler.success(res, { ResponseHandler.success(res, {
accessToken: newAccessToken accessToken: newAccessToken
}, 'Token refreshed successfully'); }, 'Token refreshed successfully');
@ -209,13 +206,10 @@ export class AuthController {
// Set tokens in httpOnly cookies (production) or return in body (development) // Set tokens in httpOnly cookies (production) or return in body (development)
const isProduction = process.env.NODE_ENV === 'production'; const isProduction = process.env.NODE_ENV === 'production';
const isUat = process.env.NODE_ENV === 'uat';
const isSecureEnv = isProduction || isUat;
const cookieOptions = { const cookieOptions = {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isProduction,
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const), sameSite: isProduction ? ('none' as const) : ('lax' as const),
maxAge: 24 * 60 * 60 * 1000, // 24 hours maxAge: 24 * 60 * 60 * 1000, // 24 hours
path: '/', path: '/',
}; };
@ -262,13 +256,10 @@ export class AuthController {
// Set new access token in cookie // Set new access token in cookie
const isProduction = process.env.NODE_ENV === 'production'; const isProduction = process.env.NODE_ENV === 'production';
const isUat = process.env.NODE_ENV === 'uat';
const isSecureEnv = isProduction || isUat;
const cookieOptions = { const cookieOptions = {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isProduction,
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const), sameSite: isProduction ? ('none' as const) : ('lax' as const),
maxAge: 24 * 60 * 60 * 1000, maxAge: 24 * 60 * 60 * 1000,
path: '/', path: '/',
}; };
@ -302,16 +293,13 @@ export class AuthController {
// Helper function to clear cookies with all possible option combinations // Helper function to clear cookies with all possible option combinations
const clearCookiesCompletely = () => { const clearCookiesCompletely = () => {
const isProduction = process.env.NODE_ENV === 'production';
const isUat = process.env.NODE_ENV === 'uat';
const isSecureEnv = isProduction || isUat;
const cookieNames = ['accessToken', 'refreshToken']; const cookieNames = ['accessToken', 'refreshToken'];
// Get the EXACT options used when setting cookies (from exchangeToken) // Get the EXACT options used when setting cookies (from exchangeToken)
// These MUST match exactly: httpOnly, secure, sameSite, path // These MUST match exactly: httpOnly, secure, sameSite, path
const cookieOptions = { const cookieOptions = {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isProduction,
sameSite: 'lax' as const, sameSite: 'lax' as const,
path: '/', path: '/',
}; };
@ -481,13 +469,10 @@ export class AuthController {
// Set cookies for web clients // Set cookies for web clients
const isProduction = process.env.NODE_ENV === 'production'; const isProduction = process.env.NODE_ENV === 'production';
const isUat = process.env.NODE_ENV === 'uat';
const isSecureEnv = isProduction || isUat;
const cookieOptions = { const cookieOptions = {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isProduction,
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, sameSite: isProduction ? 'none' as const : 'lax' as const,
maxAge: 24 * 60 * 60 * 1000, // 24 hours maxAge: 24 * 60 * 60 * 1000, // 24 hours
}; };
@ -564,13 +549,10 @@ export class AuthController {
// Set cookies with httpOnly flag for security // Set cookies with httpOnly flag for security
const isProduction = process.env.NODE_ENV === 'production'; const isProduction = process.env.NODE_ENV === 'production';
const isUat = process.env.NODE_ENV === 'uat';
const isSecureEnv = isProduction || isUat;
const cookieOptions = { const cookieOptions = {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isProduction,
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' for same-domain sameSite: isProduction ? 'none' as const : 'lax' as const, // 'none' for cross-domain in production
maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token
}; };
@ -602,7 +584,7 @@ export class AuthController {
idToken: result.oktaIdToken idToken: result.oktaIdToken
}, 'Token exchange successful'); }, 'Token exchange successful');
} else { } else {
// Dev: Include tokens for debugging and different-port setup // Development: Include tokens for debugging and different-port setup
ResponseHandler.success(res, { ResponseHandler.success(res, {
user: result.user, user: result.user,
accessToken: result.accessToken, accessToken: result.accessToken,

View File

@ -1,10 +1,10 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index'; import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark, User } from '../models'; // Fixed imports
import { aiService } from '@services/ai.service'; import { aiService } from '../services/ai.service';
import { activityService } from '@services/activity.service'; import { conclusionMongoService } from '../services/conclusion.service';
import logger from '@utils/logger'; import { activityMongoService as activityService } from '../services/activity.service';
import { getRequestMetadata } from '@utils/requestUtils'; import logger from '../utils/logger';
import { getRequestMetadata } from '../utils/requestUtils';
export class ConclusionController { export class ConclusionController {
/** /**
@ -16,20 +16,17 @@ export class ConclusionController {
const { requestId } = req.params; const { requestId } = req.params;
const userId = (req as any).user?.userId; const userId = (req as any).user?.userId;
// Fetch request with all related data // Fetch request
const request = await WorkflowRequest.findOne({ // Mongoose doesn't support 'include' directly like Sequelize.
where: { requestId }, // We'll fetch the request first.
include: [ const request = await WorkflowRequest.findOne({ requestId });
{ association: 'initiator', attributes: ['userId', 'displayName', 'email'] }
]
});
if (!request) { if (!request) {
return res.status(404).json({ error: 'Request not found' }); return res.status(404).json({ error: 'Request not found' });
} }
// Check if user is the initiator // Check if user is the initiator (compare userId strings)
if ((request as any).initiatorId !== userId) { if ((request as any).initiator.userId !== userId) {
return res.status(403).json({ error: 'Only the initiator can generate conclusion remarks' }); return res.status(403).json({ error: 'Only the initiator can generate conclusion remarks' });
} }
@ -71,118 +68,24 @@ export class ConclusionController {
}); });
} }
// Gather context for AI generation
const approvalLevels = await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
const workNotes = await WorkNote.findAll({
where: { requestId },
order: [['createdAt', 'ASC']],
limit: 20 // Last 20 work notes - keep full context for better conclusions
});
const documents = await Document.findAll({
where: { requestId },
order: [['uploadedAt', 'DESC']]
});
const activities = await Activity.findAll({
where: { requestId },
order: [['createdAt', 'ASC']],
limit: 50 // Last 50 activities - keep full context for better conclusions
});
// Build context object
const context = {
requestTitle: (request as any).title,
requestDescription: (request as any).description,
requestNumber: (request as any).requestNumber,
priority: (request as any).priority,
approvalFlow: approvalLevels.map((level: any) => {
const tatPercentage = level.tatPercentageUsed !== undefined && level.tatPercentageUsed !== null
? Number(level.tatPercentageUsed)
: (level.elapsedHours && level.tatHours ? (Number(level.elapsedHours) / Number(level.tatHours)) * 100 : 0);
return {
levelNumber: level.levelNumber,
approverName: level.approverName,
status: level.status,
comments: level.comments,
actionDate: level.actionDate,
tatHours: Number(level.tatHours || 0),
elapsedHours: Number(level.elapsedHours || 0),
tatPercentageUsed: tatPercentage
};
}),
workNotes: workNotes.map((note: any) => ({
userName: note.userName,
message: note.message,
createdAt: note.createdAt
})),
documents: documents.map((doc: any) => ({
fileName: doc.originalFileName || doc.fileName,
uploadedBy: doc.uploadedBy,
uploadedAt: doc.uploadedAt
})),
activities: activities.map((activity: any) => ({
type: activity.activityType,
action: activity.activityDescription,
details: activity.activityDescription,
timestamp: activity.createdAt
}))
};
logger.info(`[Conclusion] Generating AI remark for request ${requestId}...`); logger.info(`[Conclusion] Generating AI remark for request ${requestId}...`);
// Generate AI conclusion // Use the service to generate and save (consistent with automatic trigger)
const aiResult = await aiService.generateConclusionRemark(context); const conclusionInstance = await conclusionMongoService.generateAndSaveAIConclusion(requestId);
// Check if conclusion already exists if (!conclusionInstance) {
let conclusionInstance = await ConclusionRemark.findOne({ where: { requestId } }); return res.status(500).json({ error: 'Failed to generate conclusion' });
const conclusionData = {
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date()
};
if (conclusionInstance) {
// Update existing conclusion (allow regeneration)
await conclusionInstance.update(conclusionData as any);
logger.info(`[Conclusion] ✅ AI conclusion regenerated for request ${requestId}`);
} else {
// Create new conclusion
conclusionInstance = await ConclusionRemark.create({
requestId,
...conclusionData,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
finalizedAt: null
} as any);
logger.info(`[Conclusion] ✅ AI conclusion generated for request ${requestId}`);
} }
// Fetch initiator details manually for logging
const initiator = await User.findOne({ userId: (request as any).initiatorId });
// Log activity // Log activity
const requestMeta = getRequestMetadata(req); const requestMeta = getRequestMetadata(req);
await activityService.log({ await activityService.log({
requestId, requestId,
type: 'ai_conclusion_generated', type: 'ai_conclusion_generated',
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' }, user: { userId, name: initiator?.displayName || 'Initiator' },
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
action: 'AI Conclusion Generated', action: 'AI Conclusion Generated',
details: 'AI-powered conclusion remark generated for review', details: 'AI-powered conclusion remark generated for review',
@ -193,12 +96,12 @@ export class ConclusionController {
return res.status(200).json({ return res.status(200).json({
message: 'Conclusion generated successfully', message: 'Conclusion generated successfully',
data: { data: {
conclusionId: (conclusionInstance as any).conclusionId, conclusionId: (conclusionInstance as any).conclusionId || (conclusionInstance as any)._id,
aiGeneratedRemark: aiResult.remark, aiGeneratedRemark: conclusionInstance.aiGeneratedRemark,
keyDiscussionPoints: aiResult.keyPoints, keyDiscussionPoints: conclusionInstance.keyDiscussionPoints,
confidence: aiResult.confidence, confidence: conclusionInstance.aiConfidenceScore,
provider: aiResult.provider, provider: conclusionInstance.aiModelUsed,
generatedAt: new Date() generatedAt: conclusionInstance.generatedAt
} }
}); });
} catch (error: any) { } catch (error: any) {
@ -232,32 +135,32 @@ export class ConclusionController {
} }
// Fetch request // Fetch request
const request = await WorkflowRequest.findOne({ where: { requestId } }); const request = await WorkflowRequest.findOne({ requestId });
if (!request) { if (!request) {
return res.status(404).json({ error: 'Request not found' }); return res.status(404).json({ error: 'Request not found' });
} }
// Check if user is the initiator // Check if user is the initiator
if ((request as any).initiatorId !== userId) { if ((request as any).initiator.userId !== userId) {
return res.status(403).json({ error: 'Only the initiator can update conclusion remarks' }); return res.status(403).json({ error: 'Only the initiator can update conclusion remarks' });
} }
// Find conclusion // Find conclusion
const conclusion = await ConclusionRemark.findOne({ where: { requestId } }); const conclusion = await ConclusionRemark.findOne({ requestId });
if (!conclusion) { if (!conclusion) {
return res.status(404).json({ error: 'Conclusion not found. Generate it first.' }); return res.status(404).json({ error: 'Conclusion not found. Generate it first.' });
} }
// Update conclusion // Update conclusion
// Note: finalRemark is already sanitized by the sanitization middleware (RICH_TEXT_FIELDS)
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark; const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
await conclusion.update({ conclusion.finalRemark = finalRemark;
finalRemark: finalRemark, conclusion.editedBy = userId;
editedBy: userId, conclusion.isEdited = wasEdited;
isEdited: wasEdited, if (wasEdited) {
editCount: wasEdited ? (conclusion as any).editCount + 1 : (conclusion as any).editCount conclusion.editCount = ((conclusion as any).editCount || 0) + 1;
} as any); }
await conclusion.save();
logger.info(`[Conclusion] Updated conclusion for request ${requestId} (edited: ${wasEdited})`); logger.info(`[Conclusion] Updated conclusion for request ${requestId} (edited: ${wasEdited})`);
@ -285,22 +188,18 @@ export class ConclusionController {
return res.status(400).json({ error: 'Final remark is required' }); return res.status(400).json({ error: 'Final remark is required' });
} }
// Note: finalRemark is already sanitized by the sanitization middleware (RICH_TEXT_FIELDS)
// Fetch request // Fetch request
const request = await WorkflowRequest.findOne({ const request = await WorkflowRequest.findOne({ requestId });
where: { requestId },
include: [
{ association: 'initiator', attributes: ['userId', 'displayName', 'email'] }
]
});
if (!request) { if (!request) {
return res.status(404).json({ error: 'Request not found' }); return res.status(404).json({ error: 'Request not found' });
} }
// Fetch initiator manually
const initiator = await User.findOne({ userId: (request as any).initiator.userId });
// Check if user is the initiator // Check if user is the initiator
if ((request as any).initiatorId !== userId) { if ((request as any).initiator.userId !== userId) {
return res.status(403).json({ error: 'Only the initiator can finalize conclusion remarks' }); return res.status(403).json({ error: 'Only the initiator can finalize conclusion remarks' });
} }
@ -310,15 +209,15 @@ export class ConclusionController {
} }
// Find or create conclusion // Find or create conclusion
let conclusion = await ConclusionRemark.findOne({ where: { requestId } }); let conclusion = await ConclusionRemark.findOne({ requestId });
if (!conclusion) { if (!conclusion) {
// Create if doesn't exist (manual conclusion without AI) // Create if doesn't exist (manual conclusion without AI)
conclusion = await ConclusionRemark.create({ conclusion = await ConclusionRemark.create({
requestId, requestId,
aiGeneratedRemark: null, aiGeneratedRemark: undefined,
aiModelUsed: null, aiModelUsed: undefined,
aiConfidenceScore: null, aiConfidenceScore: undefined,
finalRemark: finalRemark, finalRemark: finalRemark,
editedBy: userId, editedBy: userId,
isEdited: false, isEdited: false,
@ -326,28 +225,28 @@ export class ConclusionController {
approvalSummary: {}, approvalSummary: {},
documentSummary: {}, documentSummary: {},
keyDiscussionPoints: [], keyDiscussionPoints: [],
generatedAt: null, generatedAt: undefined,
finalizedAt: new Date() finalizedAt: new Date()
} as any); });
} else { } else {
// Update existing conclusion // Update existing conclusion
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark; const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
await conclusion.update({ conclusion.finalRemark = finalRemark;
finalRemark: finalRemark, conclusion.editedBy = userId;
editedBy: userId, conclusion.isEdited = wasEdited;
isEdited: wasEdited, if (wasEdited) {
editCount: wasEdited ? (conclusion as any).editCount + 1 : (conclusion as any).editCount, conclusion.editCount = ((conclusion as any).editCount || 0) + 1;
finalizedAt: new Date() }
} as any); conclusion.finalizedAt = new Date();
await conclusion.save();
} }
// Update request status to CLOSED // Update request workflowState to CLOSED (keep granular status as APPROVED/REJECTED)
await request.update({ request.workflowState = 'CLOSED';
status: 'CLOSED', (request as any).conclusionRemark = finalRemark;
conclusionRemark: finalRemark, (request as any).closureDate = new Date();
closureDate: new Date() await request.save();
} as any);
logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`); logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`);
@ -355,7 +254,7 @@ export class ConclusionController {
// Since the initiator is finalizing, this should always succeed // Since the initiator is finalizing, this should always succeed
let summaryId = null; let summaryId = null;
try { try {
const { summaryService } = await import('@services/summary.service'); const { summaryService } = await import('../services/summary.service');
const userRole = (req as any).user?.role || (req as any).auth?.role; const userRole = (req as any).user?.role || (req as any).auth?.role;
const summary = await summaryService.createSummary(requestId, userId, { userRole }); const summary = await summaryService.createSummary(requestId, userId, { userRole });
summaryId = (summary as any).summaryId; summaryId = (summary as any).summaryId;
@ -371,10 +270,10 @@ export class ConclusionController {
await activityService.log({ await activityService.log({
requestId, requestId,
type: 'closed', type: 'closed',
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' }, user: { userId, name: initiator?.displayName || 'Initiator' },
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
action: 'Request Closed', action: 'Request Closed',
details: `Request closed with conclusion remark by ${(request as any).initiator?.displayName}`, details: `Request closed with conclusion remark by ${initiator?.displayName}`,
ipAddress: requestMeta.ipAddress, ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent userAgent: requestMeta.userAgent
}); });
@ -382,7 +281,7 @@ export class ConclusionController {
return res.status(200).json({ return res.status(200).json({
message: 'Request finalized and closed successfully', message: 'Request finalized and closed successfully',
data: { data: {
conclusionId: (conclusion as any).conclusionId, conclusionId: (conclusion as any).conclusionId || (conclusion as any)._id,
requestNumber: (request as any).requestNumber, requestNumber: (request as any).requestNumber,
status: 'CLOSED', status: 'CLOSED',
finalRemark: finalRemark, finalRemark: finalRemark,
@ -404,20 +303,31 @@ export class ConclusionController {
try { try {
const { requestId } = req.params; const { requestId } = req.params;
const conclusion = await ConclusionRemark.findOne({ const conclusion = await ConclusionRemark.findOne({ requestId });
where: { requestId },
include: [
{ association: 'editor', attributes: ['userId', 'displayName', 'email'] }
]
});
if (!conclusion) { if (!conclusion) {
return res.status(404).json({ error: 'Conclusion not found' }); return res.status(404).json({ error: 'Conclusion not found' });
} }
// Manually fetch editor if needed
let editor = null;
if (conclusion.editedBy) {
editor = await User.findOne({ userId: conclusion.editedBy });
}
// Append editor info to result if needed, or just return conclusion
const result = (conclusion as any).toJSON ? (conclusion as any).toJSON() : conclusion;
if (editor) {
result.editor = {
userId: editor.userId,
displayName: editor.displayName,
email: editor.email
};
}
return res.status(200).json({ return res.status(200).json({
message: 'Conclusion retrieved successfully', message: 'Conclusion retrieved successfully',
data: conclusion data: result
}); });
} catch (error: any) { } catch (error: any) {
logger.error('[Conclusion] Error getting conclusion:', error); logger.error('[Conclusion] Error getting conclusion:', error);
@ -427,4 +337,3 @@ export class ConclusionController {
} }
export const conclusionController = new ConclusionController(); export const conclusionController = new ConclusionController();

View File

@ -1,12 +1,12 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { DashboardService } from '../services/dashboard.service'; import { DashboardMongoService, dashboardMongoService } from '../services/dashboard.service';
import logger from '@utils/logger'; import logger from '@utils/logger';
export class DashboardController { export class DashboardController {
private dashboardService: DashboardService; private dashboardService: DashboardMongoService = dashboardMongoService;
constructor() { constructor() {
this.dashboardService = new DashboardService(); // Service is now injected via import singleton
} }
/** /**
@ -19,9 +19,9 @@ export class DashboardController {
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const kpis = await this.dashboardService.getKPIs(userId, dateRange, startDate, endDate, viewAsUser); const kpis = await this.dashboardService.getKPIs(userId, dateRange, startDate, endDate, viewAsUser);
res.json({ res.json({
success: true, success: true,
data: kpis data: kpis
@ -53,13 +53,14 @@ export class DashboardController {
const approverType = req.query.approverType as 'current' | 'any' | undefined; const approverType = req.query.approverType as 'current' | 'any' | undefined;
const search = req.query.search as string | undefined; const search = req.query.search as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined; const slaCompliance = req.query.slaCompliance as string | undefined;
const lifecycle = req.query.lifecycle as string | undefined;
const viewAsUser = req.query.viewAsUser === 'true'; // When true, treat admin as normal user const viewAsUser = req.query.viewAsUser === 'true'; // When true, treat admin as normal user
const stats = await this.dashboardService.getRequestStats( const stats = await this.dashboardService.getRequestStats(
userId, userId,
dateRange, dateRange,
startDate, startDate,
endDate, endDate,
status, status,
priority, priority,
templateType, templateType,
@ -69,9 +70,10 @@ export class DashboardController {
approverType, approverType,
search, search,
slaCompliance, slaCompliance,
viewAsUser viewAsUser,
lifecycle
); );
res.json({ res.json({
success: true, success: true,
data: stats data: stats
@ -94,9 +96,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const efficiency = await this.dashboardService.getTATEfficiency(userId, dateRange, startDate, endDate); const efficiency = await this.dashboardService.getTATEfficiency(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: efficiency data: efficiency
@ -119,9 +121,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const load = await this.dashboardService.getApproverLoad(userId, dateRange, startDate, endDate); const load = await this.dashboardService.getApproverLoad(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: load data: load
@ -144,9 +146,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const engagement = await this.dashboardService.getEngagementStats(userId, dateRange, startDate, endDate); const engagement = await this.dashboardService.getEngagementStats(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: engagement data: engagement
@ -169,9 +171,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const insights = await this.dashboardService.getAIInsights(userId, dateRange, startDate, endDate); const insights = await this.dashboardService.getAIInsights(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: insights data: insights
@ -194,9 +196,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const utilization = await this.dashboardService.getAIRemarkUtilization(userId, dateRange, startDate, endDate); const utilization = await this.dashboardService.getAIRemarkUtilization(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: utilization data: utilization
@ -223,9 +225,9 @@ export class DashboardController {
const limit = Number(req.query.limit || 10); const limit = Number(req.query.limit || 10);
const priority = req.query.priority as string | undefined; const priority = req.query.priority as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined; const slaCompliance = req.query.slaCompliance as string | undefined;
const result = await this.dashboardService.getApproverPerformance(userId, dateRange, page, limit, startDate, endDate, priority, slaCompliance); const result = await this.dashboardService.getApproverPerformance(userId, dateRange, page, limit, startDate, endDate, priority, slaCompliance);
res.json({ res.json({
success: true, success: true,
data: result.performance, data: result.performance,
@ -254,9 +256,9 @@ export class DashboardController {
const page = Number(req.query.page || 1); const page = Number(req.query.page || 1);
const limit = Number(req.query.limit || 10); const limit = Number(req.query.limit || 10);
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const result = await this.dashboardService.getRecentActivity(userId, page, limit, viewAsUser); const result = await this.dashboardService.getRecentActivity(userId, page, limit, viewAsUser);
res.json({ res.json({
success: true, success: true,
data: result.activities, data: result.activities,
@ -285,9 +287,9 @@ export class DashboardController {
const page = Number(req.query.page || 1); const page = Number(req.query.page || 1);
const limit = Number(req.query.limit || 10); const limit = Number(req.query.limit || 10);
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const result = await this.dashboardService.getCriticalRequests(userId, page, limit, viewAsUser); const result = await this.dashboardService.getCriticalRequests(userId, page, limit, viewAsUser);
res.json({ res.json({
success: true, success: true,
data: result.criticalRequests, data: result.criticalRequests,
@ -316,9 +318,9 @@ export class DashboardController {
const page = Number(req.query.page || 1); const page = Number(req.query.page || 1);
const limit = Number(req.query.limit || 10); const limit = Number(req.query.limit || 10);
const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user const viewAsUser = req.query.viewAsUser === 'true'; // For admin to view as normal user
const result = await this.dashboardService.getUpcomingDeadlines(userId, page, limit, viewAsUser); const result = await this.dashboardService.getUpcomingDeadlines(userId, page, limit, viewAsUser);
res.json({ res.json({
success: true, success: true,
data: result.deadlines, data: result.deadlines,
@ -347,9 +349,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const stats = await this.dashboardService.getDepartmentStats(userId, dateRange, startDate, endDate); const stats = await this.dashboardService.getDepartmentStats(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: stats data: stats
@ -372,9 +374,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const distribution = await this.dashboardService.getPriorityDistribution(userId, dateRange, startDate, endDate); const distribution = await this.dashboardService.getPriorityDistribution(userId, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: distribution data: distribution
@ -399,9 +401,9 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const result = await this.dashboardService.getLifecycleReport(userId, page, limit, dateRange, startDate, endDate); const result = await this.dashboardService.getLifecycleReport(userId, page, limit, dateRange, startDate, endDate);
res.json({ res.json({
success: true, success: true,
data: result.lifecycleData, data: result.lifecycleData,
@ -436,11 +438,11 @@ export class DashboardController {
const filterType = req.query.filterType as string | undefined; const filterType = req.query.filterType as string | undefined;
const filterCategory = req.query.filterCategory as string | undefined; const filterCategory = req.query.filterCategory as string | undefined;
const filterSeverity = req.query.filterSeverity as string | undefined; const filterSeverity = req.query.filterSeverity as string | undefined;
const result = await this.dashboardService.getActivityLogReport( const result = await this.dashboardService.getActivityLogReport(
userId, userId,
page, page,
limit, limit,
dateRange, dateRange,
filterUserId, filterUserId,
filterType, filterType,
@ -449,7 +451,7 @@ export class DashboardController {
startDate, startDate,
endDate endDate
); );
res.json({ res.json({
success: true, success: true,
data: result.activities, data: result.activities,
@ -514,7 +516,7 @@ export class DashboardController {
const dateRange = req.query.dateRange as string | undefined; const dateRange = req.query.dateRange as string | undefined;
const startDate = req.query.startDate as string | undefined; const startDate = req.query.startDate as string | undefined;
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const result = await this.dashboardService.getWorkflowAgingReport( const result = await this.dashboardService.getWorkflowAgingReport(
userId, userId,
threshold, threshold,
@ -524,7 +526,7 @@ export class DashboardController {
startDate, startDate,
endDate endDate
); );
res.json({ res.json({
success: true, success: true,
data: result.agingData, data: result.agingData,
@ -556,7 +558,7 @@ export class DashboardController {
const endDate = req.query.endDate as string | undefined; const endDate = req.query.endDate as string | undefined;
const priority = req.query.priority as string | undefined; const priority = req.query.priority as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined; const slaCompliance = req.query.slaCompliance as string | undefined;
if (!approverId) { if (!approverId) {
res.status(400).json({ res.status(400).json({
success: false, success: false,
@ -564,7 +566,7 @@ export class DashboardController {
}); });
return; return;
} }
const stats = await this.dashboardService.getSingleApproverStats( const stats = await this.dashboardService.getSingleApproverStats(
userId, userId,
approverId, approverId,
@ -574,7 +576,7 @@ export class DashboardController {
priority, priority,
slaCompliance slaCompliance
); );
res.json({ res.json({
success: true, success: true,
data: stats data: stats
@ -604,7 +606,7 @@ export class DashboardController {
const priority = req.query.priority as string | undefined; const priority = req.query.priority as string | undefined;
const slaCompliance = req.query.slaCompliance as string | undefined; const slaCompliance = req.query.slaCompliance as string | undefined;
const search = req.query.search as string | undefined; const search = req.query.search as string | undefined;
if (!approverId) { if (!approverId) {
res.status(400).json({ res.status(400).json({
success: false, success: false,
@ -612,7 +614,7 @@ export class DashboardController {
}); });
return; return;
} }
const result = await this.dashboardService.getRequestsByApprover( const result = await this.dashboardService.getRequestsByApprover(
userId, userId,
approverId, approverId,
@ -626,7 +628,7 @@ export class DashboardController {
slaCompliance, slaCompliance,
search search
); );
res.json({ res.json({
success: true, success: true,
data: result.requests, data: result.requests,
@ -646,4 +648,3 @@ export class DashboardController {
} }
} }
} }

View File

@ -1,25 +1,18 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import type { AuthenticatedRequest } from '../types/express'; import type { AuthenticatedRequest } from '../types/express';
import { DealerClaimService } from '../services/dealerClaim.service'; import { DealerClaimMongoService } from '../services/dealerClaim.service';
import { ResponseHandler } from '../utils/responseHandler'; import { ResponseHandler } from '../utils/responseHandler';
import { translateEInvoiceError } from '../utils/einvoiceErrors';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { gcsStorageService } from '../services/gcsStorage.service'; import { gcsStorageService } from '../services/gcsStorage.service';
import { Document } from '../models/Document'; import { Document, InternalOrder, WorkflowRequest } from '../models'; // Fixed imports
import { InternalOrder } from '../models/InternalOrder';
import { constants } from '../config/constants'; import { constants } from '../config/constants';
import { sapIntegrationService } from '../services/sapIntegration.service'; import { sapIntegrationService } from '../services/sapIntegration.service';
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import crypto from 'crypto'; import crypto from 'crypto';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { DealerClaimDetails } from '../models/DealerClaimDetails';
import { ClaimInvoice } from '../models/ClaimInvoice';
import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem';
import { ActivityType } from '../models/ActivityType';
export class DealerClaimController { export class DealerClaimController {
private dealerClaimService = new DealerClaimService(); private dealerClaimService = new DealerClaimMongoService();
/** /**
* Create a new dealer claim request * Create a new dealer claim request
@ -127,11 +120,11 @@ export class DealerClaimController {
return uuidRegex.test(id); return uuidRegex.test(id);
}; };
const { WorkflowRequest } = await import('../models/WorkflowRequest'); // Use WorkflowRequest from imports (Mongoose model)
if (isUuid(identifier)) { if (isUuid(identifier)) {
return await WorkflowRequest.findByPk(identifier); return await WorkflowRequest.findOne({ requestId: identifier });
} else { } else {
return await WorkflowRequest.findOne({ where: { requestNumber: identifier } }); return await WorkflowRequest.findOne({ requestNumber: identifier });
} }
} }
@ -318,8 +311,9 @@ export class DealerClaimController {
const extension = path.extname(file.originalname).replace('.', '').toLowerCase(); const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Save to documents table // Save to documents table (Mongoose)
const doc = await Document.create({ const doc = await Document.create({
documentId: crypto.randomUUID(), // Generate UUID if model requires it and doesn't auto-gen
requestId, requestId,
uploadedBy: userId, uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname), fileName: path.basename(file.filename || file.originalname),
@ -338,10 +332,11 @@ export class DealerClaimController {
parentDocumentId: null as any, parentDocumentId: null as any,
isDeleted: false, isDeleted: false,
downloadCount: 0, downloadCount: 0,
} as any); uploadedAt: new Date()
});
completionDocuments.push({ completionDocuments.push({
documentId: doc.documentId, documentId: (doc as any).documentId,
name: file.originalname, name: file.originalname,
url: uploadResult.storageUrl, url: uploadResult.storageUrl,
size: file.size, size: file.size,
@ -379,6 +374,7 @@ export class DealerClaimController {
// Save to documents table // Save to documents table
const doc = await Document.create({ const doc = await Document.create({
documentId: crypto.randomUUID(),
requestId, requestId,
uploadedBy: userId, uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname), fileName: path.basename(file.filename || file.originalname),
@ -397,10 +393,11 @@ export class DealerClaimController {
parentDocumentId: null as any, parentDocumentId: null as any,
isDeleted: false, isDeleted: false,
downloadCount: 0, downloadCount: 0,
} as any); uploadedAt: new Date()
});
activityPhotos.push({ activityPhotos.push({
documentId: doc.documentId, documentId: (doc as any).documentId,
name: file.originalname, name: file.originalname,
url: uploadResult.storageUrl, url: uploadResult.storageUrl,
size: file.size, size: file.size,
@ -439,6 +436,7 @@ export class DealerClaimController {
// Save to documents table // Save to documents table
const doc = await Document.create({ const doc = await Document.create({
documentId: crypto.randomUUID(), // UUID gen
requestId, requestId,
uploadedBy: userId, uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname), fileName: path.basename(file.filename || file.originalname),
@ -457,10 +455,11 @@ export class DealerClaimController {
parentDocumentId: null as any, parentDocumentId: null as any,
isDeleted: false, isDeleted: false,
downloadCount: 0, downloadCount: 0,
} as any); uploadedAt: new Date()
});
invoicesReceipts.push({ invoicesReceipts.push({
documentId: doc.documentId, documentId: (doc as any).documentId,
name: file.originalname, name: file.originalname,
url: uploadResult.storageUrl, url: uploadResult.storageUrl,
size: file.size, size: file.size,
@ -499,6 +498,7 @@ export class DealerClaimController {
// Save to documents table // Save to documents table
const doc = await Document.create({ const doc = await Document.create({
documentId: crypto.randomUUID(), // UUID gen
requestId, requestId,
uploadedBy: userId, uploadedBy: userId,
fileName: path.basename(attendanceSheetFile.filename || attendanceSheetFile.originalname), fileName: path.basename(attendanceSheetFile.filename || attendanceSheetFile.originalname),
@ -517,10 +517,11 @@ export class DealerClaimController {
parentDocumentId: null as any, parentDocumentId: null as any,
isDeleted: false, isDeleted: false,
downloadCount: 0, downloadCount: 0,
} as any); uploadedAt: new Date()
});
attendanceSheet = { attendanceSheet = {
documentId: doc.documentId, documentId: (doc as any).documentId,
name: attendanceSheetFile.originalname, name: attendanceSheetFile.originalname,
url: uploadResult.storageUrl, url: uploadResult.storageUrl,
size: attendanceSheetFile.size, size: attendanceSheetFile.size,
@ -665,7 +666,7 @@ export class DealerClaimController {
); );
// Fetch and return the updated IO details from database // Fetch and return the updated IO details from database
const updatedIO = await InternalOrder.findOne({ where: { requestId } }); const updatedIO = await InternalOrder.findOne({ requestId });
if (updatedIO) { if (updatedIO) {
return ResponseHandler.success(res, { return ResponseHandler.success(res, {
@ -757,66 +758,7 @@ export class DealerClaimController {
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error updating e-invoice:', error); logger.error('[DealerClaimController] Error updating e-invoice:', error);
return ResponseHandler.error(res, 'Failed to update e-invoice details', 500, errorMessage);
// Translate technical PWC/IRP error codes to user-friendly messages
const userFacingMessage = translateEInvoiceError(errorMessage);
return ResponseHandler.error(res, userFacingMessage, 500, errorMessage);
}
}
/**
* Download E-Invoice PDF
* GET /api/v1/dealer-claims/:requestId/e-invoice/pdf
*/
async downloadInvoicePdf(req: Request, res: Response): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
const { ClaimInvoice } = await import('../models/ClaimInvoice');
let invoice = await ClaimInvoice.findOne({ where: { requestId } });
if (!invoice) {
return ResponseHandler.error(res, 'Invoice record not found', 404);
}
// Generate PDF on the fly
try {
const { pdfService } = await import('../services/pdf.service');
const pdfBuffer = await pdfService.generateInvoicePdf(requestId);
const requestNumber = workflow.requestNumber || 'invoice';
const fileName = `Invoice_${requestNumber}.pdf`;
res.setHeader('Content-Type', 'application/pdf');
res.setHeader('Content-Disposition', `inline; filename="${fileName}"`);
res.setHeader('Content-Length', pdfBuffer.length);
// Convert Buffer to stream
const { Readable } = await import('stream');
const stream = new Readable();
stream.push(pdfBuffer);
stream.push(null);
stream.pipe(res);
} catch (pdfError) {
logger.error(`[DealerClaimController] Failed to generate PDF:`, pdfError);
return ResponseHandler.error(res, 'Failed to generate invoice PDF', 500);
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error downloading invoice PDF:', error);
return ResponseHandler.error(res, 'Failed to download invoice PDF', 500, errorMessage);
} }
} }
@ -868,245 +810,4 @@ export class DealerClaimController {
return ResponseHandler.error(res, 'Failed to update credit note details', 500, errorMessage); return ResponseHandler.error(res, 'Failed to update credit note details', 500, errorMessage);
} }
} }
/**
* Send credit note to dealer and auto-approve Step 8
* POST /api/v1/dealer-claims/:requestId/credit-note/send
* Accepts either UUID or requestNumber
*/
async sendCreditNoteToDealer(
req: AuthenticatedRequest,
res: Response
): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
await this.dealerClaimService.sendCreditNoteToDealer(requestId, userId);
return ResponseHandler.success(res, { message: 'Credit note sent to dealer and Step 8 approved successfully' }, 'Credit note sent');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error sending credit note to dealer:', error);
return ResponseHandler.error(res, 'Failed to send credit note to dealer', 500, errorMessage);
}
}
/**
* Test SAP Budget Blocking (for testing/debugging)
* POST /api/v1/dealer-claims/test/sap-block
*
* This endpoint allows direct testing of SAP budget blocking without creating a full request
*/
async testSapBudgetBlock(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
const { ioNumber, amount, requestNumber } = req.body;
// Validation
if (!ioNumber || !amount) {
return ResponseHandler.error(res, 'Missing required fields: ioNumber and amount are required', 400);
}
const blockAmount = parseFloat(amount);
if (isNaN(blockAmount) || blockAmount <= 0) {
return ResponseHandler.error(res, 'Amount must be a positive number', 400);
}
logger.info(`[DealerClaimController] Testing SAP budget block:`, {
ioNumber,
amount: blockAmount,
requestNumber: requestNumber || 'TEST-REQUEST',
userId
});
// First validate IO number
const ioValidation = await sapIntegrationService.validateIONumber(ioNumber);
if (!ioValidation.isValid) {
return ResponseHandler.error(res, `Invalid IO number: ${ioValidation.error || 'IO number not found in SAP'}`, 400);
}
logger.info(`[DealerClaimController] IO validation successful:`, {
ioNumber,
availableBalance: ioValidation.availableBalance
});
// Block budget in SAP
const testRequestNumber = requestNumber || `TEST-${Date.now()}`;
const blockResult = await sapIntegrationService.blockBudget(
ioNumber,
blockAmount,
testRequestNumber,
`Test budget block for ${testRequestNumber}`
);
if (!blockResult.success) {
return ResponseHandler.error(res, `Failed to block budget in SAP: ${blockResult.error}`, 500);
}
// Return detailed response
return ResponseHandler.success(res, {
message: 'SAP budget block test successful',
ioNumber,
requestedAmount: blockAmount,
availableBalance: ioValidation.availableBalance,
sapResponse: {
success: blockResult.success,
blockedAmount: blockResult.blockedAmount,
remainingBalance: blockResult.remainingBalance,
sapDocumentNumber: blockResult.blockId || null,
error: blockResult.error || null
},
calculatedRemainingBalance: ioValidation.availableBalance - blockResult.blockedAmount,
validation: {
isValid: ioValidation.isValid,
availableBalance: ioValidation.availableBalance,
error: ioValidation.error || null
}
}, 'SAP budget block test completed');
} catch (error: any) {
logger.error('[DealerClaimController] Error testing SAP budget block:', error);
return ResponseHandler.error(res, error.message || 'Failed to test SAP budget block', 500);
}
}
/**
* Download Invoice CSV
* GET /api/v1/dealer-claims/:requestId/e-invoice/csv
*/
async downloadInvoiceCsv(req: Request, res: Response): Promise<void> {
try {
const identifier = req.params.requestId;
// Use helper to find workflow
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
// Fetch related data
logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`);
const [invoice, items, claimDetails, internalOrder] = await Promise.all([
ClaimInvoice.findOne({ where: { requestId } }),
ClaimInvoiceItem.findAll({ where: { requestId }, order: [['slNo', 'ASC']] }),
DealerClaimDetails.findOne({ where: { requestId } }),
InternalOrder.findOne({ where: { requestId } })
]);
logger.info(`[DealerClaimController] Found ${items.length} items to export for request ${requestNumber}`);
let sapRefNo = '';
let taxationType = 'GST';
if (claimDetails?.activityType) {
const activity = await ActivityType.findOne({ where: { title: claimDetails.activityType } });
sapRefNo = activity?.sapRefNo || '';
taxationType = activity?.taxationType || (claimDetails.activityType.toLowerCase().includes('non') ? 'Non GST' : 'GST');
}
// Construct CSV
const headers = [
'TRNS_UNIQ_NO',
'CLAIM_NUMBER',
'INV_NUMBER',
'DEALER_CODE',
'IO_NUMBER',
'CLAIM_DOC_TYP',
'CLAIM_DATE',
'CLAIM_AMT',
'GST_AMT',
'GST_PERCENTAG'
];
const rows = items.map(item => {
const isNonGst = taxationType === 'Non GST' || taxationType === 'Non-GST';
// For Non-GST, we hide HSN (often stored in transactionCode) and GST details
const trnsUniqNo = isNonGst ? '' : (item.transactionCode || '');
const claimNumber = requestNumber;
const invNumber = invoice?.invoiceNumber || '';
const dealerCode = claimDetails?.dealerCode || '';
const ioNumber = internalOrder?.ioNumber || '';
const claimDocTyp = sapRefNo;
const claimDate = invoice?.createdAt ? new Date(invoice.createdAt).toISOString().split('T')[0] : '';
const claimAmt = item.assAmt;
// Zero out tax for Non-GST
const totalTax = isNonGst ? 0 : (Number(item.igstAmt || 0) + Number(item.cgstAmt || 0) + Number(item.sgstAmt || 0) + Number(item.utgstAmt || 0));
const gstPercentag = isNonGst ? 0 : (item.gstRt || 0);
return [
trnsUniqNo,
claimNumber,
invNumber,
dealerCode,
ioNumber,
claimDocTyp,
claimDate,
claimAmt,
totalTax.toFixed(2),
gstPercentag
].join(',');
});
const csvContent = [headers.join(','), ...rows].join('\n');
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', `attachment; filename="Invoice_${requestNumber}.csv"`);
res.status(200).send(csvContent);
return;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error downloading invoice CSV:', error);
return ResponseHandler.error(res, 'Failed to download invoice CSV', 500, errorMessage);
}
}
/**
* Re-trigger WFM CSV push (Step 7)
* POST /api/v1/dealer-claims/:requestId/wfm/retrigger
*/
async retriggerWFMPush(req: Request, res: Response): Promise<void> {
try {
const { requestId: identifier } = req.params;
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).id || (workflow as any).requestId;
await this.dealerClaimService.pushWFMCSV(requestId);
return ResponseHandler.success(res, {
message: 'WFM CSV push re-triggered successfully'
}, 'WFM push re-triggered');
} catch (error: any) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error re-triggering WFM push:', error);
return ResponseHandler.error(res, 'Failed to re-trigger WFM push', 500, errorMessage);
}
}
} }

View File

@ -1,34 +0,0 @@
import { Request, Response } from 'express';
import { dealerExternalService } from '../services/dealerExternal.service';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '../utils/logger';
export class DealerExternalController {
/**
* Search dealer by code via external API
* GET /api/v1/dealers-external/search/:dealerCode
*/
async searchByDealerCode(req: Request, res: Response): Promise<void> {
try {
const { dealerCode } = req.params;
if (!dealerCode) {
return ResponseHandler.error(res, 'Dealer code is required', 400);
}
const dealerInfo = await dealerExternalService.getDealerByCode(dealerCode);
if (!dealerInfo) {
return ResponseHandler.error(res, 'Dealer not found in external system', 404);
}
return ResponseHandler.success(res, dealerInfo, 'Dealer found successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error(`[DealerExternalController] Error searching dealer ${req.params.dealerCode}:`, error);
return ResponseHandler.error(res, 'Failed to fetch dealer from external source', 500, errorMessage);
}
}
}
export const dealerExternalController = new DealerExternalController();

View File

@ -1,18 +1,18 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import crypto from 'crypto'; import crypto from 'crypto';
import path from 'path'; import path from 'path';
import fs from 'fs'; import fs from 'fs';
import { Document } from '@models/Document'; import { DocumentModel } from '../models/mongoose/Document.schema';
import { User } from '@models/User'; import { UserModel } from '../models/mongoose/User.schema';
import { WorkflowRequest } from '@models/WorkflowRequest'; import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
import { Participant } from '@models/Participant'; import { ParticipantModel as Participant } from '../models/mongoose/Participant.schema';
import { ApprovalLevel } from '@models/ApprovalLevel'; import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
import { Op } from 'sequelize';
import { ResponseHandler } from '@utils/responseHandler'; import { ResponseHandler } from '@utils/responseHandler';
import { activityService } from '@services/activity.service'; import { activityMongoService as activityService } from '@services/activity.service';
import { gcsStorageService } from '@services/gcsStorage.service'; import { gcsStorageService } from '@services/gcsStorage.service';
import { emailNotificationService } from '@services/emailNotification.service'; import { emailNotificationService } from '@services/emailNotification.service';
import { notificationService } from '@services/notification.service'; import { notificationMongoService as notificationService } from '@services/notification.service';
import type { AuthenticatedRequest } from '../types/express'; import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils'; import { getRequestMetadata } from '@utils/requestUtils';
import { getConfigNumber, getConfigValue } from '@services/configReader.service'; import { getConfigNumber, getConfigValue } from '@services/configReader.service';
@ -28,9 +28,18 @@ export class DocumentController {
} }
// Extract requestId from body (multer should parse form fields) // Extract requestId from body (multer should parse form fields)
// Try both req.body and req.body.requestId for compatibility
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim()); const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
console.log('[DEBUG] Document upload attempt:', {
identifier,
bodyKeys: Object.keys(req.body || {}),
bodyRequestId: req.body?.requestId,
bodyRequest_id: req.body?.request_id,
userId: req.user?.userId
});
if (!identifier || identifier === 'undefined' || identifier === 'null') { if (!identifier || identifier === 'undefined' || identifier === 'null') {
console.log('[DEBUG] RequestId missing or invalid');
logWithContext('error', 'RequestId missing or invalid in document upload', { logWithContext('error', 'RequestId missing or invalid in document upload', {
body: req.body, body: req.body,
bodyKeys: Object.keys(req.body || {}), bodyKeys: Object.keys(req.body || {}),
@ -46,19 +55,45 @@ export class DocumentController {
return uuidRegex.test(id); return uuidRegex.test(id);
}; };
// Get workflow request - handle both UUID (requestId) and requestNumber // Helper to check if identifier is MongoDB ObjectId
let workflowRequest: WorkflowRequest | null = null; const isObjectId = (id: string): boolean => {
if (isUuid(identifier)) { return /^[0-9a-f]{24}$/i.test(id);
workflowRequest = await WorkflowRequest.findByPk(identifier); };
// Get workflow request - handle UUID (requestId), requestNumber, or MongoDB ObjectId (_id)
let workflowRequest: any = null;
const identifierIsUuid = isUuid(identifier);
const identifierIsObjectId = isObjectId(identifier);
console.log('[DEBUG] Looking up workflow request:', {
identifier,
identifierIsUuid,
identifierIsObjectId,
lookupField: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
});
if (identifierIsUuid) {
workflowRequest = await WorkflowRequest.findOne({ requestId: identifier });
} else if (identifierIsObjectId) {
workflowRequest = await WorkflowRequest.findById(identifier);
} else { } else {
workflowRequest = await WorkflowRequest.findOne({ where: { requestNumber: identifier } }); workflowRequest = await WorkflowRequest.findOne({ requestNumber: identifier });
} }
console.log('[DEBUG] Workflow lookup result:', {
found: !!workflowRequest,
requestId: workflowRequest?.requestId,
requestNumber: workflowRequest?.requestNumber,
_id: workflowRequest?._id?.toString()
});
if (!workflowRequest) { if (!workflowRequest) {
logWithContext('error', 'Workflow request not found for document upload', { logWithContext('error', 'Workflow request not found for document upload', {
identifier, identifier,
isUuid: isUuid(identifier), isUuid: identifierIsUuid,
userId: req.user?.userId isObjectId: identifierIsObjectId,
userId: req.user?.userId,
attemptedLookup: identifierIsUuid ? 'requestId' : identifierIsObjectId ? '_id' : 'requestNumber'
}); });
ResponseHandler.error(res, 'Workflow request not found', 404); ResponseHandler.error(res, 'Workflow request not found', 404);
return; return;
@ -67,11 +102,10 @@ export class DocumentController {
// Get the actual requestId (UUID) and requestNumber // Get the actual requestId (UUID) and requestNumber
const requestId = (workflowRequest as any).requestId || (workflowRequest as any).request_id; const requestId = (workflowRequest as any).requestId || (workflowRequest as any).request_id;
const requestNumber = (workflowRequest as any).requestNumber || (workflowRequest as any).request_number; const requestNumber = (workflowRequest as any).requestNumber || (workflowRequest as any).request_number;
if (!requestNumber) { if (!requestNumber) {
logWithContext('error', 'Request number not found for workflow', { logWithContext('error', 'Request number not found for workflow', {
requestId, requestId,
workflowRequest: JSON.stringify(workflowRequest.toJSON()),
userId: req.user?.userId userId: req.user?.userId
}); });
ResponseHandler.error(res, 'Request number not found for workflow', 500); ResponseHandler.error(res, 'Request number not found for workflow', 500);
@ -84,28 +118,28 @@ export class DocumentController {
return; return;
} }
// Validate file size against database configuration // Validate file size
const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10); const maxFileSizeMB = await getConfigNumber('MAX_FILE_SIZE_MB', 10);
const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024; const maxFileSizeBytes = maxFileSizeMB * 1024 * 1024;
if (file.size > maxFileSizeBytes) { if (file.size > maxFileSizeBytes) {
ResponseHandler.error( ResponseHandler.error(
res, res,
`File size exceeds the maximum allowed size of ${maxFileSizeMB}MB. Current size: ${(file.size / (1024 * 1024)).toFixed(2)}MB`, `File size exceeds the maximum allowed size of ${maxFileSizeMB} MB.Current size: ${(file.size / (1024 * 1024)).toFixed(2)} MB`,
400 400
); );
return; return;
} }
// Validate file type against database configuration // Validate file type
const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif'); const allowedFileTypesStr = await getConfigValue('ALLOWED_FILE_TYPES', 'pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif');
const allowedFileTypes = allowedFileTypesStr.split(',').map(ext => ext.trim().toLowerCase()); const allowedFileTypes = allowedFileTypesStr.split(',').map((ext: string) => ext.trim().toLowerCase());
const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase(); const fileExtension = path.extname(file.originalname).replace('.', '').toLowerCase();
if (!allowedFileTypes.includes(fileExtension)) { if (!allowedFileTypes.includes(fileExtension)) {
ResponseHandler.error( ResponseHandler.error(
res, res,
`File type "${fileExtension}" is not allowed. Allowed types: ${allowedFileTypes.join(', ')}`, `File type "${fileExtension}" is not allowed.Allowed types: ${allowedFileTypes.join(', ')} `,
400 400
); );
return; return;
@ -117,7 +151,7 @@ export class DocumentController {
const extension = path.extname(file.originalname).replace('.', '').toLowerCase(); const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
const category = (req.body?.category as string) || 'OTHER'; const category = (req.body?.category as string) || 'OTHER';
// Upload with automatic fallback to local storage // Upload file
const uploadResult = await gcsStorageService.uploadFileWithFallback({ const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer, buffer: fileBuffer,
originalName: file.originalname, originalName: file.originalname,
@ -125,11 +159,11 @@ export class DocumentController {
requestNumber: requestNumber, requestNumber: requestNumber,
fileType: 'documents' fileType: 'documents'
}); });
const storageUrl = uploadResult.storageUrl; const storageUrl = uploadResult.storageUrl;
const gcsFilePath = uploadResult.filePath; const gcsFilePath = uploadResult.filePath;
// Clean up local temporary file if it exists (from multer disk storage) // Clean up local temp file
if (file.path && fs.existsSync(file.path)) { if (file.path && fs.existsSync(file.path)) {
try { try {
fs.unlinkSync(file.path); fs.unlinkSync(file.path);
@ -138,134 +172,30 @@ export class DocumentController {
} }
} }
// Check if storageUrl exceeds database column limit (500 chars)
// GCS signed URLs can be very long (500-1000+ chars)
const MAX_STORAGE_URL_LENGTH = 500;
let finalStorageUrl = storageUrl;
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
logWithContext('warn', 'Storage URL exceeds database column limit, truncating', {
originalLength: storageUrl.length,
maxLength: MAX_STORAGE_URL_LENGTH,
urlPrefix: storageUrl.substring(0, 100),
});
// For signed URLs, we can't truncate as it will break the URL
// Instead, store null and generate signed URLs on-demand when needed
// The filePath is sufficient to generate a new signed URL later
finalStorageUrl = null as any;
logWithContext('info', 'Storing null storageUrl - will generate signed URL on-demand', {
filePath: gcsFilePath,
reason: 'Signed URL too long for database column',
});
}
// Truncate file names if they exceed database column limits (255 chars)
const MAX_FILE_NAME_LENGTH = 255;
const originalFileName = file.originalname;
let truncatedOriginalFileName = originalFileName;
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
// Preserve file extension when truncating
const ext = path.extname(originalFileName);
const nameWithoutExt = path.basename(originalFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
// If extension itself is too long, just use the extension
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logWithContext('warn', 'File name truncated to fit database column', {
originalLength: originalFileName.length,
truncatedLength: truncatedOriginalFileName.length,
originalName: originalFileName.substring(0, 100) + '...',
truncatedName: truncatedOriginalFileName,
});
}
// Generate fileName (basename of the generated file name in GCS)
const generatedFileName = path.basename(gcsFilePath);
let truncatedFileName = generatedFileName;
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
const ext = path.extname(generatedFileName);
const nameWithoutExt = path.basename(generatedFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logWithContext('warn', 'Generated file name truncated', {
originalLength: generatedFileName.length,
truncatedLength: truncatedFileName.length,
});
}
// Prepare document data // Prepare document data
const documentData = { const documentData = {
documentId: require('crypto').randomUUID(),
requestId, requestId,
uploadedBy: userId, uploadedBy: userId,
fileName: truncatedFileName, fileName: path.basename(gcsFilePath).substring(0, 255),
originalFileName: truncatedOriginalFileName, originalFileName: file.originalname.substring(0, 255),
fileType: extension, fileType: extension,
fileExtension: extension, fileExtension: extension,
fileSize: file.size, fileSize: file.size,
filePath: gcsFilePath, // Store GCS path or local path filePath: gcsFilePath,
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long) storageUrl: (storageUrl && storageUrl.length < 500) ? storageUrl : undefined,
mimeType: file.mimetype, mimeType: file.mimetype,
checksum, checksum,
isGoogleDoc: false, category: category as any,
googleDocUrl: null as any,
category,
version: 1, version: 1,
parentDocumentId: null as any,
isDeleted: false, isDeleted: false,
downloadCount: 0,
}; };
logWithContext('info', 'Creating document record', { const doc = await (DocumentModel as any).create(documentData);
requestId,
userId,
fileName: file.originalname,
filePath: gcsFilePath,
storageUrl: storageUrl,
documentData: JSON.stringify(documentData, null, 2),
});
let doc; // Log event
try { logDocumentEvent('uploaded', (doc as any).documentId, {
doc = await Document.create(documentData as any); requestId: workflowRequest.requestId, // Standardized to UUID
logWithContext('info', 'Document record created successfully', {
documentId: doc.documentId,
requestId,
fileName: file.originalname,
});
} catch (createError) {
const createErrorMessage = createError instanceof Error ? createError.message : 'Unknown error';
const createErrorStack = createError instanceof Error ? createError.stack : undefined;
// Check if it's a Sequelize validation error
const sequelizeError = (createError as any)?.errors || (createError as any)?.parent;
logWithContext('error', 'Document.create() failed', {
error: createErrorMessage,
stack: createErrorStack,
sequelizeErrors: sequelizeError,
requestId,
userId,
fileName: file.originalname,
filePath: gcsFilePath,
storageUrl: storageUrl,
documentData: JSON.stringify(documentData, null, 2),
});
throw createError; // Re-throw to be caught by outer catch block
}
// Log document upload event
logDocumentEvent('uploaded', doc.documentId, {
requestId,
userId, userId,
fileName: file.originalname, fileName: file.originalname,
fileType: extension, fileType: extension,
@ -274,249 +204,128 @@ export class DocumentController {
}); });
// Get user details for activity logging // Get user details for activity logging
const user = await User.findByPk(userId); const uploader = await UserModel.findOne({ userId });
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User'; const uploaderName = uploader?.displayName || uploader?.email || 'User';
// Log activity for document upload // Log activity
const requestMeta = getRequestMetadata(req); const requestMeta = getRequestMetadata(req);
await activityService.log({ await activityService.log({
requestId, requestId: workflowRequest.requestId, // Standardized to UUID
type: 'document_added', type: 'document_added',
user: { userId, name: uploaderName }, user: { userId, name: uploaderName },
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
action: 'Document Added', action: 'Document Added',
details: `Added ${file.originalname} as supporting document by ${uploaderName}`, details: `Added ${file.originalname} as supporting document by ${uploaderName} `,
metadata: { metadata: {
fileName: file.originalname, fileName: file.originalname,
fileSize: file.size, fileSize: file.size,
fileType: extension, fileType: extension,
category category
}, },
ipAddress: requestMeta.ipAddress, ipAddress: requestMeta.ipAddress,
userAgent: requestMeta.userAgent userAgent: requestMeta.userAgent
}); });
// Send notifications for additional document added // Send notifications
try { try {
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id; const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
const isInitiator = userId === initiatorId; const isInitiator = userId === initiatorId;
// Get all participants (spectators) // Get participants
const spectators = await Participant.findAll({ const participants = await Participant.find({
where: { requestId: workflowRequest.requestId, // Standardized to UUID
requestId, participantType: 'SPECTATOR'
participantType: 'SPECTATOR'
},
include: [{
model: User,
as: 'user',
attributes: ['userId', 'email', 'displayName']
}]
}); });
// Get current approver (pending or in-progress approval level) // Get current approver
const currentApprovalLevel = await ApprovalLevel.findOne({ const currentLevel = await ApprovalLevel.findOne({
where: { requestId: requestId,
requestId, status: { $in: ['PENDING', 'IN_PROGRESS'] }
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] } }).sort({ levelNumber: 1 });
},
order: [['levelNumber', 'ASC']],
include: [{
model: User,
as: 'approver',
attributes: ['userId', 'email', 'displayName']
}]
});
logWithContext('info', 'Current approver lookup for document notification', {
requestId,
currentApprovalLevelFound: !!currentApprovalLevel,
approverUserId: currentApprovalLevel ? ((currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver)?.userId : null,
isInitiator
});
// Determine who to notify based on who uploaded
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = []; const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
if (isInitiator) { // Add initiator if they are not the uploader
// Initiator added → notify spectators and current approver if (!isInitiator) {
spectators.forEach((spectator: any) => { const initiator = await UserModel.findOne({ userId: initiatorId });
const spectatorUser = spectator.user || spectator.User; if (initiator) {
if (spectatorUser && spectatorUser.userId !== userId) { recipientsToNotify.push({
recipientsToNotify.push({ userId: initiator.userId,
userId: spectatorUser.userId, email: initiator.email,
email: spectatorUser.email, displayName: initiator.displayName || initiator.email
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Check if uploader is a spectator
const uploaderParticipant = await Participant.findOne({
where: {
requestId,
userId,
participantType: 'SPECTATOR'
}
});
if (uploaderParticipant) {
// Spectator added → notify initiator and current approver
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Approver added → notify initiator and spectators
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
}); });
} }
} }
// Send notifications (email, in-app, and web-push) // Add current approver if not the uploader
const requestData = { if (currentLevel?.approver?.userId && currentLevel.approver.userId !== userId) {
requestNumber: requestNumber, const approver = await UserModel.findOne({ userId: currentLevel.approver.userId });
requestId: requestId, if (approver) {
title: (workflowRequest as any).title || 'Request' recipientsToNotify.push({
}; userId: approver.userId,
email: approver.email,
displayName: approver.displayName || approver.email
});
}
}
// Prepare user IDs for in-app and web-push notifications // Add spectators
const recipientUserIds = recipientsToNotify.map(r => r.userId); for (const p of participants) {
if (p.userId !== userId && !recipientsToNotify.some(r => r.userId === p.userId)) {
const spectator = await UserModel.findOne({ userId: p.userId });
if (spectator) {
recipientsToNotify.push({
userId: spectator.userId,
email: spectator.email,
displayName: spectator.displayName || spectator.email
});
}
}
}
// Send in-app and web-push notifications // Send notifications
if (recipientUserIds.length > 0) { if (recipientsToNotify.length > 0) {
try { const recipientIds = recipientsToNotify.map(r => r.userId);
await notificationService.sendToUsers(
recipientUserIds, await notificationService.sendToUsers(recipientIds, {
{ title: 'Additional Document Added',
title: 'Additional Document Added', body: `${uploaderName} added "${file.originalname}" to ${requestNumber} `,
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`, requestId,
requestId, requestNumber,
requestNumber, url: `/ request / ${requestNumber} `,
url: `/request/${requestNumber}`, type: 'document_added',
type: 'document_added', priority: 'MEDIUM',
priority: 'MEDIUM', actionRequired: false,
actionRequired: false, metadata: {
metadata: {
documentName: file.originalname,
fileSize: file.size,
addedByName: uploaderName,
source: 'Documents Tab'
}
}
);
logWithContext('info', 'In-app and web-push notifications sent for additional document', {
requestId,
documentName: file.originalname, documentName: file.originalname,
recipientsCount: recipientUserIds.length addedByName: uploaderName
}); }
} catch (notifyError) { });
logWithContext('error', 'Failed to send in-app/web-push notifications for additional document', {
requestId,
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
});
}
}
// Send email notifications const requestData = {
for (const recipient of recipientsToNotify) { requestNumber,
await emailNotificationService.sendAdditionalDocumentAdded( requestId,
requestData, title: (workflowRequest as any).title || 'Request'
recipient, };
{
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(requestData, recipient, {
documentName: file.originalname, documentName: file.originalname,
fileSize: file.size, fileSize: file.size,
addedByName: uploaderName, addedByName: uploaderName,
source: 'Documents Tab' source: 'Documents Tab'
} });
); }
} }
logWithContext('info', 'Additional document notifications sent', {
requestId,
documentName: file.originalname,
recipientsCount: recipientsToNotify.length,
isInitiator
});
} catch (notifyError) { } catch (notifyError) {
// Don't fail document upload if notifications fail logWithContext('error', 'Failed to send document notifications', { error: notifyError });
logWithContext('error', 'Failed to send additional document notifications', {
requestId,
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
});
} }
ResponseHandler.success(res, doc, 'File uploaded', 201); ResponseHandler.success(res, doc, 'File uploaded', 201);
} catch (error) { } catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'; const message = error instanceof Error ? error.message : 'Unknown error';
const errorStack = error instanceof Error ? error.stack : undefined; logWithContext('error', 'Document upload failed', { error: message });
logWithContext('error', 'Document upload failed', {
userId: req.user?.userId,
requestId: req.body?.requestId || req.body?.request_id,
body: req.body,
bodyKeys: Object.keys(req.body || {}),
file: req.file ? {
originalname: req.file.originalname,
size: req.file.size,
mimetype: req.file.mimetype,
hasBuffer: !!req.file.buffer,
hasPath: !!req.file.path
} : 'No file',
error: message,
stack: errorStack
});
ResponseHandler.error(res, 'Upload failed', 500, message); ResponseHandler.error(res, 'Upload failed', 500, message);
} }
} }
} }

View File

@ -1,8 +1,8 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { Notification } from '@models/Notification'; import mongoose from 'mongoose';
import { Op } from 'sequelize'; import { NotificationModel as Notification } from '../models/mongoose/Notification.schema';
import logger from '@utils/logger'; import logger from '../utils/logger';
import { notificationService } from '@services/notification.service'; import { notificationMongoService as notificationService } from '../services/notification.service';
export class NotificationController { export class NotificationController {
/** /**
@ -25,12 +25,12 @@ export class NotificationController {
const offset = (Number(page) - 1) * Number(limit); const offset = (Number(page) - 1) * Number(limit);
const { rows, count } = await Notification.findAndCountAll({ const rows = await Notification.find(where)
where, .sort({ createdAt: -1 })
order: [['createdAt', 'DESC']], .limit(Number(limit))
limit: Number(limit), .skip(offset);
offset
}); const count = await Notification.countDocuments(where);
res.json({ res.json({
success: true, success: true,
@ -42,7 +42,7 @@ export class NotificationController {
total: count, total: count,
totalPages: Math.ceil(count / Number(limit)) totalPages: Math.ceil(count / Number(limit))
}, },
unreadCount: unreadOnly === 'true' ? count : await Notification.count({ where: { userId, isRead: false } }) unreadCount: unreadOnly === 'true' ? count : await Notification.countDocuments({ userId, isRead: false })
} }
}); });
} catch (error: any) { } catch (error: any) {
@ -63,8 +63,8 @@ export class NotificationController {
return; return;
} }
const count = await Notification.count({ const count = await Notification.countDocuments({
where: { userId, isRead: false } userId, isRead: false
}); });
res.json({ res.json({
@ -90,8 +90,13 @@ export class NotificationController {
return; return;
} }
if (!mongoose.Types.ObjectId.isValid(notificationId)) {
res.status(400).json({ success: false, message: 'Invalid notification ID' });
return;
}
const notification = await Notification.findOne({ const notification = await Notification.findOne({
where: { notificationId, userId } _id: notificationId, userId
}); });
if (!notification) { if (!notification) {
@ -99,10 +104,10 @@ export class NotificationController {
return; return;
} }
await notification.update({ notification.isRead = true;
isRead: true, notification.metadata = notification.metadata || {};
readAt: new Date() notification.metadata.readAt = new Date();
}); await notification.save();
res.json({ res.json({
success: true, success: true,
@ -127,9 +132,9 @@ export class NotificationController {
return; return;
} }
await Notification.update( await Notification.updateMany(
{ isRead: true, readAt: new Date() }, { userId, isRead: false },
{ where: { userId, isRead: false } } { $set: { isRead: true } }
); );
res.json({ res.json({
@ -155,10 +160,17 @@ export class NotificationController {
return; return;
} }
const deleted = await Notification.destroy({ if (!mongoose.Types.ObjectId.isValid(notificationId)) {
where: { notificationId, userId } res.status(400).json({ success: false, message: 'Invalid notification ID' });
return;
}
const result = await Notification.deleteOne({
_id: notificationId, userId
}); });
const deleted = result.deletedCount;
if (deleted === 0) { if (deleted === 0) {
res.status(404).json({ success: false, message: 'Notification not found' }); res.status(404).json({ success: false, message: 'Notification not found' });
return; return;
@ -201,4 +213,3 @@ export class NotificationController {
} }
} }
} }

View File

@ -1,12 +1,14 @@
import { Response } from 'express'; import { Response } from 'express';
import { pauseService } from '@services/pause.service'; import { pauseMongoService } from '@services/pause.service';
import { workflowServiceMongo } from '@services/workflow.service';
import { ResponseHandler } from '@utils/responseHandler'; import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express'; import type { AuthenticatedRequest } from '../types/express';
import { z } from 'zod'; import { z } from 'zod';
// Validation schemas // Validation schemas
// In MongoDB, levelId could be a string (ObjectId)
const pauseWorkflowSchema = z.object({ const pauseWorkflowSchema = z.object({
levelId: z.string().uuid().optional().nullable(), levelId: z.string().optional().nullable(),
reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'), reason: z.string().min(1, 'Reason is required').max(1000, 'Reason must be less than 1000 characters'),
resumeDate: z.string().datetime().or(z.date()) resumeDate: z.string().datetime().or(z.date())
}); });
@ -26,18 +28,25 @@ export class PauseController {
const userId = req.user?.userId; const userId = req.user?.userId;
if (!userId) { if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401); ResponseHandler.unauthorized(res, 'Unauthorized');
return;
}
// Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return; return;
} }
// Validate request body // Validate request body
const validated = pauseWorkflowSchema.parse(req.body); const validated = pauseWorkflowSchema.parse(req.body);
const resumeDate = validated.resumeDate instanceof Date const resumeDate = validated.resumeDate instanceof Date
? validated.resumeDate ? validated.resumeDate
: new Date(validated.resumeDate); : new Date(validated.resumeDate);
const result = await pauseService.pauseWorkflow( const result = await pauseMongoService.pauseWorkflow(
id, requestId,
validated.levelId || null, validated.levelId || null,
userId, userId,
validated.reason, validated.reason,
@ -68,14 +77,21 @@ export class PauseController {
const userId = req.user?.userId; const userId = req.user?.userId;
if (!userId) { if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401); ResponseHandler.unauthorized(res, 'Unauthorized');
return;
}
// Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return; return;
} }
// Validate request body (notes is optional) // Validate request body (notes is optional)
const validated = resumeWorkflowSchema.parse(req.body || {}); const validated = resumeWorkflowSchema.parse(req.body || {});
const result = await pauseService.resumeWorkflow(id, userId, validated.notes); const result = await pauseMongoService.resumeWorkflow(requestId, userId, validated.notes);
ResponseHandler.success(res, { ResponseHandler.success(res, {
workflow: result.workflow, workflow: result.workflow,
@ -101,11 +117,18 @@ export class PauseController {
const userId = req.user?.userId; const userId = req.user?.userId;
if (!userId) { if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401); ResponseHandler.unauthorized(res, 'Unauthorized');
return; return;
} }
await pauseService.retriggerPause(id, userId); // Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
await pauseMongoService.retriggerPause(requestId, userId);
ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200); ResponseHandler.success(res, null, 'Pause retrigger request sent successfully', 200);
} catch (error: any) { } catch (error: any) {
@ -122,7 +145,14 @@ export class PauseController {
try { try {
const { id } = req.params; const { id } = req.params;
const pauseDetails = await pauseService.getPauseDetails(id); // Resolve requestId (UUID)
const requestId = await workflowServiceMongo.resolveRequestId(id);
if (!requestId) {
ResponseHandler.notFound(res, 'Request not found');
return;
}
const pauseDetails = await pauseMongoService.getPauseDetails(requestId);
if (!pauseDetails) { if (!pauseDetails) {
ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200); ResponseHandler.success(res, { isPaused: false }, 'Workflow is not paused', 200);
@ -138,4 +168,3 @@ export class PauseController {
} }
export const pauseController = new PauseController(); export const pauseController = new PauseController();

View File

@ -1,13 +1,11 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { TatAlert } from '@models/TatAlert'; import { TatAlertModel as TatAlert } from '../models/mongoose/TatAlert.schema';
import { ApprovalLevel } from '@models/ApprovalLevel'; import { ApprovalLevelModel as ApprovalLevel } from '../models/mongoose/ApprovalLevel.schema';
import { User } from '@models/User'; import { UserModel } from '../models/mongoose/User.schema';
import { WorkflowRequest } from '@models/WorkflowRequest'; import { WorkflowRequestModel as WorkflowRequest } from '../models/mongoose/WorkflowRequest.schema';
import logger from '@utils/logger'; import logger from '../utils/logger';
import { sequelize } from '@config/database'; import { activityMongoService as activityService } from '../services/activity.service';
import { QueryTypes } from 'sequelize'; import { getRequestMetadata } from '../utils/requestUtils';
import { activityService } from '@services/activity.service';
import { getRequestMetadata } from '@utils/requestUtils';
import type { AuthenticatedRequest } from '../types/express'; import type { AuthenticatedRequest } from '../types/express';
/** /**
@ -16,27 +14,36 @@ import type { AuthenticatedRequest } from '../types/express';
export const getTatAlertsByRequest = async (req: Request, res: Response) => { export const getTatAlertsByRequest = async (req: Request, res: Response) => {
try { try {
const { requestId } = req.params; const { requestId } = req.params;
const alerts = await TatAlert.findAll({ const alerts = await TatAlert.find({ requestId })
where: { requestId }, .sort({ alertSentAt: 1 })
include: [ .lean();
{
model: ApprovalLevel, // Enrich with level info manually since we can't easily populate across collections if not using ObjectIds strictly for references in Mongoose style (using strings here)
as: 'level', // Or we can query ApprovalLevel
attributes: ['levelNumber', 'levelName', 'approverName', 'status'] const enrichedAlerts = await Promise.all(alerts.map(async (alert: any) => {
}, // Fetch level info
{ const level = await ApprovalLevel.findOne({ levelId: alert.levelId }).select('levelNumber levelName approverName status').lean(); // Use findOne with levelId (string)
model: User,
as: 'approver', const alertData = { ...alert, level };
attributes: ['userId', 'displayName', 'email', 'department']
if (alert.approverId) {
const approver = await UserModel.findOne({ userId: alert.approverId }).select('userId displayName email department').lean();
if (approver) {
alertData.approver = {
userId: approver.userId,
displayName: approver.displayName,
email: approver.email,
department: approver.department
};
} }
], }
order: [['alertSentAt', 'ASC']] return alertData;
}); }));
res.json({ res.json({
success: true, success: true,
data: alerts data: enrichedAlerts
}); });
} catch (error) { } catch (error) {
logger.error('[TAT Controller] Error fetching TAT alerts:', error); logger.error('[TAT Controller] Error fetching TAT alerts:', error);
@ -53,12 +60,10 @@ export const getTatAlertsByRequest = async (req: Request, res: Response) => {
export const getTatAlertsByLevel = async (req: Request, res: Response) => { export const getTatAlertsByLevel = async (req: Request, res: Response) => {
try { try {
const { levelId } = req.params; const { levelId } = req.params;
const alerts = await TatAlert.findAll({ const alerts = await TatAlert.find({ levelId })
where: { levelId }, .sort({ alertSentAt: 1 });
order: [['alertSentAt', 'ASC']]
});
res.json({ res.json({
success: true, success: true,
data: alerts data: alerts
@ -78,32 +83,62 @@ export const getTatAlertsByLevel = async (req: Request, res: Response) => {
export const getTatComplianceSummary = async (req: Request, res: Response) => { export const getTatComplianceSummary = async (req: Request, res: Response) => {
try { try {
const { startDate, endDate } = req.query; const { startDate, endDate } = req.query;
let dateFilter = ''; const matchStage: any = {};
if (startDate && endDate) { if (startDate && endDate) {
dateFilter = `AND alert_sent_at BETWEEN '${startDate}' AND '${endDate}'`; matchStage.alertSentAt = {
$gte: new Date(startDate as string),
$lte: new Date(endDate as string)
};
} }
const summary = await sequelize.query(` const summary = await TatAlert.aggregate([
SELECT { $match: matchStage },
COUNT(*) as total_alerts, {
COUNT(CASE WHEN alert_type = 'TAT_50' THEN 1 END) as alerts_50, $group: {
COUNT(CASE WHEN alert_type = 'TAT_75' THEN 1 END) as alerts_75, _id: null,
COUNT(CASE WHEN alert_type = 'TAT_100' THEN 1 END) as breaches, total_alerts: { $sum: 1 },
COUNT(CASE WHEN was_completed_on_time = true THEN 1 END) as completed_on_time, alerts_50: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_50'] }, 1, 0] } },
COUNT(CASE WHEN was_completed_on_time = false THEN 1 END) as completed_late, alerts_75: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_75'] }, 1, 0] } },
ROUND( breaches: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_100'] }, 1, 0] } },
COUNT(CASE WHEN was_completed_on_time = true THEN 1 END) * 100.0 / completed_on_time: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', true] }, 1, 0] } },
NULLIF(COUNT(CASE WHEN was_completed_on_time IS NOT NULL THEN 1 END), 0), completed_late: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', false] }, 1, 0] } },
2 completed_total: {
) as compliance_percentage $sum: { $cond: [{ $ne: ['$wasCompletedOnTime', null] }, 1, 0] }
FROM tat_alerts }
WHERE 1=1 ${dateFilter} }
`, { type: QueryTypes.SELECT }); },
{
$project: {
_id: 0,
total_alerts: 1,
alerts_50: 1,
alerts_75: 1,
breaches: 1,
completed_on_time: 1,
completed_late: 1,
compliance_percentage: {
$cond: [
{ $eq: ['$completed_total', 0] },
0,
{ $round: [{ $multiply: [{ $divide: ['$completed_on_time', '$completed_total'] }, 100] }, 2] }
]
}
}
}
]);
res.json({ res.json({
success: true, success: true,
data: summary[0] || {} data: summary[0] || {
total_alerts: 0,
alerts_50: 0,
alerts_75: 0,
breaches: 0,
completed_on_time: 0,
completed_late: 0,
compliance_percentage: 0
}
}); });
} catch (error) { } catch (error) {
logger.error('[TAT Controller] Error fetching TAT compliance summary:', error); logger.error('[TAT Controller] Error fetching TAT compliance summary:', error);
@ -119,33 +154,57 @@ export const getTatComplianceSummary = async (req: Request, res: Response) => {
*/ */
export const getTatBreachReport = async (req: Request, res: Response) => { export const getTatBreachReport = async (req: Request, res: Response) => {
try { try {
const breaches = await sequelize.query(` const breaches = await TatAlert.aggregate([
SELECT { $match: { isBreached: true } },
ta.alert_id, { $sort: { alertSentAt: -1 } },
ta.request_id, { $limit: 100 },
w.request_number, // Lookup WorkflowRequest
w.title as request_title, {
w.priority, $lookup: {
al.level_number, from: 'workflow_requests',
al.approver_name, localField: 'requestId',
ta.tat_hours_allocated, foreignField: 'requestId',
ta.tat_hours_elapsed, as: 'request'
ta.alert_sent_at, }
ta.completion_time, },
ta.was_completed_on_time, { $unwind: { path: '$request', preserveNullAndEmptyArrays: true } },
CASE // Lookup ApprovalLevel
WHEN ta.completion_time IS NULL THEN 'Still Pending' {
WHEN ta.was_completed_on_time = false THEN 'Completed Late' $lookup: {
ELSE 'Completed On Time' from: 'approval_levels',
END as completion_status localField: 'levelId',
FROM tat_alerts ta foreignField: 'levelId',
JOIN workflow_requests w ON ta.request_id = w.request_id as: 'level'
JOIN approval_levels al ON ta.level_id = al.level_id }
WHERE ta.is_breached = true },
ORDER BY ta.alert_sent_at DESC { $unwind: { path: '$level', preserveNullAndEmptyArrays: true } },
LIMIT 100 {
`, { type: QueryTypes.SELECT }); $project: {
alert_id: '$_id',
request_id: '$requestId',
request_number: '$request.requestNumber',
request_title: '$request.title',
priority: '$request.priority',
level_number: '$level.levelNumber',
approver_name: '$level.approverName',
tat_hours_allocated: '$tatHoursAllocated',
tat_hours_elapsed: '$tatHoursElapsed',
alert_sent_at: '$alertSentAt',
completion_time: '$completionTime',
was_completed_on_time: '$wasCompletedOnTime',
completion_status: {
$switch: {
branches: [
{ case: { $eq: ['$completionTime', null] }, then: 'Still Pending' },
{ case: { $eq: ['$wasCompletedOnTime', false] }, then: 'Completed Late' }
],
default: 'Completed On Time'
}
}
}
}
]);
res.json({ res.json({
success: true, success: true,
data: breaches data: breaches
@ -184,7 +243,9 @@ export const updateBreachReason = async (req: Request, res: Response) => {
} }
// Get the approval level to verify permissions // Get the approval level to verify permissions
const level = await ApprovalLevel.findByPk(levelId); // Note: levelId in params likely refers to the level document UUID
const level = await ApprovalLevel.findOne({ levelId }); // Use findOne with levelId custom ID
if (!level) { if (!level) {
return res.status(404).json({ return res.status(404).json({
success: false, success: false,
@ -193,7 +254,7 @@ export const updateBreachReason = async (req: Request, res: Response) => {
} }
// Get user to check role // Get user to check role
const user = await User.findByPk(userId); const user = await UserModel.findOne({ userId });
if (!user) { if (!user) {
return res.status(404).json({ return res.status(404).json({
success: false, success: false,
@ -201,13 +262,13 @@ export const updateBreachReason = async (req: Request, res: Response) => {
}); });
} }
const userRole = (user as any).role; const userRole = user.role;
const approverId = (level as any).approverId; const approverId = (level as any).approverId || (level.approver ? level.approver.userId : null);
// Check permissions: ADMIN, MANAGEMENT, or the approver // Check permissions: ADMIN, MANAGEMENT, or the approver
const hasPermission = const hasPermission =
userRole === 'ADMIN' || userRole === 'ADMIN' ||
userRole === 'MANAGEMENT' || userRole === 'MANAGEMENT' ||
approverId === userId; approverId === userId;
if (!hasPermission) { if (!hasPermission) {
@ -218,28 +279,25 @@ export const updateBreachReason = async (req: Request, res: Response) => {
} }
// Get user details for activity logging // Get user details for activity logging
const userDisplayName = (user as any).displayName || (user as any).email || 'Unknown User'; const userDisplayName = user.displayName || user.email || 'Unknown User';
const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time const isUpdate = !!(level as any).breachReason; // Check if this is an update or first time
const levelNumber = (level as any).levelNumber; const levelNumber = (level as any).levelNumber;
const approverName = (level as any).approverName || 'Unknown Approver'; const approverName = (level as any).approverName || (level.approver ? level.approver.name : 'Unknown Approver');
// Update breach reason directly in approval_levels table // Update breach reason directly in approval_levels
await level.update({ // Mongoose update
breachReason: breachReason.trim() (level as any).breachReason = breachReason.trim();
}); await level.save();
// Reload to get updated data
await level.reload();
// Log activity for the request // Log activity for the request
const userRoleLabel = userRole === 'ADMIN' ? 'Admin' : userRole === 'MANAGEMENT' ? 'Management' : 'Approver'; const userRoleLabel = userRole === 'ADMIN' ? 'Admin' : userRole === 'MANAGEMENT' ? 'Management' : 'Approver';
await activityService.log({ await activityService.log({
requestId: level.requestId, requestId: level.requestId,
type: 'comment', // Using comment type for breach reason entry type: 'comment', // Using comment type for breach reason entry
user: { user: {
userId: userId, userId: userId,
name: userDisplayName, name: userDisplayName,
email: (user as any).email email: user.email
}, },
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason', action: isUpdate ? 'Updated TAT breach reason' : 'Added TAT breach reason',
@ -280,29 +338,53 @@ export const updateBreachReason = async (req: Request, res: Response) => {
export const getApproverTatPerformance = async (req: Request, res: Response) => { export const getApproverTatPerformance = async (req: Request, res: Response) => {
try { try {
const { approverId } = req.params; const { approverId } = req.params;
const performance = await sequelize.query(` const performance = await TatAlert.aggregate([
SELECT { $match: { approverId: approverId } },
COUNT(DISTINCT ta.level_id) as total_approvals, {
COUNT(CASE WHEN ta.alert_type = 'TAT_50' THEN 1 END) as alerts_50_received, $group: {
COUNT(CASE WHEN ta.alert_type = 'TAT_75' THEN 1 END) as alerts_75_received, _id: null,
COUNT(CASE WHEN ta.is_breached = true THEN 1 END) as breaches, total_approvals: { $addToSet: '$levelId' }, // Count distinct levels? Or count alerts? Query said count distinct level_id.
AVG(ta.tat_hours_elapsed) as avg_hours_taken, alerts_50_received: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_50'] }, 1, 0] } },
ROUND( alerts_75_received: { $sum: { $cond: [{ $eq: ['$alertType', 'TAT_75'] }, 1, 0] } },
COUNT(CASE WHEN ta.was_completed_on_time = true THEN 1 END) * 100.0 / breaches: { $sum: { $cond: [{ $eq: ['$isBreached', true] }, 1, 0] } },
NULLIF(COUNT(CASE WHEN ta.was_completed_on_time IS NOT NULL THEN 1 END), 0), min_hours: { $min: '$tatHoursElapsed' }, // Helper to ensure avg works if field exists
2 tatHoursElapsedSum: { $sum: '$tatHoursElapsed' },
) as compliance_rate tatHoursElapsedCount: { $sum: 1 },
FROM tat_alerts ta
WHERE ta.approver_id = :approverId completed_on_time: { $sum: { $cond: [{ $eq: ['$wasCompletedOnTime', true] }, 1, 0] } },
`, { completed_total: { $sum: { $cond: [{ $ne: ['$wasCompletedOnTime', null] }, 1, 0] } }
replacements: { approverId }, }
type: QueryTypes.SELECT },
}); {
$project: {
_id: 0,
total_approvals: { $size: '$total_approvals' },
alerts_50_received: 1,
alerts_75_received: 1,
breaches: 1,
avg_hours_taken: { $divide: ['$tatHoursElapsedSum', '$tatHoursElapsedCount'] },
compliance_rate: {
$cond: [
{ $eq: ['$completed_total', 0] },
0,
{ $round: [{ $multiply: [{ $divide: ['$completed_on_time', '$completed_total'] }, 100] }, 2] }
]
}
}
}
]);
res.json({ res.json({
success: true, success: true,
data: performance[0] || {} data: performance[0] || {
total_approvals: 0,
alerts_50_received: 0,
alerts_75_received: 0,
breaches: 0,
avg_hours_taken: 0,
compliance_rate: 0
}
}); });
} catch (error) { } catch (error) {
logger.error('[TAT Controller] Error fetching approver TAT performance:', error); logger.error('[TAT Controller] Error fetching approver TAT performance:', error);
@ -312,4 +394,3 @@ export const getApproverTatPerformance = async (req: Request, res: Response) =>
}); });
} }
}; };

View File

@ -158,6 +158,7 @@ export class TemplateController {
templateName, templateName,
templateDescription, templateDescription,
templateCategory, templateCategory,
workflowType, // Added
approvalLevelsConfig, approvalLevelsConfig,
defaultTatHours, defaultTatHours,
formStepsConfig, formStepsConfig,
@ -174,9 +175,10 @@ export class TemplateController {
} = req.body; } = req.body;
const template = await this.templateService.updateTemplate(templateId, userId, { const template = await this.templateService.updateTemplate(templateId, userId, {
templateName: templateName || name, name: templateName || name,
templateDescription: templateDescription || description, description: templateDescription || description,
templateCategory: templateCategory || category, department: templateCategory || category,
workflowType,
approvalLevelsConfig: approvalLevelsConfig || approvers, approvalLevelsConfig: approvalLevelsConfig || approvers,
defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined, defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined,
formStepsConfig, formStepsConfig,

View File

@ -10,37 +10,13 @@ export class UserController {
this.userService = new UserService(); this.userService = new UserService();
} }
async getAllUsers(req: Request, res: Response): Promise<void> {
try {
const users = await this.userService.getAllUsers();
const result = {
users: users.map(u => ({
userId: u.userId,
email: u.email,
displayName: u.displayName,
department: u.department,
designation: u.designation,
isActive: u.isActive,
})),
total: users.length
};
ResponseHandler.success(res, result, 'All users fetched');
} catch (error) {
logger.error('Failed to fetch all users', { error });
ResponseHandler.error(res, 'Failed to fetch all users', 500);
}
}
async searchUsers(req: Request, res: Response): Promise<void> { async searchUsers(req: Request, res: Response): Promise<void> {
try { try {
const q = String(req.query.q || '').trim(); const q = String(req.query.q || '').trim();
const limit = Number(req.query.limit || 10); const limit = Number(req.query.limit || 10);
const source = String(req.query.source || 'default') as 'local' | 'okta' | 'default';
const currentUserId = (req as any).user?.userId || (req as any).user?.id; const currentUserId = (req as any).user?.userId || (req as any).user?.id;
const users = await this.userService.searchUsers(q, limit, currentUserId, source); const users = await this.userService.searchUsers(q, limit, currentUserId);
const result = users.map(u => ({ const result = users.map(u => ({
userId: (u as any).userId, userId: (u as any).userId,
@ -93,31 +69,6 @@ export class UserController {
} }
} }
async getUserById(req: Request, res: Response): Promise<void> {
try {
const { userId } = req.params;
const user = await this.userService.getUserById(userId);
if (!user) {
ResponseHandler.error(res, 'User not found', 404);
return;
}
ResponseHandler.success(res, {
userId: user.userId,
email: user.email,
displayName: user.displayName,
firstName: user.firstName,
lastName: user.lastName,
department: user.department,
isActive: user.isActive
}, 'User fetched');
} catch (error) {
logger.error('Failed to fetch user by ID', { error });
ResponseHandler.error(res, 'Failed to fetch user by ID', 500);
}
}
/** /**
* Ensure user exists in database (create if not exists) * Ensure user exists in database (create if not exists)
* Called when user is selected/tagged in the frontend * Called when user is selected/tagged in the frontend

View File

@ -1,5 +1,5 @@
import { Request, Response } from 'express'; import { Request, Response } from 'express';
import { User } from '@models/User'; import { UserModel } from '../models/mongoose/User.schema';
import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator'; import { updateNotificationPreferencesSchema } from '@validators/userPreference.validator';
import logger from '@utils/logger'; import logger from '@utils/logger';
@ -10,14 +10,7 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
try { try {
const userId = req.user!.userId; const userId = req.user!.userId;
const user = await User.findByPk(userId, { const user = await UserModel.findOne({ userId });
attributes: [
'userId',
'emailNotificationsEnabled',
'pushNotificationsEnabled',
'inAppNotificationsEnabled'
]
});
if (!user) { if (!user) {
res.status(404).json({ res.status(404).json({
@ -32,9 +25,9 @@ export const getNotificationPreferences = async (req: Request, res: Response): P
res.json({ res.json({
success: true, success: true,
data: { data: {
emailNotificationsEnabled: user.emailNotificationsEnabled, emailNotificationsEnabled: user.notifications?.email ?? true,
pushNotificationsEnabled: user.pushNotificationsEnabled, pushNotificationsEnabled: user.notifications?.push ?? true,
inAppNotificationsEnabled: user.inAppNotificationsEnabled inAppNotificationsEnabled: user.notifications?.inApp ?? true
} }
}); });
} catch (error: any) { } catch (error: any) {
@ -57,7 +50,7 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
// Validate request body // Validate request body
const validated = updateNotificationPreferencesSchema.parse(req.body); const validated = updateNotificationPreferencesSchema.parse(req.body);
const user = await User.findByPk(userId); const user = await UserModel.findOne({ userId });
if (!user) { if (!user) {
res.status(404).json({ res.status(404).json({
@ -67,29 +60,32 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
return; return;
} }
// Update only provided fields // Update only provided fields in nested notifications object
const updateData: any = {}; if (!user.notifications) {
user.notifications = { email: true, push: true, inApp: true };
}
if (validated.emailNotificationsEnabled !== undefined) { if (validated.emailNotificationsEnabled !== undefined) {
updateData.emailNotificationsEnabled = validated.emailNotificationsEnabled; user.notifications.email = validated.emailNotificationsEnabled;
} }
if (validated.pushNotificationsEnabled !== undefined) { if (validated.pushNotificationsEnabled !== undefined) {
updateData.pushNotificationsEnabled = validated.pushNotificationsEnabled; user.notifications.push = validated.pushNotificationsEnabled;
} }
if (validated.inAppNotificationsEnabled !== undefined) { if (validated.inAppNotificationsEnabled !== undefined) {
updateData.inAppNotificationsEnabled = validated.inAppNotificationsEnabled; user.notifications.inApp = validated.inAppNotificationsEnabled;
} }
await user.update(updateData); await user.save();
logger.info(`[UserPreference] Updated notification preferences for user ${userId}:`, updateData); logger.info(`[UserPreference] Updated notification preferences for user ${userId}`);
res.json({ res.json({
success: true, success: true,
message: 'Notification preferences updated successfully', message: 'Notification preferences updated successfully',
data: { data: {
emailNotificationsEnabled: user.emailNotificationsEnabled, emailNotificationsEnabled: user.notifications.email,
pushNotificationsEnabled: user.pushNotificationsEnabled, pushNotificationsEnabled: user.notifications.push,
inAppNotificationsEnabled: user.inAppNotificationsEnabled inAppNotificationsEnabled: user.notifications.inApp
} }
}); });
} catch (error: any) { } catch (error: any) {
@ -110,4 +106,3 @@ export const updateNotificationPreferences = async (req: Request, res: Response)
}); });
} }
}; };

Some files were not shown because too many files have changed in this diff Show More