Compare commits

..

7 Commits

32 changed files with 1331 additions and 947 deletions

View File

@ -1,2 +1,2 @@
import{a as s}from"./index-F9w_cZ47.js";import"./radix-vendor-DIkYAdWy.js";import"./charts-vendor-Bme4E5cb.js";import"./utils-vendor-DNMmNUQL.js";import"./ui-vendor-sjs6YRoy.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-AvM4PHvP.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
//# sourceMappingURL=conclusionApi-BIX8LEl5.js.map
import{a as s}from"./index-D-8iFw5e.js";import"./radix-vendor-DIkYAdWy.js";import"./charts-vendor-Bme4E5cb.js";import"./utils-vendor-DNMmNUQL.js";import"./ui-vendor-DbB0YGPu.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-B1UBYWWO.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
//# sourceMappingURL=conclusionApi-DFaefruY.js.map

View File

@ -1 +1 @@
{"version":3,"file":"conclusionApi-BIX8LEl5.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n * Returns null if conclusion doesn't exist (404) instead of throwing error\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark | null> {\r\n try {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n } catch (error: any) {\r\n // Handle 404 gracefully - conclusion doesn't exist yet, which is normal\r\n if (error.response?.status === 404) {\r\n return null;\r\n }\r\n // Re-throw other errors\r\n throw error;\r\n }\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion","error","_a"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAMA,eAAsBC,EAAcJ,EAAqD,OACvF,GAAI,CAEF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB,OAASK,EAAY,CAEnB,KAAIC,EAAAD,EAAM,WAAN,YAAAC,EAAgB,UAAW,IAC7B,OAAO,KAGT,MAAMD,CACR,CACF"}
{"version":3,"file":"conclusionApi-DFaefruY.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n * Returns null if conclusion doesn't exist (404) instead of throwing error\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark | null> {\r\n try {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n } catch (error: any) {\r\n // Handle 404 gracefully - conclusion doesn't exist yet, which is normal\r\n if (error.response?.status === 404) {\r\n return null;\r\n }\r\n // Re-throw other errors\r\n throw error;\r\n }\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion","error","_a"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAMA,eAAsBC,EAAcJ,EAAqD,OACvF,GAAI,CAEF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB,OAASK,EAAY,CAEnB,KAAIC,EAAAD,EAAM,WAAN,YAAAC,EAAgB,UAAW,IAC7B,OAAO,KAGT,MAAMD,CACR,CACF"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -52,15 +52,15 @@
transition: transform 0.2s ease;
}
</style>
<script type="module" crossorigin src="/assets/index-F9w_cZ47.js"></script>
<script type="module" crossorigin src="/assets/index-D-8iFw5e.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-Bme4E5cb.js">
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-DIkYAdWy.js">
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-DNMmNUQL.js">
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-sjs6YRoy.js">
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-DbB0YGPu.js">
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
<link rel="modulepreload" crossorigin href="/assets/router-vendor-AvM4PHvP.js">
<link rel="stylesheet" crossorigin href="/assets/index-CPRbj7YF.css">
<link rel="modulepreload" crossorigin href="/assets/router-vendor-B1UBYWWO.js">
<link rel="stylesheet" crossorigin href="/assets/index-B-mLDzJe.css">
</head>
<body>
<div id="root"></div>

View File

@ -4,8 +4,8 @@
"description": "Royal Enfield Workflow Management System - Backend API (TypeScript)",
"main": "dist/server.js",
"scripts": {
"start": "npm install && npm run setup && npm run build && npm run start:prod",
"dev": "npm run setup && npm run migrate && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"start": "npm run build && npm run start:prod && npm run setup",
"dev": "npm run setup && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"build": "tsc && tsc-alias",
"build:watch": "tsc --watch",
@ -92,4 +92,4 @@
"node": ">=22.0.0",
"npm": ">=10.0.0"
}
}
}

View File

@ -16,17 +16,7 @@ import path from 'path';
// Load environment variables from .env file first
dotenv.config();
// Initialize Google Secret Manager (async, but we'll wait for it in server.ts)
// This will merge secrets from GCS into process.env if USE_GOOGLE_SECRET_MANAGER=true
// Export initialization function so server.ts can await it before starting
export async function initializeSecrets(): Promise<void> {
try {
await initializeGoogleSecretManager();
} catch (error) {
// Log error but don't throw - allow fallback to .env
console.error('⚠️ Failed to initialize Google Secret Manager, using .env file:', error);
}
}
// Secrets are now initialized in server.ts before app is imported
const app: express.Application = express();
const userService = new UserService();
@ -123,8 +113,8 @@ app.use(createMetricsRouter());
// Health check endpoint (before API routes)
app.get('/health', (_req: express.Request, res: express.Response) => {
res.status(200).json({
status: 'OK',
res.status(200).json({
status: 'OK',
timestamp: new Date(),
uptime: process.uptime(),
environment: process.env.NODE_ENV || 'development'
@ -142,7 +132,7 @@ app.use('/uploads', express.static(UPLOAD_DIR));
app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.Response): Promise<void> => {
try {
const ssoData: SSOUserData = req.body;
// Validate required fields - email and oktaSub are required
if (!ssoData.email || !ssoData.oktaSub) {
res.status(400).json({
@ -155,7 +145,7 @@ app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.
// Create or update user
const user = await userService.createOrUpdateUser(ssoData);
res.status(200).json({
success: true,
message: 'User processed successfully',
@ -193,7 +183,7 @@ app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.
app.get('/api/v1/users', async (_req: express.Request, res: express.Response): Promise<void> => {
try {
const users = await userService.getAllUsers();
res.status(200).json({
success: true,
message: 'Users retrieved successfully',
@ -254,7 +244,7 @@ if (reactBuildPath && fs.existsSync(path.join(reactBuildPath, "index.html"))) {
}
}
}));
// Catch-all handler: serve React app for all non-API routes
// This must be AFTER all API routes to avoid intercepting API requests
app.get('*', (req: express.Request, res: express.Response): void => {
@ -267,7 +257,7 @@ if (reactBuildPath && fs.existsSync(path.join(reactBuildPath, "index.html"))) {
});
return;
}
// Serve React app for all other routes (SPA routing)
// This handles client-side routing in React Router
// CSP headers from Helmet will be applied to this response
@ -284,7 +274,7 @@ if (reactBuildPath && fs.existsSync(path.join(reactBuildPath, "index.html"))) {
note: 'React build not found. API is available at /api/v1'
});
});
// Standard 404 handler for non-existent routes
app.use((req: express.Request, res: express.Response): void => {
res.status(404).json({

View File

@ -19,6 +19,7 @@ export class TemplateController {
}
const {
// New fields
templateName,
templateCode,
templateDescription,
@ -30,20 +31,34 @@ export class TemplateController {
userFieldMappings,
dynamicApproverConfig,
isActive,
// Legacy fields (from frontend)
name,
description,
category,
approvers,
suggestedSLA
} = req.body;
if (!templateName) {
// Map legacy to new
const finalTemplateName = templateName || name;
const finalTemplateDescription = templateDescription || description;
const finalTemplateCategory = templateCategory || category;
const finalApprovalLevelsConfig = approvalLevelsConfig || approvers;
const finalDefaultTatHours = defaultTatHours || suggestedSLA;
if (!finalTemplateName) {
return ResponseHandler.error(res, 'Template name is required', 400);
}
const template = await this.templateService.createTemplate(userId, {
templateName,
templateName: finalTemplateName,
templateCode,
templateDescription,
templateCategory,
templateDescription: finalTemplateDescription,
templateCategory: finalTemplateCategory,
workflowType,
approvalLevelsConfig,
defaultTatHours: defaultTatHours ? parseFloat(defaultTatHours) : undefined,
approvalLevelsConfig: finalApprovalLevelsConfig,
defaultTatHours: finalDefaultTatHours ? parseFloat(finalDefaultTatHours) : undefined,
formStepsConfig,
userFieldMappings,
dynamicApproverConfig,
@ -149,14 +164,21 @@ export class TemplateController {
userFieldMappings,
dynamicApproverConfig,
isActive,
// Legacy
name,
description,
category,
approvers,
suggestedSLA
} = req.body;
const template = await this.templateService.updateTemplate(templateId, userId, {
templateName,
templateDescription,
templateCategory,
approvalLevelsConfig,
defaultTatHours: defaultTatHours ? parseFloat(defaultTatHours) : undefined,
templateName: templateName || name,
templateDescription: templateDescription || description,
templateCategory: templateCategory || category,
approvalLevelsConfig: approvalLevelsConfig || approvers,
defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined,
formStepsConfig,
userFieldMappings,
dynamicApproverConfig,

View File

@ -0,0 +1,130 @@
import { Request, Response } from 'express';
import { WorkflowTemplate } from '../models';
import logger from '../utils/logger';
export const createTemplate = async (req: Request, res: Response) => {
try {
const { name, description, category, priority, estimatedTime, approvers, suggestedSLA } = req.body;
const userId = (req as any).user?.userId;
const template = await WorkflowTemplate.create({
templateName: name,
templateDescription: description,
templateCategory: category,
approvalLevelsConfig: approvers,
defaultTatHours: suggestedSLA,
createdBy: userId,
isActive: true,
isSystemTemplate: false,
usageCount: 0
});
res.status(201).json({
success: true,
message: 'Workflow template created successfully',
data: template
});
} catch (error) {
logger.error('Error creating workflow template:', error);
res.status(500).json({
success: false,
message: 'Failed to create workflow template',
error: error instanceof Error ? error.message : 'Unknown error'
});
}
};
export const getTemplates = async (req: Request, res: Response) => {
try {
const templates = await WorkflowTemplate.findAll({
where: { isActive: true },
order: [['createdAt', 'DESC']]
});
res.status(200).json({
success: true,
data: templates
});
} catch (error) {
logger.error('Error fetching workflow templates:', error);
res.status(500).json({
success: false,
message: 'Failed to fetch workflow templates',
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
export const updateTemplate = async (req: Request, res: Response) => {
try {
const { id } = req.params;
const { name, description, category, approvers, suggestedSLA, isActive } = req.body;
const updates: any = {};
if (name) updates.templateName = name;
if (description) updates.templateDescription = description;
if (category) updates.templateCategory = category;
if (approvers) updates.approvalLevelsConfig = approvers;
if (suggestedSLA) updates.defaultTatHours = suggestedSLA;
if (isActive !== undefined) updates.isActive = isActive;
const template = await WorkflowTemplate.findByPk(id);
if (!template) {
return res.status(404).json({
success: false,
message: 'Workflow template not found'
});
}
await template.update(updates);
return res.status(200).json({
success: true,
message: 'Workflow template updated successfully',
data: template
});
} catch (error) {
logger.error('Error updating workflow template:', error);
return res.status(500).json({
success: false,
message: 'Failed to update workflow template',
error: error instanceof Error ? error.message : 'Unknown error'
});
}
};
export const deleteTemplate = async (req: Request, res: Response) => {
try {
const { id } = req.params;
const template = await WorkflowTemplate.findByPk(id);
if (!template) {
return res.status(404).json({
success: false,
message: 'Workflow template not found'
});
}
// Hard delete or Soft delete based on preference.
// Since we have isActive flag, let's use that (Soft Delete) or just destroy if it's unused.
// For now, let's do a hard delete to match the expectation of "Delete" in the UI
// unless there are FK constraints (which sequelize handles).
// Actually, safer to Soft Delete by setting isActive = false if we want history,
// but user asked for Delete. Let's do destroy.
await template.destroy();
return res.status(200).json({
success: true,
message: 'Workflow template deleted successfully'
});
} catch (error) {
logger.error('Error deleting workflow template:', error);
return res.status(500).json({
success: false,
message: 'Failed to delete workflow template',
error: error instanceof Error ? error.message : 'Unknown error'
});
}
};

View File

@ -0,0 +1,115 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
try {
const tableDescription = await queryInterface.describeTable('workflow_templates');
// 1. Rename id -> template_id
if (tableDescription.id && !tableDescription.template_id) {
console.log('Renaming id to template_id...');
await queryInterface.renameColumn('workflow_templates', 'id', 'template_id');
}
// 2. Rename name -> template_name
if (tableDescription.name && !tableDescription.template_name) {
console.log('Renaming name to template_name...');
await queryInterface.renameColumn('workflow_templates', 'name', 'template_name');
}
// 3. Rename description -> template_description
if (tableDescription.description && !tableDescription.template_description) {
console.log('Renaming description to template_description...');
await queryInterface.renameColumn('workflow_templates', 'description', 'template_description');
}
// 4. Rename category -> template_category
if (tableDescription.category && !tableDescription.template_category) {
console.log('Renaming category to template_category...');
await queryInterface.renameColumn('workflow_templates', 'category', 'template_category');
}
// 5. Rename suggested_sla -> default_tat_hours
if (tableDescription.suggested_sla && !tableDescription.default_tat_hours) {
console.log('Renaming suggested_sla to default_tat_hours...');
await queryInterface.renameColumn('workflow_templates', 'suggested_sla', 'default_tat_hours');
}
// 6. Add missing columns
if (!tableDescription.template_code) {
console.log('Adding template_code column...');
await queryInterface.addColumn('workflow_templates', 'template_code', {
type: DataTypes.STRING(50),
allowNull: true,
unique: true
});
}
if (!tableDescription.workflow_type) {
console.log('Adding workflow_type column...');
await queryInterface.addColumn('workflow_templates', 'workflow_type', {
type: DataTypes.STRING(50),
allowNull: true
});
}
if (!tableDescription.approval_levels_config) {
console.log('Adding approval_levels_config column...');
await queryInterface.addColumn('workflow_templates', 'approval_levels_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.form_steps_config) {
console.log('Adding form_steps_config column...');
await queryInterface.addColumn('workflow_templates', 'form_steps_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.user_field_mappings) {
console.log('Adding user_field_mappings column...');
await queryInterface.addColumn('workflow_templates', 'user_field_mappings', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.dynamic_approver_config) {
console.log('Adding dynamic_approver_config column...');
await queryInterface.addColumn('workflow_templates', 'dynamic_approver_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.is_system_template) {
console.log('Adding is_system_template column...');
await queryInterface.addColumn('workflow_templates', 'is_system_template', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
if (!tableDescription.usage_count) {
console.log('Adding usage_count column...');
await queryInterface.addColumn('workflow_templates', 'usage_count', {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0
});
}
console.log('✅ Schema validation/fix complete');
} catch (error) {
console.error('Error in schema fix migration:', error);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Revert is complex/risky effectively, skipping for this fix-forward migration
}

View File

@ -1,180 +1,177 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { sequelize } from '../config/database';
import { User } from './User';
interface WorkflowTemplateAttributes {
templateId: string;
templateName: string;
templateCode?: string;
templateDescription?: string;
templateCategory?: string;
workflowType?: string;
approvalLevelsConfig?: any;
defaultTatHours?: number;
formStepsConfig?: any;
userFieldMappings?: any;
dynamicApproverConfig?: any;
isActive: boolean;
isSystemTemplate: boolean;
usageCount: number;
createdBy?: string;
createdAt: Date;
updatedAt: Date;
templateId: string;
templateName: string;
templateCode?: string;
templateDescription?: string;
templateCategory?: string;
workflowType?: string;
approvalLevelsConfig?: any;
defaultTatHours?: number;
formStepsConfig?: any;
userFieldMappings?: any;
dynamicApproverConfig?: any;
isActive: boolean;
isSystemTemplate: boolean;
usageCount: number;
createdBy?: string;
createdAt: Date;
updatedAt: Date;
}
interface WorkflowTemplateCreationAttributes extends Optional<WorkflowTemplateAttributes, 'templateId' | 'templateCode' | 'templateDescription' | 'templateCategory' | 'workflowType' | 'approvalLevelsConfig' | 'defaultTatHours' | 'formStepsConfig' | 'userFieldMappings' | 'dynamicApproverConfig' | 'createdBy' | 'createdAt' | 'updatedAt'> {}
interface WorkflowTemplateCreationAttributes extends Optional<WorkflowTemplateAttributes, 'templateId' | 'templateCode' | 'templateDescription' | 'templateCategory' | 'workflowType' | 'approvalLevelsConfig' | 'defaultTatHours' | 'formStepsConfig' | 'userFieldMappings' | 'dynamicApproverConfig' | 'createdBy' | 'createdAt' | 'updatedAt'> { }
class WorkflowTemplate extends Model<WorkflowTemplateAttributes, WorkflowTemplateCreationAttributes> implements WorkflowTemplateAttributes {
public templateId!: string;
public templateName!: string;
public templateCode?: string;
public templateDescription?: string;
public templateCategory?: string;
public workflowType?: string;
public approvalLevelsConfig?: any;
public defaultTatHours?: number;
public formStepsConfig?: any;
public userFieldMappings?: any;
public dynamicApproverConfig?: any;
public isActive!: boolean;
public isSystemTemplate!: boolean;
public usageCount!: number;
public createdBy?: string;
public createdAt!: Date;
public updatedAt!: Date;
export class WorkflowTemplate extends Model<WorkflowTemplateAttributes, WorkflowTemplateCreationAttributes> implements WorkflowTemplateAttributes {
public templateId!: string;
public templateName!: string;
public templateCode?: string;
public templateDescription?: string;
public templateCategory?: string;
public workflowType?: string;
public approvalLevelsConfig?: any;
public defaultTatHours?: number;
public formStepsConfig?: any;
public userFieldMappings?: any;
public dynamicApproverConfig?: any;
public isActive!: boolean;
public isSystemTemplate!: boolean;
public usageCount!: number;
public createdBy?: string;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public creator?: User;
// Associations
public creator?: User;
}
WorkflowTemplate.init(
{
templateId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'template_id'
{
templateId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'template_id'
},
templateName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'template_name'
},
templateCode: {
type: DataTypes.STRING(50),
allowNull: true,
unique: true,
field: 'template_code'
},
templateDescription: {
type: DataTypes.TEXT,
allowNull: true,
field: 'template_description'
},
templateCategory: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'template_category'
},
workflowType: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'workflow_type'
},
approvalLevelsConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'approval_levels_config'
},
defaultTatHours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
defaultValue: 24,
field: 'default_tat_hours'
},
formStepsConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'form_steps_config'
},
userFieldMappings: {
type: DataTypes.JSONB,
allowNull: true,
field: 'user_field_mappings'
},
dynamicApproverConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'dynamic_approver_config'
},
isActive: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
field: 'is_active'
},
isSystemTemplate: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
field: 'is_system_template'
},
usageCount: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
field: 'usage_count'
},
createdBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'created_by',
references: {
model: 'users',
key: 'user_id'
}
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
templateName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'template_name'
},
templateCode: {
type: DataTypes.STRING(50),
allowNull: true,
unique: true,
field: 'template_code'
},
templateDescription: {
type: DataTypes.TEXT,
allowNull: true,
field: 'template_description'
},
templateCategory: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'template_category'
},
workflowType: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'workflow_type'
},
approvalLevelsConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'approval_levels_config'
},
defaultTatHours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
defaultValue: 24,
field: 'default_tat_hours'
},
formStepsConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'form_steps_config'
},
userFieldMappings: {
type: DataTypes.JSONB,
allowNull: true,
field: 'user_field_mappings'
},
dynamicApproverConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'dynamic_approver_config'
},
isActive: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
field: 'is_active'
},
isSystemTemplate: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
field: 'is_system_template'
},
usageCount: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
field: 'usage_count'
},
createdBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'created_by',
references: {
model: 'users',
key: 'user_id'
}
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
{
sequelize,
modelName: 'WorkflowTemplate',
tableName: 'workflow_templates',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['template_code']
},
{
fields: ['workflow_type']
},
{
fields: ['is_active']
}
]
}
},
{
sequelize,
modelName: 'WorkflowTemplate',
tableName: 'workflow_templates',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['template_code']
},
{
fields: ['workflow_type']
},
{
fields: ['is_active']
}
]
}
);
// Associations
WorkflowTemplate.belongsTo(User, {
as: 'creator',
foreignKey: 'createdBy',
targetKey: 'userId'
as: 'creator',
foreignKey: 'createdBy',
targetKey: 'userId'
});
export { WorkflowTemplate };

View File

@ -20,12 +20,12 @@ import { DealerClaimDetails } from './DealerClaimDetails';
import { DealerProposalDetails } from './DealerProposalDetails';
import { DealerCompletionDetails } from './DealerCompletionDetails';
import { DealerProposalCostItem } from './DealerProposalCostItem';
import { WorkflowTemplate } from './WorkflowTemplate';
import { InternalOrder } from './InternalOrder';
import { ClaimBudgetTracking } from './ClaimBudgetTracking';
import { Dealer } from './Dealer';
import { ActivityType } from './ActivityType';
import { DealerClaimHistory } from './DealerClaimHistory';
import { WorkflowTemplate } from './WorkflowTemplate';
// Define associations
const defineAssociations = () => {
@ -170,11 +170,11 @@ export {
ConclusionRemark,
RequestSummary,
SharedSummary,
WorkflowTemplate,
DealerClaimDetails,
DealerProposalDetails,
DealerCompletionDetails,
DealerProposalCostItem,
WorkflowTemplate,
InternalOrder,
ClaimBudgetTracking,
Dealer,

View File

@ -0,0 +1,16 @@
import { Router } from 'express';
import { createTemplate, getTemplates, updateTemplate, deleteTemplate } from '../controllers/workflowTemplate.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
import { requireAdmin } from '../middlewares/authorization.middleware';
const router = Router();
// Public route to get templates (authenticated users)
router.get('/', authenticateToken, getTemplates);
// Admin only route to create templates
router.post('/', authenticateToken, requireAdmin, createTemplate);
router.put('/:id', authenticateToken, requireAdmin, updateTemplate);
router.delete('/:id', authenticateToken, requireAdmin, deleteTemplate);
export default router;

View File

@ -11,8 +11,8 @@
*/
import { Client } from 'pg';
import { sequelize } from '../config/database';
import { QueryTypes } from 'sequelize';
import { initializeGoogleSecretManager } from '../services/googleSecretManager.service';
import { exec } from 'child_process';
import { promisify } from 'util';
import dotenv from 'dotenv';
@ -21,14 +21,15 @@ import path from 'path';
dotenv.config({ path: path.resolve(__dirname, '../../.env') });
const execAsync = promisify(exec);
const DB_HOST = process.env.DB_HOST || 'localhost';
const DB_PORT = parseInt(process.env.DB_PORT || '5432');
const DB_USER = process.env.DB_USER || 'postgres';
const DB_PASSWORD = process.env.DB_PASSWORD || '';
const DB_NAME = process.env.DB_NAME || 'royal_enfield_workflow';
// DB constants moved inside functions to ensure secrets are loaded first
async function checkAndCreateDatabase(): Promise<boolean> {
const DB_HOST = process.env.DB_HOST || 'localhost';
const DB_PORT = parseInt(process.env.DB_PORT || '5432');
const DB_USER = process.env.DB_USER || 'postgres';
const DB_PASSWORD = process.env.DB_PASSWORD || '';
const DB_NAME = process.env.DB_NAME || 'royal_enfield_workflow';
const client = new Client({
host: DB_HOST,
port: DB_PORT,
@ -49,13 +50,13 @@ async function checkAndCreateDatabase(): Promise<boolean> {
if (result.rows.length === 0) {
console.log(`📦 Database '${DB_NAME}' not found. Creating...`);
// Create database
await client.query(`CREATE DATABASE "${DB_NAME}"`);
console.log(`✅ Database '${DB_NAME}' created successfully!`);
await client.end();
// Connect to new database and install extensions
const newDbClient = new Client({
host: DB_HOST,
@ -64,13 +65,13 @@ async function checkAndCreateDatabase(): Promise<boolean> {
password: DB_PASSWORD,
database: DB_NAME,
});
await newDbClient.connect();
console.log('📦 Installing uuid-ossp extension...');
await newDbClient.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
console.log('✅ Extension installed!');
await newDbClient.end();
return true; // Database was created
} else {
console.log(`✅ Database '${DB_NAME}' already exists.`);
@ -87,7 +88,7 @@ async function checkAndCreateDatabase(): Promise<boolean> {
async function runMigrations(): Promise<void> {
try {
console.log('🔄 Checking and running pending migrations...');
// Import all migrations using require for CommonJS compatibility
// Some migrations use module.exports, others use export
const m0 = require('../migrations/2025103000-create-users');
@ -136,7 +137,8 @@ async function runMigrations(): Promise<void> {
const m41 = require('../migrations/20250120-create-dealers-table');
const m42 = require('../migrations/20250125-create-activity-types');
const m43 = require('../migrations/20260113-redesign-dealer-claim-history');
const m44 = require('../migrations/20260123-fix-template-id-schema');
const migrations = [
{ name: '2025103000-create-users', module: m0 },
{ name: '2025103001-create-workflow-requests', module: m1 },
@ -184,10 +186,13 @@ async function runMigrations(): Promise<void> {
{ name: '20250120-create-dealers-table', module: m41 },
{ name: '20250125-create-activity-types', module: m42 },
{ name: '20260113-redesign-dealer-claim-history', module: m43 },
{ name: '20260123-fix-template-id-schema', module: m44 },
];
// Dynamically import sequelize after secrets are loaded
const { sequelize } = require('../config/database');
const queryInterface = sequelize.getQueryInterface();
// Ensure migrations tracking table exists
const tables = await queryInterface.showAllTables();
if (!tables.includes('migrations')) {
@ -199,34 +204,34 @@ async function runMigrations(): Promise<void> {
)
`);
}
// Get already executed migrations
const executedResults = await sequelize.query<{ name: string }>(
const executedResults = await sequelize.query(
'SELECT name FROM migrations ORDER BY id',
{ type: QueryTypes.SELECT }
);
) as { name: string }[];
const executedMigrations = executedResults.map(r => r.name);
// Find pending migrations
const pendingMigrations = migrations.filter(
m => !executedMigrations.includes(m.name)
);
if (pendingMigrations.length === 0) {
console.log('✅ Migrations up-to-date');
return;
}
console.log(`🔄 Running ${pendingMigrations.length} pending migration(s)...`);
// Run each pending migration
for (const migration of pendingMigrations) {
try {
console.log(`${migration.name}`);
// Call the up function - works for both module.exports and export styles
await migration.module.up(queryInterface);
// Mark as executed
await sequelize.query(
'INSERT INTO migrations (name) VALUES (:name) ON CONFLICT (name) DO NOTHING',
@ -241,7 +246,7 @@ async function runMigrations(): Promise<void> {
throw error;
}
}
console.log(`✅ Applied ${pendingMigrations.length} migration(s)`);
} catch (error: any) {
console.error('❌ Migration failed:', error.message);
@ -252,6 +257,7 @@ async function runMigrations(): Promise<void> {
async function testConnection(): Promise<void> {
try {
console.log('🔌 Testing database connection...');
const { sequelize } = require('../config/database');
await sequelize.authenticate();
console.log('✅ Database connection established!');
} catch (error: any) {
@ -266,6 +272,10 @@ async function autoSetup(): Promise<void> {
console.log('========================================\n');
try {
// Step 0: Initialize secrets
console.log('🔐 Initializing secrets...');
await initializeGoogleSecretManager();
// Step 1: Check and create database if needed
const wasCreated = await checkAndCreateDatabase();
@ -278,10 +288,13 @@ async function autoSetup(): Promise<void> {
console.log('\n========================================');
console.log('✅ Setup completed successfully!');
console.log('========================================\n');
console.log('📝 Note: Admin configurations will be auto-seeded on server start if table is empty.');
console.log('📝 Note: Dealers table will be empty - import dealers using CSV import script.\n');
console.log('📝 Note: Admin configurations will be auto-seeded on server start if table is empty.\n');
if (wasCreated) {
console.log('💡 Next steps:');
console.log(' 1. Server will start automatically');
@ -289,7 +302,7 @@ async function autoSetup(): Promise<void> {
console.log(' 3. Run this SQL to make yourself admin:');
console.log(` UPDATE users SET role = 'ADMIN' WHERE email = 'your-email@royalenfield.com';\n`);
}
} catch (error: any) {
console.error('\n========================================');
console.error('❌ Setup failed!');

View File

@ -0,0 +1,19 @@
import { sequelize } from '../config/database';
async function run() {
try {
await sequelize.authenticate();
console.log('✅ Connection established');
const tableDescription = await sequelize.getQueryInterface().describeTable('workflow_templates');
console.log('Current schema for workflow_templates:', JSON.stringify(tableDescription, null, 2));
} catch (error: any) {
console.error('❌ Error:', error.message);
} finally {
await sequelize.close();
}
}
run();

View File

@ -0,0 +1,31 @@
import { sequelize } from '../config/database';
import { up } from '../migrations/20260123-fix-template-id-schema';
async function forceRun() {
try {
await sequelize.authenticate();
console.log('✅ Connected to DB');
const queryInterface = sequelize.getQueryInterface();
// 1. Remove from migrations table if exists (to keep track clean)
await sequelize.query("DELETE FROM migrations WHERE name = '20260123-fix-template-id-schema'");
console.log('DATA CLEANUP: Removed migration record to force re-run tracking.');
// 2. Run the migration up function directly
console.log('🚀 Running migration manually...');
await up(queryInterface);
// 3. Mark as executed
await sequelize.query("INSERT INTO migrations (name) VALUES ('20260123-fix-template-id-schema')");
console.log('✅ Migration applied and tracked successfully.');
} catch (error: any) {
console.error('❌ Error executing force migration:', error.message, error);
} finally {
await sequelize.close();
}
}
forceRun();

View File

@ -1,5 +1,5 @@
import { sequelize } from '../config/database';
import { QueryInterface, QueryTypes } from 'sequelize';
import { initializeGoogleSecretManager } from '../services/googleSecretManager.service';
import * as m0 from '../migrations/2025103000-create-users';
import * as m1 from '../migrations/2025103001-create-workflow-requests';
import * as m2 from '../migrations/2025103002-create-approval-levels';
@ -46,6 +46,7 @@ import * as m40 from '../migrations/20251218-fix-claim-invoice-credit-note-colum
import * as m41 from '../migrations/20250120-create-dealers-table';
import * as m42 from '../migrations/20250125-create-activity-types';
import * as m43 from '../migrations/20260113-redesign-dealer-claim-history';
import * as m44 from '../migrations/20260123-fix-template-id-schema';
interface Migration {
name: string;
@ -106,6 +107,7 @@ const migrations: Migration[] = [
{ name: '20250120-create-dealers-table', module: m41 },
{ name: '20250125-create-activity-types', module: m42 },
{ name: '20260113-redesign-dealer-claim-history', module: m43 },
{ name: '20260123-fix-template-id-schema', module: m44 },
];
/**
@ -134,12 +136,12 @@ async function ensureMigrationsTable(queryInterface: QueryInterface): Promise<vo
/**
* Get list of already executed migrations
*/
async function getExecutedMigrations(): Promise<string[]> {
async function getExecutedMigrations(sequelize: any): Promise<string[]> {
try {
const results = await sequelize.query<{ name: string }>(
const results = await sequelize.query(
'SELECT name FROM migrations ORDER BY id',
{ type: QueryTypes.SELECT }
);
) as { name: string }[];
return results.map(r => r.name);
} catch (error) {
// Table might not exist yet
@ -150,7 +152,7 @@ async function getExecutedMigrations(): Promise<string[]> {
/**
* Mark migration as executed
*/
async function markMigrationExecuted(name: string): Promise<void> {
async function markMigrationExecuted(sequelize: any, name: string): Promise<void> {
await sequelize.query(
'INSERT INTO migrations (name) VALUES (:name) ON CONFLICT (name) DO NOTHING',
{
@ -165,6 +167,12 @@ async function markMigrationExecuted(name: string): Promise<void> {
*/
async function run() {
try {
console.log('🔐 Initializing secrets...');
await initializeGoogleSecretManager();
// Dynamically import sequelize after secrets are loaded
const { sequelize } = require('../config/database');
await sequelize.authenticate();
const queryInterface = sequelize.getQueryInterface();
@ -173,7 +181,7 @@ async function run() {
await ensureMigrationsTable(queryInterface);
// Get already executed migrations
const executedMigrations = await getExecutedMigrations();
const executedMigrations = await getExecutedMigrations(sequelize);
// Find pending migrations
const pendingMigrations = migrations.filter(
@ -188,11 +196,12 @@ async function run() {
console.log(`🔄 Running ${pendingMigrations.length} migration(s)...`);
// Run each pending migration
for (const migration of pendingMigrations) {
try {
await migration.module.up(queryInterface);
await markMigrationExecuted(migration.name);
await markMigrationExecuted(sequelize, migration.name);
console.log(`${migration.name}`);
} catch (error: any) {
console.error(`❌ Migration failed: ${migration.name} - ${error.message}`);

View File

@ -1,16 +1,14 @@
import http from 'http';
import { initializeSecrets } from './app'; // Import initialization function
import app from './app';
import { initSocket } from './realtime/socket';
import './queues/tatWorker'; // Initialize TAT worker
import { logTatConfig } from './config/tat.config';
import { logSystemConfig } from './config/system.config';
import { initializeHolidaysCache } from './utils/tatTimeUtils';
import { seedDefaultConfigurations } from './services/configSeed.service';
import dotenv from 'dotenv';
import path from 'path';
// Load environment variables from .env file FIRST
dotenv.config({ path: path.resolve(__dirname, '../.env') });
import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
import { seedDefaultActivityTypes } from './services/activityTypeSeed.service';
import { startPauseResumeJob } from './jobs/pauseResumeJob';
import './queues/pauseResumeWorker'; // Initialize pause resume worker
import { initializeQueueMetrics, stopQueueMetrics } from './utils/queueMetrics';
import { stopQueueMetrics } from './utils/queueMetrics';
// Dynamic imports will be used inside startServer to ensure secrets are loaded first
import { emailService } from './services/email.service';
const PORT: number = parseInt(process.env.PORT || '5000', 10);
@ -20,8 +18,22 @@ const startServer = async (): Promise<void> => {
try {
// Initialize Google Secret Manager before starting server
// This will merge secrets from GCS into process.env if enabled
await initializeSecrets();
console.log('🔐 Initializing secrets...');
await initializeGoogleSecretManager();
// Dynamically import everything else after secrets are loaded
const app = require('./app').default;
const { initSocket } = require('./realtime/socket');
require('./queues/tatWorker'); // Initialize TAT worker
const { logTatConfig } = require('./config/tat.config');
const { logSystemConfig } = require('./config/system.config');
const { initializeHolidaysCache } = require('./utils/tatTimeUtils');
const { seedDefaultConfigurations } = require('./services/configSeed.service');
const { startPauseResumeJob } = require('./jobs/pauseResumeJob');
require('./queues/pauseResumeWorker'); // Initialize pause resume worker
const { initializeQueueMetrics } = require('./utils/queueMetrics');
const { emailService } = require('./services/email.service');
// Re-initialize email service after secrets are loaded (in case SMTP credentials were loaded)
// This ensures the email service uses production SMTP if credentials are available
try {
@ -30,37 +42,46 @@ const startServer = async (): Promise<void> => {
} catch (error) {
console.warn('⚠️ Email service re-initialization warning (will use test account if SMTP not configured):', error);
}
// Re-initialize email service after secrets are loaded (in case SMTP credentials were loaded)
// This ensures the email service uses production SMTP if credentials are available
try {
await emailService.initialize();
console.log('📧 Email service re-initialized after secrets loaded');
} catch (error) {
console.warn('⚠️ Email service re-initialization warning (will use test account if SMTP not configured):', error);
}
const server = http.createServer(app);
initSocket(server);
// Seed default configurations if table is empty
try {
await seedDefaultConfigurations();
} catch (error) {
console.error('⚠️ Configuration seeding error:', error);
}
// Seed default activity types if table is empty
try {
await seedDefaultActivityTypes();
} catch (error) {
console.error('⚠️ Activity type seeding error:', error);
}
// Initialize holidays cache for TAT calculations
try {
await initializeHolidaysCache();
} catch (error) {
// Silently fall back to weekends-only TAT calculation
}
// Start scheduled jobs
startPauseResumeJob();
// Initialize queue metrics collection for Prometheus
initializeQueueMetrics();
server.listen(PORT, () => {
console.log(`🚀 Server running on port ${PORT} | ${process.env.NODE_ENV || 'development'}`);
});

View File

@ -28,7 +28,7 @@ class AIService {
// Check if AI is enabled from config
const { getConfigBoolean } = require('./configReader.service');
const enabled = await getConfigBoolean('AI_ENABLED', true);
if (!enabled) {
logger.warn('[AI Service] AI features disabled in admin configuration');
this.isInitialized = true;
@ -54,7 +54,7 @@ class AIService {
this.isInitialized = true;
} catch (error: any) {
logger.error('[AI Service] Failed to initialize Vertex AI:', error);
if (error.code === 'MODULE_NOT_FOUND') {
logger.warn('[AI Service] @google-cloud/vertexai package not installed. Run: npm install @google-cloud/vertexai');
} else if (error.message?.includes('ENOENT') || error.message?.includes('not found')) {
@ -65,7 +65,7 @@ class AIService {
} else {
logger.error(`[AI Service] Initialization error: ${error.message}`);
}
this.isInitialized = true; // Mark as initialized even if failed to prevent infinite loops
}
}
@ -115,7 +115,7 @@ class AIService {
const streamingResp = await generativeModel.generateContent(request);
const response = streamingResp.response;
// Log full response structure for debugging if empty
if (!response.candidates || response.candidates.length === 0) {
logger.error('[AI Service] No candidates in Vertex AI response:', {
@ -125,12 +125,12 @@ class AIService {
});
throw new Error('Vertex AI returned no candidates. The response may have been blocked by safety filters.');
}
const candidate = response.candidates[0];
// Check for safety ratings or blocked reasons
if (candidate.safetyRatings && candidate.safetyRatings.length > 0) {
const blockedRatings = candidate.safetyRatings.filter((rating: any) =>
const blockedRatings = candidate.safetyRatings.filter((rating: any) =>
rating.probability === 'HIGH' || rating.probability === 'MEDIUM'
);
if (blockedRatings.length > 0) {
@ -143,7 +143,7 @@ class AIService {
});
}
}
// Check finish reason
if (candidate.finishReason && candidate.finishReason !== 'STOP') {
logger.warn('[AI Service] Vertex AI finish reason:', {
@ -151,10 +151,10 @@ class AIService {
safetyRatings: candidate.safetyRatings
});
}
// Extract text from response
const text = candidate.content?.parts?.[0]?.text || '';
// Handle MAX_TOKENS finish reason - accept whatever response we got
// We trust the AI's response - no truncation on our side
if (candidate.finishReason === 'MAX_TOKENS' && text) {
@ -167,7 +167,7 @@ class AIService {
// Return the response without any truncation - trust what AI generated
return text;
}
if (!text) {
// Log detailed response structure for debugging
logger.error('[AI Service] Empty text in Vertex AI response:', {
@ -178,7 +178,7 @@ class AIService {
promptPreview: prompt.substring(0, 200) + '...',
model: this.model
});
// Provide more helpful error message
if (candidate.finishReason === 'SAFETY') {
throw new Error('Vertex AI blocked the response due to safety filters. The prompt may contain content that violates safety policies.');
@ -194,7 +194,7 @@ class AIService {
return text;
} catch (error: any) {
logger.error('[AI Service] Vertex AI generation error:', error);
// Provide more specific error messages
if (error.message?.includes('Model was not found')) {
throw new Error(`Model ${this.model} not found or not available in region ${LOCATION}. Please check model name and region.`);
@ -203,7 +203,7 @@ class AIService {
} else if (error.message?.includes('API not enabled')) {
throw new Error('Vertex AI API is not enabled. Please enable it in Google Cloud Console.');
}
throw new Error(`Vertex AI generation failed: ${error.message}`);
}
}
@ -268,10 +268,13 @@ class AIService {
const maxLengthStr = await getConfigValue('AI_MAX_REMARK_LENGTH', '2000');
const maxLength = parseInt(maxLengthStr || '2000', 10);
// Trust AI's response - do not truncate anything
// AI is instructed to stay within limit, but we accept whatever it generates
// Trust AI's response - do not truncate anything
// AI is instructed to stay within limit, but we accept whatever it generates
if (remarkText.length > maxLength) {
logger.info(`[AI Service] AI generated ${remarkText.length} characters (suggested limit: ${maxLength}). Full content preserved as-is.`);
logger.info(`[AI Service] AI generated ${remarkText.length} characters (suggested limit: ${maxLength}). Full content preserved as-is.`);
}
// Extract key points (look for bullet points or numbered items)
@ -315,7 +318,7 @@ class AIService {
const maxLengthStr = await getConfigValue('AI_MAX_REMARK_LENGTH', '2000');
const maxLength = parseInt(maxLengthStr || '2000', 10);
const targetWordCount = Math.floor(maxLength / 6); // Approximate words (avg 6 chars per word)
logger.info(`[AI Service] Using max remark length: ${maxLength} characters (≈${targetWordCount} words) from admin config`);
// Check if this is a rejected request
@ -333,11 +336,11 @@ class AIService {
const approvalSummary = approvalFlow
.filter((a: any) => a.status === 'APPROVED' || a.status === 'REJECTED')
.map((a: any) => {
const tatPercentage = a.tatPercentageUsed !== undefined && a.tatPercentageUsed !== null
? Number(a.tatPercentageUsed)
const tatPercentage = a.tatPercentageUsed !== undefined && a.tatPercentageUsed !== null
? Number(a.tatPercentageUsed)
: (a.elapsedHours && a.tatHours ? (Number(a.elapsedHours) / Number(a.tatHours)) * 100 : 0);
const riskStatus = getTATRiskStatus(tatPercentage);
const tatInfo = a.elapsedHours && a.tatHours
const tatInfo = a.elapsedHours && a.tatHours
? ` (completed in ${a.elapsedHours.toFixed(1)}h of ${a.tatHours}h TAT, ${tatPercentage.toFixed(1)}% used)`
: '';
const riskInfo = riskStatus !== 'ON_TRACK' ? ` [${riskStatus}]` : '';
@ -358,7 +361,7 @@ class AIService {
.join('\n');
// Build rejection context if applicable
const rejectionContext = isRejected
const rejectionContext = isRejected
? `\n**Rejection Details:**\n- Rejected by: ${rejectedBy || 'Approver'}\n- Rejection reason: ${rejectionReason || 'Not specified'}`
: '';
@ -380,8 +383,8 @@ ${documentSummary || 'No documents'}
**YOUR TASK:**
Write a brief, professional conclusion (approximately ${targetWordCount} words, max ${maxLength} characters) that:
${isRejected
? `- Summarizes what was requested and explains that it was rejected
${isRejected
? `- Summarizes what was requested and explains that it was rejected
- Mentions who rejected it and the rejection reason
- Notes the outcome and any learnings or next steps
- Mentions if any approval levels were AT_RISK, CRITICAL, or BREACHED (if applicable)
@ -389,7 +392,7 @@ ${isRejected
- Is suitable for permanent archiving and future reference
- Sounds natural and human-written (not AI-generated)
- Maintains a professional and constructive tone even for rejections`
: `- Summarizes what was requested and the final decision
: `- Summarizes what was requested and the final decision
- Mentions who approved it and any key comments
- Mentions if any approval levels were AT_RISK, CRITICAL, or BREACHED (if applicable)
- Notes the outcome and next steps (if applicable)
@ -450,13 +453,13 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
*/
private extractKeyPoints(remark: string): string[] {
const keyPoints: string[] = [];
// Look for bullet points (-, •, *) or numbered items (1., 2., etc.)
const lines = remark.split('\n');
for (const line of lines) {
const trimmed = line.trim();
// Match bullet points
if (trimmed.match(/^[-•*]\s+(.+)$/)) {
const point = trimmed.replace(/^[-•*]\s+/, '');
@ -464,7 +467,7 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
keyPoints.push(point);
}
}
// Match numbered items
if (trimmed.match(/^\d+\.\s+(.+)$/)) {
const point = trimmed.replace(/^\d+\.\s+/, '');
@ -473,13 +476,13 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
}
}
}
// If no bullet points found, extract first few sentences
if (keyPoints.length === 0) {
const sentences = remark.split(/[.!?]+/).filter(s => s.trim().length > 20);
keyPoints.push(...sentences.slice(0, 3).map(s => s.trim()));
}
return keyPoints.slice(0, 5); // Max 5 key points
}
@ -488,22 +491,22 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
*/
private calculateConfidence(remark: string, context: any): number {
let score = 0.6; // Base score
// Check if remark has good length (100-400 chars - more realistic)
if (remark.length >= 100 && remark.length <= 400) {
score += 0.2;
}
// Check if remark mentions key elements
if (remark.toLowerCase().includes('approv')) {
score += 0.1;
}
// Check if remark is not too generic
if (remark.length > 80 && !remark.toLowerCase().includes('lorem ipsum')) {
score += 0.1;
}
return Math.min(1.0, score);
}

View File

@ -78,7 +78,7 @@ export class EmailService {
private async initializeTestAccount(): Promise<void> {
try {
this.testAccountInfo = await nodemailer.createTestAccount();
this.transporter = nodemailer.createTransport({
host: this.testAccountInfo.smtp.host,
port: this.testAccountInfo.smtp.port,
@ -111,7 +111,7 @@ export class EmailService {
const smtpHost = process.env.SMTP_HOST;
const smtpUser = process.env.SMTP_USER;
const smtpPassword = process.env.SMTP_PASSWORD;
if (smtpHost && smtpUser && smtpPassword) {
logger.info('📧 SMTP credentials detected - re-initializing email service with production SMTP');
await this.initialize();
@ -149,11 +149,11 @@ export class EmailService {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const info = await this.transporter!.sendMail(mailOptions);
if (!info || !info.messageId) {
throw new Error('Email sent but no messageId returned');
}
const result: { messageId: string; previewUrl?: string } = {
messageId: info.messageId
};
@ -162,10 +162,10 @@ export class EmailService {
if (this.useTestAccount) {
try {
const previewUrl = nodemailer.getTestMessageUrl(info);
if (previewUrl) {
result.previewUrl = previewUrl;
// Always log to console for visibility
console.log('\n' + '='.repeat(80));
console.log(`📧 EMAIL PREVIEW (${options.subject})`);
@ -176,7 +176,7 @@ export class EmailService {
console.log(`Preview URL: ${previewUrl}`);
console.log(`Message ID: ${info.messageId}`);
console.log('='.repeat(80) + '\n');
logger.info(`✅ Email sent (TEST MODE) to ${recipients}`);
logger.info(`📧 Preview URL: ${previewUrl}`);
} else {
@ -198,7 +198,7 @@ export class EmailService {
} catch (error) {
lastError = error;
logger.error(`❌ Email send attempt ${attempt}/${maxRetries} failed:`, error);
if (attempt < maxRetries) {
const delay = parseInt(process.env.EMAIL_RETRY_DELAY || '5000') * attempt;
logger.info(`⏳ Retrying in ${delay}ms...`);
@ -217,22 +217,22 @@ export class EmailService {
*/
async sendBatch(emails: EmailOptions[]): Promise<void> {
logger.info(`📧 Sending batch of ${emails.length} emails`);
const batchSize = parseInt(process.env.EMAIL_BATCH_SIZE || '10');
for (let i = 0; i < emails.length; i += batchSize) {
const batch = emails.slice(i, i + batchSize);
await Promise.allSettled(
batch.map(email => this.sendEmail(email))
);
// Small delay between batches to avoid rate limiting
if (i + batchSize < emails.length) {
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
logger.info(`✅ Batch email sending complete`);
}

View File

@ -119,7 +119,7 @@ export class EmailNotificationService {
};
const html = getRequestCreatedEmail(data);
const subject = `[${requestData.requestNumber}] Request Created Successfully`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Created Successfully`;
const result = await emailService.sendEmail({
to: initiatorData.email,
@ -162,9 +162,9 @@ export class EmailNotificationService {
// Multi-level approval email
const chainData: ApprovalChainItem[] = approvalChain.map((level: any) => ({
name: level.approverName || level.approverEmail,
status: level.status === 'APPROVED' ? 'approved'
: level.levelNumber === approverData.levelNumber ? 'current'
: level.levelNumber < approverData.levelNumber ? 'pending'
status: level.status === 'APPROVED' ? 'approved'
: level.levelNumber === approverData.levelNumber ? 'current'
: level.levelNumber < approverData.levelNumber ? 'pending'
: 'awaiting',
date: level.approvedAt ? this.formatDate(level.approvedAt) : undefined,
levelNumber: level.levelNumber
@ -189,7 +189,7 @@ export class EmailNotificationService {
};
const html = getMultiApproverRequestEmail(data);
const subject = `[${requestData.requestNumber}] Multi-Level Approval Request - Your Turn`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Multi-Level Approval Request - Your Turn`;
const result = await emailService.sendEmail({
to: approverData.email,
@ -218,7 +218,7 @@ export class EmailNotificationService {
};
const html = getApprovalRequestEmail(data);
const subject = `[${requestData.requestNumber}] Approval Request - Action Required`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Approval Request - Action Required`;
const result = await emailService.sendEmail({
to: approverData.email,
@ -272,7 +272,7 @@ export class EmailNotificationService {
};
const html = getApprovalConfirmationEmail(data);
const subject = `[${requestData.requestNumber}] Request Approved${isFinalApproval ? ' - All Approvals Complete' : ''}`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Approved${isFinalApproval ? ' - All Approvals Complete' : ''}`;
const result = await emailService.sendEmail({
to: initiatorData.email,
@ -323,7 +323,7 @@ export class EmailNotificationService {
};
const html = getRejectionNotificationEmail(data);
const subject = `[${requestData.requestNumber}] Request Rejected`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Rejected`;
const result = await emailService.sendEmail({
to: initiatorData.email,
@ -364,9 +364,9 @@ export class EmailNotificationService {
}
// Determine urgency level based on threshold
const urgencyLevel = tatInfo.thresholdPercentage >= 75 ? 'high'
: tatInfo.thresholdPercentage >= 50 ? 'medium'
: 'low';
const urgencyLevel = tatInfo.thresholdPercentage >= 75 ? 'high'
: tatInfo.thresholdPercentage >= 50 ? 'medium'
: 'low';
// Get initiator name - try from requestData first, then fetch if needed
let initiatorName = requestData.initiatorName || requestData.initiator?.displayName || 'Initiator';
@ -399,7 +399,7 @@ export class EmailNotificationService {
};
const html = getTATReminderEmail(data);
const subject = `[${requestData.requestNumber}] TAT Reminder - ${tatInfo.thresholdPercentage}% Elapsed`;
const subject = `${requestData.requestNumber} - ${requestData.title} - TAT Reminder - ${tatInfo.thresholdPercentage}% Elapsed`;
const result = await emailService.sendEmail({
to: approverData.email,
@ -469,7 +469,7 @@ export class EmailNotificationService {
};
const html = getTATBreachedEmail(data);
const subject = `[${requestData.requestNumber}] TAT BREACHED - Immediate Action Required`;
const subject = `${requestData.requestNumber} - ${requestData.title} - TAT BREACHED - Immediate Action Required`;
const result = await emailService.sendEmail({
to: approverData.email,
@ -516,8 +516,8 @@ export class EmailNotificationService {
}
const isAutoResumed = !resumedByData || resumedByData.userId === 'system';
const resumedByText = isAutoResumed
? 'automatically'
const resumedByText = isAutoResumed
? 'automatically'
: `by ${resumedByData.displayName || resumedByData.email}`;
const data: WorkflowResumedData = {
@ -529,7 +529,7 @@ export class EmailNotificationService {
resumedTime: this.formatTime(new Date()),
pausedDuration: pauseDuration,
currentApprover: approverData.displayName || approverData.email,
newTATDeadline: requestData.tatDeadline
newTATDeadline: requestData.tatDeadline
? this.formatDate(requestData.tatDeadline) + ' ' + this.formatTime(requestData.tatDeadline)
: 'To be determined',
isApprover: true,
@ -538,7 +538,7 @@ export class EmailNotificationService {
};
const html = getWorkflowResumedEmail(data);
const subject = `[${requestData.requestNumber}] Workflow Resumed - Action Required`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Workflow Resumed - Action Required`;
const result = await emailService.sendEmail({
to: approverData.email,
@ -585,8 +585,8 @@ export class EmailNotificationService {
}
const isAutoResumed = !resumedByData || resumedByData.userId === 'system' || !resumedByData.userId;
const resumedByText = isAutoResumed
? 'automatically'
const resumedByText = isAutoResumed
? 'automatically'
: `by ${resumedByData.displayName || resumedByData.email || resumedByData.name || 'User'}`;
const data: WorkflowResumedData = {
@ -598,7 +598,7 @@ export class EmailNotificationService {
resumedTime: this.formatTime(new Date()),
pausedDuration: pauseDuration,
currentApprover: approverData?.displayName || approverData?.email || 'Current Approver',
newTATDeadline: requestData.tatDeadline
newTATDeadline: requestData.tatDeadline
? this.formatDate(requestData.tatDeadline) + ' ' + this.formatTime(requestData.tatDeadline)
: 'To be determined',
isApprover: false, // This is for initiator
@ -607,7 +607,7 @@ export class EmailNotificationService {
};
const html = getWorkflowResumedEmail(data);
const subject = `[${requestData.requestNumber}] Workflow Resumed`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Workflow Resumed`;
const result = await emailService.sendEmail({
to: initiatorData.email,
@ -685,7 +685,7 @@ export class EmailNotificationService {
};
const html = getRequestClosedEmail(data);
const subject = `[${requestData.requestNumber}] Request Closed`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Closed`;
const result = await emailService.sendEmail({
to: recipientData.email,
@ -710,7 +710,7 @@ export class EmailNotificationService {
closureData: any
): Promise<void> {
logger.info(`📧 Sending Request Closed emails to ${participants.length} participants`);
for (const participant of participants) {
await this.sendRequestClosed(requestData, participant, closureData);
// Small delay to avoid rate limiting
@ -754,7 +754,7 @@ export class EmailNotificationService {
};
const html = getApproverSkippedEmail(data);
const subject = `[${requestData.requestNumber}] Approver Skipped`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Approver Skipped`;
const result = await emailService.sendEmail({
to: skippedApproverData.email,
@ -814,7 +814,7 @@ export class EmailNotificationService {
};
const html = getWorkflowPausedEmail(data);
const subject = `[${requestData.requestNumber}] Workflow Paused`;
const subject = `${requestData.requestNumber} - ${requestData.title} - Workflow Paused`;
const result = await emailService.sendEmail({
to: recipientData.email,

File diff suppressed because it is too large Load Diff