Compare commits
No commits in common. "be220bbb0ca7b8c09cb10548aaec119d7b5fce91" and "4cf72888579f6d9970c01f8cdc25eede43e5283f" have entirely different histories.
be220bbb0c
...
4cf7288857
@ -1,2 +1,2 @@
|
||||
import{a as t}from"./index-D5U31xpx.js";import"./radix-vendor-C2EbRL2a.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BmvKDhMD.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";async function m(n){return(await t.post(`/conclusions/${n}/generate`)).data.data}async function d(n,o){return(await t.post(`/conclusions/${n}/finalize`,{finalRemark:o})).data.data}async function f(n){return(await t.get(`/conclusions/${n}`)).data.data}export{d as finalizeConclusion,m as generateConclusion,f as getConclusion};
|
||||
//# sourceMappingURL=conclusionApi-xBwvOJP0.js.map
|
||||
import{a as t}from"./index-9cOIFSn9.js";import"./radix-vendor-C2EbRL2a.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BmvKDhMD.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";async function m(n){return(await t.post(`/conclusions/${n}/generate`)).data.data}async function d(n,o){return(await t.post(`/conclusions/${n}/finalize`,{finalRemark:o})).data.data}async function f(n){return(await t.get(`/conclusions/${n}`)).data.data}export{d as finalizeConclusion,m as generateConclusion,f as getConclusion};
|
||||
//# sourceMappingURL=conclusionApi-uNxtglEr.js.map
|
||||
@ -1 +1 @@
|
||||
{"version":3,"file":"conclusionApi-xBwvOJP0.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAKA,eAAsBC,EAAcJ,EAA8C,CAEhF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB"}
|
||||
{"version":3,"file":"conclusionApi-uNxtglEr.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAKA,eAAsBC,EAAcJ,EAA8C,CAEhF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB"}
|
||||
File diff suppressed because one or more lines are too long
1
build/assets/index-9cOIFSn9.js.map
Normal file
1
build/assets/index-9cOIFSn9.js.map
Normal file
File diff suppressed because one or more lines are too long
1
build/assets/index-BmOYs32D.css
Normal file
1
build/assets/index-BmOYs32D.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -52,7 +52,7 @@
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
</style>
|
||||
<script type="module" crossorigin src="/assets/index-D5U31xpx.js"></script>
|
||||
<script type="module" crossorigin src="/assets/index-9cOIFSn9.js"></script>
|
||||
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-Cji9-Yri.js">
|
||||
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-C2EbRL2a.js">
|
||||
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-DHm03ykU.js">
|
||||
@ -60,7 +60,7 @@
|
||||
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
|
||||
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
|
||||
<link rel="modulepreload" crossorigin href="/assets/router-vendor-CRr9x_Jp.js">
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-DwXE9Ynd.css">
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-BmOYs32D.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
@ -1,56 +0,0 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { WorkflowTemplate } from '../models';
|
||||
import logger from '../utils/logger';
|
||||
|
||||
export const createTemplate = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { name, description, category, priority, estimatedTime, approvers, suggestedSLA } = req.body;
|
||||
const userId = (req as any).user?.userId;
|
||||
|
||||
const template = await WorkflowTemplate.create({
|
||||
name,
|
||||
description,
|
||||
category,
|
||||
priority,
|
||||
estimatedTime,
|
||||
approvers,
|
||||
suggestedSLA,
|
||||
createdBy: userId,
|
||||
isActive: true
|
||||
});
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
message: 'Workflow template created successfully',
|
||||
data: template
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow template:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to create workflow template',
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const getTemplates = async (req: Request, res: Response) => {
|
||||
try {
|
||||
const templates = await WorkflowTemplate.findAll({
|
||||
where: { isActive: true },
|
||||
order: [['createdAt', 'DESC']]
|
||||
});
|
||||
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data: templates
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error fetching workflow templates:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to fetch workflow templates',
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
};
|
||||
@ -1,84 +0,0 @@
|
||||
import { QueryInterface, DataTypes } from 'sequelize';
|
||||
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
// Drop table if exists to ensure schema is correct (CASCADE to remove FKs)
|
||||
await queryInterface.dropTable('workflow_templates', { cascade: true });
|
||||
|
||||
// Create Enum for Template Priority
|
||||
await queryInterface.sequelize.query(`DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_template_priority') THEN
|
||||
CREATE TYPE enum_template_priority AS ENUM ('low', 'medium', 'high');
|
||||
END IF;
|
||||
END$$;`);
|
||||
|
||||
await queryInterface.createTable('workflow_templates', {
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
primaryKey: true,
|
||||
defaultValue: DataTypes.UUIDV4
|
||||
},
|
||||
name: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false
|
||||
},
|
||||
description: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true
|
||||
},
|
||||
category: {
|
||||
type: DataTypes.STRING,
|
||||
defaultValue: 'General'
|
||||
},
|
||||
priority: {
|
||||
type: 'enum_template_priority',
|
||||
defaultValue: 'medium'
|
||||
},
|
||||
estimated_time: {
|
||||
type: DataTypes.STRING,
|
||||
defaultValue: 'Variable'
|
||||
},
|
||||
approvers: {
|
||||
type: DataTypes.JSONB,
|
||||
defaultValue: []
|
||||
},
|
||||
suggested_sla: {
|
||||
type: DataTypes.INTEGER,
|
||||
defaultValue: 24
|
||||
},
|
||||
is_active: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
defaultValue: true
|
||||
},
|
||||
created_by: {
|
||||
type: DataTypes.UUID,
|
||||
allowNull: true,
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'user_id'
|
||||
}
|
||||
},
|
||||
fields: {
|
||||
type: DataTypes.JSONB,
|
||||
defaultValue: {}
|
||||
},
|
||||
created_at: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: DataTypes.NOW
|
||||
},
|
||||
updated_at: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: DataTypes.NOW
|
||||
},
|
||||
});
|
||||
|
||||
// Add index on created_by
|
||||
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_templates_created_by" ON "workflow_templates" ("created_by");');
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.dropTable('workflow_templates');
|
||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_template_priority;');
|
||||
}
|
||||
@ -1,99 +0,0 @@
|
||||
import { Model, DataTypes } from 'sequelize';
|
||||
import { sequelize } from '@config/database';
|
||||
import { User } from './User';
|
||||
|
||||
export class WorkflowTemplate extends Model {
|
||||
public id!: string;
|
||||
public name!: string;
|
||||
public description!: string;
|
||||
public category!: string;
|
||||
public priority!: 'low' | 'medium' | 'high';
|
||||
public estimatedTime!: string;
|
||||
public approvers!: any[];
|
||||
public suggestedSLA!: number;
|
||||
public isActive!: boolean;
|
||||
public createdBy!: string;
|
||||
public fields!: any;
|
||||
|
||||
public readonly createdAt!: Date;
|
||||
public readonly updatedAt!: Date;
|
||||
}
|
||||
|
||||
WorkflowTemplate.init(
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
allowNull: false
|
||||
},
|
||||
name: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false
|
||||
},
|
||||
description: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true
|
||||
},
|
||||
category: {
|
||||
type: DataTypes.STRING,
|
||||
defaultValue: 'General'
|
||||
},
|
||||
priority: {
|
||||
type: DataTypes.ENUM('low', 'medium', 'high'),
|
||||
defaultValue: 'medium'
|
||||
},
|
||||
estimatedTime: {
|
||||
type: DataTypes.STRING,
|
||||
defaultValue: 'Variable',
|
||||
field: 'estimated_time'
|
||||
},
|
||||
approvers: {
|
||||
type: DataTypes.JSONB,
|
||||
defaultValue: []
|
||||
},
|
||||
suggestedSLA: {
|
||||
type: DataTypes.INTEGER,
|
||||
defaultValue: 24,
|
||||
comment: 'In hours',
|
||||
field: 'suggested_sla'
|
||||
},
|
||||
isActive: {
|
||||
type: DataTypes.BOOLEAN,
|
||||
defaultValue: true,
|
||||
field: 'is_active'
|
||||
},
|
||||
createdBy: {
|
||||
type: DataTypes.UUID,
|
||||
allowNull: true,
|
||||
field: 'created_by',
|
||||
references: {
|
||||
model: 'users',
|
||||
key: 'user_id'
|
||||
}
|
||||
},
|
||||
fields: {
|
||||
type: DataTypes.JSONB,
|
||||
defaultValue: {}
|
||||
},
|
||||
createdAt: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: DataTypes.NOW,
|
||||
field: 'created_at'
|
||||
},
|
||||
updatedAt: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: false,
|
||||
defaultValue: DataTypes.NOW,
|
||||
field: 'updated_at'
|
||||
}
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
tableName: 'workflow_templates',
|
||||
timestamps: true,
|
||||
createdAt: 'created_at',
|
||||
updatedAt: 'updated_at'
|
||||
}
|
||||
);
|
||||
@ -16,7 +16,6 @@ import { Notification } from './Notification';
|
||||
import ConclusionRemark from './ConclusionRemark';
|
||||
import RequestSummary from './RequestSummary';
|
||||
import SharedSummary from './SharedSummary';
|
||||
import { WorkflowTemplate } from './WorkflowTemplate';
|
||||
|
||||
// Define associations
|
||||
const defineAssociations = () => {
|
||||
@ -139,8 +138,7 @@ export {
|
||||
Notification,
|
||||
ConclusionRemark,
|
||||
RequestSummary,
|
||||
SharedSummary,
|
||||
WorkflowTemplate
|
||||
SharedSummary
|
||||
};
|
||||
|
||||
// Export default sequelize instance
|
||||
|
||||
@ -13,7 +13,6 @@ import dashboardRoutes from './dashboard.routes';
|
||||
import notificationRoutes from './notification.routes';
|
||||
import conclusionRoutes from './conclusion.routes';
|
||||
import aiRoutes from './ai.routes';
|
||||
import workflowTemplateRoutes from './workflowTemplate.routes';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@ -41,7 +40,6 @@ router.use('/notifications', notificationRoutes);
|
||||
router.use('/conclusions', conclusionRoutes);
|
||||
router.use('/ai', aiRoutes);
|
||||
router.use('/summaries', summaryRoutes);
|
||||
router.use('/templates', workflowTemplateRoutes);
|
||||
|
||||
// TODO: Add other route modules as they are implemented
|
||||
// router.use('/approvals', approvalRoutes);
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
import { Router } from 'express';
|
||||
import { createTemplate, getTemplates } from '../controllers/workflowTemplate.controller';
|
||||
import { authenticateToken } from '../middlewares/auth.middleware';
|
||||
import { requireAdmin } from '../middlewares/authorization.middleware';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Public route to get templates (authenticated users)
|
||||
router.get('/', authenticateToken, getTemplates);
|
||||
|
||||
// Admin only route to create templates
|
||||
router.post('/', authenticateToken, requireAdmin, createTemplate);
|
||||
|
||||
export default router;
|
||||
@ -49,13 +49,13 @@ async function checkAndCreateDatabase(): Promise<boolean> {
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
console.log(`📦 Database '${DB_NAME}' not found. Creating...`);
|
||||
|
||||
|
||||
// Create database
|
||||
await client.query(`CREATE DATABASE "${DB_NAME}"`);
|
||||
console.log(`✅ Database '${DB_NAME}' created successfully!`);
|
||||
|
||||
|
||||
await client.end();
|
||||
|
||||
|
||||
// Connect to new database and install extensions
|
||||
const newDbClient = new Client({
|
||||
host: DB_HOST,
|
||||
@ -64,13 +64,13 @@ async function checkAndCreateDatabase(): Promise<boolean> {
|
||||
password: DB_PASSWORD,
|
||||
database: DB_NAME,
|
||||
});
|
||||
|
||||
|
||||
await newDbClient.connect();
|
||||
console.log('📦 Installing uuid-ossp extension...');
|
||||
await newDbClient.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
|
||||
console.log('✅ Extension installed!');
|
||||
await newDbClient.end();
|
||||
|
||||
|
||||
return true; // Database was created
|
||||
} else {
|
||||
console.log(`✅ Database '${DB_NAME}' already exists.`);
|
||||
@ -87,7 +87,7 @@ async function checkAndCreateDatabase(): Promise<boolean> {
|
||||
async function runMigrations(): Promise<void> {
|
||||
try {
|
||||
console.log('🔄 Checking and running pending migrations...');
|
||||
|
||||
|
||||
// Import all migrations using require for CommonJS compatibility
|
||||
// Some migrations use module.exports, others use export
|
||||
const m0 = require('../migrations/2025103000-create-users');
|
||||
@ -120,8 +120,7 @@ async function runMigrations(): Promise<void> {
|
||||
const m27 = require('../migrations/20250127-migrate-in-progress-to-pending');
|
||||
const m28 = require('../migrations/20250130-migrate-to-vertex-ai');
|
||||
const m29 = require('../migrations/20251203-add-user-notification-preferences');
|
||||
const m30 = require('../migrations/20260122-create-workflow-templates');
|
||||
|
||||
|
||||
const migrations = [
|
||||
{ name: '2025103000-create-users', module: m0 },
|
||||
{ name: '2025103001-create-workflow-requests', module: m1 },
|
||||
@ -153,11 +152,10 @@ async function runMigrations(): Promise<void> {
|
||||
{ name: '20250127-migrate-in-progress-to-pending', module: m27 },
|
||||
{ name: '20250130-migrate-to-vertex-ai', module: m28 },
|
||||
{ name: '20251203-add-user-notification-preferences', module: m29 },
|
||||
{ name: '20260122-create-workflow-templates', module: m30 },
|
||||
];
|
||||
|
||||
|
||||
const queryInterface = sequelize.getQueryInterface();
|
||||
|
||||
|
||||
// Ensure migrations tracking table exists
|
||||
const tables = await queryInterface.showAllTables();
|
||||
if (!tables.includes('migrations')) {
|
||||
@ -169,34 +167,34 @@ async function runMigrations(): Promise<void> {
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
|
||||
// Get already executed migrations
|
||||
const executedResults = await sequelize.query<{ name: string }>(
|
||||
'SELECT name FROM migrations ORDER BY id',
|
||||
{ type: QueryTypes.SELECT }
|
||||
);
|
||||
const executedMigrations = executedResults.map(r => r.name);
|
||||
|
||||
|
||||
// Find pending migrations
|
||||
const pendingMigrations = migrations.filter(
|
||||
m => !executedMigrations.includes(m.name)
|
||||
);
|
||||
|
||||
|
||||
if (pendingMigrations.length === 0) {
|
||||
console.log('✅ Migrations up-to-date');
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log(`🔄 Running ${pendingMigrations.length} pending migration(s)...`);
|
||||
|
||||
|
||||
// Run each pending migration
|
||||
for (const migration of pendingMigrations) {
|
||||
try {
|
||||
console.log(` → ${migration.name}`);
|
||||
|
||||
|
||||
// Call the up function - works for both module.exports and export styles
|
||||
await migration.module.up(queryInterface);
|
||||
|
||||
|
||||
// Mark as executed
|
||||
await sequelize.query(
|
||||
'INSERT INTO migrations (name) VALUES (:name) ON CONFLICT (name) DO NOTHING',
|
||||
@ -211,7 +209,7 @@ async function runMigrations(): Promise<void> {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
console.log(`✅ Applied ${pendingMigrations.length} migration(s)`);
|
||||
} catch (error: any) {
|
||||
console.error('❌ Migration failed:', error.message);
|
||||
@ -248,9 +246,9 @@ async function autoSetup(): Promise<void> {
|
||||
console.log('\n========================================');
|
||||
console.log('✅ Setup completed successfully!');
|
||||
console.log('========================================\n');
|
||||
|
||||
|
||||
console.log('📝 Note: Admin configurations will be auto-seeded on server start if table is empty.\n');
|
||||
|
||||
|
||||
if (wasCreated) {
|
||||
console.log('💡 Next steps:');
|
||||
console.log(' 1. Server will start automatically');
|
||||
@ -258,7 +256,7 @@ async function autoSetup(): Promise<void> {
|
||||
console.log(' 3. Run this SQL to make yourself admin:');
|
||||
console.log(` UPDATE users SET role = 'ADMIN' WHERE email = 'your-email@royalenfield.com';\n`);
|
||||
}
|
||||
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('\n========================================');
|
||||
console.error('❌ Setup failed!');
|
||||
|
||||
@ -10,7 +10,6 @@ import { seedDefaultConfigurations } from './services/configSeed.service';
|
||||
import { startPauseResumeJob } from './jobs/pauseResumeJob';
|
||||
import './queues/pauseResumeWorker'; // Initialize pause resume worker
|
||||
import { initializeQueueMetrics, stopQueueMetrics } from './utils/queueMetrics';
|
||||
import { emailService } from './services/email.service';
|
||||
|
||||
const PORT: number = parseInt(process.env.PORT || '5000', 10);
|
||||
|
||||
@ -20,39 +19,30 @@ const startServer = async (): Promise<void> => {
|
||||
// Initialize Google Secret Manager before starting server
|
||||
// This will merge secrets from GCS into process.env if enabled
|
||||
await initializeSecrets();
|
||||
|
||||
// Re-initialize email service after secrets are loaded (in case SMTP credentials were loaded)
|
||||
// This ensures the email service uses production SMTP if credentials are available
|
||||
try {
|
||||
await emailService.initialize();
|
||||
console.log('📧 Email service re-initialized after secrets loaded');
|
||||
} catch (error) {
|
||||
console.warn('⚠️ Email service re-initialization warning (will use test account if SMTP not configured):', error);
|
||||
}
|
||||
|
||||
|
||||
const server = http.createServer(app);
|
||||
initSocket(server);
|
||||
|
||||
|
||||
// Seed default configurations if table is empty
|
||||
try {
|
||||
await seedDefaultConfigurations();
|
||||
} catch (error) {
|
||||
console.error('⚠️ Configuration seeding error:', error);
|
||||
}
|
||||
|
||||
|
||||
// Initialize holidays cache for TAT calculations
|
||||
try {
|
||||
await initializeHolidaysCache();
|
||||
} catch (error) {
|
||||
// Silently fall back to weekends-only TAT calculation
|
||||
}
|
||||
|
||||
|
||||
// Start scheduled jobs
|
||||
startPauseResumeJob();
|
||||
|
||||
|
||||
// Initialize queue metrics collection for Prometheus
|
||||
initializeQueueMetrics();
|
||||
|
||||
|
||||
server.listen(PORT, () => {
|
||||
console.log(`🚀 Server running on port ${PORT} | ${process.env.NODE_ENV || 'development'}`);
|
||||
});
|
||||
|
||||
@ -28,7 +28,7 @@ class AIService {
|
||||
// Check if AI is enabled from config
|
||||
const { getConfigBoolean } = require('./configReader.service');
|
||||
const enabled = await getConfigBoolean('AI_ENABLED', true);
|
||||
|
||||
|
||||
if (!enabled) {
|
||||
logger.warn('[AI Service] AI features disabled in admin configuration');
|
||||
this.isInitialized = true;
|
||||
@ -54,7 +54,7 @@ class AIService {
|
||||
this.isInitialized = true;
|
||||
} catch (error: any) {
|
||||
logger.error('[AI Service] Failed to initialize Vertex AI:', error);
|
||||
|
||||
|
||||
if (error.code === 'MODULE_NOT_FOUND') {
|
||||
logger.warn('[AI Service] @google-cloud/vertexai package not installed. Run: npm install @google-cloud/vertexai');
|
||||
} else if (error.message?.includes('ENOENT') || error.message?.includes('not found')) {
|
||||
@ -65,7 +65,7 @@ class AIService {
|
||||
} else {
|
||||
logger.error(`[AI Service] Initialization error: ${error.message}`);
|
||||
}
|
||||
|
||||
|
||||
this.isInitialized = true; // Mark as initialized even if failed to prevent infinite loops
|
||||
}
|
||||
}
|
||||
@ -99,11 +99,10 @@ class AIService {
|
||||
|
||||
try {
|
||||
// Get the generative model
|
||||
// Increase maxOutputTokens to handle longer conclusions (up to ~4000 tokens ≈ 3000 words)
|
||||
const generativeModel = this.vertexAI.getGenerativeModel({
|
||||
model: this.model,
|
||||
generationConfig: {
|
||||
maxOutputTokens: 4096, // Increased from 2048 to handle longer conclusions
|
||||
maxOutputTokens: 2048,
|
||||
temperature: 0.3,
|
||||
},
|
||||
});
|
||||
@ -115,7 +114,7 @@ class AIService {
|
||||
|
||||
const streamingResp = await generativeModel.generateContent(request);
|
||||
const response = streamingResp.response;
|
||||
|
||||
|
||||
// Log full response structure for debugging if empty
|
||||
if (!response.candidates || response.candidates.length === 0) {
|
||||
logger.error('[AI Service] No candidates in Vertex AI response:', {
|
||||
@ -125,12 +124,12 @@ class AIService {
|
||||
});
|
||||
throw new Error('Vertex AI returned no candidates. The response may have been blocked by safety filters.');
|
||||
}
|
||||
|
||||
|
||||
const candidate = response.candidates[0];
|
||||
|
||||
|
||||
// Check for safety ratings or blocked reasons
|
||||
if (candidate.safetyRatings && candidate.safetyRatings.length > 0) {
|
||||
const blockedRatings = candidate.safetyRatings.filter((rating: any) =>
|
||||
const blockedRatings = candidate.safetyRatings.filter((rating: any) =>
|
||||
rating.probability === 'HIGH' || rating.probability === 'MEDIUM'
|
||||
);
|
||||
if (blockedRatings.length > 0) {
|
||||
@ -143,7 +142,7 @@ class AIService {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check finish reason
|
||||
if (candidate.finishReason && candidate.finishReason !== 'STOP') {
|
||||
logger.warn('[AI Service] Vertex AI finish reason:', {
|
||||
@ -151,23 +150,10 @@ class AIService {
|
||||
safetyRatings: candidate.safetyRatings
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Extract text from response
|
||||
const text = candidate.content?.parts?.[0]?.text || '';
|
||||
|
||||
// Handle MAX_TOKENS finish reason - accept whatever response we got
|
||||
// We trust the AI's response - no truncation on our side
|
||||
if (candidate.finishReason === 'MAX_TOKENS' && text) {
|
||||
// Accept the response as-is - AI was instructed to stay within limits
|
||||
// If it hit the limit, we still use what we got (no truncation on our side)
|
||||
logger.info('[AI Service] Vertex AI response hit token limit, but content received is preserved as-is:', {
|
||||
textLength: text.length,
|
||||
finishReason: candidate.finishReason
|
||||
});
|
||||
// Return the response without any truncation - trust what AI generated
|
||||
return text;
|
||||
}
|
||||
|
||||
|
||||
if (!text) {
|
||||
// Log detailed response structure for debugging
|
||||
logger.error('[AI Service] Empty text in Vertex AI response:', {
|
||||
@ -178,12 +164,12 @@ class AIService {
|
||||
promptPreview: prompt.substring(0, 200) + '...',
|
||||
model: this.model
|
||||
});
|
||||
|
||||
|
||||
// Provide more helpful error message
|
||||
if (candidate.finishReason === 'SAFETY') {
|
||||
throw new Error('Vertex AI blocked the response due to safety filters. The prompt may contain content that violates safety policies.');
|
||||
} else if (candidate.finishReason === 'MAX_TOKENS') {
|
||||
throw new Error('Vertex AI response was truncated due to token limit. The prompt may be too long or the response limit was exceeded.');
|
||||
throw new Error('Vertex AI response was truncated due to token limit.');
|
||||
} else if (candidate.finishReason === 'RECITATION') {
|
||||
throw new Error('Vertex AI blocked the response due to recitation concerns.');
|
||||
} else {
|
||||
@ -194,7 +180,7 @@ class AIService {
|
||||
return text;
|
||||
} catch (error: any) {
|
||||
logger.error('[AI Service] Vertex AI generation error:', error);
|
||||
|
||||
|
||||
// Provide more specific error messages
|
||||
if (error.message?.includes('Model was not found')) {
|
||||
throw new Error(`Model ${this.model} not found or not available in region ${LOCATION}. Please check model name and region.`);
|
||||
@ -203,7 +189,7 @@ class AIService {
|
||||
} else if (error.message?.includes('API not enabled')) {
|
||||
throw new Error('Vertex AI API is not enabled. Please enable it in Google Cloud Console.');
|
||||
}
|
||||
|
||||
|
||||
throw new Error(`Vertex AI generation failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
@ -268,10 +254,9 @@ class AIService {
|
||||
const maxLengthStr = await getConfigValue('AI_MAX_REMARK_LENGTH', '2000');
|
||||
const maxLength = parseInt(maxLengthStr || '2000', 10);
|
||||
|
||||
// Trust AI's response - do not truncate anything
|
||||
// AI is instructed to stay within limit, but we accept whatever it generates
|
||||
// Log length (no trimming - preserve complete AI-generated content)
|
||||
if (remarkText.length > maxLength) {
|
||||
logger.info(`[AI Service] AI generated ${remarkText.length} characters (suggested limit: ${maxLength}). Full content preserved as-is.`);
|
||||
logger.warn(`[AI Service] ⚠️ AI exceeded suggested limit (${remarkText.length} > ${maxLength}). Content preserved to avoid incomplete information.`);
|
||||
}
|
||||
|
||||
// Extract key points (look for bullet points or numbered items)
|
||||
@ -315,7 +300,7 @@ class AIService {
|
||||
const maxLengthStr = await getConfigValue('AI_MAX_REMARK_LENGTH', '2000');
|
||||
const maxLength = parseInt(maxLengthStr || '2000', 10);
|
||||
const targetWordCount = Math.floor(maxLength / 6); // Approximate words (avg 6 chars per word)
|
||||
|
||||
|
||||
logger.info(`[AI Service] Using max remark length: ${maxLength} characters (≈${targetWordCount} words) from admin config`);
|
||||
|
||||
// Check if this is a rejected request
|
||||
@ -333,11 +318,11 @@ class AIService {
|
||||
const approvalSummary = approvalFlow
|
||||
.filter((a: any) => a.status === 'APPROVED' || a.status === 'REJECTED')
|
||||
.map((a: any) => {
|
||||
const tatPercentage = a.tatPercentageUsed !== undefined && a.tatPercentageUsed !== null
|
||||
? Number(a.tatPercentageUsed)
|
||||
const tatPercentage = a.tatPercentageUsed !== undefined && a.tatPercentageUsed !== null
|
||||
? Number(a.tatPercentageUsed)
|
||||
: (a.elapsedHours && a.tatHours ? (Number(a.elapsedHours) / Number(a.tatHours)) * 100 : 0);
|
||||
const riskStatus = getTATRiskStatus(tatPercentage);
|
||||
const tatInfo = a.elapsedHours && a.tatHours
|
||||
const tatInfo = a.elapsedHours && a.tatHours
|
||||
? ` (completed in ${a.elapsedHours.toFixed(1)}h of ${a.tatHours}h TAT, ${tatPercentage.toFixed(1)}% used)`
|
||||
: '';
|
||||
const riskInfo = riskStatus !== 'ON_TRACK' ? ` [${riskStatus}]` : '';
|
||||
@ -351,14 +336,13 @@ class AIService {
|
||||
.map((wn: any) => `- ${wn.userName}: "${wn.message.substring(0, 150)}${wn.message.length > 150 ? '...' : ''}"`)
|
||||
.join('\n');
|
||||
|
||||
// Summarize documents (limit to reduce token usage)
|
||||
// Summarize documents
|
||||
const documentSummary = documents
|
||||
.slice(0, 10) // Limit to first 10 documents
|
||||
.map((d: any) => `- ${d.fileName} (by ${d.uploadedBy})`)
|
||||
.join('\n');
|
||||
|
||||
// Build rejection context if applicable
|
||||
const rejectionContext = isRejected
|
||||
const rejectionContext = isRejected
|
||||
? `\n**Rejection Details:**\n- Rejected by: ${rejectedBy || 'Approver'}\n- Rejection reason: ${rejectionReason || 'Not specified'}`
|
||||
: '';
|
||||
|
||||
@ -380,8 +364,8 @@ ${documentSummary || 'No documents'}
|
||||
|
||||
**YOUR TASK:**
|
||||
Write a brief, professional conclusion (approximately ${targetWordCount} words, max ${maxLength} characters) that:
|
||||
${isRejected
|
||||
? `- Summarizes what was requested and explains that it was rejected
|
||||
${isRejected
|
||||
? `- Summarizes what was requested and explains that it was rejected
|
||||
- Mentions who rejected it and the rejection reason
|
||||
- Notes the outcome and any learnings or next steps
|
||||
- Mentions if any approval levels were AT_RISK, CRITICAL, or BREACHED (if applicable)
|
||||
@ -389,7 +373,7 @@ ${isRejected
|
||||
- Is suitable for permanent archiving and future reference
|
||||
- Sounds natural and human-written (not AI-generated)
|
||||
- Maintains a professional and constructive tone even for rejections`
|
||||
: `- Summarizes what was requested and the final decision
|
||||
: `- Summarizes what was requested and the final decision
|
||||
- Mentions who approved it and any key comments
|
||||
- Mentions if any approval levels were AT_RISK, CRITICAL, or BREACHED (if applicable)
|
||||
- Notes the outcome and next steps (if applicable)
|
||||
@ -398,17 +382,15 @@ ${isRejected
|
||||
- Sounds natural and human-written (not AI-generated)`}
|
||||
|
||||
**CRITICAL CHARACTER LIMIT - STRICT REQUIREMENT:**
|
||||
- Your response MUST stay within ${maxLength} characters (not words, CHARACTERS including spaces including HTML tags)
|
||||
- This is a HARD LIMIT - you must count your characters and ensure your complete response fits within ${maxLength} characters
|
||||
- Count your characters carefully before responding - include all HTML tags in your count
|
||||
- Your response MUST be EXACTLY within ${maxLength} characters (not words, CHARACTERS including spaces)
|
||||
- Count your characters carefully before responding
|
||||
- If you have too much content, PRIORITIZE the most important information:
|
||||
1. Final decision (approved/rejected)
|
||||
2. Key approvers and their decisions
|
||||
3. Critical TAT breaches (if any)
|
||||
4. Brief summary of the request
|
||||
- OMIT less important details to fit within the limit rather than exceeding it
|
||||
- Better to be concise and complete within the limit than to exceed it
|
||||
- IMPORTANT: Generate your complete response within this limit - do not generate partial content that exceeds the limit
|
||||
- Better to be concise than to exceed the limit
|
||||
|
||||
**WRITING GUIDELINES:**
|
||||
- Be concise and direct - every word must add value
|
||||
@ -450,13 +432,13 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
|
||||
*/
|
||||
private extractKeyPoints(remark: string): string[] {
|
||||
const keyPoints: string[] = [];
|
||||
|
||||
|
||||
// Look for bullet points (-, •, *) or numbered items (1., 2., etc.)
|
||||
const lines = remark.split('\n');
|
||||
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
|
||||
// Match bullet points
|
||||
if (trimmed.match(/^[-•*]\s+(.+)$/)) {
|
||||
const point = trimmed.replace(/^[-•*]\s+/, '');
|
||||
@ -464,7 +446,7 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
|
||||
keyPoints.push(point);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Match numbered items
|
||||
if (trimmed.match(/^\d+\.\s+(.+)$/)) {
|
||||
const point = trimmed.replace(/^\d+\.\s+/, '');
|
||||
@ -473,13 +455,13 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// If no bullet points found, extract first few sentences
|
||||
if (keyPoints.length === 0) {
|
||||
const sentences = remark.split(/[.!?]+/).filter(s => s.trim().length > 20);
|
||||
keyPoints.push(...sentences.slice(0, 3).map(s => s.trim()));
|
||||
}
|
||||
|
||||
|
||||
return keyPoints.slice(0, 5); // Max 5 key points
|
||||
}
|
||||
|
||||
@ -488,22 +470,22 @@ Write the conclusion now in HTML format. STRICT LIMIT: ${maxLength} characters m
|
||||
*/
|
||||
private calculateConfidence(remark: string, context: any): number {
|
||||
let score = 0.6; // Base score
|
||||
|
||||
|
||||
// Check if remark has good length (100-400 chars - more realistic)
|
||||
if (remark.length >= 100 && remark.length <= 400) {
|
||||
score += 0.2;
|
||||
}
|
||||
|
||||
|
||||
// Check if remark mentions key elements
|
||||
if (remark.toLowerCase().includes('approv')) {
|
||||
score += 0.1;
|
||||
}
|
||||
|
||||
|
||||
// Check if remark is not too generic
|
||||
if (remark.length > 80 && !remark.toLowerCase().includes('lorem ipsum')) {
|
||||
score += 0.1;
|
||||
}
|
||||
|
||||
|
||||
return Math.min(1.0, score);
|
||||
}
|
||||
|
||||
|
||||
@ -72,7 +72,7 @@ export class EmailService {
|
||||
private async initializeTestAccount(): Promise<void> {
|
||||
try {
|
||||
this.testAccountInfo = await nodemailer.createTestAccount();
|
||||
|
||||
|
||||
this.transporter = nodemailer.createTransport({
|
||||
host: this.testAccountInfo.smtp.host,
|
||||
port: this.testAccountInfo.smtp.port,
|
||||
@ -100,18 +100,6 @@ export class EmailService {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
// If using test account, check if SMTP credentials are now available and re-initialize
|
||||
if (this.useTestAccount) {
|
||||
const smtpHost = process.env.SMTP_HOST;
|
||||
const smtpUser = process.env.SMTP_USER;
|
||||
const smtpPassword = process.env.SMTP_PASSWORD;
|
||||
|
||||
if (smtpHost && smtpUser && smtpPassword) {
|
||||
logger.info('📧 SMTP credentials detected - re-initializing email service with production SMTP');
|
||||
await this.initialize();
|
||||
}
|
||||
}
|
||||
|
||||
const recipients = Array.isArray(options.to) ? options.to.join(', ') : options.to;
|
||||
const fromAddress = process.env.EMAIL_FROM || 'RE Flow <noreply@royalenfield.com>';
|
||||
|
||||
@ -132,11 +120,11 @@ export class EmailService {
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
const info = await this.transporter!.sendMail(mailOptions);
|
||||
|
||||
|
||||
if (!info || !info.messageId) {
|
||||
throw new Error('Email sent but no messageId returned');
|
||||
}
|
||||
|
||||
|
||||
const result: { messageId: string; previewUrl?: string } = {
|
||||
messageId: info.messageId
|
||||
};
|
||||
@ -145,10 +133,10 @@ export class EmailService {
|
||||
if (this.useTestAccount) {
|
||||
try {
|
||||
const previewUrl = nodemailer.getTestMessageUrl(info);
|
||||
|
||||
|
||||
if (previewUrl) {
|
||||
result.previewUrl = previewUrl;
|
||||
|
||||
|
||||
// Always log to console for visibility
|
||||
console.log('\n' + '='.repeat(80));
|
||||
console.log(`📧 EMAIL PREVIEW (${options.subject})`);
|
||||
@ -156,7 +144,7 @@ export class EmailService {
|
||||
console.log(`Preview URL: ${previewUrl}`);
|
||||
console.log(`Message ID: ${info.messageId}`);
|
||||
console.log('='.repeat(80) + '\n');
|
||||
|
||||
|
||||
logger.info(`✅ Email sent (TEST MODE) to ${recipients}`);
|
||||
logger.info(`📧 Preview URL: ${previewUrl}`);
|
||||
} else {
|
||||
@ -178,7 +166,7 @@ export class EmailService {
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
logger.error(`❌ Email send attempt ${attempt}/${maxRetries} failed:`, error);
|
||||
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
const delay = parseInt(process.env.EMAIL_RETRY_DELAY || '5000') * attempt;
|
||||
logger.info(`⏳ Retrying in ${delay}ms...`);
|
||||
@ -197,22 +185,22 @@ export class EmailService {
|
||||
*/
|
||||
async sendBatch(emails: EmailOptions[]): Promise<void> {
|
||||
logger.info(`📧 Sending batch of ${emails.length} emails`);
|
||||
|
||||
|
||||
const batchSize = parseInt(process.env.EMAIL_BATCH_SIZE || '10');
|
||||
|
||||
|
||||
for (let i = 0; i < emails.length; i += batchSize) {
|
||||
const batch = emails.slice(i, i + batchSize);
|
||||
|
||||
|
||||
await Promise.allSettled(
|
||||
batch.map(email => this.sendEmail(email))
|
||||
);
|
||||
|
||||
|
||||
// Small delay between batches to avoid rate limiting
|
||||
if (i + batchSize < emails.length) {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
logger.info(`✅ Batch email sending complete`);
|
||||
}
|
||||
|
||||
@ -245,8 +233,6 @@ export class EmailService {
|
||||
export const emailService = new EmailService();
|
||||
|
||||
// Initialize on import (will use test account if SMTP not configured)
|
||||
// Note: If secrets are loaded later, the service will re-initialize automatically
|
||||
// when sendEmail is called (if SMTP credentials become available)
|
||||
emailService.initialize().catch(error => {
|
||||
logger.error('Failed to initialize email service:', error);
|
||||
});
|
||||
|
||||
@ -101,7 +101,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getRequestCreatedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Created Successfully`;
|
||||
const subject = `[${requestData.requestNumber}] Request Created Successfully`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: initiatorData.email,
|
||||
@ -144,9 +144,9 @@ export class EmailNotificationService {
|
||||
// Multi-level approval email
|
||||
const chainData: ApprovalChainItem[] = approvalChain.map((level: any) => ({
|
||||
name: level.approverName || level.approverEmail,
|
||||
status: level.status === 'APPROVED' ? 'approved'
|
||||
: level.levelNumber === approverData.levelNumber ? 'current'
|
||||
: level.levelNumber < approverData.levelNumber ? 'pending'
|
||||
status: level.status === 'APPROVED' ? 'approved'
|
||||
: level.levelNumber === approverData.levelNumber ? 'current'
|
||||
: level.levelNumber < approverData.levelNumber ? 'pending'
|
||||
: 'awaiting',
|
||||
date: level.approvedAt ? this.formatDate(level.approvedAt) : undefined,
|
||||
levelNumber: level.levelNumber
|
||||
@ -170,7 +170,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getMultiApproverRequestEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Multi-Level Approval Request - Your Turn`;
|
||||
const subject = `[${requestData.requestNumber}] Multi-Level Approval Request - Your Turn`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: approverData.email,
|
||||
@ -198,7 +198,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getApprovalRequestEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Approval Request - Action Required`;
|
||||
const subject = `[${requestData.requestNumber}] Approval Request - Action Required`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: approverData.email,
|
||||
@ -252,7 +252,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getApprovalConfirmationEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Approved${isFinalApproval ? ' - All Approvals Complete' : ''}`;
|
||||
const subject = `[${requestData.requestNumber}] Request Approved${isFinalApproval ? ' - All Approvals Complete' : ''}`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: initiatorData.email,
|
||||
@ -303,7 +303,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getRejectionNotificationEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Rejected`;
|
||||
const subject = `[${requestData.requestNumber}] Request Rejected`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: initiatorData.email,
|
||||
@ -344,9 +344,9 @@ export class EmailNotificationService {
|
||||
}
|
||||
|
||||
// Determine urgency level based on threshold
|
||||
const urgencyLevel = tatInfo.thresholdPercentage >= 75 ? 'high'
|
||||
: tatInfo.thresholdPercentage >= 50 ? 'medium'
|
||||
: 'low';
|
||||
const urgencyLevel = tatInfo.thresholdPercentage >= 75 ? 'high'
|
||||
: tatInfo.thresholdPercentage >= 50 ? 'medium'
|
||||
: 'low';
|
||||
|
||||
// Get initiator name - try from requestData first, then fetch if needed
|
||||
let initiatorName = requestData.initiatorName || requestData.initiator?.displayName || 'Initiator';
|
||||
@ -379,7 +379,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getTATReminderEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - TAT Reminder - ${tatInfo.thresholdPercentage}% Elapsed`;
|
||||
const subject = `[${requestData.requestNumber}] TAT Reminder - ${tatInfo.thresholdPercentage}% Elapsed`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: approverData.email,
|
||||
@ -449,7 +449,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getTATBreachedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - TAT BREACHED - Immediate Action Required`;
|
||||
const subject = `[${requestData.requestNumber}] TAT BREACHED - Immediate Action Required`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: approverData.email,
|
||||
@ -496,8 +496,8 @@ export class EmailNotificationService {
|
||||
}
|
||||
|
||||
const isAutoResumed = !resumedByData || resumedByData.userId === 'system';
|
||||
const resumedByText = isAutoResumed
|
||||
? 'automatically'
|
||||
const resumedByText = isAutoResumed
|
||||
? 'automatically'
|
||||
: `by ${resumedByData.displayName || resumedByData.email}`;
|
||||
|
||||
const data: WorkflowResumedData = {
|
||||
@ -509,7 +509,7 @@ export class EmailNotificationService {
|
||||
resumedTime: this.formatTime(new Date()),
|
||||
pausedDuration: pauseDuration,
|
||||
currentApprover: approverData.displayName || approverData.email,
|
||||
newTATDeadline: requestData.tatDeadline
|
||||
newTATDeadline: requestData.tatDeadline
|
||||
? this.formatDate(requestData.tatDeadline) + ' ' + this.formatTime(requestData.tatDeadline)
|
||||
: 'To be determined',
|
||||
isApprover: true,
|
||||
@ -518,7 +518,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getWorkflowResumedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Workflow Resumed - Action Required`;
|
||||
const subject = `[${requestData.requestNumber}] Workflow Resumed - Action Required`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: approverData.email,
|
||||
@ -565,8 +565,8 @@ export class EmailNotificationService {
|
||||
}
|
||||
|
||||
const isAutoResumed = !resumedByData || resumedByData.userId === 'system' || !resumedByData.userId;
|
||||
const resumedByText = isAutoResumed
|
||||
? 'automatically'
|
||||
const resumedByText = isAutoResumed
|
||||
? 'automatically'
|
||||
: `by ${resumedByData.displayName || resumedByData.email || resumedByData.name || 'User'}`;
|
||||
|
||||
const data: WorkflowResumedData = {
|
||||
@ -578,7 +578,7 @@ export class EmailNotificationService {
|
||||
resumedTime: this.formatTime(new Date()),
|
||||
pausedDuration: pauseDuration,
|
||||
currentApprover: approverData?.displayName || approverData?.email || 'Current Approver',
|
||||
newTATDeadline: requestData.tatDeadline
|
||||
newTATDeadline: requestData.tatDeadline
|
||||
? this.formatDate(requestData.tatDeadline) + ' ' + this.formatTime(requestData.tatDeadline)
|
||||
: 'To be determined',
|
||||
isApprover: false, // This is for initiator
|
||||
@ -587,7 +587,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getWorkflowResumedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Workflow Resumed`;
|
||||
const subject = `[${requestData.requestNumber}] Workflow Resumed`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: initiatorData.email,
|
||||
@ -665,7 +665,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getRequestClosedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Request Closed`;
|
||||
const subject = `[${requestData.requestNumber}] Request Closed`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: recipientData.email,
|
||||
@ -690,7 +690,7 @@ export class EmailNotificationService {
|
||||
closureData: any
|
||||
): Promise<void> {
|
||||
logger.info(`📧 Sending Request Closed emails to ${participants.length} participants`);
|
||||
|
||||
|
||||
for (const participant of participants) {
|
||||
await this.sendRequestClosed(requestData, participant, closureData);
|
||||
// Small delay to avoid rate limiting
|
||||
@ -734,7 +734,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getApproverSkippedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Approver Skipped`;
|
||||
const subject = `[${requestData.requestNumber}] Approver Skipped`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: skippedApproverData.email,
|
||||
@ -794,7 +794,7 @@ export class EmailNotificationService {
|
||||
};
|
||||
|
||||
const html = getWorkflowPausedEmail(data);
|
||||
const subject = `${requestData.requestNumber} - ${requestData.title} - Workflow Paused`;
|
||||
const subject = `[${requestData.requestNumber}] Workflow Paused`;
|
||||
|
||||
const result = await emailService.sendEmail({
|
||||
to: recipientData.email,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user