activity creation draft edit and create websocket implemention done

This commit is contained in:
laxmanhalaki 2025-10-31 20:01:26 +05:30
parent b6fe3a1e83
commit eb0bca123b
30 changed files with 1737 additions and 135 deletions

249
package-lock.json generated
View File

@ -27,7 +27,9 @@
"pg": "^8.13.1",
"pg-hstore": "^2.3.4",
"sequelize": "^6.37.5",
"socket.io": "^4.8.1",
"uuid": "^8.3.2",
"web-push": "^3.6.7",
"winston": "^3.17.0",
"zod": "^3.24.1"
},
@ -44,6 +46,7 @@
"@types/passport": "^1.0.16",
"@types/passport-jwt": "^4.0.1",
"@types/supertest": "^6.0.2",
"@types/web-push": "^3.6.4",
"@typescript-eslint/eslint-plugin": "^8.19.1",
"@typescript-eslint/parser": "^8.19.1",
"eslint": "^9.17.0",
@ -1599,6 +1602,12 @@
"text-hex": "1.0.x"
}
},
"node_modules/@socket.io/component-emitter": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz",
"integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==",
"license": "MIT"
},
"node_modules/@tootallnate/once": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
@ -1736,7 +1745,6 @@
"version": "2.8.19",
"resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz",
"integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
@ -2082,6 +2090,16 @@
"integrity": "sha512-LSFfpSnJJY9wbC0LQxgvfb+ynbHftFo0tMsFOl/J4wexLnYMmDSPaj2ZyDv3TkfL1UePxPrxOWJfbiRS8mQv7A==",
"license": "MIT"
},
"node_modules/@types/web-push": {
"version": "3.6.4",
"resolved": "https://registry.npmjs.org/@types/web-push/-/web-push-3.6.4.tgz",
"integrity": "sha512-GnJmSr40H3RAnj0s34FNTcJi1hmWFV5KXugE0mYWnYhgTAHLJ/dJKAwDmvPJYMke0RplY2XE9LnM4hqSqKIjhQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/yargs": {
"version": "17.0.34",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.34.tgz",
@ -2529,6 +2547,18 @@
"dev": true,
"license": "MIT"
},
"node_modules/asn1.js": {
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz",
"integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==",
"license": "MIT",
"dependencies": {
"bn.js": "^4.0.0",
"inherits": "^2.0.1",
"minimalistic-assert": "^1.0.0",
"safer-buffer": "^2.1.0"
}
},
"node_modules/async": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
@ -2724,6 +2754,15 @@
],
"license": "MIT"
},
"node_modules/base64id": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz",
"integrity": "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog==",
"license": "MIT",
"engines": {
"node": "^4.5.0 || >= 5.9"
}
},
"node_modules/baseline-browser-mapping": {
"version": "2.8.20",
"resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.20.tgz",
@ -2787,6 +2826,12 @@
"dev": true,
"license": "MIT"
},
"node_modules/bn.js": {
"version": "4.12.2",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.2.tgz",
"integrity": "sha512-n4DSx829VRTRByMRGdjQ9iqsN0Bh4OolPsFnaZBLcbi8iXcB+kJ9s7EnRt4wILZNV3kPLHkRVfOc/HvhC3ovDw==",
"license": "MIT"
},
"node_modules/body-parser": {
"version": "1.20.3",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
@ -3714,6 +3759,61 @@
"once": "^1.4.0"
}
},
"node_modules/engine.io": {
"version": "6.6.4",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.4.tgz",
"integrity": "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g==",
"license": "MIT",
"dependencies": {
"@types/cors": "^2.8.12",
"@types/node": ">=10.0.0",
"accepts": "~1.3.4",
"base64id": "2.0.0",
"cookie": "~0.7.2",
"cors": "~2.8.5",
"debug": "~4.3.1",
"engine.io-parser": "~5.2.1",
"ws": "~8.17.1"
},
"engines": {
"node": ">=10.2.0"
}
},
"node_modules/engine.io-parser": {
"version": "5.2.3",
"resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz",
"integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/engine.io/node_modules/cookie": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/engine.io/node_modules/debug": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/error-ex": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz",
@ -4893,6 +4993,15 @@
"dev": true,
"license": "MIT"
},
"node_modules/http_ece": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http_ece/-/http_ece-1.2.0.tgz",
"integrity": "sha512-JrF8SSLVmcvc5NducxgyOrKXe3EsyHMgBFgSaIUGmArKe+rwr0uphRkRXvwiom3I+fpIfoItveHrfudL8/rxuA==",
"license": "MIT",
"engines": {
"node": ">=16"
}
},
"node_modules/http-errors": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
@ -6379,6 +6488,12 @@
"node": ">=6"
}
},
"node_modules/minimalistic-assert": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz",
"integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==",
"license": "ISC"
},
"node_modules/minimatch": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
@ -8004,6 +8119,98 @@
"node": ">=8"
}
},
"node_modules/socket.io": {
"version": "4.8.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz",
"integrity": "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==",
"license": "MIT",
"dependencies": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"cors": "~2.8.5",
"debug": "~4.3.2",
"engine.io": "~6.6.0",
"socket.io-adapter": "~2.5.2",
"socket.io-parser": "~4.2.4"
},
"engines": {
"node": ">=10.2.0"
}
},
"node_modules/socket.io-adapter": {
"version": "2.5.5",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.5.5.tgz",
"integrity": "sha512-eLDQas5dzPgOWCk9GuuJC2lBqItuhKI4uxGgo9aIV7MYbk2h9Q6uULEh8WBzThoI7l+qU9Ast9fVUmkqPP9wYg==",
"license": "MIT",
"dependencies": {
"debug": "~4.3.4",
"ws": "~8.17.1"
}
},
"node_modules/socket.io-adapter/node_modules/debug": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/socket.io-parser": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.4.tgz",
"integrity": "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==",
"license": "MIT",
"dependencies": {
"@socket.io/component-emitter": "~3.1.0",
"debug": "~4.3.1"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/socket.io-parser/node_modules/debug": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/socket.io/node_modules/debug": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
@ -8958,6 +9165,25 @@
"makeerror": "1.0.12"
}
},
"node_modules/web-push": {
"version": "3.6.7",
"resolved": "https://registry.npmjs.org/web-push/-/web-push-3.6.7.tgz",
"integrity": "sha512-OpiIUe8cuGjrj3mMBFWY+e4MMIkW3SVT+7vEIjvD9kejGUypv8GPDf84JdPWskK8zMRIJ6xYGm+Kxr8YkPyA0A==",
"license": "MPL-2.0",
"dependencies": {
"asn1.js": "^5.3.0",
"http_ece": "1.2.0",
"https-proxy-agent": "^7.0.0",
"jws": "^4.0.0",
"minimist": "^1.2.5"
},
"bin": {
"web-push": "src/cli.js"
},
"engines": {
"node": ">= 16"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
@ -9109,6 +9335,27 @@
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
}
},
"node_modules/ws": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",

View File

@ -43,7 +43,9 @@
"pg": "^8.13.1",
"pg-hstore": "^2.3.4",
"sequelize": "^6.37.5",
"socket.io": "^4.8.1",
"uuid": "^8.3.2",
"web-push": "^3.6.7",
"winston": "^3.17.0",
"zod": "^3.24.1"
},
@ -60,6 +62,7 @@
"@types/passport": "^1.0.16",
"@types/passport-jwt": "^4.0.1",
"@types/supertest": "^6.0.2",
"@types/web-push": "^3.6.4",
"@typescript-eslint/eslint-plugin": "^8.19.1",
"@typescript-eslint/parser": "^8.19.1",
"eslint": "^9.17.0",

View File

@ -198,6 +198,76 @@ export class WorkflowController {
}
}
// Multipart update for drafts: accepts payload JSON and files[]
async updateWorkflowMultipart(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user?.userId;
if (!userId) {
ResponseHandler.error(res, 'Unauthorized', 401);
return;
}
const { id } = req.params;
const raw = String(req.body?.payload || '');
if (!raw) {
ResponseHandler.error(res, 'payload is required', 400);
return;
}
const parsed = JSON.parse(raw);
const validated = validateUpdateWorkflow(parsed);
const updateData: UpdateWorkflowRequest = { ...validated } as any;
if (validated.priority) {
updateData.priority = validated.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
}
// Update workflow
const workflow = await workflowService.updateWorkflow(id, updateData);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
// Attach new files as documents
const files = (req as any).files as Express.Multer.File[] | undefined;
const category = (req.body?.category as string) || 'SUPPORTING';
const docs: any[] = [];
if (files && files.length > 0) {
const actualRequestId = (workflow as any).requestId;
for (const file of files) {
const buffer = fs.readFileSync(file.path);
const checksum = crypto.createHash('sha256').update(buffer).digest('hex');
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
const doc = await Document.create({
requestId: actualRequestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
originalFileName: file.originalname,
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: file.path,
storageUrl: `/uploads/${path.basename(file.path)}`,
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: category || 'OTHER',
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
docs.push(doc);
}
}
ResponseHandler.success(res, { workflow, newDocuments: docs }, 'Workflow updated with documents', 200);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
}
}
async submitWorkflow(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;

View File

@ -0,0 +1,28 @@
import type { Request, Response } from 'express';
import { workNoteService } from '../services/worknote.service';
import { WorkflowService } from '../services/workflow.service';
export class WorkNoteController {
private workflowService = new WorkflowService();
async list(req: any, res: Response): Promise<void> {
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
const requestId: string = wf.getDataValue('requestId');
const rows = await workNoteService.list(requestId);
res.json({ success: true, data: rows });
}
async create(req: any, res: Response): Promise<void> {
const wf = await (this.workflowService as any).findWorkflowByIdentifier(req.params.id);
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
const requestId: string = wf.getDataValue('requestId');
const user = { userId: req.user?.userId, name: req.user?.displayName };
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
const files = (req.files as any[])?.map(f => ({ path: f.path, originalname: f.originalname, mimetype: f.mimetype, size: f.size })) || [];
const note = await workNoteService.create(requestId, user, payload, files);
res.status(201).json({ success: true, data: note });
}
}

View File

@ -5,31 +5,83 @@ import { Op } from 'sequelize';
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
// Helper to check if identifier is UUID or requestNumber
function isUuid(identifier: string): boolean {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return uuidRegex.test(identifier);
}
// Helper to find workflow by either requestId or requestNumber
async function findWorkflowByIdentifier(identifier: string): Promise<WorkflowRequest | null> {
if (isUuid(identifier)) {
return await WorkflowRequest.findByPk(identifier);
} else {
return await WorkflowRequest.findOne({
where: { requestNumber: identifier }
});
}
}
export function requireParticipantTypes(allowed: AllowedType[]) {
return async (req: Request, res: Response, next: NextFunction) => {
try {
const userId: string | undefined = (req as any).user?.userId || (req as any).user?.id;
const requestId: string | undefined = (req.params as any)?.id;
if (!userId || !requestId) {
const requestIdentifier: string | undefined = (req.params as any)?.id;
if (!userId || !requestIdentifier) {
return res.status(403).json({ success: false, error: 'Forbidden' });
}
// Resolve requestIdentifier to actual requestId (UUID)
const workflow = await findWorkflowByIdentifier(requestIdentifier);
if (!workflow) {
return res.status(404).json({ success: false, error: 'Workflow not found' });
}
const actualRequestId = (workflow as any).requestId;
// Check initiator
if (allowed.includes('INITIATOR')) {
const wf = await WorkflowRequest.findByPk(requestId);
if (wf && (wf as any).initiatorId === userId) {
if ((workflow as any).initiatorId === userId) {
return next();
}
}
// Check participants table for APPROVER / SPECTATOR
const rolesToCheck = allowed.filter(r => r !== 'INITIATOR');
if (rolesToCheck.length > 0) {
// Check participants table for SPECTATOR
if (allowed.includes('SPECTATOR')) {
const participant = await Participant.findOne({
where: {
requestId,
requestId: actualRequestId,
userId,
participantType: { [Op.in]: rolesToCheck as any },
participantType: 'SPECTATOR',
isActive: true
},
});
if (participant) {
return next();
}
}
// For APPROVER role, check ApprovalLevel table
// This is the primary source of truth for approvers
if (allowed.includes('APPROVER')) {
const { ApprovalLevel } = await import('@models/ApprovalLevel');
const approvalLevel = await ApprovalLevel.findOne({
where: {
requestId: actualRequestId,
approverId: userId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any }
}
});
if (approvalLevel) {
return next();
}
// Fallback: also check Participants table (some approvers might be added there)
const participant = await Participant.findOne({
where: {
requestId: actualRequestId,
userId,
participantType: 'APPROVER',
isActive: true
},
});
if (participant) {
@ -39,6 +91,7 @@ export function requireParticipantTypes(allowed: AllowedType[]) {
return res.status(403).json({ success: false, error: 'Insufficient permissions' });
} catch (err) {
console.error('Authorization check error:', err);
return res.status(500).json({ success: false, error: 'Authorization check failed' });
}
};

View File

@ -2,10 +2,18 @@ import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Enums
await queryInterface.sequelize.query("CREATE TYPE enum_priority AS ENUM ('STANDARD','EXPRESS');");
await queryInterface.sequelize.query(
"CREATE TYPE enum_workflow_status AS ENUM ('DRAFT','PENDING','IN_PROGRESS','APPROVED','REJECTED','CLOSED');"
);
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_priority') THEN
CREATE TYPE enum_priority AS ENUM ('STANDARD','EXPRESS');
END IF;
END$$;`);
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_workflow_status') THEN
CREATE TYPE enum_workflow_status AS ENUM ('DRAFT','PENDING','IN_PROGRESS','APPROVED','REJECTED','CLOSED');
END IF;
END$$;`);
await queryInterface.createTable('workflow_requests', {
request_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
@ -29,9 +37,9 @@ export async function up(queryInterface: QueryInterface): Promise<void> {
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
});
await queryInterface.addIndex('workflow_requests', ['initiator_id']);
await queryInterface.addIndex('workflow_requests', ['status']);
await queryInterface.addIndex('workflow_requests', ['created_at']);
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_initiator_id" ON "workflow_requests" ("initiator_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_status" ON "workflow_requests" ("status");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "workflow_requests_created_at" ON "workflow_requests" ("created_at");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {

View File

@ -1,9 +1,12 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(
"CREATE TYPE enum_approval_status AS ENUM ('PENDING','IN_PROGRESS','APPROVED','REJECTED','SKIPPED');"
);
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_approval_status') THEN
CREATE TYPE enum_approval_status AS ENUM ('PENDING','IN_PROGRESS','APPROVED','REJECTED','SKIPPED');
END IF;
END$$;`);
await queryInterface.createTable('approval_levels', {
level_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
@ -29,14 +32,17 @@ export async function up(queryInterface: QueryInterface): Promise<void> {
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
});
await queryInterface.addIndex('approval_levels', ['request_id']);
await queryInterface.addIndex('approval_levels', ['approver_id']);
await queryInterface.addIndex('approval_levels', ['status']);
await queryInterface.addConstraint('approval_levels', {
fields: ['request_id', 'level_number'],
type: 'unique',
name: 'uq_approval_levels_request_level'
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_request_id" ON "approval_levels" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_approver_id" ON "approval_levels" ("approver_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "approval_levels_status" ON "approval_levels" ("status");');
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'uq_approval_levels_request_level'
) THEN
ALTER TABLE "approval_levels" ADD CONSTRAINT "uq_approval_levels_request_level" UNIQUE ("request_id", "level_number");
END IF;
END$$;`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {

View File

@ -1,9 +1,12 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(
"CREATE TYPE enum_participant_type AS ENUM ('SPECTATOR','INITIATOR','APPROVER','CONSULTATION');"
);
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_participant_type') THEN
CREATE TYPE enum_participant_type AS ENUM ('SPECTATOR','INITIATOR','APPROVER','CONSULTATION');
END IF;
END$$;`);
await queryInterface.createTable('participants', {
participant_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
@ -21,13 +24,16 @@ export async function up(queryInterface: QueryInterface): Promise<void> {
is_active: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
});
await queryInterface.addIndex('participants', ['request_id']);
await queryInterface.addIndex('participants', ['user_id']);
await queryInterface.addConstraint('participants', {
fields: ['request_id', 'user_id'],
type: 'unique',
name: 'uq_participants_request_user'
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "participants_request_id" ON "participants" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "participants_user_id" ON "participants" ("user_id");');
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint WHERE conname = 'uq_participants_request_user'
) THEN
ALTER TABLE "participants" ADD CONSTRAINT "uq_participants_request_user" UNIQUE ("request_id", "user_id");
END IF;
END$$;`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {

View File

@ -1,9 +1,12 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(
"CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER');"
);
await queryInterface.sequelize.query(`DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_document_category') THEN
CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER');
END IF;
END$$;`);
await queryInterface.createTable('documents', {
document_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
@ -28,9 +31,9 @@ export async function up(queryInterface: QueryInterface): Promise<void> {
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
});
await queryInterface.addIndex('documents', ['request_id']);
await queryInterface.addIndex('documents', ['uploaded_by']);
await queryInterface.addIndex('documents', ['category']);
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_request_id" ON "documents" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_uploaded_by" ON "documents" ("uploaded_by");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "documents_category" ON "documents" ("category");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {

View File

@ -0,0 +1,21 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('subscriptions', {
subscription_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
user_id: { type: DataTypes.UUID, allowNull: false },
endpoint: { type: DataTypes.STRING(1000), allowNull: false, unique: true },
p256dh: { type: DataTypes.STRING(255), allowNull: false },
auth: { type: DataTypes.STRING(255), allowNull: false },
user_agent: { type: DataTypes.STRING(500), allowNull: true },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "subscriptions_user_id" ON "subscriptions" ("user_id");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('subscriptions');
}
};

View File

@ -0,0 +1,29 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('activities', {
activity_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
request_id: { type: DataTypes.UUID, allowNull: false },
user_id: { type: DataTypes.UUID, allowNull: true },
user_name: { type: DataTypes.STRING(255), allowNull: true },
activity_type: { type: DataTypes.STRING(100), allowNull: false },
activity_description: { type: DataTypes.TEXT, allowNull: false },
activity_category: { type: DataTypes.STRING(100), allowNull: true },
severity: { type: DataTypes.STRING(50), allowNull: true },
metadata: { type: DataTypes.JSONB, allowNull: true },
is_system_event: { type: DataTypes.BOOLEAN, allowNull: true },
ip_address: { type: DataTypes.STRING(100), allowNull: true },
user_agent: { type: DataTypes.TEXT, allowNull: true },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_request_id" ON "activities" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_created_at" ON "activities" ("created_at");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activities_activity_type" ON "activities" ("activity_type");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('activities');
}
};

View File

@ -0,0 +1,32 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('work_notes', {
note_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
request_id: { type: DataTypes.UUID, allowNull: false },
user_id: { type: DataTypes.UUID, allowNull: false },
user_name: { type: DataTypes.STRING(255), allowNull: true },
user_role: { type: DataTypes.STRING(50), allowNull: true },
message: { type: DataTypes.TEXT, allowNull: false },
message_type: { type: DataTypes.STRING(50), allowNull: true },
is_priority: { type: DataTypes.BOOLEAN, allowNull: true },
has_attachment: { type: DataTypes.BOOLEAN, allowNull: true },
parent_note_id: { type: DataTypes.UUID, allowNull: true },
mentioned_users: { type: DataTypes.ARRAY(DataTypes.UUID), allowNull: true },
reactions: { type: DataTypes.JSONB, allowNull: true },
is_edited: { type: DataTypes.BOOLEAN, allowNull: true },
is_deleted: { type: DataTypes.BOOLEAN, allowNull: true },
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_request_id" ON "work_notes" ("request_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_user_id" ON "work_notes" ("user_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_notes_created_at" ON "work_notes" ("created_at");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('work_notes');
}
};

View File

@ -0,0 +1,25 @@
import { QueryInterface, DataTypes } from 'sequelize';
module.exports = {
up: async (queryInterface: QueryInterface) => {
await queryInterface.createTable('work_note_attachments', {
attachment_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4, allowNull: false },
note_id: { type: DataTypes.UUID, allowNull: false },
file_name: { type: DataTypes.STRING(255), allowNull: false },
file_type: { type: DataTypes.STRING(100), allowNull: false },
file_size: { type: DataTypes.BIGINT, allowNull: false },
file_path: { type: DataTypes.STRING(500), allowNull: false },
storage_url: { type: DataTypes.STRING(500), allowNull: true },
is_downloadable: { type: DataTypes.BOOLEAN, allowNull: true },
download_count: { type: DataTypes.INTEGER, allowNull: true, defaultValue: 0 },
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW }
});
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_note_attachments_note_id" ON "work_note_attachments" ("note_id");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "work_note_attachments_uploaded_at" ON "work_note_attachments" ("uploaded_at");');
},
down: async (queryInterface: QueryInterface) => {
await queryInterface.dropTable('work_note_attachments');
}
};

120
src/models/Activity.ts Normal file
View File

@ -0,0 +1,120 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface ActivityAttributes {
activityId: string;
requestId: string;
userId?: string | null;
userName?: string | null;
activityType: string; // activity_type
activityDescription: string; // activity_description
activityCategory?: string | null;
severity?: string | null;
metadata?: object | null;
isSystemEvent?: boolean | null;
ipAddress?: string | null;
userAgent?: string | null;
createdAt: Date;
}
interface ActivityCreationAttributes extends Optional<ActivityAttributes, 'activityId' | 'createdAt'> {}
class Activity extends Model<ActivityAttributes, ActivityCreationAttributes> implements ActivityAttributes {
public activityId!: string;
public requestId!: string;
public userId!: string | null;
public userName!: string | null;
public activityType!: string;
public activityDescription!: string;
public activityCategory!: string | null;
public severity!: string | null;
public metadata!: object | null;
public isSystemEvent!: boolean | null;
public ipAddress!: string | null;
public userAgent!: string | null;
public createdAt!: Date;
}
Activity.init(
{
activityId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'activity_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id'
},
userId: {
type: DataTypes.UUID,
allowNull: true,
field: 'user_id'
},
userName: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'user_name'
},
activityType: {
type: DataTypes.STRING(100),
allowNull: false,
field: 'activity_type'
},
activityDescription: {
type: DataTypes.TEXT,
allowNull: false,
field: 'activity_description'
},
activityCategory: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'activity_category'
},
severity: {
type: DataTypes.STRING(50),
allowNull: true
},
metadata: {
type: DataTypes.JSONB,
allowNull: true
},
isSystemEvent: {
type: DataTypes.BOOLEAN,
allowNull: true,
field: 'is_system_event'
},
ipAddress: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'ip_address'
},
userAgent: {
type: DataTypes.TEXT,
allowNull: true,
field: 'user_agent'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
},
{
sequelize,
modelName: 'Activity',
tableName: 'activities',
timestamps: false,
indexes: [
{ fields: ['request_id'] },
{ fields: ['created_at'] }
]
}
);
export { Activity };

View File

@ -0,0 +1,77 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface SubscriptionAttributes {
subscriptionId: string;
userId: string;
endpoint: string;
p256dh: string;
auth: string;
userAgent?: string | null;
createdAt: Date;
}
interface SubscriptionCreationAttributes extends Optional<SubscriptionAttributes, 'subscriptionId' | 'userAgent' | 'createdAt'> {}
class Subscription extends Model<SubscriptionAttributes, SubscriptionCreationAttributes> implements SubscriptionAttributes {
public subscriptionId!: string;
public userId!: string;
public endpoint!: string;
public p256dh!: string;
public auth!: string;
public userAgent!: string | null;
public createdAt!: Date;
}
Subscription.init(
{
subscriptionId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'subscription_id'
},
userId: {
type: DataTypes.UUID,
allowNull: false,
field: 'user_id'
},
endpoint: {
type: DataTypes.STRING(1000),
allowNull: false
},
p256dh: {
type: DataTypes.STRING(255),
allowNull: false
},
auth: {
type: DataTypes.STRING(255),
allowNull: false
},
userAgent: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'user_agent'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
},
{
sequelize,
modelName: 'Subscription',
tableName: 'subscriptions',
timestamps: false,
indexes: [
{ fields: ['user_id'] },
{ unique: true, fields: ['endpoint'] }
]
}
);
export { Subscription };

74
src/models/WorkNote.ts Normal file
View File

@ -0,0 +1,74 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface WorkNoteAttributes {
noteId: string;
requestId: string;
userId: string;
userName?: string | null;
userRole?: string | null;
message: string; // rich text (HTML/JSON) stored as TEXT
messageType?: string | null; // COMMENT etc
isPriority?: boolean | null;
hasAttachment?: boolean | null;
parentNoteId?: string | null;
mentionedUsers?: string[] | null;
reactions?: object | null;
isEdited?: boolean | null;
isDeleted?: boolean | null;
createdAt: Date;
updatedAt: Date;
}
interface WorkNoteCreationAttributes extends Optional<WorkNoteAttributes, 'noteId' | 'userName' | 'userRole' | 'messageType' | 'isPriority' | 'hasAttachment' | 'parentNoteId' | 'mentionedUsers' | 'reactions' | 'isEdited' | 'isDeleted' | 'createdAt' | 'updatedAt'> {}
class WorkNote extends Model<WorkNoteAttributes, WorkNoteCreationAttributes> implements WorkNoteAttributes {
public noteId!: string;
public requestId!: string;
public userId!: string;
public userName!: string | null;
public userRole!: string | null;
public message!: string;
public messageType!: string | null;
public isPriority!: boolean | null;
public hasAttachment!: boolean | null;
public parentNoteId!: string | null;
public mentionedUsers!: string[] | null;
public reactions!: object | null;
public isEdited!: boolean | null;
public isDeleted!: boolean | null;
public createdAt!: Date;
public updatedAt!: Date;
}
WorkNote.init(
{
noteId: { type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4, primaryKey: true, field: 'note_id' },
requestId: { type: DataTypes.UUID, allowNull: false, field: 'request_id' },
userId: { type: DataTypes.UUID, allowNull: false, field: 'user_id' },
userName: { type: DataTypes.STRING(255), allowNull: true, field: 'user_name' },
userRole: { type: DataTypes.STRING(50), allowNull: true, field: 'user_role' },
message: { type: DataTypes.TEXT, allowNull: false },
messageType: { type: DataTypes.STRING(50), allowNull: true, field: 'message_type' },
isPriority: { type: DataTypes.BOOLEAN, allowNull: true, field: 'is_priority' },
hasAttachment: { type: DataTypes.BOOLEAN, allowNull: true, field: 'has_attachment' },
parentNoteId: { type: DataTypes.UUID, allowNull: true, field: 'parent_note_id' },
mentionedUsers: { type: DataTypes.ARRAY(DataTypes.UUID), allowNull: true, field: 'mentioned_users' },
reactions: { type: DataTypes.JSONB, allowNull: true },
isEdited: { type: DataTypes.BOOLEAN, allowNull: true, field: 'is_edited' },
isDeleted: { type: DataTypes.BOOLEAN, allowNull: true, field: 'is_deleted' },
createdAt: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW, field: 'created_at' },
updatedAt: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW, field: 'updated_at' },
},
{
sequelize,
modelName: 'WorkNote',
tableName: 'work_notes',
timestamps: false,
indexes: [ { fields: ['request_id'] }, { fields: ['user_id'] }, { fields: ['created_at'] } ]
}
);
export { WorkNote };

View File

@ -0,0 +1,56 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface WorkNoteAttachmentAttributes {
attachmentId: string;
noteId: string;
fileName: string;
fileType: string;
fileSize: number;
filePath: string;
storageUrl?: string | null;
isDownloadable?: boolean | null;
downloadCount?: number | null;
uploadedAt: Date;
}
interface WorkNoteAttachmentCreationAttributes extends Optional<WorkNoteAttachmentAttributes, 'attachmentId' | 'storageUrl' | 'isDownloadable' | 'downloadCount' | 'uploadedAt'> {}
class WorkNoteAttachment extends Model<WorkNoteAttachmentAttributes, WorkNoteAttachmentCreationAttributes> implements WorkNoteAttachmentAttributes {
public attachmentId!: string;
public noteId!: string;
public fileName!: string;
public fileType!: string;
public fileSize!: number;
public filePath!: string;
public storageUrl!: string | null;
public isDownloadable!: boolean | null;
public downloadCount!: number | null;
public uploadedAt!: Date;
}
WorkNoteAttachment.init(
{
attachmentId: { type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4, primaryKey: true, field: 'attachment_id' },
noteId: { type: DataTypes.UUID, allowNull: false, field: 'note_id' },
fileName: { type: DataTypes.STRING(255), allowNull: false, field: 'file_name' },
fileType: { type: DataTypes.STRING(100), allowNull: false, field: 'file_type' },
fileSize: { type: DataTypes.BIGINT, allowNull: false, field: 'file_size' },
filePath: { type: DataTypes.STRING(500), allowNull: false, field: 'file_path' },
storageUrl: { type: DataTypes.STRING(500), allowNull: true, field: 'storage_url' },
isDownloadable: { type: DataTypes.BOOLEAN, allowNull: true, field: 'is_downloadable' },
downloadCount: { type: DataTypes.INTEGER, allowNull: true, field: 'download_count', defaultValue: 0 },
uploadedAt: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW, field: 'uploaded_at' },
},
{
sequelize,
modelName: 'WorkNoteAttachment',
tableName: 'work_note_attachments',
timestamps: false,
indexes: [ { fields: ['note_id'] }, { fields: ['uploaded_at'] } ]
}
);
export { WorkNoteAttachment };

View File

@ -6,6 +6,10 @@ import { WorkflowRequest } from './WorkflowRequest';
import { ApprovalLevel } from './ApprovalLevel';
import { Participant } from './Participant';
import { Document } from './Document';
import { Subscription } from './Subscription';
import { Activity } from './Activity';
import { WorkNote } from './WorkNote';
import { WorkNoteAttachment } from './WorkNoteAttachment';
// Define associations
const defineAssociations = () => {
@ -53,62 +57,8 @@ const defineAssociations = () => {
sourceKey: 'requestId'
});
// ApprovalLevel associations
ApprovalLevel.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
ApprovalLevel.belongsTo(User, {
as: 'approver',
foreignKey: 'approverId',
targetKey: 'userId'
});
// Participant associations
Participant.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
Participant.belongsTo(User, {
as: 'user',
foreignKey: 'userId',
targetKey: 'userId'
});
Participant.belongsTo(User, {
as: 'addedByUser',
foreignKey: 'addedBy',
targetKey: 'userId'
});
// Document associations
Document.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
Document.belongsTo(User, {
as: 'uploader',
foreignKey: 'uploadedBy',
targetKey: 'userId'
});
Document.belongsTo(Document, {
as: 'parentDocument',
foreignKey: 'parentDocumentId',
targetKey: 'documentId'
});
Document.hasMany(Document, {
as: 'childDocuments',
foreignKey: 'parentDocumentId',
sourceKey: 'documentId'
});
// Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts
// Only hasMany associations from WorkflowRequest are defined here since they're one-way
};
// Initialize associations
@ -121,7 +71,11 @@ export {
WorkflowRequest,
ApprovalLevel,
Participant,
Document
Document,
Subscription,
Activity,
WorkNote,
WorkNoteAttachment
};
// Export default sequelize instance

38
src/realtime/socket.ts Normal file
View File

@ -0,0 +1,38 @@
import { Server } from 'socket.io';
let io: Server | null = null;
export function initSocket(httpServer: any) {
const defaultOrigins = [
'http://localhost:3000',
'http://127.0.0.1:3000',
'http://localhost:5173',
'http://127.0.0.1:5173'
];
const configured = (process.env.FRONTEND_ORIGIN || '').split(',').map(s => s.trim()).filter(Boolean);
const origins = configured.length ? configured : defaultOrigins;
io = new Server(httpServer, {
cors: {
origin: origins,
methods: ['GET', 'POST'],
credentials: true
},
path: '/socket.io'
});
io.on('connection', (socket: any) => {
socket.on('join:request', (requestId: string) => {
socket.join(`request:${requestId}`);
});
socket.on('leave:request', (requestId: string) => {
socket.leave(`request:${requestId}`);
});
});
return io;
}
export function emitToRequestRoom(requestId: string, event: string, payload: any) {
if (!io) return;
io.to(`request:${requestId}`).emit(event, payload);
}

View File

@ -1,4 +1,5 @@
import { Router } from 'express';
import type { Request, Response } from 'express';
import { WorkflowController } from '../controllers/workflow.controller';
import { ApprovalController } from '../controllers/approval.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
@ -11,10 +12,15 @@ import multer from 'multer';
import path from 'path';
import crypto from 'crypto';
import { ensureUploadDir, UPLOAD_DIR } from '../config/storage';
import { notificationService } from '../services/notification.service';
import { Activity } from '@models/Activity';
import { WorkflowService } from '../services/workflow.service';
import { WorkNoteController } from '../controllers/worknote.controller';
const router = Router();
const workflowController = new WorkflowController();
const approvalController = new ApprovalController();
const workNoteController = new WorkNoteController();
// Workflow routes
router.get('/',
@ -81,6 +87,14 @@ router.put('/:id',
asyncHandler(workflowController.updateWorkflow.bind(workflowController))
);
// Multipart update (payload + files[]) for draft updates
router.put('/:id/multipart',
authenticateToken,
validateParams(workflowParamsSchema),
upload.array('files'),
asyncHandler(workflowController.updateWorkflowMultipart.bind(workflowController))
);
router.patch('/:id/submit',
authenticateToken,
validateParams(workflowParamsSchema),
@ -116,4 +130,58 @@ router.patch('/:id/approvals/:levelId/reject',
asyncHandler(approvalController.approveLevel.bind(approvalController))
);
// Notifications
router.post('/notifications/subscribe',
authenticateToken,
asyncHandler(async (req: any, res: Response): Promise<void> => {
const userId = req.user?.userId;
if (!userId) { res.status(401).json({ success: false, error: 'Unauthorized' }); return; }
const ua = req.headers['user-agent'] as string | undefined;
await notificationService.addSubscription(userId, req.body, ua);
res.json({ success: true });
return;
})
);
router.post('/notifications/test',
authenticateToken,
asyncHandler(async (req: any, res: Response): Promise<void> => {
const userId = req.user?.userId;
await notificationService.sendToUsers([userId], { title: 'Test', body: 'Push works!' });
res.json({ success: true });
return;
})
);
// Activities
router.get('/:id/activity',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(async (req: any, res: Response): Promise<void> => {
// Resolve requestId UUID from identifier
const workflowService = new WorkflowService();
const wf = await (workflowService as any).findWorkflowByIdentifier(req.params.id);
if (!wf) { res.status(404).json({ success: false, error: 'Not found' }); return; }
const requestId: string = wf.getDataValue('requestId');
const rows = await Activity.findAll({ where: { requestId }, order: [['created_at', 'ASC']] as any });
res.json({ success: true, data: rows });
return;
})
);
// Work Notes
router.get('/:id/work-notes',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(workNoteController.list.bind(workNoteController))
);
const noteUpload = upload; // reuse same storage/limits
router.post('/:id/work-notes',
authenticateToken,
validateParams(workflowParamsSchema),
noteUpload.array('files'),
asyncHandler(workNoteController.create.bind(workNoteController))
);
export default router;

View File

@ -3,6 +3,10 @@ import * as m1 from '../migrations/2025103001-create-workflow-requests';
import * as m2 from '../migrations/2025103002-create-approval-levels';
import * as m3 from '../migrations/2025103003-create-participants';
import * as m4 from '../migrations/2025103004-create-documents';
import * as m5 from '../migrations/20251031_01_create_subscriptions';
import * as m6 from '../migrations/20251031_02_create_activities';
import * as m7 from '../migrations/20251031_03_create_work_notes';
import * as m8 from '../migrations/20251031_04_create_work_note_attachments';
async function run() {
try {
@ -12,6 +16,10 @@ async function run() {
await m2.up(sequelize.getQueryInterface());
await m3.up(sequelize.getQueryInterface());
await m4.up(sequelize.getQueryInterface());
await (m5 as any).up(sequelize.getQueryInterface());
await (m6 as any).up(sequelize.getQueryInterface());
await (m7 as any).up(sequelize.getQueryInterface());
await (m8 as any).up(sequelize.getQueryInterface());
console.log('Migrations applied');
process.exit(0);
} catch (err) {

View File

@ -1,15 +1,20 @@
import app from './app';
import http from 'http';
import { initSocket } from './realtime/socket';
const PORT: number = parseInt(process.env.PORT || '5000', 10);
// Start server
const startServer = (): void => {
try {
app.listen(PORT, () => {
const server = http.createServer(app);
initSocket(server);
server.listen(PORT, () => {
console.log(`🚀 Server running on port ${PORT}`);
console.log(`📊 Environment: ${process.env.NODE_ENV || 'development'}`);
console.log(`🌐 API Base URL: http://localhost:${PORT}`);
console.log(`❤️ Health Check: http://localhost:${PORT}/health`);
console.log(`🔌 Socket.IO path: /socket.io`);
});
} catch (error) {
console.error('❌ Unable to start server:', error);

View File

@ -0,0 +1,43 @@
export type ActivityEntry = {
requestId: string;
type: 'created' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder';
user?: { userId: string; name?: string; email?: string };
timestamp: string;
action: string;
details: string;
};
class ActivityService {
private byRequest: Map<string, ActivityEntry[]> = new Map();
log(entry: ActivityEntry) {
const list = this.byRequest.get(entry.requestId) || [];
list.push(entry);
this.byRequest.set(entry.requestId, list);
// Persist best-effort (non-blocking)
try {
const { Activity } = require('@models/Activity');
Activity.create({
requestId: entry.requestId,
userId: entry.user?.userId || null,
userName: entry.user?.name || null,
activityType: entry.type,
activityDescription: entry.details,
activityCategory: null,
severity: null,
metadata: null,
isSystemEvent: !entry.user,
ipAddress: null,
userAgent: null,
});
} catch {}
}
get(requestId: string): ActivityEntry[] {
return this.byRequest.get(requestId) || [];
}
}
export const activityService = new ActivityService();

View File

@ -1,9 +1,13 @@
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { Participant } from '@models/Participant';
import { ApprovalAction } from '../types/approval.types';
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
import { calculateElapsedHours, calculateTATPercentage } from '@utils/helpers';
import logger from '@utils/logger';
import { Op } from 'sequelize';
import { notificationService } from './notification.service';
import { activityService } from './activity.service';
export class ApprovalService {
async approveLevel(levelId: string, action: ApprovalAction, _userId: string): Promise<ApprovalLevel | null> {
@ -27,17 +31,154 @@ export class ApprovalService {
const updatedLevel = await level.update(updateData);
// Update workflow status if this is the final level
if (level.isFinalApprover && action.action === 'APPROVE') {
await WorkflowRequest.update(
{ status: WorkflowStatus.APPROVED, closureDate: now },
{ where: { requestId: level.requestId } }
);
// Load workflow for titles and initiator
const wf = await WorkflowRequest.findByPk(level.requestId);
// Handle approval - move to next level or close workflow
if (action.action === 'APPROVE') {
if (level.isFinalApprover) {
// Final approver - close workflow as APPROVED
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: (level.levelNumber || 0) + 1
},
{ where: { requestId: level.requestId } }
);
logger.info(`Final approver approved. Workflow ${level.requestId} closed as APPROVED`);
// Notify initiator
if (wf) {
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
url: `/request/${(wf as any).requestNumber}`
});
activityService.log({
requestId: level.requestId,
type: 'approval',
timestamp: new Date().toISOString(),
action: 'Final approval',
details: `${(wf as any).requestNumber}${(wf as any).title}`
});
}
} else {
// Not final - move to next level
const nextLevelNumber = (level.levelNumber || 0) + 1;
const nextLevel = await ApprovalLevel.findOne({
where: {
requestId: level.requestId,
levelNumber: nextLevelNumber
}
});
if (nextLevel) {
// Activate next level
await nextLevel.update({
status: ApprovalStatus.PENDING,
levelStartTime: now
});
// Update workflow current level
await WorkflowRequest.update(
{ currentLevel: nextLevelNumber },
{ where: { requestId: level.requestId } }
);
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
// Notify next approver
if (wf && nextLevel) {
await notificationService.sendToUsers([ (nextLevel as any).approverId ], {
title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
url: `/request/${(wf as any).requestNumber}`
});
activityService.log({
requestId: level.requestId,
type: 'assignment',
timestamp: new Date().toISOString(),
action: 'Moved to next approver',
details: `${(wf as any).requestNumber}${(wf as any).title}`
});
}
} else {
// No next level found but not final approver - this shouldn't happen
logger.warn(`No next level found for workflow ${level.requestId} after approving level ${level.levelNumber}`);
await WorkflowRequest.update(
{
status: WorkflowStatus.APPROVED,
closureDate: now,
currentLevel: nextLevelNumber
},
{ where: { requestId: level.requestId } }
);
if (wf) {
await notificationService.sendToUsers([ (wf as any).initiatorId ], {
title: `Approved: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
url: `/request/${(wf as any).requestNumber}`
});
activityService.log({
requestId: level.requestId,
type: 'approval',
timestamp: new Date().toISOString(),
action: 'Workflow approved (no next level found)',
details: `${(wf as any).requestNumber}${(wf as any).title}`
});
}
}
}
} else if (action.action === 'REJECT') {
// Rejection - close workflow and mark all remaining levels as skipped
await WorkflowRequest.update(
{ status: WorkflowStatus.REJECTED, closureDate: now },
{
status: WorkflowStatus.REJECTED,
closureDate: now
},
{ where: { requestId: level.requestId } }
);
// Mark all pending levels as skipped
await ApprovalLevel.update(
{
status: ApprovalStatus.SKIPPED,
levelEndTime: now
},
{
where: {
requestId: level.requestId,
status: ApprovalStatus.PENDING,
levelNumber: { [Op.gt]: level.levelNumber }
}
}
);
logger.info(`Level ${level.levelNumber} rejected. Workflow ${level.requestId} closed as REJECTED`);
// Notify initiator and all participants
if (wf) {
const participants = await Participant.findAll({ where: { requestId: level.requestId } });
const targetUserIds = new Set<string>();
targetUserIds.add((wf as any).initiatorId);
for (const p of participants as any[]) {
targetUserIds.add(p.userId);
}
await notificationService.sendToUsers(Array.from(targetUserIds), {
title: `Rejected: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
url: `/request/${(wf as any).requestNumber}`
});
activityService.log({
requestId: level.requestId,
type: 'rejection',
timestamp: new Date().toISOString(),
action: 'Workflow rejected',
details: `${(wf as any).requestNumber}${(wf as any).title}`
});
}
}
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);

View File

@ -0,0 +1,72 @@
import webpush from 'web-push';
import logger from '@utils/logger';
import { Subscription } from '@models/Subscription';
type PushSubscription = any; // Web Push protocol JSON
class NotificationService {
private userIdToSubscriptions: Map<string, PushSubscription[]> = new Map();
configure(vapidPublicKey?: string, vapidPrivateKey?: string, mailto?: string) {
const pub = vapidPublicKey || process.env.VAPID_PUBLIC_KEY || '';
const priv = vapidPrivateKey || process.env.VAPID_PRIVATE_KEY || '';
const contact = mailto || process.env.VAPID_CONTACT || 'mailto:admin@example.com';
if (!pub || !priv) {
logger.warn('VAPID keys are not configured. Push notifications are disabled.');
return;
}
webpush.setVapidDetails(contact, pub, priv);
logger.info('Web Push configured');
}
async addSubscription(userId: string, subscription: PushSubscription, userAgent?: string) {
// Persist to DB (upsert by endpoint)
try {
const endpoint: string = subscription?.endpoint || '';
const keys = subscription?.keys || {};
if (!endpoint || !keys?.p256dh || !keys?.auth) throw new Error('Invalid subscription payload');
await Subscription.upsert({
userId,
endpoint,
p256dh: keys.p256dh,
auth: keys.auth,
userAgent: userAgent || null,
} as any);
} catch (e) {
logger.error('Failed to persist subscription', e);
}
const list = this.userIdToSubscriptions.get(userId) || [];
const already = list.find((s) => JSON.stringify(s) === JSON.stringify(subscription));
if (!already) {
list.push(subscription);
this.userIdToSubscriptions.set(userId, list);
}
logger.info(`Subscription stored for user ${userId}. Total: ${list.length}`);
}
async sendToUsers(userIds: string[], payload: any) {
const message = JSON.stringify(payload);
for (const uid of userIds) {
let subs = this.userIdToSubscriptions.get(uid) || [];
// Load from DB if memory empty
if (subs.length === 0) {
try {
const rows = await Subscription.findAll({ where: { userId: uid } });
subs = rows.map((r: any) => ({ endpoint: r.endpoint, keys: { p256dh: r.p256dh, auth: r.auth } }));
} catch {}
}
for (const sub of subs) {
try {
await webpush.sendNotification(sub, message);
} catch (err) {
logger.error(`Failed to send push to ${uid}:`, err);
}
}
}
}
}
export const notificationService = new NotificationService();
notificationService.configure();

View File

@ -4,11 +4,17 @@ import { User } from '@models/User';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { Participant } from '@models/Participant';
import { Document } from '@models/Document';
// Ensure associations are initialized by importing models index
import '@models/index';
import { CreateWorkflowRequest, UpdateWorkflowRequest } from '../types/workflow.types';
import { generateRequestNumber, calculateTATDays } from '@utils/helpers';
import logger from '@utils/logger';
import { WorkflowStatus, ParticipantType, ApprovalStatus } from '../types/common.types';
import { Op } from 'sequelize';
import fs from 'fs';
import path from 'path';
import { notificationService } from './notification.service';
import { activityService } from './activity.service';
export class WorkflowService {
async listWorkflows(page: number, limit: number) {
@ -98,22 +104,32 @@ export class WorkflowService {
async listOpenForMe(userId: string, page: number, limit: number) {
const offset = (page - 1) * limit;
const levelRows = await ApprovalLevel.findAll({
// Find all pending/in-progress approval levels across requests ordered by levelNumber
const pendingLevels = await ApprovalLevel.findAll({
where: {
approverId: userId,
status: { [Op.in]: [ApprovalStatus.PENDING as any, (ApprovalStatus as any).IN_PROGRESS ?? 'IN_PROGRESS', 'PENDING', 'IN_PROGRESS'] as any },
},
attributes: ['requestId'],
order: [['requestId', 'ASC'], ['levelNumber', 'ASC']],
attributes: ['requestId', 'levelNumber', 'approverId'],
});
// Include requests where the user is a SPECTATOR (view-only)
const spectatorRows = await Participant.findAll({
where: { userId, participantType: 'SPECTATOR' as any },
attributes: ['requestId'],
});
const requestIds = Array.from(new Set([
...levelRows.map((l: any) => l.requestId),
...spectatorRows.map((s: any) => s.requestId),
]));
// For each request, pick the first (current) pending level
const currentLevelByRequest = new Map<string, { requestId: string; levelNumber: number; approverId: string }>();
for (const lvl of pendingLevels as any[]) {
const rid = lvl.requestId as string;
if (!currentLevelByRequest.has(rid)) {
currentLevelByRequest.set(rid, {
requestId: rid,
levelNumber: lvl.levelNumber,
approverId: lvl.approverId,
});
}
}
// Only include requests where the current approver matches the user
const requestIds = Array.from(currentLevelByRequest.values())
.filter(item => item.approverId === userId)
.map(item => item.requestId);
const { rows, count } = await WorkflowRequest.findAndCountAll({
where: {
requestId: { [Op.in]: requestIds.length ? requestIds : ['00000000-0000-0000-0000-000000000000'] },
@ -135,7 +151,12 @@ export class WorkflowService {
const levelRows = await ApprovalLevel.findAll({
where: {
approverId: userId,
status: { [Op.in]: [ApprovalStatus.APPROVED as any, 'APPROVED'] as any },
status: { [Op.in]: [
ApprovalStatus.APPROVED as any,
(ApprovalStatus as any).REJECTED ?? 'REJECTED',
'APPROVED',
'REJECTED'
] as any },
},
attributes: ['requestId'],
});
@ -211,6 +232,31 @@ export class WorkflowService {
}
logger.info(`Workflow created: ${requestNumber}`);
activityService.log({
requestId: (workflow as any).requestId,
type: 'created',
user: { userId: initiatorId },
timestamp: new Date().toISOString(),
action: 'Request created',
details: `${workflowData.title}`
});
const firstLevel = await ApprovalLevel.findOne({ where: { requestId: (workflow as any).requestId, levelNumber: 1 } });
if (firstLevel) {
await notificationService.sendToUsers([(firstLevel as any).approverId], {
title: 'New request assigned',
body: `${workflowData.title}`,
requestNumber: requestNumber,
url: `/request/${requestNumber}`
});
activityService.log({
requestId: (workflow as any).requestId,
type: 'assignment',
user: { userId: (firstLevel as any).approverId },
timestamp: new Date().toISOString(),
action: 'Assigned to approver',
details: `${(firstLevel as any).approverName || ''}`
});
}
return workflow;
} catch (error) {
logger.error('Failed to create workflow:', error);
@ -218,9 +264,29 @@ export class WorkflowService {
}
}
// Helper to determine if identifier is UUID or requestNumber
private isUuid(identifier: string): boolean {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return uuidRegex.test(identifier);
}
// Helper to find workflow by either requestId or requestNumber
private async findWorkflowByIdentifier(identifier: string) {
if (this.isUuid(identifier)) {
return await WorkflowRequest.findByPk(identifier);
} else {
return await WorkflowRequest.findOne({
where: { requestNumber: identifier }
});
}
}
async getWorkflowById(requestId: string): Promise<WorkflowRequest | null> {
try {
return await WorkflowRequest.findByPk(requestId, {
const workflow = await this.findWorkflowByIdentifier(requestId);
if (!workflow) return null;
return await WorkflowRequest.findByPk(workflow.requestId, {
include: [
{ association: 'initiator' },
{ association: 'approvalLevels' },
@ -236,7 +302,24 @@ export class WorkflowService {
async getWorkflowDetails(requestId: string) {
try {
const workflow = await WorkflowRequest.findByPk(requestId, {
const workflowBase = await this.findWorkflowByIdentifier(requestId);
if (!workflowBase) {
logger.warn(`Workflow not found for identifier: ${requestId}`);
return null;
}
// Get requestId - try both property access and getDataValue for safety
const actualRequestId = (workflowBase as any).getDataValue
? (workflowBase as any).getDataValue('requestId')
: (workflowBase as any).requestId;
if (!actualRequestId) {
logger.error(`Could not extract requestId from workflow. Identifier: ${requestId}, Workflow data:`, JSON.stringify(workflowBase, null, 2));
throw new Error('Failed to extract requestId from workflow');
}
// Reload with associations
const workflow = await WorkflowRequest.findByPk(actualRequestId, {
include: [ { association: 'initiator' } ]
});
if (!workflow) return null;
@ -244,7 +327,7 @@ export class WorkflowService {
// Compute current approver and SLA summary (same logic used in lists)
const currentLevel = await ApprovalLevel.findOne({
where: {
requestId,
requestId: actualRequestId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any },
},
order: [['levelNumber', 'ASC']],
@ -282,11 +365,47 @@ export class WorkflowService {
sla: { percent, remainingText },
};
// Ensure actualRequestId is valid UUID (not requestNumber)
if (!actualRequestId || typeof actualRequestId !== 'string') {
logger.error(`Invalid requestId extracted: ${actualRequestId}, original identifier: ${requestId}`);
throw new Error('Invalid workflow identifier');
}
// Verify it's a UUID format
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
if (!uuidRegex.test(actualRequestId)) {
logger.error(`Extracted requestId is not a valid UUID: ${actualRequestId}, original identifier: ${requestId}`);
throw new Error('Invalid workflow identifier format');
}
logger.info(`Fetching participants for requestId: ${actualRequestId} (original identifier: ${requestId})`);
// Load related entities explicitly to avoid alias issues
const approvals = await ApprovalLevel.findAll({ where: { requestId }, order: [['levelNumber','ASC']] }) as any[];
const participants = await Participant.findAll({ where: { requestId } }) as any[];
const documents = await Document.findAll({ where: { requestId } }) as any[];
const activities: any[] = [];
// Use the actual UUID requestId for all queries
const approvals = await ApprovalLevel.findAll({
where: { requestId: actualRequestId },
order: [['levelNumber','ASC']]
}) as any[];
const participants = await Participant.findAll({
where: { requestId: actualRequestId }
}) as any[];
logger.info(`Found ${participants.length} participants for requestId: ${actualRequestId}`);
const documents = await Document.findAll({
where: {
requestId: actualRequestId,
isDeleted: false // Only fetch non-deleted documents
}
}) as any[];
let activities: any[] = [];
try {
const { Activity } = require('@models/Activity');
activities = await Activity.findAll({ where: { requestId: actualRequestId }, order: [['created_at', 'ASC']] });
} catch {
activities = activityService.get(actualRequestId);
}
return { workflow, approvals, participants, documents, activities, summary };
} catch (error) {
@ -297,10 +416,170 @@ export class WorkflowService {
async updateWorkflow(requestId: string, updateData: UpdateWorkflowRequest): Promise<WorkflowRequest | null> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
const workflow = await this.findWorkflowByIdentifier(requestId);
if (!workflow) return null;
return await workflow.update(updateData);
const actualRequestId = (workflow as any).getDataValue
? (workflow as any).getDataValue('requestId')
: (workflow as any).requestId;
// Only allow full updates (approval levels, participants) for DRAFT workflows
const isDraft = (workflow as any).status === WorkflowStatus.DRAFT || (workflow as any).isDraft;
// Update basic workflow fields
const basicUpdate: any = {};
if (updateData.title) basicUpdate.title = updateData.title;
if (updateData.description) basicUpdate.description = updateData.description;
if (updateData.priority) basicUpdate.priority = updateData.priority;
if (updateData.status) basicUpdate.status = updateData.status;
if (updateData.conclusionRemark !== undefined) basicUpdate.conclusionRemark = updateData.conclusionRemark;
await workflow.update(basicUpdate);
// Update approval levels if provided (only for drafts)
if (isDraft && updateData.approvalLevels && Array.isArray(updateData.approvalLevels)) {
// Delete all existing approval levels for this draft
await ApprovalLevel.destroy({ where: { requestId: actualRequestId } });
// Create new approval levels
const totalTatHours = updateData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0);
for (const levelData of updateData.approvalLevels) {
await ApprovalLevel.create({
requestId: actualRequestId,
levelNumber: levelData.levelNumber,
levelName: levelData.levelName || `Level ${levelData.levelNumber}`,
approverId: levelData.approverId,
approverEmail: levelData.approverEmail,
approverName: levelData.approverName,
tatHours: levelData.tatHours,
tatDays: calculateTATDays(levelData.tatHours),
status: ApprovalStatus.PENDING,
elapsedHours: 0,
remainingHours: levelData.tatHours,
tatPercentageUsed: 0,
isFinalApprover: levelData.isFinalApprover || false
});
}
// Update workflow totals
await workflow.update({
totalLevels: updateData.approvalLevels.length,
totalTatHours,
currentLevel: 1
});
logger.info(`Updated ${updateData.approvalLevels.length} approval levels for workflow ${actualRequestId}`);
}
// Update participants if provided (only for drafts)
if (isDraft && updateData.participants && Array.isArray(updateData.participants)) {
// Get existing participants
const existingParticipants = await Participant.findAll({
where: { requestId: actualRequestId }
});
// Create a map of existing participants by userId
const existingMap = new Map(existingParticipants.map((p: any) => [
(p as any).userId,
p
]));
// Create a set of new participant userIds
const newUserIds = new Set(updateData.participants.map(p => p.userId));
// Delete participants that are no longer in the new list (except INITIATOR)
for (const existing of existingParticipants) {
const userId = (existing as any).userId;
const participantType = (existing as any).participantType;
// Never delete INITIATOR
if (participantType === 'INITIATOR') continue;
// Delete if not in new list
if (!newUserIds.has(userId)) {
await existing.destroy();
logger.info(`Deleted participant ${userId} from workflow ${actualRequestId}`);
}
}
// Add or update participants from the new list
for (const participantData of updateData.participants) {
const existing = existingMap.get(participantData.userId);
if (existing) {
// Update existing participant
await existing.update({
userEmail: participantData.userEmail,
userName: participantData.userName,
participantType: participantData.participantType as any,
canComment: participantData.canComment ?? true,
canViewDocuments: participantData.canViewDocuments ?? true,
canDownloadDocuments: participantData.canDownloadDocuments ?? false,
notificationEnabled: participantData.notificationEnabled ?? true,
isActive: true
});
} else {
// Create new participant
await Participant.create({
requestId: actualRequestId,
userId: participantData.userId,
userEmail: participantData.userEmail,
userName: participantData.userName,
participantType: participantData.participantType as any,
canComment: participantData.canComment ?? true,
canViewDocuments: participantData.canViewDocuments ?? true,
canDownloadDocuments: participantData.canDownloadDocuments ?? false,
notificationEnabled: participantData.notificationEnabled ?? true,
addedBy: (workflow as any).initiatorId,
isActive: true
});
logger.info(`Added new participant ${participantData.userId} to workflow ${actualRequestId}`);
}
}
logger.info(`Synced ${updateData.participants.length} participants for workflow ${actualRequestId}`);
}
// Delete documents if requested (only for drafts)
if (isDraft && updateData.deleteDocumentIds && updateData.deleteDocumentIds.length > 0) {
logger.info(`Attempting to delete ${updateData.deleteDocumentIds.length} documents for workflow ${actualRequestId}. Document IDs:`, updateData.deleteDocumentIds);
// First get documents with file paths before deleting
const documentsToDelete = await Document.findAll({
where: { requestId: actualRequestId, documentId: { [Op.in]: updateData.deleteDocumentIds } },
attributes: ['documentId', 'originalFileName', 'filePath', 'isDeleted']
});
logger.info(`Found ${documentsToDelete.length} documents matching delete IDs. Existing:`, documentsToDelete.map((d: any) => ({ id: d.documentId, name: d.originalFileName, filePath: d.filePath, isDeleted: d.isDeleted })));
// Delete physical files from filesystem
for (const doc of documentsToDelete) {
const filePath = (doc as any).filePath;
if (filePath && fs.existsSync(filePath)) {
try {
fs.unlinkSync(filePath);
logger.info(`Deleted physical file: ${filePath} for document ${(doc as any).documentId}`);
} catch (error) {
logger.error(`Failed to delete physical file ${filePath}:`, error);
// Continue with soft-delete even if file deletion fails
}
} else if (filePath) {
logger.warn(`File path does not exist, skipping file deletion: ${filePath}`);
}
}
// Mark documents as deleted in database
const deleteResult = await Document.update(
{ isDeleted: true },
{ where: { requestId: actualRequestId, documentId: { [Op.in]: updateData.deleteDocumentIds } } }
);
logger.info(`Marked ${deleteResult[0]} documents as deleted in database (out of ${updateData.deleteDocumentIds.length} requested)`);
}
// Reload the workflow instance to get latest data (without associations to avoid the error)
// The associations issue occurs when trying to include them, so we skip that
const refreshed = await WorkflowRequest.findByPk(actualRequestId);
return refreshed;
} catch (error) {
logger.error(`Failed to update workflow ${requestId}:`, error);
throw new Error('Failed to update workflow');
@ -309,14 +588,33 @@ export class WorkflowService {
async submitWorkflow(requestId: string): Promise<WorkflowRequest | null> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
const workflow = await this.findWorkflowByIdentifier(requestId);
if (!workflow) return null;
return await workflow.update({
const updated = await workflow.update({
status: WorkflowStatus.PENDING,
isDraft: false,
submissionDate: new Date()
});
activityService.log({
requestId: (updated as any).requestId,
type: 'status_change',
timestamp: new Date().toISOString(),
action: 'Submitted',
details: 'Request moved to PENDING'
});
const current = await ApprovalLevel.findOne({
where: { requestId: (updated as any).requestId, levelNumber: (updated as any).currentLevel || 1 }
});
if (current) {
await notificationService.sendToUsers([(current as any).approverId], {
title: 'Request submitted',
body: `${(updated as any).title}`,
requestNumber: (updated as any).requestNumber,
url: `/request/${(updated as any).requestNumber}`
});
}
return updated;
} catch (error) {
logger.error(`Failed to submit workflow ${requestId}:`, error);
throw new Error('Failed to submit workflow');

View File

@ -0,0 +1,51 @@
import { Op } from 'sequelize';
import { WorkNote } from '@models/WorkNote';
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
import logger from '@utils/logger';
export class WorkNoteService {
async list(requestId: string) {
return await WorkNote.findAll({
where: { requestId },
order: [['created_at' as any, 'ASC']]
});
}
async create(requestId: string, user: { userId: string; name?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path: string; originalname: string; mimetype: string; size: number }>) {
const note = await WorkNote.create({
requestId,
userId: user.userId,
userName: user.name || null,
message: payload.message,
isPriority: !!payload.isPriority,
parentNoteId: payload.parentNoteId || null,
mentionedUsers: payload.mentionedUsers || null,
hasAttachment: files && files.length > 0 ? true : false
} as any);
if (files && files.length) {
for (const f of files) {
await WorkNoteAttachment.create({
noteId: (note as any).noteId,
fileName: f.originalname,
fileType: f.mimetype,
fileSize: f.size,
filePath: f.path,
isDownloadable: true
} as any);
}
}
try {
// Optional realtime emit (if socket layer is initialized)
const { emitToRequestRoom } = require('../realtime/socket');
if (emitToRequestRoom) emitToRequestRoom(requestId, 'worknote:new', { note });
} catch (e) { logger.warn('Realtime emit failed (not initialized)'); }
return note;
}
}
export const workNoteService = new WorkNoteService();

View File

@ -37,6 +37,11 @@ export interface UpdateWorkflowRequest {
priority?: Priority;
status?: WorkflowStatus;
conclusionRemark?: string;
// For draft updates - full workflow structure
approvalLevels?: CreateApprovalLevel[];
participants?: CreateParticipant[];
// Document updates (add new documents via multipart, delete via IDs)
deleteDocumentIds?: string[];
}
export interface CreateApprovalLevel {

View File

@ -6,8 +6,28 @@ export const approvalActionSchema = z.object({
rejectionReason: z.string().optional(),
});
// Helper to validate UUID or requestNumber format (REQ-YYYY-NNNNN)
const workflowIdValidator = z.string().refine(
(val) => {
// Check if it's a UUID
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
if (uuidRegex.test(val)) {
return true;
}
// Check if it's a requestNumber format (REQ-YYYY-NNNNN)
const requestNumberRegex = /^REQ-\d{4}-\d+$/i;
if (requestNumberRegex.test(val)) {
return true;
}
return false;
},
{
message: 'Invalid workflow ID - must be a UUID or requestNumber format (REQ-YYYY-NNNNN)'
}
);
export const approvalParamsSchema = z.object({
id: z.string().uuid('Invalid workflow ID'),
id: workflowIdValidator,
levelId: z.string().uuid('Invalid approval level ID'),
});

View File

@ -32,10 +32,51 @@ export const updateWorkflowSchema = z.object({
priority: z.enum(['STANDARD', 'EXPRESS'] as const).optional(),
status: z.enum(['DRAFT', 'PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'CLOSED'] as const).optional(),
conclusionRemark: z.string().optional(),
// For draft updates - allow updating approval levels and participants
approvalLevels: z.array(z.object({
levelNumber: z.number().int().min(1).max(10),
levelName: z.string().optional(),
approverId: z.string().uuid(),
approverEmail: z.string().email(),
approverName: z.string().min(1),
tatHours: z.number().positive(),
isFinalApprover: z.boolean().optional(),
})).optional(),
participants: z.array(z.object({
userId: z.string().uuid(),
userEmail: z.string().email(),
userName: z.string().min(1),
participantType: z.enum(['INITIATOR', 'APPROVER', 'SPECTATOR'] as const),
canComment: z.boolean().optional(),
canViewDocuments: z.boolean().optional(),
canDownloadDocuments: z.boolean().optional(),
notificationEnabled: z.boolean().optional(),
})).optional(),
deleteDocumentIds: z.array(z.string().uuid()).optional(),
});
// Helper to validate UUID or requestNumber format
const workflowIdValidator = z.string().refine(
(val) => {
// Check if it's a valid UUID
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
if (uuidRegex.test(val)) {
return true;
}
// Check if it's a valid requestNumber format (e.g., REQ-2025-12057)
const requestNumberRegex = /^REQ-\d{4}-\d{5,}$/i;
if (requestNumberRegex.test(val)) {
return true;
}
return false;
},
{
message: 'Invalid workflow ID - must be a valid UUID or requestNumber (e.g., REQ-2025-12057)',
}
);
export const workflowParamsSchema = z.object({
id: z.string().uuid('Invalid workflow ID'),
id: workflowIdValidator,
});
export const workflowQuerySchema = z.object({