890 lines
39 KiB
TypeScript
890 lines
39 KiB
TypeScript
import { Request, Response } from 'express';
|
|
import { WorkflowService } from '@services/workflow.service';
|
|
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
|
|
import { ResponseHandler } from '@utils/responseHandler';
|
|
import type { AuthenticatedRequest } from '../types/express';
|
|
import { Priority } from '../types/common.types';
|
|
import type { UpdateWorkflowRequest } from '../types/workflow.types';
|
|
import { Document } from '@models/Document';
|
|
import { User } from '@models/User';
|
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
|
import fs from 'fs';
|
|
import path from 'path';
|
|
import crypto from 'crypto';
|
|
import { getRequestMetadata } from '@utils/requestUtils';
|
|
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
|
|
import logger from '@utils/logger';
|
|
|
|
const workflowService = new WorkflowService();
|
|
|
|
export class WorkflowController {
|
|
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
try {
|
|
const validatedData = validateCreateWorkflow(req.body);
|
|
|
|
// Validate initiator exists
|
|
await validateInitiator(req.user.userId);
|
|
|
|
// Handle frontend format: map 'approvers' -> 'approvalLevels' for backward compatibility
|
|
let approvalLevels = validatedData.approvalLevels || [];
|
|
if (!approvalLevels.length && (req.body as any).approvers) {
|
|
const approvers = (req.body as any).approvers || [];
|
|
approvalLevels = approvers.map((a: any, index: number) => ({
|
|
levelNumber: index + 1,
|
|
email: a.email || a.approverEmail,
|
|
tatHours: a.tatType === 'days' ? (a.tat || 0) * 24 : (a.tat || a.tatHours || 24),
|
|
isFinalApprover: index === approvers.length - 1,
|
|
}));
|
|
}
|
|
|
|
// Normalize approval levels: map approverEmail -> email for backward compatibility
|
|
const normalizedApprovalLevels = approvalLevels.map((level: any) => ({
|
|
...level,
|
|
email: level.email || level.approverEmail, // Support both formats
|
|
}));
|
|
|
|
// Enrich approval levels with user data (auto-lookup from AD if not in DB)
|
|
logger.info(`[WorkflowController] Enriching ${normalizedApprovalLevels.length} approval levels`);
|
|
const enrichedApprovalLevels = await enrichApprovalLevels(normalizedApprovalLevels as any);
|
|
|
|
// Enrich spectators if provided
|
|
// Normalize spectators: map userEmail -> email for backward compatibility
|
|
// Filter participants to only include SPECTATOR type (exclude INITIATOR and APPROVER)
|
|
const allParticipants = validatedData.spectators || validatedData.participants || [];
|
|
const spectators = allParticipants.filter((p: any) =>
|
|
!p.participantType || p.participantType === 'SPECTATOR'
|
|
);
|
|
const normalizedSpectators = spectators.map((spec: any) => ({
|
|
...spec,
|
|
email: spec.email || spec.userEmail, // Support both formats
|
|
})).filter((spec: any) => spec.email); // Only include entries with email
|
|
const enrichedSpectators = normalizedSpectators.length > 0
|
|
? await enrichSpectators(normalizedSpectators as any)
|
|
: [];
|
|
|
|
// Build complete participants array automatically
|
|
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
|
const initiator = await User.findByPk(req.user.userId);
|
|
const initiatorEmail = (initiator as any).email;
|
|
const initiatorName = (initiator as any).displayName || (initiator as any).email;
|
|
|
|
const autoGeneratedParticipants = [
|
|
// Add initiator
|
|
{
|
|
userId: req.user.userId,
|
|
userEmail: initiatorEmail,
|
|
userName: initiatorName,
|
|
participantType: 'INITIATOR' as const,
|
|
canComment: true,
|
|
canViewDocuments: true,
|
|
canDownloadDocuments: true,
|
|
notificationEnabled: true,
|
|
},
|
|
// Add all approvers from approval levels
|
|
...enrichedApprovalLevels.map((level: any) => ({
|
|
userId: level.approverId,
|
|
userEmail: level.approverEmail,
|
|
userName: level.approverName,
|
|
participantType: 'APPROVER' as const,
|
|
canComment: true,
|
|
canViewDocuments: true,
|
|
canDownloadDocuments: true,
|
|
notificationEnabled: true,
|
|
})),
|
|
// Add all spectators
|
|
...enrichedSpectators,
|
|
];
|
|
|
|
// Convert string literal priority to enum
|
|
const workflowData = {
|
|
...validatedData,
|
|
priority: validatedData.priority as Priority,
|
|
approvalLevels: enrichedApprovalLevels,
|
|
participants: autoGeneratedParticipants,
|
|
};
|
|
|
|
const requestMeta = getRequestMetadata(req);
|
|
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, {
|
|
ipAddress: requestMeta.ipAddress,
|
|
userAgent: requestMeta.userAgent
|
|
});
|
|
|
|
ResponseHandler.success(res, workflow, 'Workflow created successfully', 201);
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
logger.error('[WorkflowController] Failed to create workflow:', error);
|
|
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
|
}
|
|
}
|
|
|
|
// Multipart create: accepts payload JSON and files[]
|
|
async createWorkflowMultipart(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
try {
|
|
const userId = req.user?.userId;
|
|
if (!userId) {
|
|
ResponseHandler.error(res, 'Unauthorized', 401);
|
|
return;
|
|
}
|
|
|
|
const raw = String(req.body?.payload || '');
|
|
if (!raw) {
|
|
ResponseHandler.error(res, 'payload is required', 400);
|
|
return;
|
|
}
|
|
|
|
let parsed;
|
|
try {
|
|
parsed = JSON.parse(raw);
|
|
} catch (parseError) {
|
|
ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error');
|
|
return;
|
|
}
|
|
|
|
// Transform frontend format to backend format BEFORE validation
|
|
// Map 'approvers' -> 'approvalLevels' for backward compatibility
|
|
if (!parsed.approvalLevels && parsed.approvers) {
|
|
const approvers = parsed.approvers || [];
|
|
parsed.approvalLevels = approvers.map((a: any, index: number) => ({
|
|
levelNumber: index + 1,
|
|
email: a.email || a.approverEmail,
|
|
tatHours: a.tatType === 'days' ? (a.tat || 0) * 24 : (a.tat || a.tatHours || 24),
|
|
isFinalApprover: index === approvers.length - 1,
|
|
}));
|
|
}
|
|
|
|
let validated;
|
|
try {
|
|
validated = validateCreateWorkflow(parsed);
|
|
} catch (validationError: any) {
|
|
// Zod validation errors provide detailed information
|
|
const errorMessage = validationError?.errors
|
|
? validationError.errors.map((e: any) => `${e.path.join('.')}: ${e.message}`).join('; ')
|
|
: (validationError instanceof Error ? validationError.message : 'Validation failed');
|
|
logger.error(`[WorkflowController] Validation failed:`, errorMessage);
|
|
ResponseHandler.error(res, 'Validation failed', 400, errorMessage);
|
|
return;
|
|
}
|
|
|
|
// Validate initiator exists
|
|
await validateInitiator(userId);
|
|
|
|
// Use the approval levels from validation (already transformed above)
|
|
let approvalLevels = validated.approvalLevels || [];
|
|
|
|
// Normalize approval levels: map approverEmail -> email for backward compatibility
|
|
const normalizedApprovalLevels = approvalLevels.map((level: any) => ({
|
|
...level,
|
|
email: level.email || level.approverEmail, // Support both formats
|
|
}));
|
|
|
|
// Enrich approval levels with user data (auto-lookup from AD if not in DB)
|
|
logger.info(`[WorkflowController] Enriching ${normalizedApprovalLevels.length} approval levels`);
|
|
const enrichedApprovalLevels = await enrichApprovalLevels(normalizedApprovalLevels as any);
|
|
|
|
// Enrich spectators if provided
|
|
// Normalize spectators: map userEmail -> email for backward compatibility
|
|
// Filter participants to only include SPECTATOR type (exclude INITIATOR and APPROVER)
|
|
const allParticipants = validated.spectators || validated.participants || [];
|
|
const spectators = allParticipants.filter((p: any) =>
|
|
!p.participantType || p.participantType === 'SPECTATOR'
|
|
);
|
|
const normalizedSpectators = spectators.map((spec: any) => ({
|
|
...spec,
|
|
email: spec.email || spec.userEmail, // Support both formats
|
|
})).filter((spec: any) => spec.email); // Only include entries with email
|
|
const enrichedSpectators = normalizedSpectators.length > 0
|
|
? await enrichSpectators(normalizedSpectators as any)
|
|
: [];
|
|
|
|
// Build complete participants array automatically
|
|
// This includes: INITIATOR + all APPROVERs + all SPECTATORs
|
|
const initiator = await User.findByPk(userId);
|
|
const initiatorEmail = (initiator as any).email;
|
|
const initiatorName = (initiator as any).displayName || (initiator as any).email;
|
|
|
|
const autoGeneratedParticipants = [
|
|
// Add initiator
|
|
{
|
|
userId: userId,
|
|
userEmail: initiatorEmail,
|
|
userName: initiatorName,
|
|
participantType: 'INITIATOR' as const,
|
|
canComment: true,
|
|
canViewDocuments: true,
|
|
canDownloadDocuments: true,
|
|
notificationEnabled: true,
|
|
},
|
|
// Add all approvers from approval levels
|
|
...enrichedApprovalLevels.map((level: any) => ({
|
|
userId: level.approverId,
|
|
userEmail: level.approverEmail,
|
|
userName: level.approverName,
|
|
participantType: 'APPROVER' as const,
|
|
canComment: true,
|
|
canViewDocuments: true,
|
|
canDownloadDocuments: true,
|
|
notificationEnabled: true,
|
|
})),
|
|
// Add all spectators
|
|
...enrichedSpectators,
|
|
];
|
|
|
|
const workflowData = {
|
|
...validated,
|
|
priority: validated.priority as Priority,
|
|
approvalLevels: enrichedApprovalLevels,
|
|
participants: autoGeneratedParticipants,
|
|
} as any;
|
|
|
|
const requestMeta = getRequestMetadata(req);
|
|
const workflow = await workflowService.createWorkflow(userId, workflowData, {
|
|
ipAddress: requestMeta.ipAddress,
|
|
userAgent: requestMeta.userAgent
|
|
});
|
|
|
|
// Attach files as documents (category defaults to SUPPORTING)
|
|
const files = (req as any).files as Express.Multer.File[] | undefined;
|
|
const category = (req.body?.category as string) || 'OTHER';
|
|
const docs: any[] = [];
|
|
if (files && files.length > 0) {
|
|
const { activityService } = require('../services/activity.service');
|
|
const user = await User.findByPk(userId);
|
|
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
|
|
|
for (const file of files) {
|
|
// Get file buffer - multer.memoryStorage provides buffer, not path
|
|
const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
|
|
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
|
|
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
|
|
|
// Upload with automatic fallback to local storage
|
|
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
|
|
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
|
buffer: fileBuffer,
|
|
originalName: file.originalname,
|
|
mimeType: file.mimetype,
|
|
requestNumber: requestNumber,
|
|
fileType: 'documents'
|
|
});
|
|
|
|
const storageUrl = uploadResult.storageUrl;
|
|
const gcsFilePath = uploadResult.filePath;
|
|
|
|
// Clean up local temporary file if it exists (from multer disk storage)
|
|
if (file.path && fs.existsSync(file.path)) {
|
|
try {
|
|
fs.unlinkSync(file.path);
|
|
} catch (unlinkError) {
|
|
logger.warn('[Workflow] Failed to delete local temporary file:', unlinkError);
|
|
}
|
|
}
|
|
|
|
// Truncate file names if they exceed database column limits (255 chars)
|
|
const MAX_FILE_NAME_LENGTH = 255;
|
|
const originalFileName = file.originalname;
|
|
let truncatedOriginalFileName = originalFileName;
|
|
|
|
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
// Preserve file extension when truncating
|
|
const ext = path.extname(originalFileName);
|
|
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
|
|
if (maxNameLength > 0) {
|
|
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
} else {
|
|
// If extension itself is too long, just use the extension
|
|
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
}
|
|
|
|
logger.warn('[Workflow] File name truncated to fit database column', {
|
|
originalLength: originalFileName.length,
|
|
truncatedLength: truncatedOriginalFileName.length,
|
|
originalName: originalFileName.substring(0, 100) + '...',
|
|
truncatedName: truncatedOriginalFileName,
|
|
});
|
|
}
|
|
|
|
// Generate fileName (basename of the generated file name in GCS)
|
|
const generatedFileName = path.basename(gcsFilePath);
|
|
let truncatedFileName = generatedFileName;
|
|
|
|
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
const ext = path.extname(generatedFileName);
|
|
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
|
|
if (maxNameLength > 0) {
|
|
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
} else {
|
|
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
}
|
|
|
|
logger.warn('[Workflow] Generated file name truncated', {
|
|
originalLength: generatedFileName.length,
|
|
truncatedLength: truncatedFileName.length,
|
|
});
|
|
}
|
|
|
|
// Check if storageUrl exceeds database column limit (500 chars)
|
|
const MAX_STORAGE_URL_LENGTH = 500;
|
|
let finalStorageUrl = storageUrl;
|
|
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
logger.warn('[Workflow] Storage URL exceeds database column limit, storing null', {
|
|
originalLength: storageUrl.length,
|
|
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
urlPrefix: storageUrl.substring(0, 100),
|
|
filePath: gcsFilePath,
|
|
});
|
|
// For signed URLs, store null and generate on-demand later
|
|
finalStorageUrl = null as any;
|
|
}
|
|
|
|
logger.info('[Workflow] Creating document record', {
|
|
fileName: truncatedOriginalFileName,
|
|
filePath: gcsFilePath,
|
|
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
|
requestId: workflow.requestId
|
|
});
|
|
|
|
try {
|
|
const doc = await Document.create({
|
|
requestId: workflow.requestId,
|
|
uploadedBy: userId,
|
|
fileName: truncatedFileName,
|
|
originalFileName: truncatedOriginalFileName,
|
|
fileType: extension,
|
|
fileExtension: extension,
|
|
fileSize: file.size,
|
|
filePath: gcsFilePath, // Store GCS path or local path
|
|
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
|
mimeType: file.mimetype,
|
|
checksum,
|
|
isGoogleDoc: false,
|
|
googleDocUrl: null as any,
|
|
category: category || 'OTHER',
|
|
version: 1,
|
|
parentDocumentId: null as any,
|
|
isDeleted: false,
|
|
downloadCount: 0,
|
|
} as any);
|
|
docs.push(doc);
|
|
logger.info('[Workflow] Document record created successfully', {
|
|
documentId: doc.documentId,
|
|
fileName: file.originalname,
|
|
});
|
|
} catch (docError) {
|
|
const docErrorMessage = docError instanceof Error ? docError.message : 'Unknown error';
|
|
const docErrorStack = docError instanceof Error ? docError.stack : undefined;
|
|
logger.error('[Workflow] Failed to create document record', {
|
|
error: docErrorMessage,
|
|
stack: docErrorStack,
|
|
fileName: file.originalname,
|
|
requestId: workflow.requestId,
|
|
filePath: gcsFilePath,
|
|
storageUrl: storageUrl,
|
|
});
|
|
// Re-throw to be caught by outer catch block
|
|
throw docError;
|
|
}
|
|
|
|
// Log document upload activity
|
|
const requestMeta = getRequestMetadata(req);
|
|
activityService.log({
|
|
requestId: workflow.requestId,
|
|
type: 'document_added',
|
|
user: { userId, name: uploaderName },
|
|
timestamp: new Date().toISOString(),
|
|
action: 'Document Added',
|
|
details: `Added ${file.originalname} as supporting document by ${uploaderName}`,
|
|
metadata: { fileName: file.originalname, fileSize: file.size, fileType: extension },
|
|
ipAddress: requestMeta.ipAddress,
|
|
userAgent: requestMeta.userAgent
|
|
});
|
|
}
|
|
}
|
|
|
|
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
const errorStack = error instanceof Error ? error.stack : undefined;
|
|
logger.error('[WorkflowController] createWorkflowMultipart failed', {
|
|
error: errorMessage,
|
|
stack: errorStack,
|
|
userId: req.user?.userId,
|
|
filesCount: (req as any).files?.length || 0,
|
|
});
|
|
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
|
}
|
|
}
|
|
|
|
async getWorkflow(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const { id } = req.params;
|
|
const workflow = await workflowService.getWorkflowById(id);
|
|
|
|
if (!workflow) {
|
|
ResponseHandler.notFound(res, 'Workflow not found');
|
|
return;
|
|
}
|
|
|
|
ResponseHandler.success(res, workflow, 'Workflow retrieved successfully');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to get workflow', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
async getWorkflowDetails(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
try {
|
|
const { id } = req.params as any;
|
|
const userId = req.user?.userId;
|
|
|
|
if (!userId) {
|
|
ResponseHandler.error(res, 'Authentication required', 401);
|
|
return;
|
|
}
|
|
|
|
// Check if user has access to this request
|
|
const accessCheck = await workflowService.checkUserRequestAccess(userId, id);
|
|
if (!accessCheck.hasAccess) {
|
|
ResponseHandler.error(res, accessCheck.reason || 'Access denied', 403);
|
|
return;
|
|
}
|
|
|
|
const result = await workflowService.getWorkflowDetails(id);
|
|
if (!result) {
|
|
ResponseHandler.notFound(res, 'Workflow not found');
|
|
return;
|
|
}
|
|
ResponseHandler.success(res, result, 'Workflow details fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to fetch workflow details', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
async listWorkflows(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
|
|
|
// Extract filter parameters
|
|
const filters = {
|
|
search: req.query.search as string | undefined,
|
|
status: req.query.status as string | undefined,
|
|
priority: req.query.priority as string | undefined,
|
|
templateType: req.query.templateType as string | undefined,
|
|
department: req.query.department as string | undefined,
|
|
initiator: req.query.initiator as string | undefined,
|
|
approver: req.query.approver as string | undefined,
|
|
approverType: req.query.approverType as 'current' | 'any' | undefined,
|
|
slaCompliance: req.query.slaCompliance as string | undefined,
|
|
dateRange: req.query.dateRange as string | undefined,
|
|
startDate: req.query.startDate as string | undefined,
|
|
endDate: req.query.endDate as string | undefined,
|
|
};
|
|
|
|
const result = await workflowService.listWorkflows(page, limit, filters);
|
|
ResponseHandler.success(res, result, 'Workflows fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to list workflows', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
async listMyRequests(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
|
|
|
// Extract filter parameters (same as listWorkflows)
|
|
const search = req.query.search as string | undefined;
|
|
const status = req.query.status as string | undefined;
|
|
const priority = req.query.priority as string | undefined;
|
|
const department = req.query.department as string | undefined;
|
|
const initiator = req.query.initiator as string | undefined;
|
|
const approver = req.query.approver as string | undefined;
|
|
const approverType = req.query.approverType as 'current' | 'any' | undefined;
|
|
const slaCompliance = req.query.slaCompliance as string | undefined;
|
|
const dateRange = req.query.dateRange as string | undefined;
|
|
const startDate = req.query.startDate as string | undefined;
|
|
const endDate = req.query.endDate as string | undefined;
|
|
|
|
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
|
|
|
const result = await workflowService.listMyRequests(userId, page, limit, filters);
|
|
ResponseHandler.success(res, result, 'My requests fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to fetch my requests', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* List requests where user is a PARTICIPANT (not initiator) - for regular users' "All Requests" page
|
|
* Completely separate from listWorkflows (admin) to avoid interference
|
|
*/
|
|
async listParticipantRequests(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
|
|
|
// Extract filter parameters (same as listWorkflows)
|
|
const search = req.query.search as string | undefined;
|
|
const status = req.query.status as string | undefined;
|
|
const priority = req.query.priority as string | undefined;
|
|
const templateType = req.query.templateType as string | undefined;
|
|
const department = req.query.department as string | undefined;
|
|
const initiator = req.query.initiator as string | undefined;
|
|
const approver = req.query.approver as string | undefined;
|
|
const approverType = req.query.approverType as 'current' | 'any' | undefined;
|
|
const slaCompliance = req.query.slaCompliance as string | undefined;
|
|
const dateRange = req.query.dateRange as string | undefined;
|
|
const startDate = req.query.startDate as string | undefined;
|
|
const endDate = req.query.endDate as string | undefined;
|
|
|
|
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
|
|
|
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
|
ResponseHandler.success(res, result, 'Participant requests fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to fetch participant requests', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* List requests where user is the initiator (for "My Requests" page)
|
|
*/
|
|
async listMyInitiatedRequests(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
|
|
|
// Extract filter parameters
|
|
const search = req.query.search as string | undefined;
|
|
const status = req.query.status as string | undefined;
|
|
const priority = req.query.priority as string | undefined;
|
|
const templateType = req.query.templateType as string | undefined;
|
|
const department = req.query.department as string | undefined;
|
|
const slaCompliance = req.query.slaCompliance as string | undefined;
|
|
const dateRange = req.query.dateRange as string | undefined;
|
|
const startDate = req.query.startDate as string | undefined;
|
|
const endDate = req.query.endDate as string | undefined;
|
|
|
|
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
|
|
|
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
|
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to fetch my initiated requests', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
async listOpenForMe(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
|
|
|
// Extract filter parameters
|
|
const filters = {
|
|
search: req.query.search as string | undefined,
|
|
status: req.query.status as string | undefined,
|
|
priority: req.query.priority as string | undefined,
|
|
templateType: req.query.templateType as string | undefined
|
|
};
|
|
|
|
// Extract sorting parameters
|
|
const sortBy = req.query.sortBy as string | undefined;
|
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
|
|
|
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
|
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to fetch open requests for user', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
async listClosedByMe(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
|
|
|
// Extract filter parameters
|
|
const filters = {
|
|
search: req.query.search as string | undefined,
|
|
status: req.query.status as string | undefined,
|
|
priority: req.query.priority as string | undefined,
|
|
templateType: req.query.templateType as string | undefined
|
|
};
|
|
|
|
// Extract sorting parameters
|
|
const sortBy = req.query.sortBy as string | undefined;
|
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
|
|
|
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
|
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to fetch closed requests by user', 500, errorMessage);
|
|
}
|
|
}
|
|
|
|
async updateWorkflow(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const { id } = req.params;
|
|
const validatedData = validateUpdateWorkflow(req.body);
|
|
// Build a strongly-typed payload for the service layer
|
|
const updateData: UpdateWorkflowRequest = { ...validatedData } as any;
|
|
if (validatedData.priority) {
|
|
// Map string literal to enum value explicitly
|
|
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
|
}
|
|
|
|
const workflow = await workflowService.updateWorkflow(id, updateData);
|
|
|
|
if (!workflow) {
|
|
ResponseHandler.notFound(res, 'Workflow not found');
|
|
return;
|
|
}
|
|
|
|
ResponseHandler.success(res, workflow, 'Workflow updated successfully');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
|
|
}
|
|
}
|
|
|
|
// Multipart update for drafts: accepts payload JSON and files[]
|
|
async updateWorkflowMultipart(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
try {
|
|
const userId = req.user?.userId;
|
|
if (!userId) {
|
|
ResponseHandler.error(res, 'Unauthorized', 401);
|
|
return;
|
|
}
|
|
|
|
const { id } = req.params;
|
|
const raw = String(req.body?.payload || '');
|
|
if (!raw) {
|
|
ResponseHandler.error(res, 'payload is required', 400);
|
|
return;
|
|
}
|
|
const parsed = JSON.parse(raw);
|
|
const validated = validateUpdateWorkflow(parsed);
|
|
const updateData: UpdateWorkflowRequest = { ...validated } as any;
|
|
if (validated.priority) {
|
|
updateData.priority = validated.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
|
}
|
|
|
|
// Update workflow
|
|
let workflow;
|
|
try {
|
|
workflow = await workflowService.updateWorkflow(id, updateData);
|
|
if (!workflow) {
|
|
ResponseHandler.notFound(res, 'Workflow not found');
|
|
return;
|
|
}
|
|
logger.info('[WorkflowController] Workflow updated successfully', {
|
|
requestId: id,
|
|
workflowId: (workflow as any).requestId,
|
|
});
|
|
} catch (updateError) {
|
|
const updateErrorMessage = updateError instanceof Error ? updateError.message : 'Unknown error';
|
|
const updateErrorStack = updateError instanceof Error ? updateError.stack : undefined;
|
|
logger.error('[WorkflowController] updateWorkflow failed', {
|
|
error: updateErrorMessage,
|
|
stack: updateErrorStack,
|
|
requestId: id,
|
|
updateData: JSON.stringify(updateData, null, 2),
|
|
});
|
|
throw updateError; // Re-throw to be caught by outer catch block
|
|
}
|
|
|
|
// Attach new files as documents
|
|
const files = (req as any).files as Express.Multer.File[] | undefined;
|
|
const category = (req.body?.category as string) || 'SUPPORTING';
|
|
const docs: any[] = [];
|
|
if (files && files.length > 0) {
|
|
const actualRequestId = (workflow as any).requestId;
|
|
for (const file of files) {
|
|
// Get file buffer - multer.memoryStorage provides buffer, not path
|
|
const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
|
|
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
|
|
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
|
|
|
// Upload with automatic fallback to local storage
|
|
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
|
|
const uploadResult = await gcsStorageService.uploadFileWithFallback({
|
|
buffer: fileBuffer,
|
|
originalName: file.originalname,
|
|
mimeType: file.mimetype,
|
|
requestNumber: requestNumber,
|
|
fileType: 'documents'
|
|
});
|
|
|
|
const storageUrl = uploadResult.storageUrl;
|
|
const gcsFilePath = uploadResult.filePath;
|
|
|
|
// Clean up local temporary file if it exists (from multer disk storage)
|
|
if (file.path && fs.existsSync(file.path)) {
|
|
try {
|
|
fs.unlinkSync(file.path);
|
|
} catch (unlinkError) {
|
|
logger.warn('[Workflow] Failed to delete local temporary file:', unlinkError);
|
|
}
|
|
}
|
|
|
|
// Truncate file names if they exceed database column limits (255 chars)
|
|
const MAX_FILE_NAME_LENGTH = 255;
|
|
const originalFileName = file.originalname;
|
|
let truncatedOriginalFileName = originalFileName;
|
|
|
|
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
// Preserve file extension when truncating
|
|
const ext = path.extname(originalFileName);
|
|
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
|
|
if (maxNameLength > 0) {
|
|
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
} else {
|
|
// If extension itself is too long, just use the extension
|
|
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
}
|
|
|
|
logger.warn('[Workflow] File name truncated to fit database column', {
|
|
originalLength: originalFileName.length,
|
|
truncatedLength: truncatedOriginalFileName.length,
|
|
originalName: originalFileName.substring(0, 100) + '...',
|
|
truncatedName: truncatedOriginalFileName,
|
|
});
|
|
}
|
|
|
|
// Generate fileName (basename of the generated file name in GCS)
|
|
const generatedFileName = path.basename(gcsFilePath);
|
|
let truncatedFileName = generatedFileName;
|
|
|
|
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
const ext = path.extname(generatedFileName);
|
|
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
|
|
if (maxNameLength > 0) {
|
|
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
} else {
|
|
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
}
|
|
|
|
logger.warn('[Workflow] Generated file name truncated', {
|
|
originalLength: generatedFileName.length,
|
|
truncatedLength: truncatedFileName.length,
|
|
});
|
|
}
|
|
|
|
// Check if storageUrl exceeds database column limit (500 chars)
|
|
const MAX_STORAGE_URL_LENGTH = 500;
|
|
let finalStorageUrl = storageUrl;
|
|
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
logger.warn('[Workflow] Storage URL exceeds database column limit, storing null', {
|
|
originalLength: storageUrl.length,
|
|
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
urlPrefix: storageUrl.substring(0, 100),
|
|
filePath: gcsFilePath,
|
|
});
|
|
// For signed URLs, store null and generate on-demand later
|
|
finalStorageUrl = null as any;
|
|
}
|
|
|
|
logger.info('[Workflow] Creating document record', {
|
|
fileName: truncatedOriginalFileName,
|
|
filePath: gcsFilePath,
|
|
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
|
requestId: actualRequestId
|
|
});
|
|
|
|
try {
|
|
const doc = await Document.create({
|
|
requestId: actualRequestId,
|
|
uploadedBy: userId,
|
|
fileName: truncatedFileName,
|
|
originalFileName: truncatedOriginalFileName,
|
|
fileType: extension,
|
|
fileExtension: extension,
|
|
fileSize: file.size,
|
|
filePath: gcsFilePath, // Store GCS path or local path
|
|
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
|
mimeType: file.mimetype,
|
|
checksum,
|
|
isGoogleDoc: false,
|
|
googleDocUrl: null as any,
|
|
category: category || 'OTHER',
|
|
version: 1,
|
|
parentDocumentId: null as any,
|
|
isDeleted: false,
|
|
downloadCount: 0,
|
|
} as any);
|
|
docs.push(doc);
|
|
logger.info('[Workflow] Document record created successfully', {
|
|
documentId: doc.documentId,
|
|
fileName: file.originalname,
|
|
});
|
|
} catch (docError) {
|
|
const docErrorMessage = docError instanceof Error ? docError.message : 'Unknown error';
|
|
const docErrorStack = docError instanceof Error ? docError.stack : undefined;
|
|
logger.error('[Workflow] Failed to create document record', {
|
|
error: docErrorMessage,
|
|
stack: docErrorStack,
|
|
fileName: file.originalname,
|
|
requestId: actualRequestId,
|
|
filePath: gcsFilePath,
|
|
storageUrl: storageUrl,
|
|
});
|
|
// Continue with other files, but log the error
|
|
// Don't throw here - let the workflow update complete
|
|
}
|
|
}
|
|
}
|
|
|
|
ResponseHandler.success(res, { workflow, newDocuments: docs }, 'Workflow updated with documents', 200);
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
const errorStack = error instanceof Error ? error.stack : undefined;
|
|
logger.error('[WorkflowController] updateWorkflowMultipart failed', {
|
|
error: errorMessage,
|
|
stack: errorStack,
|
|
requestId: req.params.id,
|
|
userId: req.user?.userId,
|
|
hasFiles: !!(req as any).files && (req as any).files.length > 0,
|
|
fileCount: (req as any).files ? (req as any).files.length : 0,
|
|
});
|
|
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
|
|
}
|
|
}
|
|
|
|
async submitWorkflow(req: Request, res: Response): Promise<void> {
|
|
try {
|
|
const { id } = req.params;
|
|
const workflow = await workflowService.submitWorkflow(id);
|
|
|
|
if (!workflow) {
|
|
ResponseHandler.notFound(res, 'Workflow not found');
|
|
return;
|
|
}
|
|
|
|
ResponseHandler.success(res, workflow, 'Workflow submitted successfully');
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
ResponseHandler.error(res, 'Failed to submit workflow', 400, errorMessage);
|
|
}
|
|
}
|
|
}
|