created create request flow
This commit is contained in:
parent
7c1d616676
commit
b6fe3a1e83
376
backend_structure.txt
Normal file
376
backend_structure.txt
Normal file
@ -0,0 +1,376 @@
|
||||
%% Royal Enfield Workflow Management System
|
||||
%% Entity Relationship Diagram
|
||||
%% Database: PostgreSQL 16.x
|
||||
|
||||
erDiagram
|
||||
%% Core Tables
|
||||
|
||||
users ||--o{ workflow_requests : "initiates"
|
||||
users ||--o{ approval_levels : "approves"
|
||||
users ||--o{ participants : "participates"
|
||||
users ||--o{ work_notes : "posts"
|
||||
users ||--o{ documents : "uploads"
|
||||
users ||--o{ activities : "performs"
|
||||
users ||--o{ notifications : "receives"
|
||||
users ||--o{ user_sessions : "has"
|
||||
users ||--o{ users : "reports_to"
|
||||
|
||||
workflow_requests ||--|{ approval_levels : "has"
|
||||
workflow_requests ||--o{ participants : "involves"
|
||||
workflow_requests ||--o{ documents : "contains"
|
||||
workflow_requests ||--o{ work_notes : "has"
|
||||
workflow_requests ||--o{ activities : "logs"
|
||||
workflow_requests ||--o{ tat_tracking : "monitors"
|
||||
workflow_requests ||--o{ notifications : "triggers"
|
||||
workflow_requests ||--|| conclusion_remarks : "concludes"
|
||||
|
||||
approval_levels ||--o{ tat_tracking : "tracks"
|
||||
|
||||
work_notes ||--o{ work_note_attachments : "has"
|
||||
|
||||
notifications ||--o{ email_logs : "sends"
|
||||
notifications ||--o{ sms_logs : "sends"
|
||||
|
||||
%% Entity Definitions
|
||||
|
||||
users {
|
||||
uuid user_id PK
|
||||
varchar employee_id UK "HR System ID"
|
||||
varchar email UK "Primary Email"
|
||||
varchar first_name
|
||||
varchar last_name
|
||||
varchar display_name "Full Name"
|
||||
varchar department
|
||||
varchar designation
|
||||
varchar phone
|
||||
boolean is_active "Account Status"
|
||||
boolean is_admin "Super User Flag"
|
||||
timestamp last_login
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
|
||||
workflow_requests {
|
||||
uuid request_id PK
|
||||
varchar request_number UK "REQ-YYYY-NNNNN"
|
||||
uuid initiator_id FK
|
||||
varchar template_type "CUSTOM or TEMPLATE"
|
||||
varchar title "Request Summary"
|
||||
text description "Detailed Description"
|
||||
enum priority "STANDARD or EXPRESS"
|
||||
enum status "DRAFT to CLOSED"
|
||||
integer current_level "Active Stage"
|
||||
integer total_levels "Max 10 Levels"
|
||||
decimal total_tat_hours "Cumulative TAT"
|
||||
timestamp submission_date
|
||||
timestamp closure_date
|
||||
text conclusion_remark "Final Summary"
|
||||
text ai_generated_conclusion "AI Version"
|
||||
boolean is_draft "Saved Draft"
|
||||
boolean is_deleted "Soft Delete"
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
|
||||
approval_levels {
|
||||
uuid level_id PK
|
||||
uuid request_id FK
|
||||
integer level_number "Sequential Level"
|
||||
varchar level_name "Optional Label"
|
||||
uuid approver_id FK
|
||||
varchar approver_email
|
||||
varchar approver_name
|
||||
decimal tat_hours "Level TAT"
|
||||
integer tat_days "Calculated Days"
|
||||
enum status "PENDING to APPROVED"
|
||||
timestamp level_start_time "Timer Start"
|
||||
timestamp level_end_time "Timer End"
|
||||
timestamp action_date "Decision Time"
|
||||
text comments "Approval Notes"
|
||||
text rejection_reason
|
||||
boolean is_final_approver "Last Level"
|
||||
decimal elapsed_hours "Time Used"
|
||||
decimal remaining_hours "Time Left"
|
||||
decimal tat_percentage_used "Usage %"
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
|
||||
participants {
|
||||
uuid participant_id PK
|
||||
uuid request_id FK
|
||||
uuid user_id FK
|
||||
varchar user_email
|
||||
varchar user_name
|
||||
enum participant_type "SPECTATOR etc"
|
||||
boolean can_comment "Permission"
|
||||
boolean can_view_documents "Permission"
|
||||
boolean can_download_documents "Permission"
|
||||
boolean notification_enabled
|
||||
uuid added_by FK
|
||||
timestamp added_at
|
||||
boolean is_active
|
||||
}
|
||||
|
||||
documents {
|
||||
uuid document_id PK
|
||||
uuid request_id FK
|
||||
uuid uploaded_by FK
|
||||
varchar file_name "Storage Name"
|
||||
varchar original_file_name "Display Name"
|
||||
varchar file_type
|
||||
varchar file_extension
|
||||
bigint file_size "Bytes (Max 10MB)"
|
||||
varchar file_path "Cloud Path"
|
||||
varchar storage_url "Public URL"
|
||||
varchar mime_type
|
||||
varchar checksum "SHA-256"
|
||||
boolean is_google_doc
|
||||
varchar google_doc_url
|
||||
enum category "Document Type"
|
||||
integer version "Version Number"
|
||||
uuid parent_document_id FK "Version Parent"
|
||||
boolean is_deleted
|
||||
integer download_count
|
||||
timestamp uploaded_at
|
||||
}
|
||||
|
||||
work_notes {
|
||||
uuid note_id PK
|
||||
uuid request_id FK
|
||||
uuid user_id FK
|
||||
varchar user_name
|
||||
varchar user_role "INITIATOR etc"
|
||||
text message "Max 2000 chars"
|
||||
varchar message_type "COMMENT etc"
|
||||
boolean is_priority "Urgent Flag"
|
||||
boolean has_attachment
|
||||
uuid parent_note_id FK "Threading"
|
||||
uuid[] mentioned_users "@Tagged Users"
|
||||
jsonb reactions "Emoji Responses"
|
||||
boolean is_edited
|
||||
boolean is_deleted
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
|
||||
work_note_attachments {
|
||||
uuid attachment_id PK
|
||||
uuid note_id FK
|
||||
varchar file_name
|
||||
varchar file_type
|
||||
bigint file_size
|
||||
varchar file_path
|
||||
varchar storage_url
|
||||
boolean is_downloadable
|
||||
integer download_count
|
||||
timestamp uploaded_at
|
||||
}
|
||||
|
||||
activities {
|
||||
uuid activity_id PK
|
||||
uuid request_id FK
|
||||
uuid user_id FK "NULL for System"
|
||||
varchar user_name
|
||||
varchar activity_type "Event Type"
|
||||
text activity_description
|
||||
varchar activity_category "Classification"
|
||||
varchar severity "INFO to CRITICAL"
|
||||
jsonb metadata "Additional Context"
|
||||
boolean is_system_event
|
||||
varchar ip_address
|
||||
text user_agent
|
||||
timestamp created_at
|
||||
}
|
||||
|
||||
notifications {
|
||||
uuid notification_id PK
|
||||
uuid user_id FK
|
||||
uuid request_id FK
|
||||
varchar notification_type "Event Type"
|
||||
varchar title
|
||||
text message
|
||||
boolean is_read
|
||||
enum priority "LOW to URGENT"
|
||||
varchar action_url
|
||||
boolean action_required
|
||||
jsonb metadata
|
||||
varchar[] sent_via "IN_APP, EMAIL, SMS"
|
||||
boolean email_sent
|
||||
boolean sms_sent
|
||||
boolean push_sent
|
||||
timestamp read_at
|
||||
timestamp expires_at
|
||||
timestamp created_at
|
||||
}
|
||||
|
||||
tat_tracking {
|
||||
uuid tracking_id PK
|
||||
uuid request_id FK
|
||||
uuid level_id FK "NULL for Request"
|
||||
varchar tracking_type "REQUEST or LEVEL"
|
||||
enum tat_status "ON_TRACK to BREACHED"
|
||||
decimal total_tat_hours
|
||||
decimal elapsed_hours
|
||||
decimal remaining_hours
|
||||
decimal percentage_used
|
||||
boolean threshold_50_breached
|
||||
timestamp threshold_50_alerted_at
|
||||
boolean threshold_80_breached
|
||||
timestamp threshold_80_alerted_at
|
||||
boolean threshold_100_breached
|
||||
timestamp threshold_100_alerted_at
|
||||
integer alert_count
|
||||
timestamp last_calculated_at
|
||||
}
|
||||
|
||||
conclusion_remarks {
|
||||
uuid conclusion_id PK
|
||||
uuid request_id FK
|
||||
text ai_generated_remark "AI Output"
|
||||
varchar ai_model_used "GPT-4 etc"
|
||||
decimal ai_confidence_score "0.00 to 1.00"
|
||||
text final_remark "User Edited"
|
||||
uuid edited_by FK
|
||||
boolean is_edited
|
||||
integer edit_count
|
||||
jsonb approval_summary
|
||||
jsonb document_summary
|
||||
text[] key_discussion_points
|
||||
timestamp generated_at
|
||||
timestamp finalized_at
|
||||
}
|
||||
|
||||
audit_logs {
|
||||
uuid audit_id PK
|
||||
uuid user_id FK
|
||||
varchar entity_type "Table Name"
|
||||
uuid entity_id "Record ID"
|
||||
varchar action "CREATE, UPDATE etc"
|
||||
varchar action_category
|
||||
jsonb old_values "Before"
|
||||
jsonb new_values "After"
|
||||
text changes_summary
|
||||
varchar ip_address
|
||||
text user_agent
|
||||
varchar session_id
|
||||
varchar request_method "GET, POST etc"
|
||||
varchar request_url
|
||||
integer response_status "HTTP Code"
|
||||
integer execution_time_ms
|
||||
timestamp created_at
|
||||
}
|
||||
|
||||
user_sessions {
|
||||
uuid session_id PK
|
||||
uuid user_id FK
|
||||
varchar session_token UK "JWT Access"
|
||||
varchar refresh_token "JWT Refresh"
|
||||
varchar ip_address
|
||||
text user_agent
|
||||
varchar device_type "WEB, MOBILE"
|
||||
varchar browser
|
||||
varchar os
|
||||
timestamp login_at
|
||||
timestamp last_activity_at
|
||||
timestamp logout_at
|
||||
timestamp expires_at
|
||||
boolean is_active
|
||||
varchar logout_reason
|
||||
}
|
||||
|
||||
email_logs {
|
||||
uuid email_log_id PK
|
||||
uuid request_id FK
|
||||
uuid notification_id FK
|
||||
varchar recipient_email
|
||||
uuid recipient_user_id FK
|
||||
text[] cc_emails
|
||||
text[] bcc_emails
|
||||
varchar subject
|
||||
text body
|
||||
varchar email_type
|
||||
varchar status "QUEUED to SENT"
|
||||
integer send_attempts
|
||||
timestamp sent_at
|
||||
timestamp failed_at
|
||||
text failure_reason
|
||||
timestamp opened_at
|
||||
timestamp clicked_at
|
||||
timestamp created_at
|
||||
}
|
||||
|
||||
sms_logs {
|
||||
uuid sms_log_id PK
|
||||
uuid request_id FK
|
||||
uuid notification_id FK
|
||||
varchar recipient_phone
|
||||
uuid recipient_user_id FK
|
||||
text message
|
||||
varchar sms_type
|
||||
varchar status "QUEUED to DELIVERED"
|
||||
integer send_attempts
|
||||
timestamp sent_at
|
||||
timestamp delivered_at
|
||||
timestamp failed_at
|
||||
text failure_reason
|
||||
varchar sms_provider
|
||||
varchar sms_provider_message_id
|
||||
decimal cost
|
||||
timestamp created_at
|
||||
}
|
||||
|
||||
system_settings {
|
||||
uuid setting_id PK
|
||||
varchar setting_key UK "CONFIG_NAME"
|
||||
text setting_value "Value"
|
||||
varchar setting_type "STRING, NUMBER etc"
|
||||
varchar setting_category "TAT, NOTIFICATION"
|
||||
text description
|
||||
boolean is_editable
|
||||
boolean is_sensitive "Encrypted"
|
||||
jsonb validation_rules
|
||||
text default_value
|
||||
uuid updated_by FK
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
|
||||
workflow_templates {
|
||||
uuid template_id PK
|
||||
varchar template_name "Future Scope"
|
||||
text template_description
|
||||
varchar template_category
|
||||
jsonb approval_levels_config
|
||||
decimal default_tat_hours
|
||||
boolean is_active
|
||||
integer usage_count
|
||||
uuid created_by FK
|
||||
timestamp created_at
|
||||
timestamp updated_at
|
||||
}
|
||||
|
||||
report_cache {
|
||||
uuid cache_id PK
|
||||
varchar report_type
|
||||
jsonb report_params "Input Filters"
|
||||
jsonb report_data "Cached Result"
|
||||
uuid generated_by FK
|
||||
timestamp generated_at
|
||||
timestamp expires_at
|
||||
integer access_count
|
||||
timestamp last_accessed_at
|
||||
}
|
||||
|
||||
|
||||
%% Notes and Constraints
|
||||
%% 1. All timestamps are WITH TIME ZONE
|
||||
%% 2. UUIDs are generated via uuid-ossp extension
|
||||
%% 3. Enums are custom types defined separately
|
||||
%% 4. JSONB used for flexible metadata storage
|
||||
%% 5. Soft deletes via is_deleted flags
|
||||
%% 6. Audit trail via activities and audit_logs
|
||||
%% 7. Multi-channel notifications (in-app, email, SMS, push)
|
||||
%% 8. TAT thresholds: 50%, 80%, 100%
|
||||
%% 9. Max approval levels: 10
|
||||
%% 10. Max file size: 10 MB
|
||||
@ -20,7 +20,8 @@
|
||||
"db:migrate": "sequelize-cli db:migrate",
|
||||
"db:migrate:undo": "sequelize-cli db:migrate:undo",
|
||||
"db:seed": "sequelize-cli db:seed:all",
|
||||
"clean": "rm -rf dist"
|
||||
"clean": "rm -rf dist",
|
||||
"migrate": "ts-node src/scripts/migrate.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-cloud/storage": "^7.14.0",
|
||||
|
||||
@ -8,6 +8,8 @@ import { SSOUserData } from './types/auth.types';
|
||||
import { sequelize } from './config/database';
|
||||
import { corsMiddleware } from './middlewares/cors.middleware';
|
||||
import routes from './routes/index';
|
||||
import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
|
||||
import path from 'path';
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
@ -72,6 +74,10 @@ app.get('/health', (_req: express.Request, res: express.Response) => {
|
||||
// Mount API routes
|
||||
app.use('/api/v1', routes);
|
||||
|
||||
// Serve uploaded files statically
|
||||
ensureUploadDir();
|
||||
app.use('/uploads', express.static(UPLOAD_DIR));
|
||||
|
||||
// Root endpoint
|
||||
app.get('/', (_req: express.Request, res: express.Response) => {
|
||||
res.status(200).json({
|
||||
|
||||
@ -1,3 +1,19 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
const ROOT_DIR = path.resolve(process.cwd());
|
||||
const DEFAULT_UPLOAD_DIR = path.join(ROOT_DIR, 'uploads');
|
||||
|
||||
export const UPLOAD_DIR = process.env.UPLOAD_DIR && process.env.UPLOAD_DIR.trim() !== ''
|
||||
? path.resolve(process.env.UPLOAD_DIR)
|
||||
: DEFAULT_UPLOAD_DIR;
|
||||
|
||||
export function ensureUploadDir(): void {
|
||||
if (!fs.existsSync(UPLOAD_DIR)) {
|
||||
fs.mkdirSync(UPLOAD_DIR, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
export const storageConfig = {
|
||||
gcp: {
|
||||
projectId: process.env.GCP_PROJECT_ID || '',
|
||||
|
||||
@ -33,7 +33,8 @@ export class ApprovalController {
|
||||
|
||||
ResponseHandler.success(res, level, 'Current approval level retrieved successfully');
|
||||
} catch (error) {
|
||||
ResponseHandler.error(res, 'Failed to get current approval level', 500, error.message);
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to get current approval level', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
@ -44,7 +45,8 @@ export class ApprovalController {
|
||||
|
||||
ResponseHandler.success(res, levels, 'Approval levels retrieved successfully');
|
||||
} catch (error) {
|
||||
ResponseHandler.error(res, 'Failed to get approval levels', 500, error.message);
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to get approval levels', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
62
src/controllers/document.controller.ts
Normal file
62
src/controllers/document.controller.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import { Request, Response } from 'express';
|
||||
import crypto from 'crypto';
|
||||
import path from 'path';
|
||||
import { Document } from '@models/Document';
|
||||
import { ResponseHandler } from '@utils/responseHandler';
|
||||
import type { AuthenticatedRequest } from '../types/express';
|
||||
|
||||
export class DocumentController {
|
||||
async upload(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||
try {
|
||||
const userId = req.user?.userId;
|
||||
if (!userId) {
|
||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
||||
return;
|
||||
}
|
||||
|
||||
const requestId = String((req.body?.requestId || '').trim());
|
||||
if (!requestId) {
|
||||
ResponseHandler.error(res, 'requestId is required', 400);
|
||||
return;
|
||||
}
|
||||
|
||||
const file = (req as any).file as Express.Multer.File | undefined;
|
||||
if (!file) {
|
||||
ResponseHandler.error(res, 'No file uploaded', 400);
|
||||
return;
|
||||
}
|
||||
|
||||
const checksum = crypto.createHash('sha256').update(file.buffer || '').digest('hex');
|
||||
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||
const category = (req.body?.category as string) || 'OTHER';
|
||||
|
||||
const doc = await Document.create({
|
||||
requestId,
|
||||
uploadedBy: userId,
|
||||
fileName: path.basename(file.filename || file.originalname),
|
||||
originalFileName: file.originalname,
|
||||
fileType: extension,
|
||||
fileExtension: extension,
|
||||
fileSize: file.size,
|
||||
filePath: file.path, // server path
|
||||
storageUrl: `/uploads/${path.basename(file.path)}`,
|
||||
mimeType: file.mimetype,
|
||||
checksum,
|
||||
isGoogleDoc: false,
|
||||
googleDocUrl: null as any,
|
||||
category,
|
||||
version: 1,
|
||||
parentDocumentId: null as any,
|
||||
isDeleted: false,
|
||||
downloadCount: 0,
|
||||
} as any);
|
||||
|
||||
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Upload failed', 500, message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
42
src/controllers/user.controller.ts
Normal file
42
src/controllers/user.controller.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { UserService } from '../services/user.service';
|
||||
import { ResponseHandler } from '@utils/responseHandler';
|
||||
import logger from '@utils/logger';
|
||||
|
||||
export class UserController {
|
||||
private userService: UserService;
|
||||
|
||||
constructor() {
|
||||
this.userService = new UserService();
|
||||
}
|
||||
|
||||
async searchUsers(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const q = String(req.query.q || '').trim();
|
||||
const limit = Number(req.query.limit || 10);
|
||||
const currentUserId = (req as any).user?.userId || (req as any).user?.id;
|
||||
|
||||
logger.info('User search requested', { q, limit });
|
||||
|
||||
const users = await this.userService.searchUsers(q, limit, currentUserId);
|
||||
|
||||
const result = users.map(u => ({
|
||||
userId: (u as any).userId,
|
||||
email: (u as any).email,
|
||||
displayName: (u as any).displayName,
|
||||
firstName: (u as any).firstName,
|
||||
lastName: (u as any).lastName,
|
||||
department: (u as any).department,
|
||||
designation: (u as any).designation,
|
||||
isActive: (u as any).isActive,
|
||||
}));
|
||||
|
||||
ResponseHandler.success(res, result, 'Users fetched');
|
||||
} catch (error) {
|
||||
logger.error('User search failed', { error });
|
||||
ResponseHandler.error(res, 'User search failed', 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -4,6 +4,11 @@ import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/work
|
||||
import { ResponseHandler } from '@utils/responseHandler';
|
||||
import type { AuthenticatedRequest } from '../types/express';
|
||||
import { Priority } from '../types/common.types';
|
||||
import type { UpdateWorkflowRequest } from '../types/workflow.types';
|
||||
import { Document } from '@models/Document';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
|
||||
const workflowService = new WorkflowService();
|
||||
|
||||
@ -25,6 +30,66 @@ export class WorkflowController {
|
||||
}
|
||||
}
|
||||
|
||||
// Multipart create: accepts payload JSON and files[]
|
||||
async createWorkflowMultipart(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||
try {
|
||||
const userId = req.user?.userId;
|
||||
if (!userId) {
|
||||
ResponseHandler.error(res, 'Unauthorized', 401);
|
||||
return;
|
||||
}
|
||||
|
||||
const raw = String(req.body?.payload || '');
|
||||
if (!raw) {
|
||||
ResponseHandler.error(res, 'payload is required', 400);
|
||||
return;
|
||||
}
|
||||
const parsed = JSON.parse(raw);
|
||||
const validated = validateCreateWorkflow(parsed);
|
||||
const workflowData = { ...validated, priority: validated.priority as Priority } as any;
|
||||
|
||||
const workflow = await workflowService.createWorkflow(userId, workflowData);
|
||||
|
||||
// Attach files as documents (category defaults to SUPPORTING)
|
||||
const files = (req as any).files as Express.Multer.File[] | undefined;
|
||||
const category = (req.body?.category as string) || 'OTHER';
|
||||
const docs: any[] = [];
|
||||
if (files && files.length > 0) {
|
||||
for (const file of files) {
|
||||
const buffer = fs.readFileSync(file.path);
|
||||
const checksum = crypto.createHash('sha256').update(buffer).digest('hex');
|
||||
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
|
||||
const doc = await Document.create({
|
||||
requestId: workflow.requestId,
|
||||
uploadedBy: userId,
|
||||
fileName: path.basename(file.filename || file.originalname),
|
||||
originalFileName: file.originalname,
|
||||
fileType: extension,
|
||||
fileExtension: extension,
|
||||
fileSize: file.size,
|
||||
filePath: file.path,
|
||||
storageUrl: `/uploads/${path.basename(file.path)}`,
|
||||
mimeType: file.mimetype,
|
||||
checksum,
|
||||
isGoogleDoc: false,
|
||||
googleDocUrl: null as any,
|
||||
category: category || 'OTHER',
|
||||
version: 1,
|
||||
parentDocumentId: null as any,
|
||||
isDeleted: false,
|
||||
downloadCount: 0,
|
||||
} as any);
|
||||
docs.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async getWorkflow(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
@ -42,14 +107,82 @@ export class WorkflowController {
|
||||
}
|
||||
}
|
||||
|
||||
async getWorkflowDetails(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const { id } = req.params as any;
|
||||
const result = await workflowService.getWorkflowDetails(id);
|
||||
if (!result) {
|
||||
ResponseHandler.notFound(res, 'Workflow not found');
|
||||
return;
|
||||
}
|
||||
ResponseHandler.success(res, result, 'Workflow details fetched');
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to fetch workflow details', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async listWorkflows(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||
const result = await workflowService.listWorkflows(page, limit);
|
||||
ResponseHandler.success(res, result, 'Workflows fetched');
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to list workflows', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async listMyRequests(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||
const result = await workflowService.listMyRequests(userId, page, limit);
|
||||
ResponseHandler.success(res, result, 'My requests fetched');
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to fetch my requests', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async listOpenForMe(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||
const result = await workflowService.listOpenForMe(userId, page, limit);
|
||||
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to fetch open requests for user', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async listClosedByMe(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||
const result = await workflowService.listClosedByMe(userId, page, limit);
|
||||
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
ResponseHandler.error(res, 'Failed to fetch closed requests by user', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async updateWorkflow(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const validatedData = validateUpdateWorkflow(req.body);
|
||||
// Convert string literal priority to enum if present
|
||||
const updateData = validatedData.priority
|
||||
? { ...validatedData, priority: validatedData.priority as Priority }
|
||||
: validatedData;
|
||||
// Build a strongly-typed payload for the service layer
|
||||
const updateData: UpdateWorkflowRequest = { ...validatedData } as any;
|
||||
if (validatedData.priority) {
|
||||
// Map string literal to enum value explicitly
|
||||
updateData.priority = validatedData.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
||||
}
|
||||
|
||||
const workflow = await workflowService.updateWorkflow(id, updateData);
|
||||
|
||||
|
||||
47
src/middlewares/authorization.middleware.ts
Normal file
47
src/middlewares/authorization.middleware.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { Participant } from '@models/Participant';
|
||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||
import { Op } from 'sequelize';
|
||||
|
||||
type AllowedType = 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
|
||||
|
||||
export function requireParticipantTypes(allowed: AllowedType[]) {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const userId: string | undefined = (req as any).user?.userId || (req as any).user?.id;
|
||||
const requestId: string | undefined = (req.params as any)?.id;
|
||||
if (!userId || !requestId) {
|
||||
return res.status(403).json({ success: false, error: 'Forbidden' });
|
||||
}
|
||||
|
||||
// Check initiator
|
||||
if (allowed.includes('INITIATOR')) {
|
||||
const wf = await WorkflowRequest.findByPk(requestId);
|
||||
if (wf && (wf as any).initiatorId === userId) {
|
||||
return next();
|
||||
}
|
||||
}
|
||||
|
||||
// Check participants table for APPROVER / SPECTATOR
|
||||
const rolesToCheck = allowed.filter(r => r !== 'INITIATOR');
|
||||
if (rolesToCheck.length > 0) {
|
||||
const participant = await Participant.findOne({
|
||||
where: {
|
||||
requestId,
|
||||
userId,
|
||||
participantType: { [Op.in]: rolesToCheck as any },
|
||||
},
|
||||
});
|
||||
if (participant) {
|
||||
return next();
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(403).json({ success: false, error: 'Insufficient permissions' });
|
||||
} catch (err) {
|
||||
return res.status(500).json({ success: false, error: 'Authorization check failed' });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
43
src/migrations/2025103001-create-workflow-requests.ts
Normal file
43
src/migrations/2025103001-create-workflow-requests.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import { QueryInterface, DataTypes } from 'sequelize';
|
||||
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
// Enums
|
||||
await queryInterface.sequelize.query("CREATE TYPE enum_priority AS ENUM ('STANDARD','EXPRESS');");
|
||||
await queryInterface.sequelize.query(
|
||||
"CREATE TYPE enum_workflow_status AS ENUM ('DRAFT','PENDING','IN_PROGRESS','APPROVED','REJECTED','CLOSED');"
|
||||
);
|
||||
|
||||
await queryInterface.createTable('workflow_requests', {
|
||||
request_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
||||
request_number: { type: DataTypes.STRING(20), allowNull: false, unique: true },
|
||||
initiator_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
||||
template_type: { type: DataTypes.STRING(20), allowNull: false, defaultValue: 'CUSTOM' },
|
||||
title: { type: DataTypes.STRING(500), allowNull: false },
|
||||
description: { type: DataTypes.TEXT, allowNull: false },
|
||||
priority: { type: 'enum_priority' as any, allowNull: false, defaultValue: 'STANDARD' },
|
||||
status: { type: 'enum_workflow_status' as any, allowNull: false, defaultValue: 'DRAFT' },
|
||||
current_level: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
|
||||
total_levels: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
|
||||
total_tat_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
|
||||
submission_date: { type: DataTypes.DATE, allowNull: true },
|
||||
closure_date: { type: DataTypes.DATE, allowNull: true },
|
||||
conclusion_remark: { type: DataTypes.TEXT, allowNull: true },
|
||||
ai_generated_conclusion: { type: DataTypes.TEXT, allowNull: true },
|
||||
is_draft: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
||||
is_deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||
});
|
||||
|
||||
await queryInterface.addIndex('workflow_requests', ['initiator_id']);
|
||||
await queryInterface.addIndex('workflow_requests', ['status']);
|
||||
await queryInterface.addIndex('workflow_requests', ['created_at']);
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.dropTable('workflow_requests');
|
||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_workflow_status;');
|
||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_priority;');
|
||||
}
|
||||
|
||||
|
||||
47
src/migrations/2025103002-create-approval-levels.ts
Normal file
47
src/migrations/2025103002-create-approval-levels.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { QueryInterface, DataTypes } from 'sequelize';
|
||||
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(
|
||||
"CREATE TYPE enum_approval_status AS ENUM ('PENDING','IN_PROGRESS','APPROVED','REJECTED','SKIPPED');"
|
||||
);
|
||||
|
||||
await queryInterface.createTable('approval_levels', {
|
||||
level_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
||||
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
|
||||
level_number: { type: DataTypes.INTEGER, allowNull: false },
|
||||
level_name: { type: DataTypes.STRING(100), allowNull: true },
|
||||
approver_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
||||
approver_email: { type: DataTypes.STRING(255), allowNull: false },
|
||||
approver_name: { type: DataTypes.STRING(200), allowNull: false },
|
||||
tat_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false },
|
||||
tat_days: { type: DataTypes.INTEGER, allowNull: false },
|
||||
status: { type: 'enum_approval_status' as any, allowNull: false, defaultValue: 'PENDING' },
|
||||
level_start_time: { type: DataTypes.DATE, allowNull: true },
|
||||
level_end_time: { type: DataTypes.DATE, allowNull: true },
|
||||
action_date: { type: DataTypes.DATE, allowNull: true },
|
||||
comments: { type: DataTypes.TEXT, allowNull: true },
|
||||
rejection_reason: { type: DataTypes.TEXT, allowNull: true },
|
||||
is_final_approver: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
elapsed_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
|
||||
remaining_hours: { type: DataTypes.DECIMAL(10,2), allowNull: false, defaultValue: 0 },
|
||||
tat_percentage_used: { type: DataTypes.DECIMAL(5,2), allowNull: false, defaultValue: 0 },
|
||||
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||
});
|
||||
|
||||
await queryInterface.addIndex('approval_levels', ['request_id']);
|
||||
await queryInterface.addIndex('approval_levels', ['approver_id']);
|
||||
await queryInterface.addIndex('approval_levels', ['status']);
|
||||
await queryInterface.addConstraint('approval_levels', {
|
||||
fields: ['request_id', 'level_number'],
|
||||
type: 'unique',
|
||||
name: 'uq_approval_levels_request_level'
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.dropTable('approval_levels');
|
||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_approval_status;');
|
||||
}
|
||||
|
||||
|
||||
38
src/migrations/2025103003-create-participants.ts
Normal file
38
src/migrations/2025103003-create-participants.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { QueryInterface, DataTypes } from 'sequelize';
|
||||
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(
|
||||
"CREATE TYPE enum_participant_type AS ENUM ('SPECTATOR','INITIATOR','APPROVER','CONSULTATION');"
|
||||
);
|
||||
|
||||
await queryInterface.createTable('participants', {
|
||||
participant_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
||||
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
|
||||
user_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
||||
user_email: { type: DataTypes.STRING(255), allowNull: false },
|
||||
user_name: { type: DataTypes.STRING(200), allowNull: false },
|
||||
participant_type: { type: 'enum_participant_type' as any, allowNull: false },
|
||||
can_comment: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
||||
can_view_documents: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
||||
can_download_documents: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
notification_enabled: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
||||
added_by: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
||||
added_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||
is_active: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: true },
|
||||
});
|
||||
|
||||
await queryInterface.addIndex('participants', ['request_id']);
|
||||
await queryInterface.addIndex('participants', ['user_id']);
|
||||
await queryInterface.addConstraint('participants', {
|
||||
fields: ['request_id', 'user_id'],
|
||||
type: 'unique',
|
||||
name: 'uq_participants_request_user'
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.dropTable('participants');
|
||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_participant_type;');
|
||||
}
|
||||
|
||||
|
||||
41
src/migrations/2025103004-create-documents.ts
Normal file
41
src/migrations/2025103004-create-documents.ts
Normal file
@ -0,0 +1,41 @@
|
||||
import { QueryInterface, DataTypes } from 'sequelize';
|
||||
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(
|
||||
"CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER');"
|
||||
);
|
||||
|
||||
await queryInterface.createTable('documents', {
|
||||
document_id: { type: DataTypes.UUID, primaryKey: true, defaultValue: DataTypes.UUIDV4 },
|
||||
request_id: { type: DataTypes.UUID, allowNull: false, references: { model: 'workflow_requests', key: 'request_id' } },
|
||||
uploaded_by: { type: DataTypes.UUID, allowNull: false, references: { model: 'users', key: 'user_id' } },
|
||||
file_name: { type: DataTypes.STRING(255), allowNull: false },
|
||||
original_file_name: { type: DataTypes.STRING(255), allowNull: false },
|
||||
file_type: { type: DataTypes.STRING(100), allowNull: false },
|
||||
file_extension: { type: DataTypes.STRING(10), allowNull: false },
|
||||
file_size: { type: DataTypes.BIGINT, allowNull: false },
|
||||
file_path: { type: DataTypes.STRING(500), allowNull: false },
|
||||
storage_url: { type: DataTypes.STRING(500), allowNull: true },
|
||||
mime_type: { type: DataTypes.STRING(100), allowNull: false },
|
||||
checksum: { type: DataTypes.STRING(64), allowNull: false },
|
||||
is_google_doc: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
google_doc_url: { type: DataTypes.STRING(500), allowNull: true },
|
||||
category: { type: 'enum_document_category' as any, allowNull: false, defaultValue: 'OTHER' },
|
||||
version: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 1 },
|
||||
parent_document_id: { type: DataTypes.UUID, allowNull: true, references: { model: 'documents', key: 'document_id' } },
|
||||
is_deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
|
||||
download_count: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 0 },
|
||||
uploaded_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||
});
|
||||
|
||||
await queryInterface.addIndex('documents', ['request_id']);
|
||||
await queryInterface.addIndex('documents', ['uploaded_by']);
|
||||
await queryInterface.addIndex('documents', ['category']);
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.dropTable('documents');
|
||||
await queryInterface.sequelize.query('DROP TYPE IF EXISTS enum_document_category;');
|
||||
}
|
||||
|
||||
|
||||
35
src/routes/document.routes.ts
Normal file
35
src/routes/document.routes.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { Router } from 'express';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { authenticateToken } from '../middlewares/auth.middleware';
|
||||
import { asyncHandler } from '../middlewares/errorHandler.middleware';
|
||||
import { DocumentController } from '../controllers/document.controller';
|
||||
import { ensureUploadDir, UPLOAD_DIR } from '../config/storage';
|
||||
|
||||
ensureUploadDir();
|
||||
|
||||
const storage = multer.diskStorage({
|
||||
destination: (_req, _file, cb) => cb(null, UPLOAD_DIR),
|
||||
filename: (_req, file, cb) => {
|
||||
const safeBase = path.basename(file.originalname).replace(/[^a-zA-Z0-9._-]/g, '_');
|
||||
const hash = crypto.randomBytes(6).toString('hex');
|
||||
const name = `${Date.now()}-${hash}-${safeBase}`;
|
||||
cb(null, name);
|
||||
}
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
storage,
|
||||
limits: { fileSize: 10 * 1024 * 1024 }, // 10MB
|
||||
});
|
||||
|
||||
const router = Router();
|
||||
const controller = new DocumentController();
|
||||
|
||||
// multipart/form-data: file, requestId, optional category
|
||||
router.post('/', authenticateToken, upload.single('file'), asyncHandler(controller.upload.bind(controller)));
|
||||
|
||||
export default router;
|
||||
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import { Router } from 'express';
|
||||
import authRoutes from './auth.routes';
|
||||
// import workflowRoutes from './workflow.routes'; // Temporarily disabled due to TypeScript errors
|
||||
import workflowRoutes from './workflow.routes';
|
||||
import userRoutes from './user.routes';
|
||||
import documentRoutes from './document.routes';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@ -15,7 +17,9 @@ router.get('/health', (_req, res) => {
|
||||
|
||||
// API routes
|
||||
router.use('/auth', authRoutes);
|
||||
// router.use('/workflows', workflowRoutes); // Temporarily disabled
|
||||
router.use('/workflows', workflowRoutes);
|
||||
router.use('/users', userRoutes);
|
||||
router.use('/documents', documentRoutes);
|
||||
|
||||
// TODO: Add other route modules as they are implemented
|
||||
// router.use('/approvals', approvalRoutes);
|
||||
@ -23,6 +27,5 @@ router.use('/auth', authRoutes);
|
||||
// router.use('/notifications', notificationRoutes);
|
||||
// router.use('/participants', participantRoutes);
|
||||
// router.use('/dashboard', dashboardRoutes);
|
||||
// router.use('/users', userRoutes);
|
||||
|
||||
export default router;
|
||||
|
||||
14
src/routes/user.routes.ts
Normal file
14
src/routes/user.routes.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import { Router } from 'express';
|
||||
import { UserController } from '../controllers/user.controller';
|
||||
import { authenticateToken } from '../middlewares/auth.middleware';
|
||||
import { asyncHandler } from '../middlewares/errorHandler.middleware';
|
||||
|
||||
const router = Router();
|
||||
const userController = new UserController();
|
||||
|
||||
// GET /api/v1/users/search?q=<email or name>
|
||||
router.get('/search', authenticateToken, asyncHandler(userController.searchUsers.bind(userController)));
|
||||
|
||||
export default router;
|
||||
|
||||
|
||||
@ -6,24 +6,74 @@ import { validateBody, validateParams } from '../middlewares/validate.middleware
|
||||
import { createWorkflowSchema, updateWorkflowSchema, workflowParamsSchema } from '../validators/workflow.validator';
|
||||
import { approvalActionSchema, approvalParamsSchema } from '../validators/approval.validator';
|
||||
import { asyncHandler } from '../middlewares/errorHandler.middleware';
|
||||
import { requireParticipantTypes } from '../middlewares/authorization.middleware';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { ensureUploadDir, UPLOAD_DIR } from '../config/storage';
|
||||
|
||||
const router = Router();
|
||||
const workflowController = new WorkflowController();
|
||||
const approvalController = new ApprovalController();
|
||||
|
||||
// Workflow routes
|
||||
router.get('/',
|
||||
authenticateToken,
|
||||
asyncHandler(workflowController.listWorkflows.bind(workflowController))
|
||||
);
|
||||
|
||||
// Filtered lists
|
||||
router.get('/my',
|
||||
authenticateToken,
|
||||
asyncHandler(workflowController.listMyRequests.bind(workflowController))
|
||||
);
|
||||
|
||||
router.get('/open-for-me',
|
||||
authenticateToken,
|
||||
asyncHandler(workflowController.listOpenForMe.bind(workflowController))
|
||||
);
|
||||
|
||||
router.get('/closed-by-me',
|
||||
authenticateToken,
|
||||
asyncHandler(workflowController.listClosedByMe.bind(workflowController))
|
||||
);
|
||||
|
||||
router.post('/',
|
||||
authenticateToken,
|
||||
validateBody(createWorkflowSchema),
|
||||
asyncHandler(workflowController.createWorkflow.bind(workflowController))
|
||||
);
|
||||
|
||||
// Multipart create (payload + files[])
|
||||
ensureUploadDir();
|
||||
const storage = multer.diskStorage({
|
||||
destination: (_req, _file, cb) => cb(null, UPLOAD_DIR),
|
||||
filename: (_req, file, cb) => {
|
||||
const safeBase = path.basename(file.originalname).replace(/[^a-zA-Z0-9._-]/g, '_');
|
||||
const hash = crypto.randomBytes(6).toString('hex');
|
||||
cb(null, `${Date.now()}-${hash}-${safeBase}`);
|
||||
}
|
||||
});
|
||||
const upload = multer({ storage, limits: { fileSize: 10 * 1024 * 1024 } });
|
||||
|
||||
router.post('/multipart',
|
||||
authenticateToken,
|
||||
upload.array('files'),
|
||||
asyncHandler(workflowController.createWorkflowMultipart.bind(workflowController))
|
||||
);
|
||||
|
||||
router.get('/:id',
|
||||
authenticateToken,
|
||||
validateParams(workflowParamsSchema),
|
||||
asyncHandler(workflowController.getWorkflow.bind(workflowController))
|
||||
);
|
||||
|
||||
router.get('/:id/details',
|
||||
authenticateToken,
|
||||
validateParams(workflowParamsSchema),
|
||||
asyncHandler(workflowController.getWorkflowDetails.bind(workflowController))
|
||||
);
|
||||
|
||||
router.put('/:id',
|
||||
authenticateToken,
|
||||
validateParams(workflowParamsSchema),
|
||||
@ -52,6 +102,7 @@ router.get('/:id/approvals/current',
|
||||
|
||||
router.patch('/:id/approvals/:levelId/approve',
|
||||
authenticateToken,
|
||||
requireParticipantTypes(['APPROVER']),
|
||||
validateParams(approvalParamsSchema),
|
||||
validateBody(approvalActionSchema),
|
||||
asyncHandler(approvalController.approveLevel.bind(approvalController))
|
||||
@ -59,6 +110,7 @@ router.patch('/:id/approvals/:levelId/approve',
|
||||
|
||||
router.patch('/:id/approvals/:levelId/reject',
|
||||
authenticateToken,
|
||||
requireParticipantTypes(['APPROVER']),
|
||||
validateParams(approvalParamsSchema),
|
||||
validateBody(approvalActionSchema),
|
||||
asyncHandler(approvalController.approveLevel.bind(approvalController))
|
||||
|
||||
25
src/scripts/migrate.ts
Normal file
25
src/scripts/migrate.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import { sequelize } from '../config/database';
|
||||
import * as m1 from '../migrations/2025103001-create-workflow-requests';
|
||||
import * as m2 from '../migrations/2025103002-create-approval-levels';
|
||||
import * as m3 from '../migrations/2025103003-create-participants';
|
||||
import * as m4 from '../migrations/2025103004-create-documents';
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
await sequelize.authenticate();
|
||||
console.log('DB connected');
|
||||
await m1.up(sequelize.getQueryInterface());
|
||||
await m2.up(sequelize.getQueryInterface());
|
||||
await m3.up(sequelize.getQueryInterface());
|
||||
await m4.up(sequelize.getQueryInterface());
|
||||
console.log('Migrations applied');
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error('Migration failed', err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||
import { ApprovalAction } from '../types/approval.types';
|
||||
import { ApprovalStatus, WorkflowStatus } from '../types/common.types';
|
||||
import { calculateElapsedHours, calculateTATPercentage } from '@utils/helpers';
|
||||
import logger from '@utils/logger';
|
||||
|
||||
@ -15,7 +16,7 @@ export class ApprovalService {
|
||||
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
|
||||
|
||||
const updateData = {
|
||||
status: action.action === 'APPROVE' ? 'APPROVED' : 'REJECTED',
|
||||
status: action.action === 'APPROVE' ? ApprovalStatus.APPROVED : ApprovalStatus.REJECTED,
|
||||
actionDate: now,
|
||||
levelEndTime: now,
|
||||
elapsedHours,
|
||||
@ -29,12 +30,12 @@ export class ApprovalService {
|
||||
// Update workflow status if this is the final level
|
||||
if (level.isFinalApprover && action.action === 'APPROVE') {
|
||||
await WorkflowRequest.update(
|
||||
{ status: 'APPROVED', closureDate: now },
|
||||
{ status: WorkflowStatus.APPROVED, closureDate: now },
|
||||
{ where: { requestId: level.requestId } }
|
||||
);
|
||||
} else if (action.action === 'REJECTED') {
|
||||
} else if (action.action === 'REJECT') {
|
||||
await WorkflowRequest.update(
|
||||
{ status: 'REJECTED', closureDate: now },
|
||||
{ status: WorkflowStatus.REJECTED, closureDate: now },
|
||||
{ where: { requestId: level.requestId } }
|
||||
);
|
||||
}
|
||||
@ -50,7 +51,7 @@ export class ApprovalService {
|
||||
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
|
||||
try {
|
||||
return await ApprovalLevel.findOne({
|
||||
where: { requestId, status: 'PENDING' },
|
||||
where: { requestId, status: ApprovalStatus.PENDING },
|
||||
order: [['levelNumber', 'ASC']]
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@ -77,4 +77,27 @@ export class UserService {
|
||||
order: [['createdAt', 'DESC']]
|
||||
});
|
||||
}
|
||||
|
||||
async searchUsers(query: string, limit: number = 10, excludeUserId?: string): Promise<UserModel[]> {
|
||||
const q = (query || '').trim();
|
||||
if (!q) {
|
||||
return [];
|
||||
}
|
||||
const like = `%${q}%`;
|
||||
const orConds = [
|
||||
{ email: { [Op.iLike as any]: like } as any },
|
||||
{ displayName: { [Op.iLike as any]: like } as any },
|
||||
{ firstName: { [Op.iLike as any]: like } as any },
|
||||
{ lastName: { [Op.iLike as any]: like } as any },
|
||||
];
|
||||
const where: any = { [Op.or]: orConds };
|
||||
if (excludeUserId) {
|
||||
where.userId = { [Op.ne]: excludeUserId } as any;
|
||||
}
|
||||
return await UserModel.findAll({
|
||||
where,
|
||||
order: [['displayName', 'ASC']],
|
||||
limit: Math.min(Math.max(limit || 10, 1), 50),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,11 +1,157 @@
|
||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||
// duplicate import removed
|
||||
import { User } from '@models/User';
|
||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||
import { Participant } from '@models/Participant';
|
||||
import { Document } from '@models/Document';
|
||||
import { CreateWorkflowRequest, UpdateWorkflowRequest } from '../types/workflow.types';
|
||||
import { generateRequestNumber, calculateTATDays } from '@utils/helpers';
|
||||
import logger from '@utils/logger';
|
||||
import { WorkflowStatus, ParticipantType, ApprovalStatus } from '../types/common.types';
|
||||
import { Op } from 'sequelize';
|
||||
|
||||
export class WorkflowService {
|
||||
async listWorkflows(page: number, limit: number) {
|
||||
const offset = (page - 1) * limit;
|
||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
||||
offset,
|
||||
limit,
|
||||
order: [['createdAt', 'DESC']],
|
||||
include: [
|
||||
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName'] },
|
||||
],
|
||||
});
|
||||
const data = await this.enrichForCards(rows);
|
||||
|
||||
return {
|
||||
data,
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total: count,
|
||||
totalPages: Math.ceil(count / limit) || 1,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async enrichForCards(rows: WorkflowRequest[]) {
|
||||
const data = await Promise.all(rows.map(async (wf) => {
|
||||
const currentLevel = await ApprovalLevel.findOne({
|
||||
where: {
|
||||
requestId: (wf as any).requestId,
|
||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any },
|
||||
},
|
||||
order: [['levelNumber', 'ASC']],
|
||||
include: [{ model: User, as: 'approver', attributes: ['userId', 'email', 'displayName'] }]
|
||||
});
|
||||
|
||||
const totalTat = Number((wf as any).totalTatHours || 0);
|
||||
let percent = 0;
|
||||
let remainingText = '';
|
||||
if ((wf as any).submissionDate && totalTat > 0) {
|
||||
const startedAt = new Date((wf as any).submissionDate);
|
||||
const now = new Date();
|
||||
const elapsedHrs = Math.max(0, (now.getTime() - startedAt.getTime()) / (1000 * 60 * 60));
|
||||
percent = Math.min(100, Math.round((elapsedHrs / totalTat) * 100));
|
||||
const remaining = Math.max(0, totalTat - elapsedHrs);
|
||||
const days = Math.floor(remaining / 24);
|
||||
const hours = Math.floor(remaining % 24);
|
||||
remainingText = days > 0 ? `${days} days ${hours} hours remaining` : `${hours} hours remaining`;
|
||||
}
|
||||
|
||||
return {
|
||||
requestId: (wf as any).requestId,
|
||||
requestNumber: (wf as any).requestNumber,
|
||||
title: (wf as any).title,
|
||||
description: (wf as any).description,
|
||||
status: (wf as any).status,
|
||||
priority: (wf as any).priority,
|
||||
submittedAt: (wf as any).submissionDate,
|
||||
initiator: (wf as any).initiator,
|
||||
totalLevels: (wf as any).totalLevels,
|
||||
currentLevel: currentLevel ? (currentLevel as any).levelNumber : null,
|
||||
currentApprover: currentLevel ? {
|
||||
userId: (currentLevel as any).approverId,
|
||||
email: (currentLevel as any).approverEmail,
|
||||
name: (currentLevel as any).approverName,
|
||||
} : null,
|
||||
sla: { percent, remainingText },
|
||||
};
|
||||
}));
|
||||
return data;
|
||||
}
|
||||
|
||||
async listMyRequests(userId: string, page: number, limit: number) {
|
||||
const offset = (page - 1) * limit;
|
||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
||||
where: { initiatorId: userId },
|
||||
offset,
|
||||
limit,
|
||||
order: [['createdAt', 'DESC']],
|
||||
include: [
|
||||
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName'] },
|
||||
],
|
||||
});
|
||||
const data = await this.enrichForCards(rows);
|
||||
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
||||
}
|
||||
|
||||
async listOpenForMe(userId: string, page: number, limit: number) {
|
||||
const offset = (page - 1) * limit;
|
||||
const levelRows = await ApprovalLevel.findAll({
|
||||
where: {
|
||||
approverId: userId,
|
||||
status: { [Op.in]: [ApprovalStatus.PENDING as any, (ApprovalStatus as any).IN_PROGRESS ?? 'IN_PROGRESS', 'PENDING', 'IN_PROGRESS'] as any },
|
||||
},
|
||||
attributes: ['requestId'],
|
||||
});
|
||||
// Include requests where the user is a SPECTATOR (view-only)
|
||||
const spectatorRows = await Participant.findAll({
|
||||
where: { userId, participantType: 'SPECTATOR' as any },
|
||||
attributes: ['requestId'],
|
||||
});
|
||||
const requestIds = Array.from(new Set([
|
||||
...levelRows.map((l: any) => l.requestId),
|
||||
...spectatorRows.map((s: any) => s.requestId),
|
||||
]));
|
||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
||||
where: {
|
||||
requestId: { [Op.in]: requestIds.length ? requestIds : ['00000000-0000-0000-0000-000000000000'] },
|
||||
status: { [Op.in]: [WorkflowStatus.PENDING as any, (WorkflowStatus as any).IN_PROGRESS ?? 'IN_PROGRESS'] as any },
|
||||
},
|
||||
offset,
|
||||
limit,
|
||||
order: [['createdAt', 'DESC']],
|
||||
include: [
|
||||
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName'] },
|
||||
],
|
||||
});
|
||||
const data = await this.enrichForCards(rows);
|
||||
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
||||
}
|
||||
|
||||
async listClosedByMe(userId: string, page: number, limit: number) {
|
||||
const offset = (page - 1) * limit;
|
||||
const levelRows = await ApprovalLevel.findAll({
|
||||
where: {
|
||||
approverId: userId,
|
||||
status: { [Op.in]: [ApprovalStatus.APPROVED as any, 'APPROVED'] as any },
|
||||
},
|
||||
attributes: ['requestId'],
|
||||
});
|
||||
const requestIds = Array.from(new Set(levelRows.map((l: any) => l.requestId)));
|
||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
||||
where: { requestId: { [Op.in]: requestIds.length ? requestIds : ['00000000-0000-0000-0000-000000000000'] } },
|
||||
offset,
|
||||
limit,
|
||||
order: [['createdAt', 'DESC']],
|
||||
include: [
|
||||
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName'] },
|
||||
],
|
||||
});
|
||||
const data = await this.enrichForCards(rows);
|
||||
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
||||
}
|
||||
async createWorkflow(initiatorId: string, workflowData: CreateWorkflowRequest): Promise<WorkflowRequest> {
|
||||
try {
|
||||
const requestNumber = generateRequestNumber();
|
||||
@ -18,9 +164,12 @@ export class WorkflowService {
|
||||
title: workflowData.title,
|
||||
description: workflowData.description,
|
||||
priority: workflowData.priority,
|
||||
currentLevel: 1,
|
||||
totalLevels: workflowData.approvalLevels.length,
|
||||
totalTatHours,
|
||||
status: 'DRAFT'
|
||||
status: WorkflowStatus.DRAFT,
|
||||
isDraft: true,
|
||||
isDeleted: false
|
||||
});
|
||||
|
||||
// Create approval levels
|
||||
@ -34,6 +183,10 @@ export class WorkflowService {
|
||||
approverName: levelData.approverName,
|
||||
tatHours: levelData.tatHours,
|
||||
tatDays: calculateTATDays(levelData.tatHours),
|
||||
status: ApprovalStatus.PENDING,
|
||||
elapsedHours: 0,
|
||||
remainingHours: levelData.tatHours,
|
||||
tatPercentageUsed: 0,
|
||||
isFinalApprover: levelData.isFinalApprover || false
|
||||
});
|
||||
}
|
||||
@ -46,12 +199,13 @@ export class WorkflowService {
|
||||
userId: participantData.userId,
|
||||
userEmail: participantData.userEmail,
|
||||
userName: participantData.userName,
|
||||
participantType: participantData.participantType,
|
||||
participantType: (participantData.participantType as unknown as ParticipantType),
|
||||
canComment: participantData.canComment ?? true,
|
||||
canViewDocuments: participantData.canViewDocuments ?? true,
|
||||
canDownloadDocuments: participantData.canDownloadDocuments ?? false,
|
||||
notificationEnabled: participantData.notificationEnabled ?? true,
|
||||
addedBy: initiatorId
|
||||
addedBy: initiatorId,
|
||||
isActive: true
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -80,6 +234,67 @@ export class WorkflowService {
|
||||
}
|
||||
}
|
||||
|
||||
async getWorkflowDetails(requestId: string) {
|
||||
try {
|
||||
const workflow = await WorkflowRequest.findByPk(requestId, {
|
||||
include: [ { association: 'initiator' } ]
|
||||
});
|
||||
if (!workflow) return null;
|
||||
|
||||
// Compute current approver and SLA summary (same logic used in lists)
|
||||
const currentLevel = await ApprovalLevel.findOne({
|
||||
where: {
|
||||
requestId,
|
||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] as any },
|
||||
},
|
||||
order: [['levelNumber', 'ASC']],
|
||||
include: [{ model: User, as: 'approver', attributes: ['userId', 'email', 'displayName'] }]
|
||||
});
|
||||
|
||||
const totalTat = Number((workflow as any).totalTatHours || 0);
|
||||
let percent = 0;
|
||||
let remainingText = '';
|
||||
if ((workflow as any).submissionDate && totalTat > 0) {
|
||||
const startedAt = new Date((workflow as any).submissionDate);
|
||||
const now = new Date();
|
||||
const elapsedHrs = Math.max(0, (now.getTime() - startedAt.getTime()) / (1000 * 60 * 60));
|
||||
percent = Math.min(100, Math.round((elapsedHrs / totalTat) * 100));
|
||||
const remaining = Math.max(0, totalTat - elapsedHrs);
|
||||
const days = Math.floor(remaining / 24);
|
||||
const hours = Math.floor(remaining % 24);
|
||||
remainingText = days > 0 ? `${days} days ${hours} hours remaining` : `${hours} hours remaining`;
|
||||
}
|
||||
|
||||
const summary = {
|
||||
requestId: (workflow as any).requestId,
|
||||
requestNumber: (workflow as any).requestNumber,
|
||||
title: (workflow as any).title,
|
||||
status: (workflow as any).status,
|
||||
priority: (workflow as any).priority,
|
||||
submittedAt: (workflow as any).submissionDate,
|
||||
totalLevels: (workflow as any).totalLevels,
|
||||
currentLevel: currentLevel ? (currentLevel as any).levelNumber : null,
|
||||
currentApprover: currentLevel ? {
|
||||
userId: (currentLevel as any).approverId,
|
||||
email: (currentLevel as any).approverEmail,
|
||||
name: (currentLevel as any).approverName,
|
||||
} : null,
|
||||
sla: { percent, remainingText },
|
||||
};
|
||||
|
||||
// Load related entities explicitly to avoid alias issues
|
||||
const approvals = await ApprovalLevel.findAll({ where: { requestId }, order: [['levelNumber','ASC']] }) as any[];
|
||||
const participants = await Participant.findAll({ where: { requestId } }) as any[];
|
||||
const documents = await Document.findAll({ where: { requestId } }) as any[];
|
||||
const activities: any[] = [];
|
||||
|
||||
return { workflow, approvals, participants, documents, activities, summary };
|
||||
} catch (error) {
|
||||
logger.error(`Failed to get workflow details ${requestId}:`, error);
|
||||
throw new Error('Failed to get workflow details');
|
||||
}
|
||||
}
|
||||
|
||||
async updateWorkflow(requestId: string, updateData: UpdateWorkflowRequest): Promise<WorkflowRequest | null> {
|
||||
try {
|
||||
const workflow = await WorkflowRequest.findByPk(requestId);
|
||||
@ -98,7 +313,7 @@ export class WorkflowService {
|
||||
if (!workflow) return null;
|
||||
|
||||
return await workflow.update({
|
||||
status: 'PENDING',
|
||||
status: WorkflowStatus.PENDING,
|
||||
isDraft: false,
|
||||
submissionDate: new Date()
|
||||
});
|
||||
|
||||
@ -53,7 +53,7 @@ export interface CreateParticipant {
|
||||
userId: string;
|
||||
userEmail: string;
|
||||
userName: string;
|
||||
participantType: 'SPECTATOR' | 'CONSULTATION';
|
||||
participantType: 'INITIATOR' | 'APPROVER' | 'SPECTATOR';
|
||||
canComment?: boolean;
|
||||
canViewDocuments?: boolean;
|
||||
canDownloadDocuments?: boolean;
|
||||
|
||||
@ -4,7 +4,7 @@ export const createParticipantSchema = z.object({
|
||||
userId: z.string().uuid(),
|
||||
userEmail: z.string().email(),
|
||||
userName: z.string().min(1),
|
||||
participantType: z.enum(['SPECTATOR', 'CONSULTATION'] as const),
|
||||
participantType: z.enum(['INITIATOR', 'APPROVER', 'SPECTATOR'] as const),
|
||||
canComment: z.boolean().optional(),
|
||||
canViewDocuments: z.boolean().optional(),
|
||||
canDownloadDocuments: z.boolean().optional(),
|
||||
|
||||
@ -18,7 +18,7 @@ export const createWorkflowSchema = z.object({
|
||||
userId: z.string().uuid(),
|
||||
userEmail: z.string().email(),
|
||||
userName: z.string().min(1),
|
||||
participantType: z.enum(['SPECTATOR', 'CONSULTATION'] as const),
|
||||
participantType: z.enum(['INITIATOR', 'APPROVER', 'SPECTATOR'] as const),
|
||||
canComment: z.boolean().optional(),
|
||||
canViewDocuments: z.boolean().optional(),
|
||||
canDownloadDocuments: z.boolean().optional(),
|
||||
|
||||
Loading…
Reference in New Issue
Block a user