first commit

This commit is contained in:
laxmanhalaki 2025-10-29 19:31:40 +05:30
commit bbc1301342
70 changed files with 16629 additions and 0 deletions

1772
.cursor/project_setup.md Normal file

File diff suppressed because it is too large Load Diff

18
.dockerignore Normal file
View File

@ -0,0 +1,18 @@
# Docker ignore file
node_modules
npm-debug.log
.git
.gitignore
README.md
.env
.nyc_output
coverage
.nyc_output
dist
logs
*.log
.DS_Store
.vscode
.idea
*.swp
*.swo

28
.eslintrc.json Normal file
View File

@ -0,0 +1,28 @@
{
"env": {
"es2021": true,
"node": true
},
"extends": [
"eslint:recommended",
"@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": "latest",
"sourceType": "module"
},
"plugins": [
"@typescript-eslint"
],
"rules": {
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/no-explicit-any": "warn",
"@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-inferrable-types": "off",
"prefer-const": "error",
"no-var": "error"
},
"ignorePatterns": ["dist/", "node_modules/", "coverage/"]
}

137
.gitignore vendored Normal file
View File

@ -0,0 +1,137 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
.env.local
.env.development.local
.env.test.local
.env.production.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IDE
.vscode/
.idea/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Uploads
uploads/
!uploads/.gitkeep
# Database
*.sqlite
*.db
# GCP Service Account Key
config/gcp-key.json

8
.prettierrc Normal file
View File

@ -0,0 +1,8 @@
{
"semi": true,
"trailingComma": "es5",
"singleQuote": true,
"printWidth": 80,
"tabWidth": 2,
"useTabs": false
}

58
Dockerfile Normal file
View File

@ -0,0 +1,58 @@
# Dockerfile
FROM node:22-alpine AS builder
WORKDIR /app
# Copy package files
COPY package*.json ./
COPY tsconfig.json ./
# Install all dependencies (including devDependencies for build)
RUN npm ci
# Copy source code
COPY src ./src
# Build TypeScript to JavaScript
RUN npm run build
# =====================================
# Production Image
# =====================================
FROM node:22-alpine
WORKDIR /app
# Install PM2 globally
RUN npm install -g pm2
# Copy package files
COPY package*.json ./
# Install only production dependencies
RUN npm ci --only=production
# Copy compiled JavaScript from builder
COPY --from=builder /app/dist ./dist
# Create logs and uploads directories
RUN mkdir -p logs uploads
# Create non-root user
RUN addgroup -g 1001 -S nodejs
RUN adduser -S nodejs -u 1001
# Change ownership of the app directory
RUN chown -R nodejs:nodejs /app
USER nodejs
# Expose port
EXPOSE 5000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD node -e "require('http').get('http://localhost:5000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"
# Start with PM2
CMD ["pm2-runtime", "start", "dist/server.js", "--name", "re-workflow-api"]

377
ERD_Mermaid.txt Normal file
View File

@ -0,0 +1,377 @@
%% Royal Enfield Workflow Management System
%% Entity Relationship Diagram
%% Database: PostgreSQL 16.x
erDiagram
%% Core Tables
users ||--o{ workflow_requests : "initiates"
users ||--o{ approval_levels : "approves"
users ||--o{ participants : "participates"
users ||--o{ work_notes : "posts"
users ||--o{ documents : "uploads"
users ||--o{ activities : "performs"
users ||--o{ notifications : "receives"
users ||--o{ user_sessions : "has"
users ||--o{ users : "reports_to"
workflow_requests ||--|{ approval_levels : "has"
workflow_requests ||--o{ participants : "involves"
workflow_requests ||--o{ documents : "contains"
workflow_requests ||--o{ work_notes : "has"
workflow_requests ||--o{ activities : "logs"
workflow_requests ||--o{ tat_tracking : "monitors"
workflow_requests ||--o{ notifications : "triggers"
workflow_requests ||--|| conclusion_remarks : "concludes"
approval_levels ||--o{ tat_tracking : "tracks"
work_notes ||--o{ work_note_attachments : "has"
notifications ||--o{ email_logs : "sends"
notifications ||--o{ sms_logs : "sends"
%% Entity Definitions
users {
uuid user_id PK
varchar employee_id UK "HR System ID"
varchar email UK "Primary Email"
varchar first_name
varchar last_name
varchar display_name "Full Name"
varchar department
varchar designation
varchar phone
uuid reporting_manager_id FK "Self Reference"
boolean is_active "Account Status"
boolean is_admin "Super User Flag"
timestamp last_login
timestamp created_at
timestamp updated_at
}
workflow_requests {
uuid request_id PK
varchar request_number UK "REQ-YYYY-NNNNN"
uuid initiator_id FK
varchar template_type "CUSTOM or TEMPLATE"
varchar title "Request Summary"
text description "Detailed Description"
enum priority "STANDARD or EXPRESS"
enum status "DRAFT to CLOSED"
integer current_level "Active Stage"
integer total_levels "Max 10 Levels"
decimal total_tat_hours "Cumulative TAT"
timestamp submission_date
timestamp closure_date
text conclusion_remark "Final Summary"
text ai_generated_conclusion "AI Version"
boolean is_draft "Saved Draft"
boolean is_deleted "Soft Delete"
timestamp created_at
timestamp updated_at
}
approval_levels {
uuid level_id PK
uuid request_id FK
integer level_number "Sequential Level"
varchar level_name "Optional Label"
uuid approver_id FK
varchar approver_email
varchar approver_name
decimal tat_hours "Level TAT"
integer tat_days "Calculated Days"
enum status "PENDING to APPROVED"
timestamp level_start_time "Timer Start"
timestamp level_end_time "Timer End"
timestamp action_date "Decision Time"
text comments "Approval Notes"
text rejection_reason
boolean is_final_approver "Last Level"
decimal elapsed_hours "Time Used"
decimal remaining_hours "Time Left"
decimal tat_percentage_used "Usage %"
timestamp created_at
timestamp updated_at
}
participants {
uuid participant_id PK
uuid request_id FK
uuid user_id FK
varchar user_email
varchar user_name
enum participant_type "SPECTATOR etc"
boolean can_comment "Permission"
boolean can_view_documents "Permission"
boolean can_download_documents "Permission"
boolean notification_enabled
uuid added_by FK
timestamp added_at
boolean is_active
}
documents {
uuid document_id PK
uuid request_id FK
uuid uploaded_by FK
varchar file_name "Storage Name"
varchar original_file_name "Display Name"
varchar file_type
varchar file_extension
bigint file_size "Bytes (Max 10MB)"
varchar file_path "Cloud Path"
varchar storage_url "Public URL"
varchar mime_type
varchar checksum "SHA-256"
boolean is_google_doc
varchar google_doc_url
enum category "Document Type"
integer version "Version Number"
uuid parent_document_id FK "Version Parent"
boolean is_deleted
integer download_count
timestamp uploaded_at
}
work_notes {
uuid note_id PK
uuid request_id FK
uuid user_id FK
varchar user_name
varchar user_role "INITIATOR etc"
text message "Max 2000 chars"
varchar message_type "COMMENT etc"
boolean is_priority "Urgent Flag"
boolean has_attachment
uuid parent_note_id FK "Threading"
uuid[] mentioned_users "@Tagged Users"
jsonb reactions "Emoji Responses"
boolean is_edited
boolean is_deleted
timestamp created_at
timestamp updated_at
}
work_note_attachments {
uuid attachment_id PK
uuid note_id FK
varchar file_name
varchar file_type
bigint file_size
varchar file_path
varchar storage_url
boolean is_downloadable
integer download_count
timestamp uploaded_at
}
activities {
uuid activity_id PK
uuid request_id FK
uuid user_id FK "NULL for System"
varchar user_name
varchar activity_type "Event Type"
text activity_description
varchar activity_category "Classification"
varchar severity "INFO to CRITICAL"
jsonb metadata "Additional Context"
boolean is_system_event
varchar ip_address
text user_agent
timestamp created_at
}
notifications {
uuid notification_id PK
uuid user_id FK
uuid request_id FK
varchar notification_type "Event Type"
varchar title
text message
boolean is_read
enum priority "LOW to URGENT"
varchar action_url
boolean action_required
jsonb metadata
varchar[] sent_via "IN_APP, EMAIL, SMS"
boolean email_sent
boolean sms_sent
boolean push_sent
timestamp read_at
timestamp expires_at
timestamp created_at
}
tat_tracking {
uuid tracking_id PK
uuid request_id FK
uuid level_id FK "NULL for Request"
varchar tracking_type "REQUEST or LEVEL"
enum tat_status "ON_TRACK to BREACHED"
decimal total_tat_hours
decimal elapsed_hours
decimal remaining_hours
decimal percentage_used
boolean threshold_50_breached
timestamp threshold_50_alerted_at
boolean threshold_80_breached
timestamp threshold_80_alerted_at
boolean threshold_100_breached
timestamp threshold_100_alerted_at
integer alert_count
timestamp last_calculated_at
}
conclusion_remarks {
uuid conclusion_id PK
uuid request_id FK
text ai_generated_remark "AI Output"
varchar ai_model_used "GPT-4 etc"
decimal ai_confidence_score "0.00 to 1.00"
text final_remark "User Edited"
uuid edited_by FK
boolean is_edited
integer edit_count
jsonb approval_summary
jsonb document_summary
text[] key_discussion_points
timestamp generated_at
timestamp finalized_at
}
audit_logs {
uuid audit_id PK
uuid user_id FK
varchar entity_type "Table Name"
uuid entity_id "Record ID"
varchar action "CREATE, UPDATE etc"
varchar action_category
jsonb old_values "Before"
jsonb new_values "After"
text changes_summary
varchar ip_address
text user_agent
varchar session_id
varchar request_method "GET, POST etc"
varchar request_url
integer response_status "HTTP Code"
integer execution_time_ms
timestamp created_at
}
user_sessions {
uuid session_id PK
uuid user_id FK
varchar session_token UK "JWT Access"
varchar refresh_token "JWT Refresh"
varchar ip_address
text user_agent
varchar device_type "WEB, MOBILE"
varchar browser
varchar os
timestamp login_at
timestamp last_activity_at
timestamp logout_at
timestamp expires_at
boolean is_active
varchar logout_reason
}
email_logs {
uuid email_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_email
uuid recipient_user_id FK
text[] cc_emails
text[] bcc_emails
varchar subject
text body
varchar email_type
varchar status "QUEUED to SENT"
integer send_attempts
timestamp sent_at
timestamp failed_at
text failure_reason
timestamp opened_at
timestamp clicked_at
timestamp created_at
}
sms_logs {
uuid sms_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_phone
uuid recipient_user_id FK
text message
varchar sms_type
varchar status "QUEUED to DELIVERED"
integer send_attempts
timestamp sent_at
timestamp delivered_at
timestamp failed_at
text failure_reason
varchar sms_provider
varchar sms_provider_message_id
decimal cost
timestamp created_at
}
system_settings {
uuid setting_id PK
varchar setting_key UK "CONFIG_NAME"
text setting_value "Value"
varchar setting_type "STRING, NUMBER etc"
varchar setting_category "TAT, NOTIFICATION"
text description
boolean is_editable
boolean is_sensitive "Encrypted"
jsonb validation_rules
text default_value
uuid updated_by FK
timestamp created_at
timestamp updated_at
}
workflow_templates {
uuid template_id PK
varchar template_name "Future Scope"
text template_description
varchar template_category
jsonb approval_levels_config
decimal default_tat_hours
boolean is_active
integer usage_count
uuid created_by FK
timestamp created_at
timestamp updated_at
}
report_cache {
uuid cache_id PK
varchar report_type
jsonb report_params "Input Filters"
jsonb report_data "Cached Result"
uuid generated_by FK
timestamp generated_at
timestamp expires_at
integer access_count
timestamp last_accessed_at
}
%% Notes and Constraints
%% 1. All timestamps are WITH TIME ZONE
%% 2. UUIDs are generated via uuid-ossp extension
%% 3. Enums are custom types defined separately
%% 4. JSONB used for flexible metadata storage
%% 5. Soft deletes via is_deleted flags
%% 6. Audit trail via activities and audit_logs
%% 7. Multi-channel notifications (in-app, email, SMS, push)
%% 8. TAT thresholds: 50%, 80%, 100%
%% 9. Max approval levels: 10
%% 10. Max file size: 10 MB

176
README.md Normal file
View File

@ -0,0 +1,176 @@
# Royal Enfield Workflow Management System - Backend
A comprehensive backend API for the Royal Enfield Workflow Management System built with Node.js, TypeScript, Express.js, and PostgreSQL.
## Features
- **Frontend SSO Integration**: Handles user authentication via frontend SSO
- **JWT Authentication**: Secure token-based authentication with refresh tokens
- **User Management**: Create and update users based on SSO data
- **Workflow Management**: Complete workflow request lifecycle
- **Approval System**: Multi-level approval workflow
- **Document Management**: File upload and management
- **Notification System**: Real-time notifications
- **TAT Tracking**: Turnaround time monitoring
- **Audit Logging**: Comprehensive activity tracking
- **RESTful API**: Well-structured API endpoints
## Technology Stack
- **Runtime**: Node.js 22 LTS
- **Language**: TypeScript 5.7
- **Framework**: Express.js 4.21
- **Database**: PostgreSQL 16
- **ORM**: Sequelize 6.37
- **Authentication**: JWT + Frontend SSO
- **Validation**: Zod
- **Logging**: Winston
- **Testing**: Jest + Supertest
- **Process Manager**: PM2
## Quick Start
### Prerequisites
- Node.js 22.x LTS
- PostgreSQL 16.x
- npm 10.x or higher
### Installation
1. **Clone the repository**
```bash
git clone <repository-url>
cd re-workflow-backend
```
2. **Install dependencies**
```bash
npm install
```
3. **Setup environment**
```bash
cp env.example .env
# Edit .env with your configuration
```
4. **Setup database**
```bash
# Create database
createdb re_workflow_db
# Run schema
psql -U postgres -d re_workflow_db -f database/schema/schema.sql
```
5. **Start development server**
```bash
npm run dev
```
The API will be available at `http://localhost:5000`
### Docker Setup
```bash
# Copy environment file
cp env.example .env
# Start services
docker-compose up --build -d
# Check logs
docker-compose logs -f
```
## API Endpoints
### Authentication
- `POST /api/v1/auth/sso-callback` - SSO callback from frontend
- `GET /api/v1/auth/me` - Get current user profile
- `POST /api/v1/auth/refresh` - Refresh access token
- `POST /api/v1/auth/logout` - Logout user
- `GET /api/v1/auth/validate` - Validate token
### Health Check
- `GET /health` - API health status
- `GET /api/v1/health` - Detailed health check
## Environment Variables
See `env.example` for all required environment variables.
## Development
```bash
# Run in development mode
npm run dev
# Run tests
npm test
# Run linting
npm run lint
# Run type checking
npm run type-check
# Build for production
npm run build
```
## Project Structure
```
src/
├── app.ts # Express app configuration
├── server.ts # Server entry point
├── config/ # Configuration files
├── controllers/ # Request handlers
├── services/ # Business logic
├── models/ # Sequelize models
├── routes/ # API routes
├── middlewares/ # Express middlewares
├── validators/ # Request validation schemas
├── utils/ # Utility functions
└── types/ # TypeScript type definitions
```
## Database Schema
The database schema includes all tables from the ERD:
- `users` - User information
- `workflow_requests` - Main workflow requests
- `approval_levels` - Approval hierarchy
- `participants` - Workflow participants
- `documents` - Document metadata
- `work_notes` - Communication within workflow
- `activities` - Activity log
- `notifications` - User notifications
- `tat_tracking` - TAT monitoring
- And more...
## Authentication Flow
1. Frontend handles SSO authentication
2. Frontend sends user data to `/api/v1/auth/sso-callback`
3. Backend creates/updates user record
4. Backend generates JWT tokens
5. Frontend uses tokens for subsequent API calls
## Contributing
1. Fork the repository
2. Create a feature branch
3. Make your changes
4. Run tests and linting
5. Submit a pull request
## License
This project is proprietary to Royal Enfield.
## Support
For support and questions, please contact the development team.

10
babel.config.js Normal file
View File

@ -0,0 +1,10 @@
module.exports = {
presets: [
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
plugins: [
['@babel/plugin-proposal-decorators', { legacy: true }],
['@babel/plugin-proposal-class-properties', { loose: true }],
],
};

View File

@ -0,0 +1,24 @@
-- Migration: Update reporting_manager_id to use employee_id instead of user_id
-- Date: 2025-10-29
-- Description: Change reporting_manager_id from UUID to VARCHAR(50) to reference employee_id
-- First, drop the existing foreign key constraint
ALTER TABLE users DROP CONSTRAINT IF EXISTS users_reporting_manager_id_fkey;
-- Add the location column if it doesn't exist
ALTER TABLE users ADD COLUMN IF NOT EXISTS location JSONB;
-- Change the column type from UUID to VARCHAR(50)
ALTER TABLE users ALTER COLUMN reporting_manager_id TYPE VARCHAR(50);
-- Add the new foreign key constraint referencing employee_id
ALTER TABLE users ADD CONSTRAINT users_reporting_manager_id_fkey
FOREIGN KEY (reporting_manager_id) REFERENCES users(employee_id);
-- Update the index
DROP INDEX IF EXISTS idx_users_reporting_manager;
CREATE INDEX idx_users_reporting_manager ON users(reporting_manager_id);
-- Ensure employee_id has unique constraint (it should already exist)
-- This is just to make sure the constraint is in place
ALTER TABLE users ADD CONSTRAINT IF NOT EXISTS users_employee_id_unique UNIQUE (employee_id);

View File

@ -0,0 +1,16 @@
-- Migration: Remove reporting_manager_id column completely
-- Date: 2025-10-29
-- Description: Drop reporting_manager_id column and its constraints from users table
-- Drop the foreign key constraint first
ALTER TABLE users DROP CONSTRAINT IF EXISTS users_reporting_manager_id_fkey;
-- Drop the index
DROP INDEX IF EXISTS idx_users_reporting_manager;
DROP INDEX IF EXISTS users_reporting_manager_id;
-- Drop the column
ALTER TABLE users DROP COLUMN IF EXISTS reporting_manager_id;

433
database/schema/schema.sql Normal file
View File

@ -0,0 +1,433 @@
-- Royal Enfield Workflow Management System Database Schema
-- PostgreSQL 16.x
-- Generated from ERD_Mermaid.txt
-- Enable UUID extension
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Create custom types
CREATE TYPE priority_type AS ENUM ('STANDARD', 'EXPRESS');
CREATE TYPE workflow_status AS ENUM ('DRAFT', 'PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'CLOSED');
CREATE TYPE approval_status AS ENUM ('PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'SKIPPED');
CREATE TYPE participant_type AS ENUM ('SPECTATOR', 'INITIATOR', 'APPROVER', 'CONSULTATION');
CREATE TYPE tat_status AS ENUM ('ON_TRACK', 'APPROACHING', 'BREACHED');
CREATE TYPE notification_priority AS ENUM ('LOW', 'MEDIUM', 'HIGH', 'URGENT');
CREATE TYPE document_category AS ENUM ('SUPPORTING', 'APPROVAL', 'REFERENCE', 'FINAL', 'OTHER');
CREATE TYPE work_note_type AS ENUM ('COMMENT', 'QUESTION', 'CLARIFICATION', 'UPDATE', 'SYSTEM');
CREATE TYPE activity_severity AS ENUM ('INFO', 'WARNING', 'ERROR', 'CRITICAL');
CREATE TYPE setting_type AS ENUM ('STRING', 'NUMBER', 'BOOLEAN', 'JSON', 'ARRAY');
-- Users table
CREATE TABLE users (
user_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
employee_id VARCHAR(50), -- Optional - HR System Employee ID
okta_sub VARCHAR(100) UNIQUE NOT NULL, -- Okta subject identifier (unique)
email VARCHAR(255) UNIQUE NOT NULL, -- Primary identifier for user lookup
first_name VARCHAR(100), -- Optional
last_name VARCHAR(100), -- Optional
display_name VARCHAR(200), -- Optional
department VARCHAR(100),
designation VARCHAR(100),
phone VARCHAR(20),
location JSONB,
is_active BOOLEAN DEFAULT true,
is_admin BOOLEAN DEFAULT false,
last_login TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Workflow Requests table
CREATE TABLE workflow_requests (
request_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_number VARCHAR(20) UNIQUE NOT NULL,
initiator_id UUID NOT NULL REFERENCES users(user_id),
template_type VARCHAR(20) DEFAULT 'CUSTOM',
title VARCHAR(500) NOT NULL,
description TEXT NOT NULL,
priority priority_type DEFAULT 'STANDARD',
status workflow_status DEFAULT 'DRAFT',
current_level INTEGER DEFAULT 1,
total_levels INTEGER DEFAULT 1 CHECK (total_levels <= 10),
total_tat_hours DECIMAL(10,2) DEFAULT 0,
submission_date TIMESTAMP WITH TIME ZONE,
closure_date TIMESTAMP WITH TIME ZONE,
conclusion_remark TEXT,
ai_generated_conclusion TEXT,
is_draft BOOLEAN DEFAULT true,
is_deleted BOOLEAN DEFAULT false,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Approval Levels table
CREATE TABLE approval_levels (
level_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
level_number INTEGER NOT NULL,
level_name VARCHAR(100),
approver_id UUID NOT NULL REFERENCES users(user_id),
approver_email VARCHAR(255) NOT NULL,
approver_name VARCHAR(200) NOT NULL,
tat_hours DECIMAL(10,2) NOT NULL,
tat_days INTEGER GENERATED ALWAYS AS (CEIL(tat_hours / 24)) STORED,
status approval_status DEFAULT 'PENDING',
level_start_time TIMESTAMP WITH TIME ZONE,
level_end_time TIMESTAMP WITH TIME ZONE,
action_date TIMESTAMP WITH TIME ZONE,
comments TEXT,
rejection_reason TEXT,
is_final_approver BOOLEAN DEFAULT false,
elapsed_hours DECIMAL(10,2) DEFAULT 0,
remaining_hours DECIMAL(10,2) GENERATED ALWAYS AS (GREATEST(0, tat_hours - elapsed_hours)) STORED,
tat_percentage_used DECIMAL(5,2) GENERATED ALWAYS AS (LEAST(100, (elapsed_hours / NULLIF(tat_hours, 0)) * 100)) STORED,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
UNIQUE(request_id, level_number)
);
-- Participants table
CREATE TABLE participants (
participant_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users(user_id),
user_email VARCHAR(255) NOT NULL,
user_name VARCHAR(200) NOT NULL,
participant_type participant_type NOT NULL,
can_comment BOOLEAN DEFAULT true,
can_view_documents BOOLEAN DEFAULT true,
can_download_documents BOOLEAN DEFAULT false,
notification_enabled BOOLEAN DEFAULT true,
added_by UUID NOT NULL REFERENCES users(user_id),
added_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
is_active BOOLEAN DEFAULT true,
UNIQUE(request_id, user_id)
);
-- Documents table
CREATE TABLE documents (
document_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
uploaded_by UUID NOT NULL REFERENCES users(user_id),
file_name VARCHAR(255) NOT NULL,
original_file_name VARCHAR(255) NOT NULL,
file_type VARCHAR(100) NOT NULL,
file_extension VARCHAR(10) NOT NULL,
file_size BIGINT NOT NULL CHECK (file_size <= 10485760), -- 10MB limit
file_path VARCHAR(500) NOT NULL,
storage_url VARCHAR(500),
mime_type VARCHAR(100) NOT NULL,
checksum VARCHAR(64) NOT NULL,
is_google_doc BOOLEAN DEFAULT false,
google_doc_url VARCHAR(500),
category document_category DEFAULT 'OTHER',
version INTEGER DEFAULT 1,
parent_document_id UUID REFERENCES documents(document_id),
is_deleted BOOLEAN DEFAULT false,
download_count INTEGER DEFAULT 0,
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Work Notes table
CREATE TABLE work_notes (
note_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users(user_id),
user_name VARCHAR(200) NOT NULL,
user_role VARCHAR(50) NOT NULL,
message TEXT NOT NULL CHECK (LENGTH(message) <= 2000),
message_type work_note_type DEFAULT 'COMMENT',
is_priority BOOLEAN DEFAULT false,
has_attachment BOOLEAN DEFAULT false,
parent_note_id UUID REFERENCES work_notes(note_id),
mentioned_users UUID[],
reactions JSONB DEFAULT '{}',
is_edited BOOLEAN DEFAULT false,
is_deleted BOOLEAN DEFAULT false,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Work Note Attachments table
CREATE TABLE work_note_attachments (
attachment_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
note_id UUID NOT NULL REFERENCES work_notes(note_id) ON DELETE CASCADE,
file_name VARCHAR(255) NOT NULL,
file_type VARCHAR(100) NOT NULL,
file_size BIGINT NOT NULL,
file_path VARCHAR(500) NOT NULL,
storage_url VARCHAR(500),
is_downloadable BOOLEAN DEFAULT true,
download_count INTEGER DEFAULT 0,
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Activities table
CREATE TABLE activities (
activity_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
user_id UUID REFERENCES users(user_id),
user_name VARCHAR(200),
activity_type VARCHAR(100) NOT NULL,
activity_description TEXT NOT NULL,
activity_category VARCHAR(50),
severity activity_severity DEFAULT 'INFO',
metadata JSONB DEFAULT '{}',
is_system_event BOOLEAN DEFAULT false,
ip_address INET,
user_agent TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Notifications table
CREATE TABLE notifications (
notification_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
user_id UUID NOT NULL REFERENCES users(user_id),
request_id UUID REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
notification_type VARCHAR(100) NOT NULL,
title VARCHAR(200) NOT NULL,
message TEXT NOT NULL,
is_read BOOLEAN DEFAULT false,
priority notification_priority DEFAULT 'MEDIUM',
action_url VARCHAR(500),
action_required BOOLEAN DEFAULT false,
metadata JSONB DEFAULT '{}',
sent_via VARCHAR(20)[] DEFAULT '{"IN_APP"}',
email_sent BOOLEAN DEFAULT false,
sms_sent BOOLEAN DEFAULT false,
push_sent BOOLEAN DEFAULT false,
read_at TIMESTAMP WITH TIME ZONE,
expires_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- TAT Tracking table
CREATE TABLE tat_tracking (
tracking_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
level_id UUID REFERENCES approval_levels(level_id) ON DELETE CASCADE,
tracking_type VARCHAR(20) NOT NULL CHECK (tracking_type IN ('REQUEST', 'LEVEL')),
tat_status tat_status DEFAULT 'ON_TRACK',
total_tat_hours DECIMAL(10,2) NOT NULL,
elapsed_hours DECIMAL(10,2) DEFAULT 0,
remaining_hours DECIMAL(10,2) GENERATED ALWAYS AS (GREATEST(0, total_tat_hours - elapsed_hours)) STORED,
percentage_used DECIMAL(5,2) GENERATED ALWAYS AS (LEAST(100, (elapsed_hours / NULLIF(total_tat_hours, 0)) * 100)) STORED,
threshold_50_breached BOOLEAN DEFAULT false,
threshold_50_alerted_at TIMESTAMP WITH TIME ZONE,
threshold_80_breached BOOLEAN DEFAULT false,
threshold_80_alerted_at TIMESTAMP WITH TIME ZONE,
threshold_100_breached BOOLEAN DEFAULT false,
threshold_100_alerted_at TIMESTAMP WITH TIME ZONE,
alert_count INTEGER DEFAULT 0,
last_calculated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Conclusion Remarks table
CREATE TABLE conclusion_remarks (
conclusion_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
ai_generated_remark TEXT,
ai_model_used VARCHAR(50),
ai_confidence_score DECIMAL(3,2) CHECK (ai_confidence_score >= 0 AND ai_confidence_score <= 1),
final_remark TEXT,
edited_by UUID REFERENCES users(user_id),
is_edited BOOLEAN DEFAULT false,
edit_count INTEGER DEFAULT 0,
approval_summary JSONB DEFAULT '{}',
document_summary JSONB DEFAULT '{}',
key_discussion_points TEXT[],
generated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
finalized_at TIMESTAMP WITH TIME ZONE
);
-- Audit Logs table
CREATE TABLE audit_logs (
audit_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
user_id UUID REFERENCES users(user_id),
entity_type VARCHAR(100) NOT NULL,
entity_id UUID NOT NULL,
action VARCHAR(50) NOT NULL,
action_category VARCHAR(50),
old_values JSONB,
new_values JSONB,
changes_summary TEXT,
ip_address INET,
user_agent TEXT,
session_id VARCHAR(255),
request_method VARCHAR(10),
request_url VARCHAR(500),
response_status INTEGER,
execution_time_ms INTEGER,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- User Sessions table
CREATE TABLE user_sessions (
session_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
user_id UUID NOT NULL REFERENCES users(user_id),
session_token VARCHAR(500) UNIQUE NOT NULL,
refresh_token VARCHAR(500) UNIQUE,
ip_address INET,
user_agent TEXT,
device_type VARCHAR(20) DEFAULT 'WEB',
browser VARCHAR(50),
os VARCHAR(50),
login_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
last_activity_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
logout_at TIMESTAMP WITH TIME ZONE,
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
is_active BOOLEAN DEFAULT true,
logout_reason VARCHAR(100)
);
-- Email Logs table
CREATE TABLE email_logs (
email_log_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
notification_id UUID REFERENCES notifications(notification_id) ON DELETE CASCADE,
recipient_email VARCHAR(255) NOT NULL,
recipient_user_id UUID REFERENCES users(user_id),
cc_emails TEXT[],
bcc_emails TEXT[],
subject VARCHAR(500) NOT NULL,
body TEXT NOT NULL,
email_type VARCHAR(50),
status VARCHAR(20) DEFAULT 'QUEUED',
send_attempts INTEGER DEFAULT 0,
sent_at TIMESTAMP WITH TIME ZONE,
failed_at TIMESTAMP WITH TIME ZONE,
failure_reason TEXT,
opened_at TIMESTAMP WITH TIME ZONE,
clicked_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- SMS Logs table
CREATE TABLE sms_logs (
sms_log_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
notification_id UUID REFERENCES notifications(notification_id) ON DELETE CASCADE,
recipient_phone VARCHAR(20) NOT NULL,
recipient_user_id UUID REFERENCES users(user_id),
message TEXT NOT NULL,
sms_type VARCHAR(50),
status VARCHAR(20) DEFAULT 'QUEUED',
send_attempts INTEGER DEFAULT 0,
sent_at TIMESTAMP WITH TIME ZONE,
delivered_at TIMESTAMP WITH TIME ZONE,
failed_at TIMESTAMP WITH TIME ZONE,
failure_reason TEXT,
sms_provider VARCHAR(50),
sms_provider_message_id VARCHAR(100),
cost DECIMAL(10,4),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- System Settings table
CREATE TABLE system_settings (
setting_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
setting_key VARCHAR(100) UNIQUE NOT NULL,
setting_value TEXT NOT NULL,
setting_type setting_type NOT NULL,
setting_category VARCHAR(50),
description TEXT,
is_editable BOOLEAN DEFAULT true,
is_sensitive BOOLEAN DEFAULT false,
validation_rules JSONB,
default_value TEXT,
updated_by UUID REFERENCES users(user_id),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Workflow Templates table
CREATE TABLE workflow_templates (
template_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
template_name VARCHAR(200) NOT NULL,
template_description TEXT,
template_category VARCHAR(100),
approval_levels_config JSONB NOT NULL,
default_tat_hours DECIMAL(10,2) DEFAULT 24,
is_active BOOLEAN DEFAULT true,
usage_count INTEGER DEFAULT 0,
created_by UUID NOT NULL REFERENCES users(user_id),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Report Cache table
CREATE TABLE report_cache (
cache_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
report_type VARCHAR(100) NOT NULL,
report_params JSONB NOT NULL,
report_data JSONB NOT NULL,
generated_by UUID NOT NULL REFERENCES users(user_id),
generated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
access_count INTEGER DEFAULT 0,
last_accessed_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for better performance
CREATE INDEX idx_users_employee_id ON users(employee_id);
CREATE INDEX idx_users_email ON users(email);
CREATE INDEX idx_users_active ON users(is_active);
CREATE INDEX idx_workflow_requests_initiator ON workflow_requests(initiator_id);
CREATE INDEX idx_workflow_requests_status ON workflow_requests(status);
CREATE INDEX idx_workflow_requests_number ON workflow_requests(request_number);
CREATE INDEX idx_workflow_requests_created ON workflow_requests(created_at);
CREATE INDEX idx_approval_levels_request ON approval_levels(request_id);
CREATE INDEX idx_approval_levels_approver ON approval_levels(approver_id);
CREATE INDEX idx_approval_levels_status ON approval_levels(status);
CREATE INDEX idx_participants_request ON participants(request_id);
CREATE INDEX idx_participants_user ON participants(user_id);
CREATE INDEX idx_participants_type ON participants(participant_type);
CREATE INDEX idx_documents_request ON documents(request_id);
CREATE INDEX idx_documents_uploader ON documents(uploaded_by);
CREATE INDEX idx_documents_category ON documents(category);
CREATE INDEX idx_work_notes_request ON work_notes(request_id);
CREATE INDEX idx_work_notes_user ON work_notes(user_id);
CREATE INDEX idx_work_notes_parent ON work_notes(parent_note_id);
CREATE INDEX idx_activities_request ON activities(request_id);
CREATE INDEX idx_activities_user ON activities(user_id);
CREATE INDEX idx_activities_type ON activities(activity_type);
CREATE INDEX idx_activities_created ON activities(created_at);
CREATE INDEX idx_notifications_user ON notifications(user_id);
CREATE INDEX idx_notifications_request ON notifications(request_id);
CREATE INDEX idx_notifications_read ON notifications(is_read);
CREATE INDEX idx_notifications_type ON notifications(notification_type);
CREATE INDEX idx_tat_tracking_request ON tat_tracking(request_id);
CREATE INDEX idx_tat_tracking_level ON tat_tracking(level_id);
CREATE INDEX idx_tat_tracking_status ON tat_tracking(tat_status);
CREATE INDEX idx_audit_logs_user ON audit_logs(user_id);
CREATE INDEX idx_audit_logs_entity ON audit_logs(entity_type, entity_id);
CREATE INDEX idx_audit_logs_created ON audit_logs(created_at);
CREATE INDEX idx_user_sessions_user ON user_sessions(user_id);
CREATE INDEX idx_user_sessions_token ON user_sessions(session_token);
CREATE INDEX idx_user_sessions_active ON user_sessions(is_active);
-- Create triggers for updated_at timestamps
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_workflow_requests_updated_at BEFORE UPDATE ON workflow_requests FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_approval_levels_updated_at BEFORE UPDATE ON approval_levels FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_work_notes_updated_at BEFORE UPDATE ON work_notes FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_system_settings_updated_at BEFORE UPDATE ON system_settings FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_workflow_templates_updated_at BEFORE UPDATE ON workflow_templates FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();

62
docker-compose.yml Normal file
View File

@ -0,0 +1,62 @@
# docker-compose.yml
version: '3.8'
services:
postgres:
image: postgres:16-alpine
container_name: re_workflow_db
environment:
POSTGRES_USER: ${DB_USER:-laxman}
POSTGRES_PASSWORD: ${DB_PASSWORD:-Admin@123}
POSTGRES_DB: ${DB_NAME:-re_workflow_db}
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
- ./database/schema:/docker-entrypoint-initdb.d
networks:
- re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-laxman}"]
interval: 10s
timeout: 5s
retries: 5
backend:
build:
context: .
dockerfile: Dockerfile
container_name: re_workflow_backend
environment:
NODE_ENV: development
DB_HOST: postgres
DB_PORT: 5432
DB_USER: ${DB_USER:-laxman}
DB_PASSWORD: ${DB_PASSWORD:-Admin@123}
DB_NAME: ${DB_NAME:-re_workflow_db}
PORT: 5000
ports:
- "5000:5000"
depends_on:
postgres:
condition: service_healthy
volumes:
- ./logs:/app/logs
- ./uploads:/app/uploads
networks:
- re_workflow_network
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "node -e \"require('http').get('http://localhost:5000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
volumes:
postgres_data:
networks:
re_workflow_network:
driver: bridge

68
env.example Normal file
View File

@ -0,0 +1,68 @@
# Application
NODE_ENV=development
PORT=5000
API_VERSION=v1
BASE_URL=http://localhost:5000
# Database
DB_HOST=localhost
DB_PORT=5432
DB_NAME=re_workflow_db
DB_USER=postgres
DB_PASSWORD=postgres
DB_SSL=false
DB_POOL_MIN=2
DB_POOL_MAX=10
# SSO Configuration (Frontend-handled)
# Backend only needs JWT secrets for token validation
JWT_SECRET=your_jwt_secret_key_here_min_32_chars
JWT_EXPIRY=24h
REFRESH_TOKEN_SECRET=your_refresh_token_secret_here
REFRESH_TOKEN_EXPIRY=7d
# Okta/Auth0 Configuration (for backend token exchange in localhost)
OKTA_DOMAIN=https://dev-830839.oktapreview.com
OKTA_CLIENT_ID=0oa2j8slwj5S4bG5k0h8
OKTA_CLIENT_SECRET=your_okta_client_secret_here
# Session
SESSION_SECRET=your_session_secret_here_min_32_chars
# Cloud Storage (GCP)
GCP_PROJECT_ID=re-workflow-project
GCP_BUCKET_NAME=re-workflow-documents
GCP_KEY_FILE=./config/gcp-key.json
# Email Service (Optional)
SMTP_HOST=smtp.gmail.com
SMTP_PORT=587
SMTP_SECURE=false
SMTP_USER=notifications@royalenfield.com
SMTP_PASSWORD=your_smtp_password
EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
# AI Service (for conclusion generation)
AI_API_KEY=your_ai_api_key
AI_MODEL=gpt-4
AI_MAX_TOKENS=500
# Logging
LOG_LEVEL=info
LOG_FILE_PATH=./logs
# CORS
CORS_ORIGIN=http://localhost:3000
# Rate Limiting
RATE_LIMIT_WINDOW_MS=900000
RATE_LIMIT_MAX_REQUESTS=100
# File Upload
MAX_FILE_SIZE_MB=10
ALLOWED_FILE_TYPES=pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif
# TAT Monitoring
TAT_CHECK_INTERVAL_MINUTES=30
TAT_REMINDER_THRESHOLD_1=50
TAT_REMINDER_THRESHOLD_2=80

27
jest.config.js Normal file
View File

@ -0,0 +1,27 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
roots: ['<rootDir>/src', '<rootDir>/tests'],
testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'],
transform: {
'^.+\\.ts$': 'ts-jest',
},
collectCoverageFrom: [
'src/**/*.ts',
'!src/**/*.d.ts',
'!src/server.ts',
],
coverageDirectory: 'coverage',
coverageReporters: ['text', 'lcov', 'html'],
setupFilesAfterEnv: ['<rootDir>/tests/setup.js'],
moduleNameMapping: {
'^@/(.*)$': '<rootDir>/src/$1',
'^@controllers/(.*)$': '<rootDir>/src/controllers/$1',
'^@services/(.*)$': '<rootDir>/src/services/$1',
'^@models/(.*)$': '<rootDir>/src/models/$1',
'^@middlewares/(.*)$': '<rootDir>/src/middlewares/$1',
'^@utils/(.*)$': '<rootDir>/src/utils/$1',
'^@types/(.*)$': '<rootDir>/src/types/$1',
'^@config/(.*)$': '<rootDir>/src/config/$1',
},
};

6
nodemon.json Normal file
View File

@ -0,0 +1,6 @@
{
"watch": ["src"],
"ext": "ts",
"ignore": ["src/**/*.spec.ts"],
"exec": "ts-node -r tsconfig-paths/register src/server.ts"
}

9199
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

80
package.json Normal file
View File

@ -0,0 +1,80 @@
{
"name": "re-workflow-backend",
"version": "1.0.0",
"description": "Royal Enfield Workflow Management System - Backend API (TypeScript)",
"main": "dist/server.js",
"scripts": {
"start": "node dist/server.js",
"dev": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"build": "tsc",
"build:watch": "tsc --watch",
"start:prod": "NODE_ENV=production node dist/server.js",
"test": "jest --coverage",
"test:unit": "jest --testPathPattern=tests/unit",
"test:integration": "jest --testPathPattern=tests/integration",
"test:watch": "jest --watch",
"lint": "eslint src/**/*.ts",
"lint:fix": "eslint src/**/*.ts --fix",
"format": "prettier --write \"src/**/*.ts\"",
"type-check": "tsc --noEmit",
"db:migrate": "sequelize-cli db:migrate",
"db:migrate:undo": "sequelize-cli db:migrate:undo",
"db:seed": "sequelize-cli db:seed:all",
"clean": "rm -rf dist"
},
"dependencies": {
"@google-cloud/storage": "^7.14.0",
"@types/uuid": "^8.3.4",
"axios": "^1.7.9",
"bcryptjs": "^2.4.3",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"express-rate-limit": "^7.5.0",
"helmet": "^8.0.0",
"jsonwebtoken": "^9.0.2",
"morgan": "^1.10.0",
"multer": "^1.4.5-lts.1",
"node-cron": "^3.0.3",
"passport": "^0.7.0",
"passport-jwt": "^4.0.1",
"pg": "^8.13.1",
"pg-hstore": "^2.3.4",
"sequelize": "^6.37.5",
"uuid": "^8.3.2",
"winston": "^3.17.0",
"zod": "^3.24.1"
},
"devDependencies": {
"@types/bcryptjs": "^2.4.6",
"@types/cookie-parser": "^1.4.10",
"@types/cors": "^2.8.17",
"@types/express": "^5.0.0",
"@types/jest": "^29.5.14",
"@types/jsonwebtoken": "^9.0.7",
"@types/morgan": "^1.9.9",
"@types/multer": "^1.4.12",
"@types/node": "^22.10.5",
"@types/passport": "^1.0.16",
"@types/passport-jwt": "^4.0.1",
"@types/supertest": "^6.0.2",
"@typescript-eslint/eslint-plugin": "^8.19.1",
"@typescript-eslint/parser": "^8.19.1",
"eslint": "^9.17.0",
"jest": "^29.7.0",
"nodemon": "^3.1.9",
"prettier": "^3.4.2",
"sequelize-cli": "^6.6.2",
"supertest": "^7.0.0",
"ts-jest": "^29.2.5",
"ts-node": "^10.9.2",
"ts-node-dev": "^2.0.0",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.7.2"
},
"engines": {
"node": ">=22.0.0",
"npm": ">=10.0.0"
}
}

12
scripts/setup.sh Normal file
View File

@ -0,0 +1,12 @@
# Create necessary directories
mkdir -p logs uploads config
# Create .gitkeep files
touch logs/.gitkeep uploads/.gitkeep
echo "Project structure created successfully!"
echo "Next steps:"
echo "1. Copy env.example to .env and configure your environment variables"
echo "2. Install dependencies: npm install"
echo "3. Setup PostgreSQL database and run the schema"
echo "4. Start development server: npm run dev"

22
setup.bat Normal file
View File

@ -0,0 +1,22 @@
# Quick Setup Script
echo "Setting up RE Workflow Backend..."
# Create directories
mkdir -p logs uploads config
# Create .gitkeep files
touch logs/.gitkeep uploads/.gitkeep
# Copy environment file
cp env.example .env
echo "✅ Project structure created!"
echo ""
echo "Next steps:"
echo "1. Edit .env file with your configuration"
echo "2. Run: npm install"
echo "3. Setup PostgreSQL database"
echo "4. Run schema: psql -U postgres -d re_workflow_db -f database/schema/schema.sql"
echo "5. Start dev server: npm run dev"
echo ""
echo "🚀 Ready to go!"

184
src/app.ts Normal file
View File

@ -0,0 +1,184 @@
import express from 'express';
import helmet from 'helmet';
import morgan from 'morgan';
import dotenv from 'dotenv';
import cookieParser from 'cookie-parser';
import { UserService } from './services/user.service';
import { SSOUserData } from './types/auth.types';
import { sequelize } from './config/database';
import { corsMiddleware } from './middlewares/cors.middleware';
import routes from './routes/index';
// Load environment variables
dotenv.config();
const app: express.Application = express();
const userService = new UserService();
// Initialize database connection
const initializeDatabase = async () => {
try {
await sequelize.authenticate();
console.log('✅ Database connection established successfully');
// Sync models (create tables if they don't exist)
// await sequelize.sync({ force: false });
console.log('✅ Database models synchronized (sync disabled)');
} catch (error) {
console.error('❌ Database connection failed:', error);
}
};
// Initialize database
initializeDatabase();
// CORS middleware - MUST be before other middleware
app.use(corsMiddleware);
// Security middleware - Configure Helmet to work with CORS
app.use(helmet({
crossOriginEmbedderPolicy: false,
crossOriginResourcePolicy: { policy: "cross-origin" },
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
styleSrc: ["'self'", "'unsafe-inline'"],
scriptSrc: ["'self'"],
imgSrc: ["'self'", "data:", "https:"],
},
},
}));
// Cookie parser middleware - MUST be before routes
app.use(cookieParser());
// Body parsing middleware
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
// Logging middleware
app.use(morgan('combined'));
// Health check endpoint
app.get('/health', (_req: express.Request, res: express.Response) => {
res.status(200).json({
status: 'OK',
timestamp: new Date(),
uptime: process.uptime(),
environment: process.env.NODE_ENV || 'development'
});
});
// Mount API routes
app.use('/api/v1', routes);
// Root endpoint
app.get('/', (_req: express.Request, res: express.Response) => {
res.status(200).json({
message: 'Royal Enfield Workflow Management System API',
version: '1.0.0',
status: 'running',
timestamp: new Date()
});
});
// Legacy SSO Callback endpoint for user creation/update (kept for backward compatibility)
app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.Response): Promise<void> => {
try {
const ssoData: SSOUserData = req.body;
// Validate required fields - email and oktaSub are required
if (!ssoData.email || !ssoData.oktaSub) {
res.status(400).json({
success: false,
message: 'Missing required fields: email and oktaSub are required',
timestamp: new Date()
});
return;
}
// Create or update user
const user = await userService.createOrUpdateUser(ssoData);
res.status(200).json({
success: true,
message: 'User processed successfully',
data: {
user: {
userId: user.userId,
employeeId: user.employeeId || null,
oktaSub: user.oktaSub,
email: user.email,
firstName: user.firstName || null,
lastName: user.lastName || null,
displayName: user.displayName || null,
department: user.department || null,
designation: user.designation || null,
phone: user.phone || null,
location: user.location || null,
isAdmin: user.isAdmin,
lastLogin: user.lastLogin
},
isNewUser: user.createdAt.getTime() === user.updatedAt.getTime()
},
timestamp: new Date()
});
} catch (error) {
console.error('SSO Callback Error:', error);
res.status(500).json({
success: false,
message: 'Internal server error',
timestamp: new Date()
});
}
});
// Get all users endpoint
app.get('/api/v1/users', async (_req: express.Request, res: express.Response): Promise<void> => {
try {
const users = await userService.getAllUsers();
res.status(200).json({
success: true,
message: 'Users retrieved successfully',
data: {
users: users.map(user => ({
userId: user.userId,
employeeId: user.employeeId || null,
oktaSub: user.oktaSub,
email: user.email,
firstName: user.firstName || null,
lastName: user.lastName || null,
displayName: user.displayName || null,
department: user.department || null,
designation: user.designation || null,
phone: user.phone || null,
location: user.location || null,
isAdmin: user.isAdmin,
lastLogin: user.lastLogin,
createdAt: user.createdAt
})),
total: users.length
},
timestamp: new Date()
});
} catch (error) {
console.error('Get Users Error:', error);
res.status(500).json({
success: false,
message: 'Internal server error',
timestamp: new Date()
});
}
});
// Error handling middleware
app.use((req: express.Request, res: express.Response) => {
res.status(404).json({
success: false,
message: `Route ${req.originalUrl} not found`,
timestamp: new Date(),
});
});
export default app;

79
src/config/constants.ts Normal file
View File

@ -0,0 +1,79 @@
export const constants = {
// API Configuration
API_VERSION: process.env.API_VERSION || 'v1',
BASE_URL: process.env.BASE_URL || 'http://localhost:5000',
// File Upload Configuration
MAX_FILE_SIZE_MB: parseInt(process.env.MAX_FILE_SIZE_MB || '10', 10),
MAX_FILE_SIZE_BYTES: parseInt(process.env.MAX_FILE_SIZE_MB || '10', 10) * 1024 * 1024,
ALLOWED_FILE_TYPES: process.env.ALLOWED_FILE_TYPES?.split(',') || [
'pdf', 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx', 'jpg', 'jpeg', 'png', 'gif'
],
// TAT Configuration
TAT_CHECK_INTERVAL_MINUTES: parseInt(process.env.TAT_CHECK_INTERVAL_MINUTES || '30', 10),
TAT_REMINDER_THRESHOLD_1: parseInt(process.env.TAT_REMINDER_THRESHOLD_1 || '50', 10),
TAT_REMINDER_THRESHOLD_2: parseInt(process.env.TAT_REMINDER_THRESHOLD_2 || '80', 10),
// Rate Limiting
RATE_LIMIT_WINDOW_MS: parseInt(process.env.RATE_LIMIT_WINDOW_MS || '900000', 10), // 15 minutes
RATE_LIMIT_MAX_REQUESTS: parseInt(process.env.RATE_LIMIT_MAX_REQUESTS || '100', 10),
// Pagination
DEFAULT_PAGE_SIZE: 20,
MAX_PAGE_SIZE: 100,
// Workflow Limits
MAX_APPROVAL_LEVELS: 10,
MAX_PARTICIPANTS_PER_REQUEST: 50,
// Notification Types
NOTIFICATION_TYPES: {
WORKFLOW_CREATED: 'WORKFLOW_CREATED',
WORKFLOW_SUBMITTED: 'WORKFLOW_SUBMITTED',
APPROVAL_REQUIRED: 'APPROVAL_REQUIRED',
APPROVAL_COMPLETED: 'APPROVAL_COMPLETED',
WORKFLOW_APPROVED: 'WORKFLOW_APPROVED',
WORKFLOW_REJECTED: 'WORKFLOW_REJECTED',
WORKFLOW_CLOSED: 'WORKFLOW_CLOSED',
TAT_REMINDER: 'TAT_REMINDER',
TAT_BREACHED: 'TAT_BREACHED',
DOCUMENT_UPLOADED: 'DOCUMENT_UPLOADED',
COMMENT_ADDED: 'COMMENT_ADDED',
PARTICIPANT_ADDED: 'PARTICIPANT_ADDED',
},
// Activity Types
ACTIVITY_TYPES: {
WORKFLOW_CREATED: 'WORKFLOW_CREATED',
WORKFLOW_SUBMITTED: 'WORKFLOW_SUBMITTED',
WORKFLOW_APPROVED: 'WORKFLOW_APPROVED',
WORKFLOW_REJECTED: 'WORKFLOW_REJECTED',
WORKFLOW_CLOSED: 'WORKFLOW_CLOSED',
DOCUMENT_UPLOADED: 'DOCUMENT_UPLOADED',
DOCUMENT_DELETED: 'DOCUMENT_DELETED',
COMMENT_ADDED: 'COMMENT_ADDED',
PARTICIPANT_ADDED: 'PARTICIPANT_ADDED',
PARTICIPANT_REMOVED: 'PARTICIPANT_REMOVED',
USER_LOGIN: 'USER_LOGIN',
USER_LOGOUT: 'USER_LOGOUT',
},
// Document Categories
DOCUMENT_CATEGORIES: {
SUPPORTING: 'SUPPORTING',
APPROVAL: 'APPROVAL',
REFERENCE: 'REFERENCE',
FINAL: 'FINAL',
OTHER: 'OTHER',
},
// Work Note Types
WORK_NOTE_TYPES: {
COMMENT: 'COMMENT',
QUESTION: 'QUESTION',
CLARIFICATION: 'CLARIFICATION',
UPDATE: 'UPDATE',
SYSTEM: 'SYSTEM',
},
};

28
src/config/database.ts Normal file
View File

@ -0,0 +1,28 @@
import { Sequelize } from 'sequelize';
import dotenv from 'dotenv';
dotenv.config();
const sequelize = new Sequelize({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '5432', 10),
database: process.env.DB_NAME || 're_workflow_db',
username: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
dialect: 'postgres',
logging: process.env.NODE_ENV === 'development' ? console.log : false,
pool: {
min: parseInt(process.env.DB_POOL_MIN || '2', 10),
max: parseInt(process.env.DB_POOL_MAX || '10', 10),
acquire: 30000,
idle: 10000,
},
dialectOptions: {
ssl: process.env.DB_SSL === 'true' ? {
require: true,
rejectUnauthorized: false,
} : false,
},
});
export { sequelize };

30
src/config/email.ts Normal file
View File

@ -0,0 +1,30 @@
export const emailConfig = {
smtp: {
host: process.env.SMTP_HOST || 'smtp.gmail.com',
port: parseInt(process.env.SMTP_PORT || '587', 10),
secure: process.env.SMTP_SECURE === 'true',
auth: {
user: process.env.SMTP_USER || '',
pass: process.env.SMTP_PASSWORD || '',
},
},
from: process.env.EMAIL_FROM || 'RE Workflow System <notifications@royalenfield.com>',
// Email templates
templates: {
workflowCreated: 'workflow-created',
approvalRequired: 'approval-required',
workflowApproved: 'workflow-approved',
workflowRejected: 'workflow-rejected',
tatReminder: 'tat-reminder',
tatBreached: 'tat-breached',
},
// Email settings
settings: {
retryAttempts: 3,
retryDelay: 5000, // 5 seconds
batchSize: 50,
},
};

16
src/config/sso.ts Normal file
View File

@ -0,0 +1,16 @@
import { SSOConfig, SSOUserData } from '../types/auth.types';
const ssoConfig: SSOConfig = {
jwtSecret: process.env.JWT_SECRET || '',
jwtExpiry: process.env.JWT_EXPIRY || '24h',
refreshTokenExpiry: process.env.REFRESH_TOKEN_EXPIRY || '7d',
sessionSecret: process.env.SESSION_SECRET || '',
allowedOrigins: process.env.CORS_ORIGIN?.split(',') || ['http://localhost:3000'],
// Okta/Auth0 configuration for token exchange
oktaDomain: process.env.OKTA_DOMAIN || 'https://dev-830839.oktapreview.com',
oktaClientId: process.env.OKTA_CLIENT_ID || '',
oktaClientSecret: process.env.OKTA_CLIENT_SECRET || '',
};
export { ssoConfig };
export type { SSOUserData };

30
src/config/storage.ts Normal file
View File

@ -0,0 +1,30 @@
export const storageConfig = {
gcp: {
projectId: process.env.GCP_PROJECT_ID || '',
bucketName: process.env.GCP_BUCKET_NAME || '',
keyFile: process.env.GCP_KEY_FILE || './config/gcp-key.json',
},
// File upload settings
maxFileSize: parseInt(process.env.MAX_FILE_SIZE_MB || '10', 10) * 1024 * 1024, // Convert MB to bytes
allowedMimeTypes: [
'application/pdf',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.ms-powerpoint',
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'image/jpeg',
'image/jpg',
'image/png',
'image/gif',
],
// Storage paths
paths: {
documents: 'documents',
attachments: 'attachments',
temp: 'temp',
},
};

View File

@ -0,0 +1,50 @@
import { Request, Response } from 'express';
import { ApprovalService } from '@services/approval.service';
import { validateApprovalAction } from '@validators/approval.validator';
import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
const approvalService = new ApprovalService();
export class ApprovalController {
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const { levelId } = req.params;
const validatedData = validateApprovalAction(req.body);
const level = await approvalService.approveLevel(levelId, validatedData, req.user.userId);
if (!level) {
ResponseHandler.notFound(res, 'Approval level not found');
return;
}
ResponseHandler.success(res, level, 'Approval level updated successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to update approval level', 400, errorMessage);
}
}
async getCurrentApprovalLevel(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const level = await approvalService.getCurrentApprovalLevel(id);
ResponseHandler.success(res, level, 'Current approval level retrieved successfully');
} catch (error) {
ResponseHandler.error(res, 'Failed to get current approval level', 500, error.message);
}
}
async getApprovalLevels(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const levels = await approvalService.getApprovalLevels(id);
ResponseHandler.success(res, levels, 'Approval levels retrieved successfully');
} catch (error) {
ResponseHandler.error(res, 'Failed to get approval levels', 500, error.message);
}
}
}

View File

@ -0,0 +1,315 @@
import { Request, Response } from 'express';
import { AuthService } from '../services/auth.service';
import { validateSSOCallback, validateRefreshToken, validateTokenExchange } from '../validators/auth.validator';
import { ResponseHandler } from '../utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
import logger from '../utils/logger';
export class AuthController {
private authService: AuthService;
constructor() {
this.authService = new AuthService();
}
/**
* Handle SSO callback from frontend
* POST /api/v1/auth/sso-callback
*/
async handleSSOCallback(req: Request, res: Response): Promise<void> {
try {
// Validate request body
const validatedData = validateSSOCallback(req.body);
const result = await this.authService.handleSSOCallback(validatedData as any);
ResponseHandler.success(res, {
user: result.user,
accessToken: result.accessToken,
refreshToken: result.refreshToken
}, 'Authentication successful');
} catch (error) {
logger.error('SSO callback failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Authentication failed', 400, errorMessage);
}
}
/**
* Get current user profile
* GET /api/v1/auth/me
*/
async getCurrentUser(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const user = await this.authService.getUserProfile(req.user.userId);
if (!user) {
ResponseHandler.notFound(res, 'User not found');
return;
}
ResponseHandler.success(res, {
userId: user.userId,
employeeId: user.employeeId,
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
displayName: user.displayName,
department: user.department,
designation: user.designation,
phone: user.phone,
location: user.location,
isAdmin: user.isAdmin,
isActive: user.isActive,
lastLogin: user.lastLogin,
createdAt: user.createdAt,
updatedAt: user.updatedAt
}, 'User profile retrieved successfully');
} catch (error) {
logger.error('Failed to get current user:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to get user profile', 500, errorMessage);
}
}
/**
* Refresh access token
* POST /api/v1/auth/refresh
* Supports both request body and cookie-based refresh tokens
*/
async refreshToken(req: Request, res: Response): Promise<void> {
try {
// Try to get refresh token from request body first, then from cookies
let refreshToken: string;
if (req.body?.refreshToken) {
const validated = validateRefreshToken(req.body);
refreshToken = validated.refreshToken;
} else if ((req as any).cookies?.refreshToken) {
// Fallback to cookie if available (requires cookie-parser middleware)
refreshToken = (req as any).cookies.refreshToken;
} else {
throw new Error('Refresh token is required');
}
const newAccessToken = await this.authService.refreshAccessToken(refreshToken);
// Set new access token in cookie if using cookie-based auth
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
httpOnly: true,
secure: isProduction,
sameSite: 'lax' as const,
maxAge: 24 * 60 * 60 * 1000, // 24 hours
};
res.cookie('accessToken', newAccessToken, cookieOptions);
ResponseHandler.success(res, {
accessToken: newAccessToken
}, 'Token refreshed successfully');
} catch (error) {
logger.error('Token refresh failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Token refresh failed', 401, errorMessage);
}
}
/**
* Logout user
* POST /api/v1/auth/logout
* Clears all authentication cookies and tokens
* IMPORTANT: Must use EXACT same cookie options as when setting cookies
*/
async logout(req: Request, res: Response): Promise<void> {
const isProduction = process.env.NODE_ENV === 'production';
// Helper function to clear cookies with all possible option combinations
const clearCookiesCompletely = () => {
const cookieNames = ['accessToken', 'refreshToken'];
// Get the EXACT options used when setting cookies (from exchangeToken)
// These MUST match exactly: httpOnly, secure, sameSite, path
const cookieOptions = {
httpOnly: true,
secure: isProduction,
sameSite: 'lax' as const,
path: '/',
};
logger.info('Attempting to clear cookies with options:', {
httpOnly: cookieOptions.httpOnly,
secure: cookieOptions.secure,
sameSite: cookieOptions.sameSite,
path: cookieOptions.path,
isProduction,
});
// Method 1: Set expired cookie with exact same options
// This is the most reliable method - sets cookie to expire immediately
const expiredDate = new Date(0); // Jan 1, 1970
cookieNames.forEach(name => {
res.cookie(name, '', {
httpOnly: cookieOptions.httpOnly,
secure: cookieOptions.secure,
sameSite: cookieOptions.sameSite,
path: cookieOptions.path,
expires: expiredDate,
maxAge: 0,
});
logger.info(`Set expired cookie: ${name}`);
});
// Method 2: Use clearCookie with exact same options
// clearCookie requires same options that were used to set the cookie
cookieNames.forEach(name => {
res.clearCookie(name, cookieOptions);
logger.info(`Called clearCookie for: ${name}`);
});
// Method 3: Try without secure flag (for localhost/development)
if (!isProduction) {
cookieNames.forEach(name => {
res.clearCookie(name, {
httpOnly: true,
secure: false,
sameSite: 'lax',
path: '/',
});
});
}
// Method 4: Try with all possible path variations
const paths = ['/', '/api', '/api/v1'];
paths.forEach(path => {
cookieNames.forEach(name => {
res.clearCookie(name, {
httpOnly: true,
secure: isProduction,
sameSite: 'lax',
path: path,
});
});
});
logger.info('Cookies clearing attempted with all methods', {
cookieNames,
isProduction,
paths: ['/', '/api', '/api/v1'],
});
};
try {
// Logout should work even without authentication (to clear cookies)
// User might be null if token was invalid/expired
const userId = req.user?.userId || 'unknown';
const email = req.user?.email || 'unknown';
logger.info('User logout initiated', {
userId,
email,
hasUser: !!req.user,
hasCookies: !!req.cookies?.accessToken || !!req.cookies?.refreshToken,
hasHeaderToken: !!req.headers.authorization,
});
// Clear all cookies using multiple methods
clearCookiesCompletely();
logger.info('User logout successful - cookies cleared', {
userId: req.user?.userId || 'unknown',
email: req.user?.email || 'unknown',
});
// Return success response
ResponseHandler.success(res, { message: 'Logout successful, cookies cleared' }, 'Logout successful');
} catch (error) {
logger.error('Logout failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
// Even on error, try to clear cookies as last resort
try {
clearCookiesCompletely();
} catch (cookieError) {
logger.error('Error clearing cookies in catch block:', cookieError);
}
ResponseHandler.error(res, 'Logout failed', 500, errorMessage);
}
}
/**
* Validate token endpoint
* GET /api/v1/auth/validate
*/
async validateToken(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
ResponseHandler.success(res, {
valid: true,
user: req.user
}, 'Token is valid');
} catch (error) {
logger.error('Token validation failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Token validation failed', 401, errorMessage);
}
}
/**
* Exchange authorization code for tokens
* POST /api/v1/auth/token-exchange
*/
async exchangeToken(req: Request, res: Response): Promise<void> {
try {
logger.info('Token exchange request received', {
body: {
code: req.body?.code ? `${req.body.code.substring(0, 10)}...` : 'MISSING',
redirectUri: req.body?.redirectUri,
},
headers: req.headers,
});
const { code, redirectUri } = validateTokenExchange(req.body);
logger.info('Token exchange validation passed', { redirectUri });
const result = await this.authService.exchangeCodeForTokens(code, redirectUri);
// Set cookies with httpOnly flag for security
const isProduction = process.env.NODE_ENV === 'production';
const cookieOptions = {
httpOnly: true,
secure: isProduction,
sameSite: 'lax' as const,
maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token
};
res.cookie('accessToken', result.accessToken, cookieOptions);
const refreshCookieOptions = {
...cookieOptions,
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days for refresh token
};
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
// Ensure Content-Type is set to JSON
res.setHeader('Content-Type', 'application/json');
logger.info('Sending token exchange response', {
hasUser: !!result.user,
hasAccessToken: !!result.accessToken,
hasRefreshToken: !!result.refreshToken,
});
ResponseHandler.success(res, {
user: result.user,
accessToken: result.accessToken,
refreshToken: result.refreshToken
}, 'Token exchange successful');
} catch (error) {
logger.error('Token exchange failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Token exchange failed', 400, errorMessage);
}
}
}

View File

@ -0,0 +1,84 @@
import { Request, Response } from 'express';
import { WorkflowService } from '@services/workflow.service';
import { validateCreateWorkflow, validateUpdateWorkflow } from '@validators/workflow.validator';
import { ResponseHandler } from '@utils/responseHandler';
import type { AuthenticatedRequest } from '../types/express';
import { Priority } from '../types/common.types';
const workflowService = new WorkflowService();
export class WorkflowController {
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const validatedData = validateCreateWorkflow(req.body);
// Convert string literal priority to enum
const workflowData = {
...validatedData,
priority: validatedData.priority as Priority
};
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData);
ResponseHandler.success(res, workflow, 'Workflow created successfully', 201);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
}
}
async getWorkflow(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const workflow = await workflowService.getWorkflowById(id);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
ResponseHandler.success(res, workflow, 'Workflow retrieved successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to get workflow', 500, errorMessage);
}
}
async updateWorkflow(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const validatedData = validateUpdateWorkflow(req.body);
// Convert string literal priority to enum if present
const updateData = validatedData.priority
? { ...validatedData, priority: validatedData.priority as Priority }
: validatedData;
const workflow = await workflowService.updateWorkflow(id, updateData);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
ResponseHandler.success(res, workflow, 'Workflow updated successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
}
}
async submitWorkflow(req: Request, res: Response): Promise<void> {
try {
const { id } = req.params;
const workflow = await workflowService.submitWorkflow(id);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
ResponseHandler.success(res, workflow, 'Workflow submitted successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Failed to submit workflow', 400, errorMessage);
}
}
}

View File

@ -0,0 +1,108 @@
import { Request, Response, NextFunction } from 'express';
import jwt from 'jsonwebtoken';
import { User } from '../models/User';
import { ssoConfig } from '../config/sso';
import { ResponseHandler } from '../utils/responseHandler';
interface JwtPayload {
userId: string;
employeeId: string;
email: string;
role: string;
iat: number;
exp: number;
}
export const authenticateToken = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
// Try to get token from Authorization header first
const authHeader = req.headers.authorization;
let token = authHeader && authHeader.split(' ')[1]; // Bearer TOKEN
// Fallback to cookie if available (requires cookie-parser middleware)
if (!token && req.cookies?.accessToken) {
token = req.cookies.accessToken;
}
if (!token) {
ResponseHandler.unauthorized(res, 'Access token is required');
return;
}
// Verify JWT token
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
// Fetch user from database to ensure they still exist and are active
const user = await User.findByPk(decoded.userId);
if (!user || !user.isActive) {
ResponseHandler.unauthorized(res, 'User not found or inactive');
return;
}
// Attach user info to request object
req.user = {
userId: user.userId,
email: user.email,
employeeId: user.employeeId || null, // Optional - schema not finalized
role: user.isAdmin ? 'admin' : 'user'
};
next();
} catch (error: any) {
if (error?.name === 'TokenExpiredError') {
ResponseHandler.unauthorized(res, 'Token has expired');
} else if (error?.name === 'JsonWebTokenError') {
ResponseHandler.unauthorized(res, 'Invalid token');
} else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Authentication error', 500, errorMessage);
}
}
};
export const requireAdmin = (
req: Request,
res: Response,
next: NextFunction
): void => {
if (req.user?.role !== 'admin') {
ResponseHandler.forbidden(res, 'Admin access required');
return;
}
next();
};
export const optionalAuth = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
const authHeader = req.headers.authorization;
const token = authHeader && authHeader.split(' ')[1];
if (token) {
const decoded = jwt.verify(token, ssoConfig.jwtSecret) as JwtPayload;
const user = await User.findByPk(decoded.userId);
if (user && user.isActive) {
req.user = {
userId: user.userId,
email: user.email,
employeeId: user.employeeId || null, // Optional - schema not finalized
role: user.isAdmin ? 'admin' : 'user'
};
}
}
next();
} catch (error) {
// For optional auth, we don't throw errors, just continue without user
next();
}
};

View File

@ -0,0 +1,39 @@
import cors from 'cors';
// Get allowed origins from environment variable or default to localhost
const getOrigins = (): string[] => {
const corsOrigin = process.env.CORS_ORIGIN;
if (!corsOrigin) {
return ['http://localhost:3000'];
}
// Handle both comma-separated string and single origin
if (corsOrigin.includes(',')) {
return corsOrigin.split(',').map(origin => origin.trim());
}
return [corsOrigin.trim()];
};
export const corsMiddleware = cors({
origin: (origin, callback) => {
const allowedOrigins = getOrigins();
// Allow requests with no origin (like mobile apps or curl requests) in development
if (!origin && process.env.NODE_ENV === 'development') {
return callback(null, true);
}
if (origin && allowedOrigins.includes(origin)) {
callback(null, true);
} else if (!origin) {
// Allow requests with no origin
callback(null, true);
} else {
callback(new Error('Not allowed by CORS'));
}
},
credentials: true,
methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With', 'Accept'],
exposedHeaders: ['X-Total-Count', 'X-Page-Count'],
optionsSuccessStatus: 200, // Some legacy browsers (IE11, various SmartTVs) choke on 204
});

View File

@ -0,0 +1,42 @@
import { Request, Response, NextFunction } from 'express';
import logger from '../utils/logger';
export const errorHandlerMiddleware = (
error: Error,
req: Request,
res: Response,
next: NextFunction
): void => {
logger.error('Error occurred:', {
error: error.message,
stack: error.stack,
url: req.url,
method: req.method,
ip: req.ip,
});
res.status(500).json({
success: false,
message: 'Internal Server Error',
timestamp: new Date(),
});
};
export const notFoundMiddleware = (
req: Request,
res: Response,
next: NextFunction
): void => {
logger.warn(`Route not found: ${req.method} ${req.originalUrl}`);
res.status(404).json({
success: false,
message: `Route ${req.originalUrl} not found`,
timestamp: new Date(),
});
};
export const asyncHandler = (fn: Function) => {
return (req: Request, res: Response, next: NextFunction) => {
Promise.resolve(fn(req, res, next)).catch(next);
};
};

View File

@ -0,0 +1,13 @@
import rateLimit from 'express-rate-limit';
export const rateLimiter = rateLimit({
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS || '900000', 10),
max: parseInt(process.env.RATE_LIMIT_MAX_REQUESTS || '100', 10),
message: {
success: false,
message: 'Too many requests from this IP, please try again later.',
timestamp: new Date(),
},
standardHeaders: true,
legacyHeaders: false,
});

View File

@ -0,0 +1,91 @@
import { Request, Response, NextFunction } from 'express';
import { ZodSchema, ZodError } from 'zod';
import { ResponseHandler } from '../utils/responseHandler';
export const validateRequest = (schema: ZodSchema) => {
return (req: Request, res: Response, next: NextFunction) => {
try {
schema.parse({
body: req.body,
query: req.query,
params: req.params,
});
next();
} catch (error) {
if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({
field: err.path.join('.'),
message: err.message,
}));
ResponseHandler.validationError(res, 'Validation failed', errorMessages);
} else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
}
}
};
};
export const validateBody = (schema: ZodSchema) => {
return (req: Request, res: Response, next: NextFunction) => {
try {
req.body = schema.parse(req.body);
next();
} catch (error) {
if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({
field: err.path.join('.'),
message: err.message,
}));
ResponseHandler.validationError(res, 'Request body validation failed', errorMessages);
} else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
}
}
};
};
export const validateQuery = (schema: ZodSchema) => {
return (req: Request, res: Response, next: NextFunction) => {
try {
req.query = schema.parse(req.query);
next();
} catch (error) {
if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({
field: err.path.join('.'),
message: err.message,
}));
ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessages);
} else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
}
}
};
};
export const validateParams = (schema: ZodSchema) => {
return (req: Request, res: Response, next: NextFunction) => {
try {
req.params = schema.parse(req.params);
next();
} catch (error) {
if (error instanceof ZodError) {
const errorMessages = error.errors.map(err => ({
field: err.path.join('.'),
message: err.message,
}));
ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessages);
} else {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
}
}
};
};

216
src/models/ApprovalLevel.ts Normal file
View File

@ -0,0 +1,216 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
import { WorkflowRequest } from './WorkflowRequest';
import { ApprovalStatus } from '../types/common.types';
interface ApprovalLevelAttributes {
levelId: string;
requestId: string;
levelNumber: number;
levelName?: string;
approverId: string;
approverEmail: string;
approverName: string;
tatHours: number;
tatDays: number;
status: ApprovalStatus;
levelStartTime?: Date;
levelEndTime?: Date;
actionDate?: Date;
comments?: string;
rejectionReason?: string;
isFinalApprover: boolean;
elapsedHours: number;
remainingHours: number;
tatPercentageUsed: number;
createdAt: Date;
updatedAt: Date;
}
interface ApprovalLevelCreationAttributes extends Optional<ApprovalLevelAttributes, 'levelId' | 'levelName' | 'levelStartTime' | 'levelEndTime' | 'actionDate' | 'comments' | 'rejectionReason' | 'createdAt' | 'updatedAt'> {}
class ApprovalLevel extends Model<ApprovalLevelAttributes, ApprovalLevelCreationAttributes> implements ApprovalLevelAttributes {
public levelId!: string;
public requestId!: string;
public levelNumber!: number;
public levelName?: string;
public approverId!: string;
public approverEmail!: string;
public approverName!: string;
public tatHours!: number;
public tatDays!: number;
public status!: ApprovalStatus;
public levelStartTime?: Date;
public levelEndTime?: Date;
public actionDate?: Date;
public comments?: string;
public rejectionReason?: string;
public isFinalApprover!: boolean;
public elapsedHours!: number;
public remainingHours!: number;
public tatPercentageUsed!: number;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public request?: WorkflowRequest;
public approver?: User;
}
ApprovalLevel.init(
{
levelId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'level_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
levelNumber: {
type: DataTypes.INTEGER,
allowNull: false,
field: 'level_number'
},
levelName: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'level_name'
},
approverId: {
type: DataTypes.UUID,
allowNull: false,
field: 'approver_id',
references: {
model: 'users',
key: 'user_id'
}
},
approverEmail: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'approver_email'
},
approverName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'approver_name'
},
tatHours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
field: 'tat_hours'
},
tatDays: {
type: DataTypes.INTEGER,
allowNull: false,
field: 'tat_days'
},
status: {
type: DataTypes.ENUM('PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'SKIPPED'),
defaultValue: 'PENDING'
},
levelStartTime: {
type: DataTypes.DATE,
allowNull: true,
field: 'level_start_time'
},
levelEndTime: {
type: DataTypes.DATE,
allowNull: true,
field: 'level_end_time'
},
actionDate: {
type: DataTypes.DATE,
allowNull: true,
field: 'action_date'
},
comments: {
type: DataTypes.TEXT,
allowNull: true
},
rejectionReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'rejection_reason'
},
isFinalApprover: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_final_approver'
},
elapsedHours: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0,
field: 'elapsed_hours'
},
remainingHours: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0,
field: 'remaining_hours'
},
tatPercentageUsed: {
type: DataTypes.DECIMAL(5, 2),
defaultValue: 0,
field: 'tat_percentage_used'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'ApprovalLevel',
tableName: 'approval_levels',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
fields: ['request_id']
},
{
fields: ['approver_id']
},
{
fields: ['status']
},
{
unique: true,
fields: ['request_id', 'level_number']
}
]
}
);
// Associations
ApprovalLevel.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
ApprovalLevel.belongsTo(User, {
as: 'approver',
foreignKey: 'approverId',
targetKey: 'userId'
});
export { ApprovalLevel };

217
src/models/Document.ts Normal file
View File

@ -0,0 +1,217 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
import { WorkflowRequest } from './WorkflowRequest';
interface DocumentAttributes {
documentId: string;
requestId: string;
uploadedBy: string;
fileName: string;
originalFileName: string;
fileType: string;
fileExtension: string;
fileSize: number;
filePath: string;
storageUrl?: string;
mimeType: string;
checksum: string;
isGoogleDoc: boolean;
googleDocUrl?: string;
category: string;
version: number;
parentDocumentId?: string;
isDeleted: boolean;
downloadCount: number;
uploadedAt: Date;
}
interface DocumentCreationAttributes extends Optional<DocumentAttributes, 'documentId' | 'storageUrl' | 'googleDocUrl' | 'parentDocumentId' | 'uploadedAt'> {}
class Document extends Model<DocumentAttributes, DocumentCreationAttributes> implements DocumentAttributes {
public documentId!: string;
public requestId!: string;
public uploadedBy!: string;
public fileName!: string;
public originalFileName!: string;
public fileType!: string;
public fileExtension!: string;
public fileSize!: number;
public filePath!: string;
public storageUrl?: string;
public mimeType!: string;
public checksum!: string;
public isGoogleDoc!: boolean;
public googleDocUrl?: string;
public category!: string;
public version!: number;
public parentDocumentId?: string;
public isDeleted!: boolean;
public downloadCount!: number;
public uploadedAt!: Date;
// Associations
public request?: WorkflowRequest;
public uploader?: User;
public parentDocument?: Document;
}
Document.init(
{
documentId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'document_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
uploadedBy: {
type: DataTypes.UUID,
allowNull: false,
field: 'uploaded_by',
references: {
model: 'users',
key: 'user_id'
}
},
fileName: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'file_name'
},
originalFileName: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'original_file_name'
},
fileType: {
type: DataTypes.STRING(100),
allowNull: false,
field: 'file_type'
},
fileExtension: {
type: DataTypes.STRING(10),
allowNull: false,
field: 'file_extension'
},
fileSize: {
type: DataTypes.BIGINT,
allowNull: false,
field: 'file_size',
validate: {
max: 10485760 // 10MB limit
}
},
filePath: {
type: DataTypes.STRING(500),
allowNull: false,
field: 'file_path'
},
storageUrl: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'storage_url'
},
mimeType: {
type: DataTypes.STRING(100),
allowNull: false,
field: 'mime_type'
},
checksum: {
type: DataTypes.STRING(64),
allowNull: false
},
isGoogleDoc: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_google_doc'
},
googleDocUrl: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'google_doc_url'
},
category: {
type: DataTypes.ENUM('SUPPORTING', 'APPROVAL', 'REFERENCE', 'FINAL', 'OTHER'),
defaultValue: 'OTHER'
},
version: {
type: DataTypes.INTEGER,
defaultValue: 1
},
parentDocumentId: {
type: DataTypes.UUID,
allowNull: true,
field: 'parent_document_id',
references: {
model: 'documents',
key: 'document_id'
}
},
isDeleted: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_deleted'
},
downloadCount: {
type: DataTypes.INTEGER,
defaultValue: 0,
field: 'download_count'
},
uploadedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'uploaded_at'
}
},
{
sequelize,
modelName: 'Document',
tableName: 'documents',
timestamps: false,
indexes: [
{
fields: ['request_id']
},
{
fields: ['uploaded_by']
},
{
fields: ['category']
},
{
fields: ['is_deleted']
}
]
}
);
// Associations
Document.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
Document.belongsTo(User, {
as: 'uploader',
foreignKey: 'uploadedBy',
targetKey: 'userId'
});
Document.belongsTo(Document, {
as: 'parentDocument',
foreignKey: 'parentDocumentId',
targetKey: 'documentId'
});
export { Document };

170
src/models/Participant.ts Normal file
View File

@ -0,0 +1,170 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
import { WorkflowRequest } from './WorkflowRequest';
import { ParticipantType } from '../types/common.types';
interface ParticipantAttributes {
participantId: string;
requestId: string;
userId: string;
userEmail: string;
userName: string;
participantType: ParticipantType;
canComment: boolean;
canViewDocuments: boolean;
canDownloadDocuments: boolean;
notificationEnabled: boolean;
addedBy: string;
addedAt: Date;
isActive: boolean;
}
interface ParticipantCreationAttributes extends Optional<ParticipantAttributes, 'participantId' | 'addedAt'> {}
class Participant extends Model<ParticipantAttributes, ParticipantCreationAttributes> implements ParticipantAttributes {
public participantId!: string;
public requestId!: string;
public userId!: string;
public userEmail!: string;
public userName!: string;
public participantType!: ParticipantType;
public canComment!: boolean;
public canViewDocuments!: boolean;
public canDownloadDocuments!: boolean;
public notificationEnabled!: boolean;
public addedBy!: string;
public addedAt!: Date;
public isActive!: boolean;
// Associations
public request?: WorkflowRequest;
public user?: User;
public addedByUser?: User;
}
Participant.init(
{
participantId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'participant_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
field: 'user_id',
references: {
model: 'users',
key: 'user_id'
}
},
userEmail: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'user_email'
},
userName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'user_name'
},
participantType: {
type: DataTypes.ENUM('SPECTATOR', 'INITIATOR', 'APPROVER', 'CONSULTATION'),
allowNull: false,
field: 'participant_type'
},
canComment: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'can_comment'
},
canViewDocuments: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'can_view_documents'
},
canDownloadDocuments: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'can_download_documents'
},
notificationEnabled: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'notification_enabled'
},
addedBy: {
type: DataTypes.UUID,
allowNull: false,
field: 'added_by',
references: {
model: 'users',
key: 'user_id'
}
},
addedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'added_at'
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_active'
}
},
{
sequelize,
modelName: 'Participant',
tableName: 'participants',
timestamps: false,
indexes: [
{
fields: ['request_id']
},
{
fields: ['user_id']
},
{
fields: ['participant_type']
},
{
unique: true,
fields: ['request_id', 'user_id']
}
]
}
);
// Associations
Participant.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
Participant.belongsTo(User, {
as: 'user',
foreignKey: 'userId',
targetKey: 'userId'
});
Participant.belongsTo(User, {
as: 'addedByUser',
foreignKey: 'addedBy',
targetKey: 'userId'
});
export { Participant };

190
src/models/User.ts Normal file
View File

@ -0,0 +1,190 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '../config/database';
interface UserAttributes {
userId: string;
employeeId?: string | null;
oktaSub: string;
email: string;
firstName?: string | null;
lastName?: string | null;
displayName?: string | null;
department?: string | null;
designation?: string | null;
phone?: string | null;
// Location Information (JSON object)
location?: {
city?: string;
state?: string;
country?: string;
office?: string;
timezone?: string;
};
isActive: boolean;
isAdmin: boolean;
lastLogin?: Date;
createdAt: Date;
updatedAt: Date;
}
interface UserCreationAttributes extends Optional<UserAttributes, 'userId' | 'employeeId' | 'department' | 'designation' | 'phone' | 'lastLogin' | 'createdAt' | 'updatedAt'> {}
class User extends Model<UserAttributes, UserCreationAttributes> implements UserAttributes {
public userId!: string;
public employeeId?: string | null;
public oktaSub!: string;
public email!: string;
public firstName?: string | null;
public lastName?: string | null;
public displayName?: string | null;
public department?: string;
public designation?: string;
public phone?: string;
// Location Information (JSON object)
public location?: {
city?: string;
state?: string;
country?: string;
office?: string;
timezone?: string;
};
public isActive!: boolean;
public isAdmin!: boolean;
public lastLogin?: Date;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
}
User.init(
{
userId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'user_id'
},
employeeId: {
type: DataTypes.STRING(50),
allowNull: true, // Made optional - email is now primary identifier
field: 'employee_id',
comment: 'HR System Employee ID (optional)'
},
oktaSub: {
type: DataTypes.STRING(100),
allowNull: false,
unique: true,
field: 'okta_sub',
comment: 'Okta user sub (subject identifier) - unique identifier from Okta'
},
email: {
type: DataTypes.STRING(255),
allowNull: false,
unique: true,
validate: {
isEmail: true
}
},
firstName: {
type: DataTypes.STRING(100),
allowNull: true, // Made optional - can be derived from displayName if needed
defaultValue: '',
field: 'first_name'
},
lastName: {
type: DataTypes.STRING(100),
allowNull: true, // Made optional - can be derived from displayName if needed
defaultValue: '',
field: 'last_name'
},
displayName: {
type: DataTypes.STRING(200),
allowNull: true, // Made optional - can be generated from firstName + lastName if needed
defaultValue: '',
field: 'display_name',
comment: 'Full Name for display'
},
department: {
type: DataTypes.STRING(100),
allowNull: true
},
designation: {
type: DataTypes.STRING(100),
allowNull: true
},
phone: {
type: DataTypes.STRING(20),
allowNull: true
},
// Location Information (JSON object)
location: {
type: DataTypes.JSONB, // Use JSONB for PostgreSQL
allowNull: true,
comment: 'JSON object containing location details (city, state, country, office, timezone)'
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_active',
comment: 'Account status'
},
isAdmin: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_admin',
comment: 'Super user flag'
},
lastLogin: {
type: DataTypes.DATE,
allowNull: true,
field: 'last_login'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'User',
tableName: 'users',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['okta_sub']
},
{
unique: true,
fields: ['email']
},
{
fields: ['employee_id'] // Non-unique index for employee_id (now optional)
},
{
fields: ['department']
},
{
fields: ['is_active']
},
{
fields: ['location'],
using: 'gin', // GIN index for JSONB queries
operator: 'jsonb_path_ops'
}
]
}
);
export { User };

View File

@ -0,0 +1,192 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
import { Priority, WorkflowStatus } from '../types/common.types';
interface WorkflowRequestAttributes {
requestId: string;
requestNumber: string;
initiatorId: string;
templateType: 'CUSTOM' | 'TEMPLATE';
title: string;
description: string;
priority: Priority;
status: WorkflowStatus;
currentLevel: number;
totalLevels: number;
totalTatHours: number;
submissionDate?: Date;
closureDate?: Date;
conclusionRemark?: string;
aiGeneratedConclusion?: string;
isDraft: boolean;
isDeleted: boolean;
createdAt: Date;
updatedAt: Date;
}
interface WorkflowRequestCreationAttributes extends Optional<WorkflowRequestAttributes, 'requestId' | 'submissionDate' | 'closureDate' | 'conclusionRemark' | 'aiGeneratedConclusion' | 'createdAt' | 'updatedAt'> {}
class WorkflowRequest extends Model<WorkflowRequestAttributes, WorkflowRequestCreationAttributes> implements WorkflowRequestAttributes {
public requestId!: string;
public requestNumber!: string;
public initiatorId!: string;
public templateType!: 'CUSTOM' | 'TEMPLATE';
public title!: string;
public description!: string;
public priority!: Priority;
public status!: WorkflowStatus;
public currentLevel!: number;
public totalLevels!: number;
public totalTatHours!: number;
public submissionDate?: Date;
public closureDate?: Date;
public conclusionRemark?: string;
public aiGeneratedConclusion?: string;
public isDraft!: boolean;
public isDeleted!: boolean;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public initiator?: User;
}
WorkflowRequest.init(
{
requestId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'request_id'
},
requestNumber: {
type: DataTypes.STRING(20),
allowNull: false,
unique: true,
field: 'request_number'
},
initiatorId: {
type: DataTypes.UUID,
allowNull: false,
field: 'initiator_id',
references: {
model: 'users',
key: 'user_id'
}
},
templateType: {
type: DataTypes.STRING(20),
defaultValue: 'CUSTOM',
field: 'template_type'
},
title: {
type: DataTypes.STRING(500),
allowNull: false
},
description: {
type: DataTypes.TEXT,
allowNull: false
},
priority: {
type: DataTypes.ENUM('STANDARD', 'EXPRESS'),
defaultValue: 'STANDARD'
},
status: {
type: DataTypes.ENUM('DRAFT', 'PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'CLOSED'),
defaultValue: 'DRAFT'
},
currentLevel: {
type: DataTypes.INTEGER,
defaultValue: 1,
field: 'current_level'
},
totalLevels: {
type: DataTypes.INTEGER,
defaultValue: 1,
field: 'total_levels',
validate: {
max: 10
}
},
totalTatHours: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0,
field: 'total_tat_hours'
},
submissionDate: {
type: DataTypes.DATE,
allowNull: true,
field: 'submission_date'
},
closureDate: {
type: DataTypes.DATE,
allowNull: true,
field: 'closure_date'
},
conclusionRemark: {
type: DataTypes.TEXT,
allowNull: true,
field: 'conclusion_remark'
},
aiGeneratedConclusion: {
type: DataTypes.TEXT,
allowNull: true,
field: 'ai_generated_conclusion'
},
isDraft: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_draft'
},
isDeleted: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_deleted'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'WorkflowRequest',
tableName: 'workflow_requests',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
fields: ['initiator_id']
},
{
fields: ['status']
},
{
unique: true,
fields: ['request_number']
},
{
fields: ['created_at']
}
]
}
);
// Associations
WorkflowRequest.belongsTo(User, {
as: 'initiator',
foreignKey: 'initiatorId',
targetKey: 'userId'
});
export { WorkflowRequest };

128
src/models/index.ts Normal file
View File

@ -0,0 +1,128 @@
import { sequelize } from '@config/database';
// Import all models
import { User } from './User';
import { WorkflowRequest } from './WorkflowRequest';
import { ApprovalLevel } from './ApprovalLevel';
import { Participant } from './Participant';
import { Document } from './Document';
// Define associations
const defineAssociations = () => {
// User associations
User.hasMany(WorkflowRequest, {
as: 'initiatedRequests',
foreignKey: 'initiatorId',
sourceKey: 'userId'
});
User.hasMany(ApprovalLevel, {
as: 'approvalLevels',
foreignKey: 'approverId',
sourceKey: 'userId'
});
User.hasMany(Participant, {
as: 'participations',
foreignKey: 'userId',
sourceKey: 'userId'
});
User.hasMany(Document, {
as: 'uploadedDocuments',
foreignKey: 'uploadedBy',
sourceKey: 'userId'
});
// WorkflowRequest associations
WorkflowRequest.hasMany(ApprovalLevel, {
as: 'approvalLevels',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
WorkflowRequest.hasMany(Participant, {
as: 'participants',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
WorkflowRequest.hasMany(Document, {
as: 'documents',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
// ApprovalLevel associations
ApprovalLevel.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
ApprovalLevel.belongsTo(User, {
as: 'approver',
foreignKey: 'approverId',
targetKey: 'userId'
});
// Participant associations
Participant.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
Participant.belongsTo(User, {
as: 'user',
foreignKey: 'userId',
targetKey: 'userId'
});
Participant.belongsTo(User, {
as: 'addedByUser',
foreignKey: 'addedBy',
targetKey: 'userId'
});
// Document associations
Document.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
Document.belongsTo(User, {
as: 'uploader',
foreignKey: 'uploadedBy',
targetKey: 'userId'
});
Document.belongsTo(Document, {
as: 'parentDocument',
foreignKey: 'parentDocumentId',
targetKey: 'documentId'
});
Document.hasMany(Document, {
as: 'childDocuments',
foreignKey: 'parentDocumentId',
sourceKey: 'documentId'
});
};
// Initialize associations
defineAssociations();
// Export models and sequelize instance
export {
sequelize,
User,
WorkflowRequest,
ApprovalLevel,
Participant,
Document
};
// Export default sequelize instance
export default sequelize;

63
src/routes/auth.routes.ts Normal file
View File

@ -0,0 +1,63 @@
import { Router, Request, Response, NextFunction } from 'express';
import { AuthController } from '../controllers/auth.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
import { validateBody } from '../middlewares/validate.middleware';
import { ssoCallbackSchema, refreshTokenSchema, tokenExchangeSchema } from '../validators/auth.validator';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
const router = Router();
const authController = new AuthController();
console.log('✅ Auth routes loaded - token-exchange endpoint registered');
// Token exchange endpoint (no authentication required) - for localhost development
router.post('/token-exchange',
validateBody(tokenExchangeSchema),
asyncHandler(authController.exchangeToken.bind(authController))
);
// SSO callback endpoint (no authentication required)
router.post('/sso-callback',
validateBody(ssoCallbackSchema),
asyncHandler(authController.handleSSOCallback.bind(authController))
);
// Token refresh endpoint (no authentication required)
router.post('/refresh',
validateBody(refreshTokenSchema),
asyncHandler(authController.refreshToken.bind(authController))
);
// Protected routes (require authentication)
router.get('/me',
authenticateToken,
asyncHandler(authController.getCurrentUser.bind(authController))
);
router.get('/validate',
authenticateToken,
asyncHandler(authController.validateToken.bind(authController))
);
// Logout endpoint - allow without authentication to clear cookies
// If token exists, validate it; if not, still clear cookies
router.post('/logout',
asyncHandler(async (req: Request, res: Response, next: NextFunction) => {
// Try to authenticate if token exists
const authHeader = req.headers.authorization;
const token = authHeader && authHeader.split(' ')[1];
const cookieToken = req.cookies?.accessToken;
// If we have a token (header or cookie), try to authenticate
if (token || cookieToken) {
return authenticateToken(req, res, next);
}
// If no token, proceed anyway (logout should work even without auth)
// This allows clearing cookies even if token is expired/invalid
next();
}),
asyncHandler(authController.logout.bind(authController))
);
export default router;

28
src/routes/index.ts Normal file
View File

@ -0,0 +1,28 @@
import { Router } from 'express';
import authRoutes from './auth.routes';
// import workflowRoutes from './workflow.routes'; // Temporarily disabled due to TypeScript errors
const router = Router();
// Health check route
router.get('/health', (_req, res) => {
res.status(200).json({
status: 'OK',
timestamp: new Date(),
service: 're-workflow-backend'
});
});
// API routes
router.use('/auth', authRoutes);
// router.use('/workflows', workflowRoutes); // Temporarily disabled
// TODO: Add other route modules as they are implemented
// router.use('/approvals', approvalRoutes);
// router.use('/documents', documentRoutes);
// router.use('/notifications', notificationRoutes);
// router.use('/participants', participantRoutes);
// router.use('/dashboard', dashboardRoutes);
// router.use('/users', userRoutes);
export default router;

View File

@ -0,0 +1,67 @@
import { Router } from 'express';
import { WorkflowController } from '../controllers/workflow.controller';
import { ApprovalController } from '../controllers/approval.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
import { validateBody, validateParams } from '../middlewares/validate.middleware';
import { createWorkflowSchema, updateWorkflowSchema, workflowParamsSchema } from '../validators/workflow.validator';
import { approvalActionSchema, approvalParamsSchema } from '../validators/approval.validator';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
const router = Router();
const workflowController = new WorkflowController();
const approvalController = new ApprovalController();
// Workflow routes
router.post('/',
authenticateToken,
validateBody(createWorkflowSchema),
asyncHandler(workflowController.createWorkflow.bind(workflowController))
);
router.get('/:id',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(workflowController.getWorkflow.bind(workflowController))
);
router.put('/:id',
authenticateToken,
validateParams(workflowParamsSchema),
validateBody(updateWorkflowSchema),
asyncHandler(workflowController.updateWorkflow.bind(workflowController))
);
router.patch('/:id/submit',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(workflowController.submitWorkflow.bind(workflowController))
);
// Approval routes
router.get('/:id/approvals',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(approvalController.getApprovalLevels.bind(approvalController))
);
router.get('/:id/approvals/current',
authenticateToken,
validateParams(workflowParamsSchema),
asyncHandler(approvalController.getCurrentApprovalLevel.bind(approvalController))
);
router.patch('/:id/approvals/:levelId/approve',
authenticateToken,
validateParams(approvalParamsSchema),
validateBody(approvalActionSchema),
asyncHandler(approvalController.approveLevel.bind(approvalController))
);
router.patch('/:id/approvals/:levelId/reject',
authenticateToken,
validateParams(approvalParamsSchema),
validateBody(approvalActionSchema),
asyncHandler(approvalController.approveLevel.bind(approvalController))
);
export default router;

31
src/server.ts Normal file
View File

@ -0,0 +1,31 @@
import app from './app';
const PORT: number = parseInt(process.env.PORT || '5000', 10);
// Start server
const startServer = (): void => {
try {
app.listen(PORT, () => {
console.log(`🚀 Server running on port ${PORT}`);
console.log(`📊 Environment: ${process.env.NODE_ENV || 'development'}`);
console.log(`🌐 API Base URL: http://localhost:${PORT}`);
console.log(`❤️ Health Check: http://localhost:${PORT}/health`);
});
} catch (error) {
console.error('❌ Unable to start server:', error);
process.exit(1);
}
};
// Graceful shutdown
process.on('SIGTERM', () => {
console.log('🛑 SIGTERM signal received: closing HTTP server');
process.exit(0);
});
process.on('SIGINT', () => {
console.log('🛑 SIGINT signal received: closing HTTP server');
process.exit(0);
});
startServer();

View File

@ -0,0 +1,73 @@
import { ApprovalLevel } from '@models/ApprovalLevel';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { ApprovalAction } from '../types/approval.types';
import { calculateElapsedHours, calculateTATPercentage } from '@utils/helpers';
import logger from '@utils/logger';
export class ApprovalService {
async approveLevel(levelId: string, action: ApprovalAction, _userId: string): Promise<ApprovalLevel | null> {
try {
const level = await ApprovalLevel.findByPk(levelId);
if (!level) return null;
const now = new Date();
const elapsedHours = calculateElapsedHours(level.levelStartTime || level.createdAt, now);
const tatPercentage = calculateTATPercentage(elapsedHours, level.tatHours);
const updateData = {
status: action.action === 'APPROVE' ? 'APPROVED' : 'REJECTED',
actionDate: now,
levelEndTime: now,
elapsedHours,
tatPercentageUsed: tatPercentage,
comments: action.comments,
rejectionReason: action.rejectionReason
};
const updatedLevel = await level.update(updateData);
// Update workflow status if this is the final level
if (level.isFinalApprover && action.action === 'APPROVE') {
await WorkflowRequest.update(
{ status: 'APPROVED', closureDate: now },
{ where: { requestId: level.requestId } }
);
} else if (action.action === 'REJECTED') {
await WorkflowRequest.update(
{ status: 'REJECTED', closureDate: now },
{ where: { requestId: level.requestId } }
);
}
logger.info(`Approval level ${levelId} ${action.action.toLowerCase()}ed`);
return updatedLevel;
} catch (error) {
logger.error(`Failed to ${action.action.toLowerCase()} level ${levelId}:`, error);
throw new Error(`Failed to ${action.action.toLowerCase()} level`);
}
}
async getCurrentApprovalLevel(requestId: string): Promise<ApprovalLevel | null> {
try {
return await ApprovalLevel.findOne({
where: { requestId, status: 'PENDING' },
order: [['levelNumber', 'ASC']]
});
} catch (error) {
logger.error(`Failed to get current approval level for ${requestId}:`, error);
throw new Error('Failed to get current approval level');
}
}
async getApprovalLevels(requestId: string): Promise<ApprovalLevel[]> {
try {
return await ApprovalLevel.findAll({
where: { requestId },
order: [['levelNumber', 'ASC']]
});
} catch (error) {
logger.error(`Failed to get approval levels for ${requestId}:`, error);
throw new Error('Failed to get approval levels');
}
}
}

View File

@ -0,0 +1,476 @@
import { User } from '../models/User';
import { SSOUserData, ssoConfig } from '../config/sso';
import jwt, { SignOptions } from 'jsonwebtoken';
import type { StringValue } from 'ms';
import { LoginResponse } from '../types/auth.types';
import logger from '../utils/logger';
import axios from 'axios';
export class AuthService {
/**
* Handle SSO callback from frontend
* Creates new user or updates existing user based on employeeId
*/
async handleSSOCallback(userData: SSOUserData): Promise<LoginResponse> {
try {
// Validate required fields - email and oktaSub are required
if (!userData.email || !userData.oktaSub) {
throw new Error('Email and Okta sub are required');
}
// Prepare user data with defaults for missing fields
// If firstName/lastName are missing, try to extract from displayName
let firstName = userData.firstName || '';
let lastName = userData.lastName || '';
let displayName = userData.displayName || '';
// If displayName exists but firstName/lastName don't, try to split displayName
if (displayName && !firstName && !lastName) {
const nameParts = displayName.trim().split(/\s+/);
if (nameParts.length > 0) {
firstName = nameParts[0] || '';
lastName = nameParts.slice(1).join(' ') || '';
}
}
// If firstName/lastName exist but displayName doesn't, create displayName
if (!displayName && (firstName || lastName)) {
displayName = `${firstName} ${lastName}`.trim() || userData.email;
}
// Fallback: if still no displayName, use email
if (!displayName) {
displayName = userData.email.split('@')[0] || 'User';
}
// Prepare update/create data - always include required fields
const userUpdateData: any = {
email: userData.email,
oktaSub: userData.oktaSub,
lastLogin: new Date(),
isActive: true,
};
// Only set optional fields if they have values (don't overwrite with null/empty)
if (firstName) userUpdateData.firstName = firstName;
if (lastName) userUpdateData.lastName = lastName;
if (displayName) userUpdateData.displayName = displayName;
if (userData.employeeId) userUpdateData.employeeId = userData.employeeId; // Optional
if (userData.department) userUpdateData.department = userData.department;
if (userData.designation) userUpdateData.designation = userData.designation;
if (userData.phone) userUpdateData.phone = userData.phone;
// Check if user exists by email (primary identifier)
let user = await User.findOne({
where: { email: userData.email }
});
if (user) {
// Update existing user - update oktaSub if different, and other fields
await user.update(userUpdateData);
// Reload to get updated data
user = await user.reload();
logger.info(`User updated via SSO`, {
email: userData.email,
oktaSub: userData.oktaSub,
updatedFields: Object.keys(userUpdateData),
});
} else {
// Create new user with required fields (email and oktaSub)
user = await User.create({
email: userData.email,
oktaSub: userData.oktaSub,
employeeId: userData.employeeId || null, // Optional
firstName: firstName || null,
lastName: lastName || null,
displayName: displayName,
department: userData.department || null,
designation: userData.designation || null,
phone: userData.phone || null,
isActive: true,
isAdmin: false,
lastLogin: new Date()
});
logger.info(`New user created via SSO`, {
email: userData.email,
oktaSub: userData.oktaSub,
employeeId: userData.employeeId || 'not provided',
displayName,
hasDepartment: !!userData.department,
hasDesignation: !!userData.designation,
});
}
// Generate JWT tokens
const accessToken = this.generateAccessToken(user);
const refreshToken = this.generateRefreshToken(user);
return {
user: {
userId: user.userId,
employeeId: user.employeeId || null,
email: user.email,
firstName: user.firstName || null,
lastName: user.lastName || null,
displayName: user.displayName || null,
department: user.department || null,
designation: user.designation || null,
isAdmin: user.isAdmin
},
accessToken,
refreshToken
};
} catch (error) {
logger.error(`SSO callback failed`, {
email: userData.email,
oktaSub: userData.oktaSub,
error: error instanceof Error ? error.message : 'Unknown error',
});
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
throw new Error(`SSO callback failed: ${errorMessage}`);
}
}
/**
* Generate JWT access token
*/
private generateAccessToken(user: User): string {
if (!ssoConfig.jwtSecret) {
throw new Error('JWT secret is not configured');
}
const payload = {
userId: user.userId,
employeeId: user.employeeId,
email: user.email,
role: user.isAdmin ? 'admin' : 'user'
};
const options: SignOptions = {
expiresIn: ssoConfig.jwtExpiry as StringValue | number
};
return jwt.sign(payload, ssoConfig.jwtSecret, options);
}
/**
* Generate JWT refresh token
*/
private generateRefreshToken(user: User): string {
if (!ssoConfig.jwtSecret) {
throw new Error('JWT secret is not configured');
}
const payload = {
userId: user.userId,
type: 'refresh'
};
const options: SignOptions = {
expiresIn: ssoConfig.refreshTokenExpiry as StringValue | number
};
return jwt.sign(payload, ssoConfig.jwtSecret, options);
}
/**
* Validate JWT token
*/
async validateToken(token: string): Promise<any> {
try {
return jwt.verify(token, ssoConfig.jwtSecret);
} catch (error) {
throw new Error('Invalid token');
}
}
/**
* Refresh access token using refresh token
*/
async refreshAccessToken(refreshToken: string): Promise<string> {
try {
const decoded = jwt.verify(refreshToken, ssoConfig.jwtSecret) as any;
if (decoded.type !== 'refresh') {
throw new Error('Invalid refresh token');
}
const user = await User.findByPk(decoded.userId);
if (!user || !user.isActive) {
throw new Error('User not found or inactive');
}
return this.generateAccessToken(user);
} catch (error) {
logger.error('Token refresh failed:', error);
throw new Error('Token refresh failed');
}
}
/**
* Get user profile by ID
*/
async getUserProfile(userId: string): Promise<User | null> {
try {
return await User.findByPk(userId);
} catch (error) {
logger.error(`Failed to get user profile for ${userId}:`, error);
throw new Error('Failed to get user profile');
}
}
/**
* Update user profile
*/
async updateUserProfile(userId: string, updateData: Partial<User>): Promise<User | null> {
try {
const user = await User.findByPk(userId);
if (!user) {
return null;
}
return await user.update(updateData);
} catch (error) {
logger.error(`Failed to update user profile for ${userId}:`, error);
throw new Error('Failed to update user profile');
}
}
/**
* Exchange authorization code for tokens with Okta/Auth0
*
* IMPORTANT: redirectUri MUST match the one used in the initial authorization request to Okta.
* This is the FRONTEND callback URL (e.g., http://localhost:3000/login/callback),
* NOT the backend URL. Okta verifies this matches to prevent redirect URI attacks.
*/
async exchangeCodeForTokens(code: string, redirectUri: string): Promise<LoginResponse> {
try {
// Validate configuration
if (!ssoConfig.oktaClientId || ssoConfig.oktaClientId.trim() === '') {
throw new Error('OKTA_CLIENT_ID is not configured. Please set it in your .env file.');
}
if (!ssoConfig.oktaClientSecret || ssoConfig.oktaClientSecret.trim() === '' || ssoConfig.oktaClientSecret.includes('your_okta_client_secret')) {
throw new Error('OKTA_CLIENT_SECRET is not configured. Please set it in your .env file.');
}
if (!code || code.trim() === '') {
throw new Error('Authorization code is required');
}
if (!redirectUri || redirectUri.trim() === '') {
throw new Error('Redirect URI is required');
}
logger.info('Exchanging code with Okta', {
redirectUri,
codePrefix: code.substring(0, 10) + '...',
oktaDomain: ssoConfig.oktaDomain,
clientId: ssoConfig.oktaClientId,
hasClientSecret: !!ssoConfig.oktaClientSecret && !ssoConfig.oktaClientSecret.includes('your_okta_client_secret'),
});
const tokenEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/token`;
// Exchange authorization code for tokens
// redirect_uri here must match the one used when requesting the authorization code
const tokenResponse = await axios.post(
tokenEndpoint,
new URLSearchParams({
grant_type: 'authorization_code',
code,
redirect_uri: redirectUri, // Frontend URL (e.g., http://localhost:3000/login/callback)
client_id: ssoConfig.oktaClientId,
client_secret: ssoConfig.oktaClientSecret,
}),
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json',
},
responseType: 'json', // Explicitly set response type
validateStatus: (status) => status < 500, // Don't throw on 4xx errors, we'll handle them
}
);
// Check for error response from Okta
if (tokenResponse.status !== 200) {
logger.error('Okta token exchange failed', {
status: tokenResponse.status,
statusText: tokenResponse.statusText,
data: tokenResponse.data,
headers: tokenResponse.headers,
});
const errorData = tokenResponse.data || {};
const errorMessage = errorData.error_description || errorData.error || 'Unknown error from Okta';
throw new Error(`Okta token exchange failed (${tokenResponse.status}): ${errorMessage}`);
}
// Check if response data is valid JSON
if (!tokenResponse.data || typeof tokenResponse.data !== 'object') {
logger.error('Invalid response from Okta', {
dataType: typeof tokenResponse.data,
isArray: Array.isArray(tokenResponse.data),
data: tokenResponse.data,
});
throw new Error('Invalid response format from Okta');
}
const { access_token, refresh_token } = tokenResponse.data;
if (!access_token) {
logger.error('Missing access_token in Okta response', {
responseKeys: Object.keys(tokenResponse.data || {}),
hasRefreshToken: !!refresh_token,
});
throw new Error('Failed to obtain access token from Okta - access_token missing in response');
}
logger.info('Successfully obtained tokens from Okta', {
hasAccessToken: !!access_token,
hasRefreshToken: !!refresh_token,
});
// Get user info from Okta using access token
const userInfoEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/userinfo`;
const userInfoResponse = await axios.get(userInfoEndpoint, {
headers: {
Authorization: `Bearer ${access_token}`,
},
});
const oktaUser = userInfoResponse.data;
// Log the full Okta response to see what attributes are available
logger.info('Okta userinfo response received', {
availableKeys: Object.keys(oktaUser || {}),
sub: oktaUser.sub,
email: oktaUser.email,
// Log specific fields that might be employeeId
employeeId: oktaUser.employeeId || oktaUser.employee_id || oktaUser.empId || 'NOT_FOUND',
// Log other common custom attributes
customAttributes: Object.keys(oktaUser || {}).filter(key =>
key.includes('employee') || key.includes('emp') || key.includes('id')
),
});
// Extract oktaSub (required) - this is the Okta subject identifier
// IMPORTANT: Do NOT use oktaSub for employeeId - they are separate fields
const oktaSub = oktaUser.sub || '';
if (!oktaSub) {
throw new Error('Okta sub (subject identifier) is required but not found in response');
}
// Extract employeeId (optional) - ONLY from custom Okta attributes, NOT from sub
// Check multiple possible sources for actual employee ID attribute:
// 1. Custom Okta attribute: employeeId, employee_id, empId, employeeNumber
// 2. Leave undefined if not found - DO NOT use oktaSub/sub as fallback
const employeeId =
oktaUser.employeeId ||
oktaUser.employee_id ||
oktaUser.empId ||
oktaUser.employeeNumber ||
undefined; // Explicitly undefined if not found - oktaSub is stored separately
// Extract user data from Okta response
// Adjust these mappings based on your Okta user profile attributes
// Only include fields that have values, leave others undefined for optional handling
const userData: SSOUserData = {
oktaSub: oktaSub, // Required - Okta subject identifier (stored in okta_sub column)
email: oktaUser.email || '',
employeeId: employeeId, // Optional - Only if provided as custom attribute, NOT oktaSub
};
// Validate: Ensure we're not accidentally using oktaSub as employeeId
if (employeeId === oktaSub) {
logger.warn('Warning: employeeId matches oktaSub - this should not happen unless explicitly set in Okta', {
oktaSub,
employeeId,
});
// Clear employeeId to avoid confusion - user can update it later if needed
userData.employeeId = undefined;
}
logger.info('User data extracted from Okta', {
oktaSub: oktaSub,
email: oktaUser.email,
employeeId: employeeId || 'not provided (optional)',
employeeIdSource: oktaUser.employeeId ? 'employeeId attribute' :
oktaUser.employee_id ? 'employee_id attribute' :
oktaUser.empId ? 'empId attribute' :
'not found',
note: 'Using email as primary identifier, oktaSub for uniqueness',
});
// Only set optional fields if they have values
if (oktaUser.given_name || oktaUser.firstName) {
userData.firstName = oktaUser.given_name || oktaUser.firstName;
}
if (oktaUser.family_name || oktaUser.lastName) {
userData.lastName = oktaUser.family_name || oktaUser.lastName;
}
if (oktaUser.name) {
userData.displayName = oktaUser.name;
}
if (oktaUser.department) {
userData.department = oktaUser.department;
}
if (oktaUser.title || oktaUser.designation) {
userData.designation = oktaUser.title || oktaUser.designation;
}
if (oktaUser.phone_number || oktaUser.phone) {
userData.phone = oktaUser.phone_number || oktaUser.phone;
}
logger.info('Extracted user data from Okta', {
employeeId: userData.employeeId,
email: userData.email,
hasFirstName: !!userData.firstName,
hasLastName: !!userData.lastName,
hasDisplayName: !!userData.displayName,
hasDepartment: !!userData.department,
hasDesignation: !!userData.designation,
hasPhone: !!userData.phone,
});
// Handle SSO callback to create/update user and generate our tokens
const result = await this.handleSSOCallback(userData);
// Return our JWT tokens along with Okta tokens (store Okta refresh token for future use)
return {
...result,
// Store Okta refresh token separately if needed
oktaRefreshToken: refresh_token,
oktaAccessToken: access_token,
};
} catch (error: any) {
logger.error('Token exchange with Okta failed:', {
message: error.message,
response: error.response?.data,
status: error.response?.status,
statusText: error.response?.statusText,
headers: error.response?.headers,
code: error.code,
stack: error.stack,
});
// Provide a more user-friendly error message
if (error.response?.data) {
const errorData = error.response.data;
// Handle if error response is an object
if (typeof errorData === 'object' && !Array.isArray(errorData)) {
const errorMsg = errorData.error_description || errorData.error || error.message;
throw new Error(`Okta authentication failed: ${errorMsg}`);
} else {
logger.error('Unexpected error response format from Okta', {
dataType: typeof errorData,
isArray: Array.isArray(errorData),
});
throw new Error(`Okta authentication failed: Unexpected response format. Status: ${error.response.status}`);
}
}
throw new Error(`Okta authentication failed: ${error.message || 'Unknown error'}`);
}
}
}

View File

@ -0,0 +1,80 @@
import { User as UserModel } from '../models/User';
import { Op } from 'sequelize';
import { SSOUserData } from '../types/auth.types'; // Use shared type
// Using UserModel type directly - interface removed to avoid duplication
export class UserService {
async createOrUpdateUser(ssoData: SSOUserData): Promise<UserModel> {
// Validate required fields
if (!ssoData.email || !ssoData.oktaSub) {
throw new Error('Email and Okta sub are required');
}
// Check if user exists by email (primary identifier) or oktaSub
const existingUser = await UserModel.findOne({
where: {
[Op.or]: [
{ email: ssoData.email },
{ oktaSub: ssoData.oktaSub }
]
}
});
const now = new Date();
if (existingUser) {
// Update existing user - include oktaSub to ensure it's synced
await existingUser.update({
email: ssoData.email,
oktaSub: ssoData.oktaSub,
employeeId: ssoData.employeeId || null, // Optional
firstName: ssoData.firstName || null,
lastName: ssoData.lastName || null,
displayName: ssoData.displayName || null,
department: ssoData.department || null,
designation: ssoData.designation || null,
phone: ssoData.phone || null,
// location: (ssoData as any).location || null, // Ignored for now - schema not finalized
lastLogin: now,
updatedAt: now,
isActive: true, // Ensure user is active after SSO login
});
return existingUser;
} else {
// Create new user - oktaSub is required
const newUser = await UserModel.create({
email: ssoData.email,
oktaSub: ssoData.oktaSub, // Required
employeeId: ssoData.employeeId || null, // Optional
firstName: ssoData.firstName || null,
lastName: ssoData.lastName || null,
displayName: ssoData.displayName || null,
department: ssoData.department || null,
designation: ssoData.designation || null,
phone: ssoData.phone || null,
// location: (ssoData as any).location || null, // Ignored for now - schema not finalized
isActive: true,
isAdmin: false, // Default to false, can be updated later
lastLogin: now
});
return newUser;
}
}
async getUserById(userId: string): Promise<UserModel | null> {
return await UserModel.findByPk(userId);
}
async getUserByEmployeeId(employeeId: string): Promise<UserModel | null> {
return await UserModel.findOne({ where: { employeeId } });
}
async getAllUsers(): Promise<UserModel[]> {
return await UserModel.findAll({
order: [['createdAt', 'DESC']]
});
}
}

View File

@ -0,0 +1,110 @@
import { WorkflowRequest } from '@models/WorkflowRequest';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { Participant } from '@models/Participant';
import { CreateWorkflowRequest, UpdateWorkflowRequest } from '../types/workflow.types';
import { generateRequestNumber, calculateTATDays } from '@utils/helpers';
import logger from '@utils/logger';
export class WorkflowService {
async createWorkflow(initiatorId: string, workflowData: CreateWorkflowRequest): Promise<WorkflowRequest> {
try {
const requestNumber = generateRequestNumber();
const totalTatHours = workflowData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0);
const workflow = await WorkflowRequest.create({
requestNumber,
initiatorId,
templateType: workflowData.templateType,
title: workflowData.title,
description: workflowData.description,
priority: workflowData.priority,
totalLevels: workflowData.approvalLevels.length,
totalTatHours,
status: 'DRAFT'
});
// Create approval levels
for (const levelData of workflowData.approvalLevels) {
await ApprovalLevel.create({
requestId: workflow.requestId,
levelNumber: levelData.levelNumber,
levelName: levelData.levelName,
approverId: levelData.approverId,
approverEmail: levelData.approverEmail,
approverName: levelData.approverName,
tatHours: levelData.tatHours,
tatDays: calculateTATDays(levelData.tatHours),
isFinalApprover: levelData.isFinalApprover || false
});
}
// Create participants if provided
if (workflowData.participants) {
for (const participantData of workflowData.participants) {
await Participant.create({
requestId: workflow.requestId,
userId: participantData.userId,
userEmail: participantData.userEmail,
userName: participantData.userName,
participantType: participantData.participantType,
canComment: participantData.canComment ?? true,
canViewDocuments: participantData.canViewDocuments ?? true,
canDownloadDocuments: participantData.canDownloadDocuments ?? false,
notificationEnabled: participantData.notificationEnabled ?? true,
addedBy: initiatorId
});
}
}
logger.info(`Workflow created: ${requestNumber}`);
return workflow;
} catch (error) {
logger.error('Failed to create workflow:', error);
throw new Error('Failed to create workflow');
}
}
async getWorkflowById(requestId: string): Promise<WorkflowRequest | null> {
try {
return await WorkflowRequest.findByPk(requestId, {
include: [
{ association: 'initiator' },
{ association: 'approvalLevels' },
{ association: 'participants' },
{ association: 'documents' }
]
});
} catch (error) {
logger.error(`Failed to get workflow ${requestId}:`, error);
throw new Error('Failed to get workflow');
}
}
async updateWorkflow(requestId: string, updateData: UpdateWorkflowRequest): Promise<WorkflowRequest | null> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) return null;
return await workflow.update(updateData);
} catch (error) {
logger.error(`Failed to update workflow ${requestId}:`, error);
throw new Error('Failed to update workflow');
}
}
async submitWorkflow(requestId: string): Promise<WorkflowRequest | null> {
try {
const workflow = await WorkflowRequest.findByPk(requestId);
if (!workflow) return null;
return await workflow.update({
status: 'PENDING',
isDraft: false,
submissionDate: new Date()
});
} catch (error) {
logger.error(`Failed to submit workflow ${requestId}:`, error);
throw new Error('Failed to submit workflow');
}
}
}

View File

@ -0,0 +1,39 @@
import { ApprovalStatus } from './common.types';
export interface ApprovalLevel {
levelId: string;
requestId: string;
levelNumber: number;
levelName?: string;
approverId: string;
approverEmail: string;
approverName: string;
tatHours: number;
tatDays: number;
status: ApprovalStatus;
levelStartTime?: Date;
levelEndTime?: Date;
actionDate?: Date;
comments?: string;
rejectionReason?: string;
isFinalApprover: boolean;
elapsedHours: number;
remainingHours: number;
tatPercentageUsed: number;
createdAt: Date;
updatedAt: Date;
}
export interface ApprovalAction {
action: 'APPROVE' | 'REJECT';
comments?: string;
rejectionReason?: string;
}
export interface ApprovalResponse {
levelId: string;
status: ApprovalStatus;
actionDate: Date;
comments?: string;
rejectionReason?: string;
}

46
src/types/auth.types.ts Normal file
View File

@ -0,0 +1,46 @@
export interface SSOUserData {
oktaSub: string; // Required - Okta subject identifier
email: string; // Required - Primary identifier for user lookup
employeeId?: string; // Optional - HR System Employee ID
firstName?: string;
lastName?: string;
displayName?: string;
department?: string;
designation?: string;
phone?: string;
reportingManagerId?: string;
}
export interface SSOConfig {
jwtSecret: string;
jwtExpiry: string;
refreshTokenExpiry: string;
sessionSecret: string;
allowedOrigins: string[];
oktaDomain: string;
oktaClientId: string;
oktaClientSecret: string;
}
export interface AuthTokens {
accessToken: string;
refreshToken: string;
}
export interface LoginResponse {
user: {
userId: string;
employeeId?: string | null;
email: string;
firstName?: string | null;
lastName?: string | null;
displayName?: string | null;
department?: string | null;
designation?: string | null;
isAdmin: boolean;
};
accessToken: string;
refreshToken: string;
oktaRefreshToken?: string;
oktaAccessToken?: string;
}

59
src/types/common.types.ts Normal file
View File

@ -0,0 +1,59 @@
export enum Priority {
STANDARD = 'STANDARD',
EXPRESS = 'EXPRESS'
}
export enum WorkflowStatus {
DRAFT = 'DRAFT',
PENDING = 'PENDING',
IN_PROGRESS = 'IN_PROGRESS',
APPROVED = 'APPROVED',
REJECTED = 'REJECTED',
CLOSED = 'CLOSED'
}
export enum ApprovalStatus {
PENDING = 'PENDING',
IN_PROGRESS = 'IN_PROGRESS',
APPROVED = 'APPROVED',
REJECTED = 'REJECTED',
SKIPPED = 'SKIPPED'
}
export enum ParticipantType {
SPECTATOR = 'SPECTATOR',
INITIATOR = 'INITIATOR',
APPROVER = 'APPROVER',
CONSULTATION = 'CONSULTATION'
}
export enum TATStatus {
ON_TRACK = 'ON_TRACK',
APPROACHING = 'APPROACHING',
BREACHED = 'BREACHED'
}
export interface ApiResponse<T = any> {
success: boolean;
message: string;
data?: T;
error?: string;
timestamp: Date;
}
export interface PaginationParams {
page: number;
limit: number;
sortBy?: string;
sortOrder?: 'ASC' | 'DESC';
}
export interface PaginatedResponse<T> {
data: T[];
pagination: {
page: number;
limit: number;
total: number;
totalPages: number;
};
}

View File

@ -0,0 +1,40 @@
export interface Document {
documentId: string;
requestId: string;
uploadedBy: string;
fileName: string;
originalFileName: string;
fileType: string;
fileExtension: string;
fileSize: number;
filePath: string;
storageUrl: string;
mimeType: string;
checksum: string;
isGoogleDoc: boolean;
googleDocUrl?: string;
category: string;
version: number;
parentDocumentId?: string;
isDeleted: boolean;
downloadCount: number;
uploadedAt: Date;
}
export interface CreateDocument {
originalFileName: string;
fileType: string;
fileExtension: string;
fileSize: number;
mimeType: string;
checksum: string;
category: string;
isGoogleDoc?: boolean;
googleDocUrl?: string;
}
export interface UpdateDocument {
originalFileName?: string;
category?: string;
isDeleted?: boolean;
}

41
src/types/express.d.ts vendored Normal file
View File

@ -0,0 +1,41 @@
import { JwtPayload } from 'jsonwebtoken';
declare global {
namespace Express {
interface Request {
user?: {
userId: string;
email: string;
employeeId?: string | null; // Optional - schema not finalized
role?: string;
};
cookies?: {
accessToken?: string;
refreshToken?: string;
[key: string]: string | undefined;
};
file?: Express.Multer.File;
files?: Express.Multer.File[];
}
}
}
export interface AuthenticatedRequest extends Express.Request {
user: {
userId: string;
email: string;
employeeId?: string | null; // Optional - schema not finalized
role: string;
};
params: any;
body: any;
query: any;
cookies?: {
accessToken?: string;
refreshToken?: string;
[key: string]: string | undefined;
};
}
// Export AuthenticatedRequest through a regular TypeScript file pattern
export type { AuthenticatedRequest };

3
src/types/express.ts Normal file
View File

@ -0,0 +1,3 @@
// Re-export AuthenticatedRequest from express.d.ts to make it importable
export type { AuthenticatedRequest } from './express.d';

9
src/types/index.ts Normal file
View File

@ -0,0 +1,9 @@
export * from './common.types';
export * from './user.types';
export * from './workflow.types';
export * from './approval.types';
export * from './document.types';
export * from './notification.types';
export * from './auth.types';
export * from './express.d';
export * from './express';

View File

@ -0,0 +1,41 @@
export interface Notification {
notificationId: string;
userId: string;
requestId?: string;
notificationType: string;
title: string;
message: string;
isRead: boolean;
priority: 'LOW' | 'MEDIUM' | 'HIGH' | 'URGENT';
actionUrl?: string;
actionRequired: boolean;
metadata?: any;
sentVia: string[];
emailSent: boolean;
smsSent: boolean;
pushSent: boolean;
readAt?: Date;
expiresAt?: Date;
createdAt: Date;
}
export interface CreateNotification {
userId: string;
requestId?: string;
notificationType: string;
title: string;
message: string;
priority?: 'LOW' | 'MEDIUM' | 'HIGH' | 'URGENT';
actionUrl?: string;
actionRequired?: boolean;
metadata?: any;
sentVia?: string[];
expiresAt?: Date;
}
export interface NotificationPreferences {
emailEnabled: boolean;
smsEnabled: boolean;
pushEnabled: boolean;
inAppEnabled: boolean;
}

View File

@ -0,0 +1,36 @@
import { ParticipantType } from './common.types';
export interface Participant {
participantId: string;
requestId: string;
userId: string;
userEmail: string;
userName: string;
participantType: ParticipantType;
canComment: boolean;
canViewDocuments: boolean;
canDownloadDocuments: boolean;
notificationEnabled: boolean;
addedBy: string;
addedAt: Date;
isActive: boolean;
}
export interface CreateParticipant {
userId: string;
userEmail: string;
userName: string;
participantType: ParticipantType;
canComment?: boolean;
canViewDocuments?: boolean;
canDownloadDocuments?: boolean;
notificationEnabled?: boolean;
}
export interface UpdateParticipant {
canComment?: boolean;
canViewDocuments?: boolean;
canDownloadDocuments?: boolean;
notificationEnabled?: boolean;
isActive?: boolean;
}

42
src/types/user.types.ts Normal file
View File

@ -0,0 +1,42 @@
export interface User {
userId: string;
employeeId: string;
email: string;
firstName: string;
lastName: string;
displayName: string;
department?: string;
designation?: string;
phone?: string;
reportingManagerId?: string;
isActive: boolean;
isAdmin: boolean;
lastLogin?: Date;
createdAt: Date;
updatedAt: Date;
}
export interface CreateUserData {
employeeId: string;
email: string;
firstName: string;
lastName: string;
displayName: string;
department?: string;
designation?: string;
phone?: string;
reportingManagerId?: string;
}
export interface UpdateUserData {
email?: string;
firstName?: string;
lastName?: string;
displayName?: string;
department?: string;
designation?: string;
phone?: string;
reportingManagerId?: string;
isActive?: boolean;
isAdmin?: boolean;
}

View File

@ -0,0 +1,61 @@
import { Priority, WorkflowStatus } from './common.types';
export interface WorkflowRequest {
requestId: string;
requestNumber: string;
initiatorId: string;
templateType: 'CUSTOM' | 'TEMPLATE';
title: string;
description: string;
priority: Priority;
status: WorkflowStatus;
currentLevel: number;
totalLevels: number;
totalTatHours: number;
submissionDate?: Date;
closureDate?: Date;
conclusionRemark?: string;
aiGeneratedConclusion?: string;
isDraft: boolean;
isDeleted: boolean;
createdAt: Date;
updatedAt: Date;
}
export interface CreateWorkflowRequest {
templateType: 'CUSTOM' | 'TEMPLATE';
title: string;
description: string;
priority: Priority;
approvalLevels: CreateApprovalLevel[];
participants?: CreateParticipant[];
}
export interface UpdateWorkflowRequest {
title?: string;
description?: string;
priority?: Priority;
status?: WorkflowStatus;
conclusionRemark?: string;
}
export interface CreateApprovalLevel {
levelNumber: number;
levelName?: string;
approverId: string;
approverEmail: string;
approverName: string;
tatHours: number;
isFinalApprover?: boolean;
}
export interface CreateParticipant {
userId: string;
userEmail: string;
userName: string;
participantType: 'SPECTATOR' | 'CONSULTATION';
canComment?: boolean;
canViewDocuments?: boolean;
canDownloadDocuments?: boolean;
notificationEnabled?: boolean;
}

69
src/utils/errorHandler.ts Normal file
View File

@ -0,0 +1,69 @@
import { Request, Response, NextFunction } from 'express';
import logger from './logger';
import { ResponseHandler } from './responseHandler';
export class AppError extends Error {
public statusCode: number;
public isOperational: boolean;
constructor(message: string, statusCode: number = 500) {
super(message);
this.statusCode = statusCode;
this.isOperational = true;
Error.captureStackTrace(this, this.constructor);
}
}
export const errorHandler = (
error: Error,
req: Request,
res: Response,
_next: NextFunction
): void => {
let statusCode = 500;
let message = 'Internal Server Error';
if (error instanceof AppError) {
statusCode = error.statusCode;
message = error.message;
} else if (error.name === 'ValidationError') {
statusCode = 400;
message = 'Validation Error';
} else if (error.name === 'UnauthorizedError') {
statusCode = 401;
message = 'Unauthorized';
} else if (error.name === 'CastError') {
statusCode = 400;
message = 'Invalid ID format';
} else if (error.name === 'MongoError' && (error as any).code === 11000) {
statusCode = 400;
message = 'Duplicate field value';
}
logger.error('Error occurred:', {
error: error.message,
stack: error.stack,
statusCode,
url: req.url,
method: req.method,
ip: req.ip,
});
ResponseHandler.error(res, message, statusCode, error.message);
};
export const notFoundHandler = (
req: Request,
_res: Response,
next: NextFunction
): void => {
const error = new AppError(`Route ${req.originalUrl} not found`, 404);
next(error);
};
export const asyncHandler = (fn: Function) => {
return (req: Request, res: Response, next: NextFunction) => {
Promise.resolve(fn(req, res, next)).catch(next);
};
};

60
src/utils/helpers.ts Normal file
View File

@ -0,0 +1,60 @@
export const generateRequestNumber = (): string => {
const year = new Date().getFullYear();
const randomNumber = Math.floor(Math.random() * 100000).toString().padStart(5, '0');
return `REQ-${year}-${randomNumber}`;
};
export const calculateTATDays = (tatHours: number): number => {
return Math.ceil(tatHours / 24);
};
export const calculateElapsedHours = (startTime: Date, endTime?: Date): number => {
const end = endTime || new Date();
const diffMs = end.getTime() - startTime.getTime();
return Math.round((diffMs / (1000 * 60 * 60)) * 100) / 100; // Round to 2 decimal places
};
export const calculateTATPercentage = (elapsedHours: number, totalTatHours: number): number => {
if (totalTatHours === 0) return 0;
return Math.min(Math.round((elapsedHours / totalTatHours) * 100), 100);
};
export const isTATBreached = (elapsedHours: number, totalTatHours: number): boolean => {
return elapsedHours > totalTatHours;
};
export const isTATApproaching = (elapsedHours: number, totalTatHours: number, threshold: number = 80): boolean => {
const percentage = calculateTATPercentage(elapsedHours, totalTatHours);
return percentage >= threshold && percentage < 100;
};
export const formatDate = (date: Date): string => {
return date.toISOString().split('T')[0];
};
export const formatDateTime = (date: Date): string => {
return date.toISOString();
};
export const isValidEmail = (email: string): boolean => {
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
return emailRegex.test(email);
};
export const isValidUUID = (uuid: string): boolean => {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return uuidRegex.test(uuid);
};
export const sanitizeString = (str: string): string => {
return str.trim().replace(/[<>]/g, '');
};
export const generateChecksum = (data: string): string => {
const crypto = require('crypto');
return crypto.createHash('sha256').update(data).digest('hex');
};
export const sleep = (ms: number): Promise<void> => {
return new Promise(resolve => setTimeout(resolve, ms));
};

55
src/utils/logger.ts Normal file
View File

@ -0,0 +1,55 @@
import winston from 'winston';
import path from 'path';
const logDir = process.env.LOG_FILE_PATH || './logs';
// Create logger instance
const logger = winston.createLogger({
level: process.env.LOG_LEVEL || 'info',
format: winston.format.combine(
winston.format.timestamp({
format: 'YYYY-MM-DD HH:mm:ss',
}),
winston.format.errors({ stack: true }),
winston.format.json()
),
defaultMeta: { service: 're-workflow-backend' },
transports: [
// Write all logs with level 'error' and below to error.log
new winston.transports.File({
filename: path.join(logDir, 'error.log'),
level: 'error',
maxsize: 5242880, // 5MB
maxFiles: 5,
}),
// Write all logs with level 'info' and below to combined.log
new winston.transports.File({
filename: path.join(logDir, 'combined.log'),
maxsize: 5242880, // 5MB
maxFiles: 5,
}),
],
});
// If we're not in production, log to the console as well
if (process.env.NODE_ENV !== 'production') {
logger.add(
new winston.transports.Console({
format: winston.format.combine(
winston.format.colorize(),
winston.format.simple()
),
})
);
}
// Create a stream object for Morgan HTTP logging
// Use type assertion to bypass TypeScript's strict checking for the stream property
const loggerWithStream = logger as any;
loggerWithStream.stream = {
write: (message: string) => {
logger.info(message.trim());
},
};
export default loggerWithStream as winston.Logger;

View File

@ -0,0 +1,97 @@
import { Response } from 'express';
import { ApiResponse } from '../types/common.types';
import logger from './logger';
export class ResponseHandler {
static success<T>(
res: Response,
data: T,
message: string = 'Success',
statusCode: number = 200
): void {
const response: ApiResponse<T> = {
success: true,
message,
data,
timestamp: new Date(),
};
logger.info(`Success response: ${message}`, { statusCode, data });
res.status(statusCode).json(response);
}
static error(
res: Response,
message: string = 'Internal Server Error',
statusCode: number = 500,
error?: string
): void {
const response: ApiResponse = {
success: false,
message,
error,
timestamp: new Date(),
};
logger.error(`Error response: ${message}`, { statusCode, error });
res.status(statusCode).json(response);
}
static validationError(
res: Response,
message: string = 'Validation Error',
errors?: any
): void {
const response: ApiResponse = {
success: false,
message,
error: errors,
timestamp: new Date(),
};
logger.warn(`Validation error: ${message}`, { errors });
res.status(400).json(response);
}
static notFound(
res: Response,
message: string = 'Resource not found'
): void {
const response: ApiResponse = {
success: false,
message,
timestamp: new Date(),
};
logger.warn(`Not found: ${message}`);
res.status(404).json(response);
}
static unauthorized(
res: Response,
message: string = 'Unauthorized access'
): void {
const response: ApiResponse = {
success: false,
message,
timestamp: new Date(),
};
logger.warn(`Unauthorized: ${message}`);
res.status(401).json(response);
}
static forbidden(
res: Response,
message: string = 'Forbidden access'
): void {
const response: ApiResponse = {
success: false,
message,
timestamp: new Date(),
};
logger.warn(`Forbidden: ${message}`);
res.status(403).json(response);
}
}

View File

@ -0,0 +1,16 @@
import { z } from 'zod';
export const approvalActionSchema = z.object({
action: z.enum(['APPROVE', 'REJECT']),
comments: z.string().optional(),
rejectionReason: z.string().optional(),
});
export const approvalParamsSchema = z.object({
id: z.string().uuid('Invalid workflow ID'),
levelId: z.string().uuid('Invalid approval level ID'),
});
export const validateApprovalAction = (data: any) => {
return approvalActionSchema.parse(data);
};

View File

@ -0,0 +1,35 @@
import { z } from 'zod';
export const ssoCallbackSchema = z.object({
oktaSub: z.string().min(1, 'Okta sub is required'),
email: z.string().email('Valid email is required'),
employeeId: z.string().optional(), // Made optional
firstName: z.string().optional(),
lastName: z.string().optional(),
displayName: z.string().optional(),
department: z.string().optional(),
designation: z.string().optional(),
phone: z.string().optional(),
reportingManagerId: z.string().uuid().optional(),
});
export const refreshTokenSchema = z.object({
refreshToken: z.string().min(1, 'Refresh token is required'),
});
export const tokenExchangeSchema = z.object({
code: z.string().min(1, 'Authorization code is required'),
redirectUri: z.string().url('Valid redirect URI is required'),
});
export const validateSSOCallback = (data: any) => {
return ssoCallbackSchema.parse(data);
};
export const validateRefreshToken = (data: any) => {
return refreshTokenSchema.parse(data);
};
export const validateTokenExchange = (data: any) => {
return tokenExchangeSchema.parse(data);
};

View File

@ -0,0 +1,16 @@
import { z } from 'zod';
export const documentParamsSchema = z.object({
id: z.string().uuid('Invalid workflow ID'),
documentId: z.string().uuid('Invalid document ID'),
});
export const updateDocumentSchema = z.object({
originalFileName: z.string().min(1).optional(),
category: z.enum(['SUPPORTING', 'APPROVAL', 'REFERENCE', 'FINAL', 'OTHER'] as const).optional(),
isDeleted: z.boolean().optional(),
});
export const validateUpdateDocument = (data: any) => {
return updateDocumentSchema.parse(data);
};

View File

@ -0,0 +1,33 @@
import { z } from 'zod';
export const createParticipantSchema = z.object({
userId: z.string().uuid(),
userEmail: z.string().email(),
userName: z.string().min(1),
participantType: z.enum(['SPECTATOR', 'CONSULTATION'] as const),
canComment: z.boolean().optional(),
canViewDocuments: z.boolean().optional(),
canDownloadDocuments: z.boolean().optional(),
notificationEnabled: z.boolean().optional(),
});
export const updateParticipantSchema = z.object({
canComment: z.boolean().optional(),
canViewDocuments: z.boolean().optional(),
canDownloadDocuments: z.boolean().optional(),
notificationEnabled: z.boolean().optional(),
isActive: z.boolean().optional(),
});
export const participantParamsSchema = z.object({
id: z.string().uuid('Invalid workflow ID'),
participantId: z.string().uuid('Invalid participant ID'),
});
export const validateCreateParticipant = (data: any) => {
return createParticipantSchema.parse(data);
};
export const validateUpdateParticipant = (data: any) => {
return updateParticipantSchema.parse(data);
};

View File

@ -0,0 +1,56 @@
import { z } from 'zod';
export const createWorkflowSchema = z.object({
templateType: z.enum(['CUSTOM', 'TEMPLATE']),
title: z.string().min(1, 'Title is required').max(500, 'Title too long'),
description: z.string().min(1, 'Description is required'),
priority: z.enum(['STANDARD', 'EXPRESS'] as const),
approvalLevels: z.array(z.object({
levelNumber: z.number().int().min(1).max(10),
levelName: z.string().optional(),
approverId: z.string().uuid(),
approverEmail: z.string().email(),
approverName: z.string().min(1),
tatHours: z.number().positive(),
isFinalApprover: z.boolean().optional(),
})).min(1, 'At least one approval level is required').max(10, 'Maximum 10 approval levels allowed'),
participants: z.array(z.object({
userId: z.string().uuid(),
userEmail: z.string().email(),
userName: z.string().min(1),
participantType: z.enum(['SPECTATOR', 'CONSULTATION'] as const),
canComment: z.boolean().optional(),
canViewDocuments: z.boolean().optional(),
canDownloadDocuments: z.boolean().optional(),
notificationEnabled: z.boolean().optional(),
})).optional(),
});
export const updateWorkflowSchema = z.object({
title: z.string().min(1).max(500).optional(),
description: z.string().min(1).optional(),
priority: z.enum(['STANDARD', 'EXPRESS'] as const).optional(),
status: z.enum(['DRAFT', 'PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'CLOSED'] as const).optional(),
conclusionRemark: z.string().optional(),
});
export const workflowParamsSchema = z.object({
id: z.string().uuid('Invalid workflow ID'),
});
export const workflowQuerySchema = z.object({
page: z.string().transform(Number).pipe(z.number().int().min(1)).optional(),
limit: z.string().transform(Number).pipe(z.number().int().min(1).max(100)).optional(),
status: z.enum(['DRAFT', 'PENDING', 'IN_PROGRESS', 'APPROVED', 'REJECTED', 'CLOSED'] as const).optional(),
priority: z.enum(['STANDARD', 'EXPRESS'] as const).optional(),
sortBy: z.string().optional(),
sortOrder: z.enum(['ASC', 'DESC'] as const).optional(),
});
export const validateCreateWorkflow = (data: any) => {
return createWorkflowSchema.parse(data);
};
export const validateUpdateWorkflow = (data: any) => {
return updateWorkflowSchema.parse(data);
};

22
tests/setup.js Normal file
View File

@ -0,0 +1,22 @@
// Test setup file
import { sequelize } from '../src/models';
beforeAll(async () => {
// Setup test database connection
await sequelize.authenticate();
});
afterAll(async () => {
// Close database connection
await sequelize.close();
});
beforeEach(async () => {
// Clean up test data before each test
// Add cleanup logic here
});
afterEach(async () => {
// Clean up test data after each test
// Add cleanup logic here
});

70
tsconfig.json Normal file
View File

@ -0,0 +1,70 @@
{
"compilerOptions": {
"target": "ES2021",
"module": "commonjs",
"lib": ["ES2021"],
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"moduleResolution": "node",
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"noImplicitAny": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"types": ["node", "jest"],
"typeRoots": ["./node_modules/@types", "./src/types"],
"baseUrl": "./src",
"paths": {
"@/*": ["./*"],
"@controllers/*": ["./controllers/*"],
"@middlewares/*": ["./middlewares/*"],
"@services/*": ["./services/*"],
"@models/*": ["./models/*"],
"@routes/*": ["./routes/*"],
"@validators/*": ["./validators/*"],
"@utils/*": ["./utils/*"],
"@types/*": ["./types/*"],
"@config/*": ["./config/*"]
}
},
"include": [
"src/app.ts",
"src/server.ts",
"src/routes/index.ts",
"src/routes/auth.routes.ts",
"src/controllers/auth.controller.ts",
"src/services/auth.service.ts",
"src/middlewares/auth.middleware.ts",
"src/middlewares/cors.middleware.ts",
"src/middlewares/validate.middleware.ts",
"src/middlewares/errorHandler.middleware.ts",
"src/utils/logger.ts",
"src/utils/responseHandler.ts",
"src/config/**/*",
"src/types/**/*",
"src/validators/auth.validator.ts",
"src/models/**/*"
],
"exclude": [
"node_modules",
"dist",
"tests",
"**/*.test.ts",
"**/*.spec.ts",
"src/routes/workflow.routes.ts",
"src/controllers/workflow.controller.ts",
"src/controllers/approval.controller.ts",
"src/services/workflow.service.ts",
"src/services/approval.service.ts"
]
}