bug fixes and admin screen added
This commit is contained in:
parent
8968b86a9f
commit
43b0a493f4
241
Business_Days_Calculation_Recommendations.md
Normal file
241
Business_Days_Calculation_Recommendations.md
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
# Business Days Calculation - Current Issues & Recommendations
|
||||||
|
|
||||||
|
## 🔴 **CRITICAL ISSUE: TAT Processor Using Wrong Calculation**
|
||||||
|
|
||||||
|
### Current Problem:
|
||||||
|
In `Re_Backend/src/queues/tatProcessor.ts` (lines 64-65), the TAT calculation uses **simple calendar hours**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const elapsedMs = now.getTime() - new Date(levelStartTime).getTime();
|
||||||
|
const elapsedHours = elapsedMs / (1000 * 60 * 60);
|
||||||
|
```
|
||||||
|
|
||||||
|
**This is WRONG because:**
|
||||||
|
- ❌ Counts ALL hours (24/7), including nights, weekends, holidays
|
||||||
|
- ❌ Doesn't respect working hours (9 AM - 6 PM)
|
||||||
|
- ❌ Doesn't exclude weekends for STANDARD priority
|
||||||
|
- ❌ Doesn't exclude holidays
|
||||||
|
- ❌ Causes incorrect TAT breach alerts
|
||||||
|
|
||||||
|
### ✅ **Solution Available:**
|
||||||
|
You already have a proper function `calculateElapsedWorkingHours()` in `tatTimeUtils.ts` that:
|
||||||
|
- ✅ Respects working hours (9 AM - 6 PM)
|
||||||
|
- ✅ Excludes weekends for STANDARD priority
|
||||||
|
- ✅ Excludes holidays
|
||||||
|
- ✅ Handles EXPRESS vs STANDARD differently
|
||||||
|
- ✅ Uses minute-by-minute precision
|
||||||
|
|
||||||
|
### 🔧 **Fix Required:**
|
||||||
|
|
||||||
|
**Update `tatProcessor.ts` to use proper working hours calculation:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// BEFORE (WRONG):
|
||||||
|
const elapsedMs = now.getTime() - new Date(levelStartTime).getTime();
|
||||||
|
const elapsedHours = elapsedMs / (1000 * 60 * 60);
|
||||||
|
|
||||||
|
// AFTER (CORRECT):
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(levelStartTime, now, priority);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📊 **Business Days Calculation for Workflow Aging Report**
|
||||||
|
|
||||||
|
### Current Situation:
|
||||||
|
- ✅ You have `calculateElapsedWorkingHours()` - calculates hours
|
||||||
|
- ❌ You DON'T have `calculateBusinessDays()` - calculates days
|
||||||
|
|
||||||
|
### Need:
|
||||||
|
For the **Workflow Aging Report**, you need to show "Days Open" as **business days** (excluding weekends and holidays), not calendar days.
|
||||||
|
|
||||||
|
### 🔧 **Solution: Add Business Days Function**
|
||||||
|
|
||||||
|
Add this function to `Re_Backend/src/utils/tatTimeUtils.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
/**
|
||||||
|
* Calculate business days between two dates
|
||||||
|
* Excludes weekends and holidays
|
||||||
|
* @param startDate - Start date
|
||||||
|
* @param endDate - End date (defaults to now)
|
||||||
|
* @param priority - 'express' or 'standard' (express includes weekends, standard excludes)
|
||||||
|
* @returns Number of business days
|
||||||
|
*/
|
||||||
|
export async function calculateBusinessDays(
|
||||||
|
startDate: Date | string,
|
||||||
|
endDate: Date | string | null = null,
|
||||||
|
priority: string = 'standard'
|
||||||
|
): Promise<number> {
|
||||||
|
await loadWorkingHoursCache();
|
||||||
|
await loadHolidaysCache();
|
||||||
|
|
||||||
|
let start = dayjs(startDate).startOf('day');
|
||||||
|
const end = dayjs(endDate || new Date()).startOf('day');
|
||||||
|
|
||||||
|
// In test mode, use calendar days
|
||||||
|
if (isTestMode()) {
|
||||||
|
return end.diff(start, 'day') + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = workingHoursCache || {
|
||||||
|
startHour: TAT_CONFIG.WORK_START_HOUR,
|
||||||
|
endHour: TAT_CONFIG.WORK_END_HOUR,
|
||||||
|
startDay: TAT_CONFIG.WORK_START_DAY,
|
||||||
|
endDay: TAT_CONFIG.WORK_END_DAY
|
||||||
|
};
|
||||||
|
|
||||||
|
let businessDays = 0;
|
||||||
|
let current = start;
|
||||||
|
|
||||||
|
// Count each day from start to end (inclusive)
|
||||||
|
while (current.isBefore(end) || current.isSame(end, 'day')) {
|
||||||
|
const dayOfWeek = current.day(); // 0 = Sunday, 6 = Saturday
|
||||||
|
const dateStr = current.format('YYYY-MM-DD');
|
||||||
|
|
||||||
|
// For express priority: count all days (including weekends) but exclude holidays
|
||||||
|
// For standard priority: count only working days (Mon-Fri) and exclude holidays
|
||||||
|
const isWorkingDay = priority === 'express'
|
||||||
|
? true // Express includes weekends
|
||||||
|
: (dayOfWeek >= config.startDay && dayOfWeek <= config.endDay);
|
||||||
|
|
||||||
|
const isNotHoliday = !holidaysCache.has(dateStr);
|
||||||
|
|
||||||
|
if (isWorkingDay && isNotHoliday) {
|
||||||
|
businessDays++;
|
||||||
|
}
|
||||||
|
|
||||||
|
current = current.add(1, 'day');
|
||||||
|
|
||||||
|
// Safety check to prevent infinite loops
|
||||||
|
if (current.diff(start, 'day') > 730) { // 2 years
|
||||||
|
console.error('[TAT] Safety break - exceeded 2 years in business days calculation');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return businessDays;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📋 **Summary of Issues & Fixes**
|
||||||
|
|
||||||
|
### Issue 1: TAT Processor Using Calendar Hours ✅ **FIXED**
|
||||||
|
- **File:** `Re_Backend/src/queues/tatProcessor.ts`
|
||||||
|
- **Line:** 64-65 (now 66-77)
|
||||||
|
- **Problem:** Uses simple calendar hours instead of working hours
|
||||||
|
- **Impact:** Incorrect TAT breach calculations
|
||||||
|
- **Fix:** ✅ Replaced with `calculateElapsedWorkingHours()` and `addWorkingHours()`/`addWorkingHoursExpress()`
|
||||||
|
- **Status:** ✅ **COMPLETED** - Now uses proper working hours calculation
|
||||||
|
|
||||||
|
### Issue 2: Missing Business Days Function ✅ **FIXED**
|
||||||
|
- **File:** `Re_Backend/src/utils/tatTimeUtils.ts`
|
||||||
|
- **Problem:** No function to calculate business days count
|
||||||
|
- **Impact:** Workflow Aging Report shows calendar days instead of business days
|
||||||
|
- **Fix:** ✅ Added `calculateBusinessDays()` function (lines 697-758)
|
||||||
|
- **Status:** ✅ **COMPLETED** - Function implemented and exported
|
||||||
|
|
||||||
|
### Issue 3: Workflow Aging Report Using Calendar Days ✅ **FIXED**
|
||||||
|
- **File:** `Re_Backend/src/services/dashboard.service.ts`
|
||||||
|
- **Problem:** Will use calendar days if not fixed
|
||||||
|
- **Impact:** Incorrect "Days Open" calculation
|
||||||
|
- **Fix:** ✅ Uses `calculateBusinessDays()` in report endpoint (getWorkflowAgingReport method)
|
||||||
|
- **Status:** ✅ **COMPLETED** - Report now uses business days calculation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🛠️ **Implementation Steps** ✅ **ALL COMPLETED**
|
||||||
|
|
||||||
|
### Step 1: Fix TAT Processor (CRITICAL) ✅ **DONE**
|
||||||
|
1. ✅ Opened `Re_Backend/src/queues/tatProcessor.ts`
|
||||||
|
2. ✅ Imported `calculateElapsedWorkingHours`, `addWorkingHours`, `addWorkingHoursExpress` from `@utils/tatTimeUtils`
|
||||||
|
3. ✅ Replaced lines 64-65 with proper working hours calculation (now lines 66-77)
|
||||||
|
4. ✅ Gets priority from workflow
|
||||||
|
5. ⏳ **TODO:** Test TAT breach alerts
|
||||||
|
|
||||||
|
### Step 2: Add Business Days Function ✅ **DONE**
|
||||||
|
1. ✅ Opened `Re_Backend/src/utils/tatTimeUtils.ts`
|
||||||
|
2. ✅ Added `calculateBusinessDays()` function (lines 697-758)
|
||||||
|
3. ✅ Exported the function
|
||||||
|
4. ⏳ **TODO:** Test with various date ranges
|
||||||
|
|
||||||
|
### Step 3: Update Workflow Aging Report ✅ **DONE**
|
||||||
|
1. ✅ Built report endpoint using `calculateBusinessDays()`
|
||||||
|
2. ✅ Filters requests where `businessDays > threshold`
|
||||||
|
3. ✅ Displays business days instead of calendar days
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ **What's Already Working**
|
||||||
|
|
||||||
|
- ✅ `calculateElapsedWorkingHours()` - Properly calculates working hours
|
||||||
|
- ✅ `calculateSLAStatus()` - Comprehensive SLA calculation
|
||||||
|
- ✅ Working hours configuration (from admin settings)
|
||||||
|
- ✅ Holiday support (from database)
|
||||||
|
- ✅ Priority-based calculation (express vs standard)
|
||||||
|
- ✅ Used correctly in `approval.service.ts` and `dashboard.service.ts`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🎯 **Priority Order**
|
||||||
|
|
||||||
|
1. **🔴 CRITICAL:** Fix TAT Processor (affects all TAT calculations)
|
||||||
|
2. **🟡 HIGH:** Add Business Days Function (needed for reports)
|
||||||
|
3. **🟡 HIGH:** Update Workflow Aging Report to use business days
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📝 **Code Example: Fixed TAT Processor**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In tatProcessor.ts, around line 60-70
|
||||||
|
import { calculateElapsedWorkingHours } from '@utils/tatTimeUtils';
|
||||||
|
|
||||||
|
// ... existing code ...
|
||||||
|
|
||||||
|
const tatHours = Number((approvalLevel as any).tatHours || 0);
|
||||||
|
const levelStartTime = (approvalLevel as any).levelStartTime || (approvalLevel as any).createdAt;
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// FIXED: Use proper working hours calculation
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(levelStartTime, now, priority);
|
||||||
|
const remainingHours = Math.max(0, tatHours - elapsedHours);
|
||||||
|
const expectedCompletionTime = dayjs(levelStartTime).add(tatHours, 'hour').toDate();
|
||||||
|
|
||||||
|
// ... rest of code ...
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🧪 **Testing Recommendations**
|
||||||
|
|
||||||
|
1. **Test TAT Breach Calculation:**
|
||||||
|
- Create a request with 8-hour TAT
|
||||||
|
- Submit on Friday 5 PM
|
||||||
|
- Should NOT breach until Monday 1 PM (next working hour)
|
||||||
|
- Currently will breach on Saturday 1 AM (wrong!)
|
||||||
|
|
||||||
|
2. **Test Business Days:**
|
||||||
|
- Start: Monday, Jan 1
|
||||||
|
- End: Friday, Jan 5
|
||||||
|
- Should return: 5 business days (not 5 calendar days if there are holidays)
|
||||||
|
|
||||||
|
3. **Test Express vs Standard:**
|
||||||
|
- Express: Should count weekends
|
||||||
|
- Standard: Should exclude weekends
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📚 **Related Files**
|
||||||
|
|
||||||
|
- `Re_Backend/src/queues/tatProcessor.ts` - ✅ **FIXED** - Now uses `calculateElapsedWorkingHours()` and proper deadline calculation
|
||||||
|
- `Re_Backend/src/utils/tatTimeUtils.ts` - ✅ **FIXED** - Added `calculateBusinessDays()` function
|
||||||
|
- `Re_Backend/src/services/approval.service.ts` - ✅ Already using correct calculation
|
||||||
|
- `Re_Backend/src/services/dashboard.service.ts` - ✅ **FIXED** - Uses `calculateBusinessDays()` in Workflow Aging Report
|
||||||
|
- `Re_Backend/src/services/workflow.service.ts` - ✅ Already using correct calculation
|
||||||
|
|
||||||
535
Data_Collection_Analysis.md
Normal file
535
Data_Collection_Analysis.md
Normal file
@ -0,0 +1,535 @@
|
|||||||
|
# Data Collection Analysis - What We Have vs What We're Collecting
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
This document compares the database structure with what we're currently collecting and recommends what we should start collecting for the Detailed Reports.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. ACTIVITIES TABLE
|
||||||
|
|
||||||
|
### ✅ **Database Fields Available:**
|
||||||
|
```sql
|
||||||
|
- activity_id (PK)
|
||||||
|
- request_id (FK) ✅ COLLECTING
|
||||||
|
- user_id (FK) ✅ COLLECTING
|
||||||
|
- user_name ✅ COLLECTING
|
||||||
|
- activity_type ✅ COLLECTING
|
||||||
|
- activity_description ✅ COLLECTING
|
||||||
|
- activity_category ❌ NOT COLLECTING (set to NULL)
|
||||||
|
- severity ❌ NOT COLLECTING (set to NULL)
|
||||||
|
- metadata ✅ COLLECTING (partially)
|
||||||
|
- is_system_event ✅ COLLECTING
|
||||||
|
- ip_address ❌ NOT COLLECTING (set to NULL)
|
||||||
|
- user_agent ❌ NOT COLLECTING (set to NULL)
|
||||||
|
- created_at ✅ COLLECTING
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔴 **Currently NOT Collecting (But Should):**
|
||||||
|
|
||||||
|
1. **IP Address** (`ip_address`)
|
||||||
|
- **Status:** Field exists, but always set to `null`
|
||||||
|
- **Impact:** Cannot show IP in User Activity Log Report
|
||||||
|
- **Fix:** Extract from `req.ip` or `req.headers['x-forwarded-for']` in controllers
|
||||||
|
- **Priority:** HIGH (needed for security/audit)
|
||||||
|
|
||||||
|
2. **User Agent** (`user_agent`)
|
||||||
|
- **Status:** Field exists, but always set to `null`
|
||||||
|
- **Impact:** Cannot show device/browser info in reports
|
||||||
|
- **Fix:** Extract from `req.headers['user-agent']` in controllers
|
||||||
|
- **Priority:** MEDIUM (nice to have for analytics)
|
||||||
|
|
||||||
|
3. **Activity Category** (`activity_category`)
|
||||||
|
- **Status:** Field exists, but always set to `null`
|
||||||
|
- **Impact:** Cannot categorize activities (e.g., "AUTHENTICATION", "WORKFLOW", "DOCUMENT")
|
||||||
|
- **Fix:** Map `activity_type` to category:
|
||||||
|
- `created`, `approval`, `rejection`, `status_change` → "WORKFLOW"
|
||||||
|
- `comment` → "COLLABORATION"
|
||||||
|
- `document_added` → "DOCUMENT"
|
||||||
|
- `sla_warning` → "SYSTEM"
|
||||||
|
- **Priority:** MEDIUM (helps with filtering/reporting)
|
||||||
|
|
||||||
|
4. **Severity** (`severity`)
|
||||||
|
- **Status:** Field exists, but always set to `null`
|
||||||
|
- **Impact:** Cannot prioritize critical activities
|
||||||
|
- **Fix:** Map based on activity type:
|
||||||
|
- `rejection`, `sla_warning` → "WARNING"
|
||||||
|
- `approval`, `closed` → "INFO"
|
||||||
|
- `status_change` → "INFO"
|
||||||
|
- **Priority:** LOW (optional enhancement)
|
||||||
|
|
||||||
|
### 📝 **Recommendation:**
|
||||||
|
**Update `activity.service.ts` to accept and store:**
|
||||||
|
```typescript
|
||||||
|
async log(entry: ActivityEntry & {
|
||||||
|
ipAddress?: string;
|
||||||
|
userAgent?: string;
|
||||||
|
category?: string;
|
||||||
|
severity?: string;
|
||||||
|
}) {
|
||||||
|
// ... existing code ...
|
||||||
|
const activityData = {
|
||||||
|
// ... existing fields ...
|
||||||
|
ipAddress: entry.ipAddress || null,
|
||||||
|
userAgent: entry.userAgent || null,
|
||||||
|
activityCategory: entry.category || this.inferCategory(entry.type),
|
||||||
|
severity: entry.severity || this.inferSeverity(entry.type),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Update all controller calls to pass IP and User Agent:**
|
||||||
|
```typescript
|
||||||
|
activityService.log({
|
||||||
|
// ... existing fields ...
|
||||||
|
ipAddress: req.ip || req.headers['x-forwarded-for'] || null,
|
||||||
|
userAgent: req.headers['user-agent'] || null,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. APPROVAL_LEVELS TABLE
|
||||||
|
|
||||||
|
### ✅ **Database Fields Available:**
|
||||||
|
```sql
|
||||||
|
- level_id (PK)
|
||||||
|
- request_id (FK) ✅ COLLECTING
|
||||||
|
- level_number ✅ COLLECTING
|
||||||
|
- level_name ❌ OPTIONAL (may not be set)
|
||||||
|
- approver_id (FK) ✅ COLLECTING
|
||||||
|
- approver_email ✅ COLLECTING
|
||||||
|
- approver_name ✅ COLLECTING
|
||||||
|
- tat_hours ✅ COLLECTING
|
||||||
|
- tat_days ✅ COLLECTING (auto-calculated)
|
||||||
|
- status ✅ COLLECTING
|
||||||
|
- level_start_time ✅ COLLECTING
|
||||||
|
- level_end_time ✅ COLLECTING
|
||||||
|
- action_date ✅ COLLECTING
|
||||||
|
- comments ✅ COLLECTING
|
||||||
|
- rejection_reason ✅ COLLECTING
|
||||||
|
- is_final_approver ✅ COLLECTING
|
||||||
|
- elapsed_hours ✅ COLLECTING
|
||||||
|
- remaining_hours ✅ COLLECTING
|
||||||
|
- tat_percentage_used ✅ COLLECTING
|
||||||
|
- tat50_alert_sent ✅ COLLECTING
|
||||||
|
- tat75_alert_sent ✅ COLLECTING
|
||||||
|
- tat_breached ✅ COLLECTING
|
||||||
|
- tat_start_time ✅ COLLECTING
|
||||||
|
- created_at ✅ COLLECTING
|
||||||
|
- updated_at ✅ COLLECTING
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔴 **Currently NOT Collecting (But Should):**
|
||||||
|
|
||||||
|
1. **Level Name** (`level_name`)
|
||||||
|
- **Status:** Field exists, but may be NULL
|
||||||
|
- **Impact:** Cannot show stage name in reports (only level number)
|
||||||
|
- **Fix:** When creating approval levels, prompt for or auto-generate level names:
|
||||||
|
- "Department Head Review"
|
||||||
|
- "Finance Approval"
|
||||||
|
- "Final Approval"
|
||||||
|
- **Priority:** MEDIUM (improves report readability)
|
||||||
|
|
||||||
|
### 📝 **Recommendation:**
|
||||||
|
**Ensure level_name is set when creating approval levels:**
|
||||||
|
```typescript
|
||||||
|
await ApprovalLevel.create({
|
||||||
|
// ... existing fields ...
|
||||||
|
levelName: levelData.levelName || `Level ${levelNumber}`,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. USER_SESSIONS TABLE
|
||||||
|
|
||||||
|
### ✅ **Database Fields Available:**
|
||||||
|
```sql
|
||||||
|
- session_id (PK)
|
||||||
|
- user_id (FK)
|
||||||
|
- session_token ✅ COLLECTING
|
||||||
|
- refresh_token ✅ COLLECTING
|
||||||
|
- ip_address ❓ CHECK IF COLLECTING
|
||||||
|
- user_agent ❓ CHECK IF COLLECTING
|
||||||
|
- device_type ❓ CHECK IF COLLECTING
|
||||||
|
- browser ❓ CHECK IF COLLECTING
|
||||||
|
- os ❓ CHECK IF COLLECTING
|
||||||
|
- login_at ✅ COLLECTING
|
||||||
|
- last_activity_at ✅ COLLECTING
|
||||||
|
- logout_at ❓ CHECK IF COLLECTING
|
||||||
|
- expires_at ✅ COLLECTING
|
||||||
|
- is_active ✅ COLLECTING
|
||||||
|
- logout_reason ❓ CHECK IF COLLECTING
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔴 **Missing for Login Activity Tracking:**
|
||||||
|
|
||||||
|
1. **Login Activities in Activities Table**
|
||||||
|
- **Status:** Login events are NOT logged in `activities` table
|
||||||
|
- **Impact:** Cannot show login activities in User Activity Log Report
|
||||||
|
- **Fix:** Add login activity logging in auth middleware/controller:
|
||||||
|
```typescript
|
||||||
|
// After successful login
|
||||||
|
await activityService.log({
|
||||||
|
requestId: 'SYSTEM_LOGIN', // Special request ID for system events
|
||||||
|
type: 'login',
|
||||||
|
user: { userId, name: user.displayName },
|
||||||
|
ipAddress: req.ip,
|
||||||
|
userAgent: req.headers['user-agent'],
|
||||||
|
category: 'AUTHENTICATION',
|
||||||
|
severity: 'INFO',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'User Login',
|
||||||
|
details: `User logged in from ${req.ip}`
|
||||||
|
});
|
||||||
|
```
|
||||||
|
- **Priority:** HIGH (needed for security audit)
|
||||||
|
|
||||||
|
2. **Device/Browser Parsing**
|
||||||
|
- **Status:** Fields exist but may not be populated
|
||||||
|
- **Impact:** Cannot show device type in reports
|
||||||
|
- **Fix:** Parse user agent to extract:
|
||||||
|
- `device_type`: "WEB", "MOBILE"
|
||||||
|
- `browser`: "Chrome", "Firefox", "Safari"
|
||||||
|
- `os`: "Windows", "macOS", "iOS", "Android"
|
||||||
|
- **Priority:** MEDIUM (nice to have)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. WORKFLOW_REQUESTS TABLE
|
||||||
|
|
||||||
|
### ✅ **All Fields Are Being Collected:**
|
||||||
|
- All fields in `workflow_requests` are properly collected
|
||||||
|
- No missing data here
|
||||||
|
|
||||||
|
### 📝 **Note:**
|
||||||
|
- `submission_date` vs `created_at`: Use `submission_date` for "days open" calculation
|
||||||
|
- `closure_date`: Available for completed requests
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. TAT_TRACKING TABLE
|
||||||
|
|
||||||
|
### ✅ **Database Fields Available:**
|
||||||
|
```sql
|
||||||
|
- tracking_id (PK)
|
||||||
|
- request_id (FK)
|
||||||
|
- level_id (FK)
|
||||||
|
- tracking_type ✅ COLLECTING
|
||||||
|
- tat_status ✅ COLLECTING
|
||||||
|
- total_tat_hours ✅ COLLECTING
|
||||||
|
- elapsed_hours ✅ COLLECTING
|
||||||
|
- remaining_hours ✅ COLLECTING
|
||||||
|
- percentage_used ✅ COLLECTING
|
||||||
|
- threshold_50_breached ✅ COLLECTING
|
||||||
|
- threshold_50_alerted_at ✅ COLLECTING
|
||||||
|
- threshold_80_breached ✅ COLLECTING
|
||||||
|
- threshold_80_alerted_at ✅ COLLECTING
|
||||||
|
- threshold_100_breached ✅ COLLECTING
|
||||||
|
- threshold_100_alerted_at ✅ COLLECTING
|
||||||
|
- alert_count ✅ COLLECTING
|
||||||
|
- last_calculated_at ✅ COLLECTING
|
||||||
|
```
|
||||||
|
|
||||||
|
### ✅ **All Fields Are Being Collected:**
|
||||||
|
- TAT tracking appears to be fully implemented
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. AUDIT_LOGS TABLE
|
||||||
|
|
||||||
|
### ✅ **Database Fields Available:**
|
||||||
|
```sql
|
||||||
|
- audit_id (PK)
|
||||||
|
- user_id (FK)
|
||||||
|
- entity_type
|
||||||
|
- entity_id
|
||||||
|
- action
|
||||||
|
- action_category
|
||||||
|
- old_values (JSONB)
|
||||||
|
- new_values (JSONB)
|
||||||
|
- changes_summary
|
||||||
|
- ip_address
|
||||||
|
- user_agent
|
||||||
|
- session_id
|
||||||
|
- request_method
|
||||||
|
- request_url
|
||||||
|
- response_status
|
||||||
|
- execution_time_ms
|
||||||
|
- created_at
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔴 **Status:**
|
||||||
|
- **Audit logging may not be fully implemented**
|
||||||
|
- **Impact:** Cannot track all system changes for audit purposes
|
||||||
|
- **Priority:** MEDIUM (for compliance/security)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## SUMMARY: What to Start Collecting
|
||||||
|
|
||||||
|
### 🔴 **HIGH PRIORITY (Must Have for Reports):**
|
||||||
|
|
||||||
|
1. **IP Address in Activities** ✅ Field exists, just need to populate
|
||||||
|
- Extract from `req.ip` or `req.headers['x-forwarded-for']`
|
||||||
|
- Update `activity.service.ts` to accept IP
|
||||||
|
- Update all controller calls
|
||||||
|
|
||||||
|
2. **User Agent in Activities** ✅ Field exists, just need to populate
|
||||||
|
- Extract from `req.headers['user-agent']`
|
||||||
|
- Update `activity.service.ts` to accept user agent
|
||||||
|
- Update all controller calls
|
||||||
|
|
||||||
|
3. **Login Activities** ❌ Not currently logged
|
||||||
|
- Add login activity logging in auth controller
|
||||||
|
- Use special `requestId: 'SYSTEM_LOGIN'` for system events
|
||||||
|
- Include IP and user agent
|
||||||
|
|
||||||
|
### 🟡 **MEDIUM PRIORITY (Nice to Have):**
|
||||||
|
|
||||||
|
4. **Activity Category** ✅ Field exists, just need to populate
|
||||||
|
- Auto-infer from `activity_type`
|
||||||
|
- Helps with filtering and reporting
|
||||||
|
|
||||||
|
5. **Level Names** ✅ Field exists, ensure it's set
|
||||||
|
- Improve readability in reports
|
||||||
|
- Auto-generate if not provided
|
||||||
|
|
||||||
|
6. **Severity** ✅ Field exists, just need to populate
|
||||||
|
- Auto-infer from `activity_type`
|
||||||
|
- Helps prioritize critical activities
|
||||||
|
|
||||||
|
### 🟢 **LOW PRIORITY (Future Enhancement):**
|
||||||
|
|
||||||
|
7. **Device/Browser Parsing**
|
||||||
|
- Parse user agent to extract device type, browser, OS
|
||||||
|
- Store in `user_sessions` table
|
||||||
|
|
||||||
|
8. **Audit Logging**
|
||||||
|
- Implement comprehensive audit logging
|
||||||
|
- Track all system changes
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. BUSINESS DAYS CALCULATION FOR WORKFLOW AGING
|
||||||
|
|
||||||
|
### ✅ **Available:**
|
||||||
|
- `calculateElapsedWorkingHours()` - Calculates working hours (excludes weekends/holidays)
|
||||||
|
- Working hours configuration (9 AM - 6 PM, Mon-Fri)
|
||||||
|
- Holiday support (from database)
|
||||||
|
- Priority-based calculation (express vs standard)
|
||||||
|
|
||||||
|
### ❌ **Missing:**
|
||||||
|
1. **Business Days Count Function**
|
||||||
|
- Need a function to calculate business days (not hours)
|
||||||
|
- For Workflow Aging Report: "Days Open" should be business days
|
||||||
|
- Currently only have working hours calculation
|
||||||
|
|
||||||
|
2. **TAT Processor Using Wrong Calculation**
|
||||||
|
- `tatProcessor.ts` uses simple calendar hours:
|
||||||
|
```typescript
|
||||||
|
const elapsedMs = now.getTime() - new Date(levelStartTime).getTime();
|
||||||
|
const elapsedHours = elapsedMs / (1000 * 60 * 60);
|
||||||
|
```
|
||||||
|
- Should use `calculateElapsedWorkingHours()` instead
|
||||||
|
- This causes incorrect TAT breach calculations
|
||||||
|
|
||||||
|
### 🔧 **What Needs to be Built:**
|
||||||
|
|
||||||
|
1. **Add Business Days Calculation Function:**
|
||||||
|
```typescript
|
||||||
|
// In tatTimeUtils.ts
|
||||||
|
export async function calculateBusinessDays(
|
||||||
|
startDate: Date | string,
|
||||||
|
endDate: Date | string = new Date(),
|
||||||
|
priority: string = 'standard'
|
||||||
|
): Promise<number> {
|
||||||
|
await loadWorkingHoursCache();
|
||||||
|
await loadHolidaysCache();
|
||||||
|
|
||||||
|
let start = dayjs(startDate);
|
||||||
|
const end = dayjs(endDate);
|
||||||
|
const config = workingHoursCache || { /* defaults */ };
|
||||||
|
|
||||||
|
let businessDays = 0;
|
||||||
|
let current = start.startOf('day');
|
||||||
|
|
||||||
|
while (current.isBefore(end) || current.isSame(end, 'day')) {
|
||||||
|
const dayOfWeek = current.day();
|
||||||
|
const dateStr = current.format('YYYY-MM-DD');
|
||||||
|
|
||||||
|
const isWorkingDay = priority === 'express'
|
||||||
|
? true
|
||||||
|
: (dayOfWeek >= config.startDay && dayOfWeek <= config.endDay);
|
||||||
|
const isNotHoliday = !holidaysCache.has(dateStr);
|
||||||
|
|
||||||
|
if (isWorkingDay && isNotHoliday) {
|
||||||
|
businessDays++;
|
||||||
|
}
|
||||||
|
|
||||||
|
current = current.add(1, 'day');
|
||||||
|
}
|
||||||
|
|
||||||
|
return businessDays;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Fix TAT Processor:**
|
||||||
|
- Replace calendar hours calculation with `calculateElapsedWorkingHours()`
|
||||||
|
- This will fix TAT breach alerts to use proper working hours
|
||||||
|
|
||||||
|
3. **Update Workflow Aging Report:**
|
||||||
|
- Use `calculateBusinessDays()` instead of calendar days
|
||||||
|
- Filter by business days threshold
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## IMPLEMENTATION CHECKLIST
|
||||||
|
|
||||||
|
### Phase 1: Quick Wins (Fields Exist, Just Need to Populate)
|
||||||
|
- [ ] Update `activity.service.ts` to accept `ipAddress` and `userAgent`
|
||||||
|
- [ ] Update all controller calls to pass IP and user agent
|
||||||
|
- [ ] Add activity category inference
|
||||||
|
- [ ] Add severity inference
|
||||||
|
|
||||||
|
### Phase 2: Fix TAT Calculations (CRITICAL)
|
||||||
|
- [x] Fix `tatProcessor.ts` to use `calculateElapsedWorkingHours()` instead of calendar hours ✅
|
||||||
|
- [x] Add `calculateBusinessDays()` function to `tatTimeUtils.ts` ✅
|
||||||
|
- [ ] Test TAT breach calculations with working hours
|
||||||
|
|
||||||
|
### Phase 3: New Functionality
|
||||||
|
- [x] Add login activity logging ✅ (Implemented in auth.controller.ts for SSO and token exchange)
|
||||||
|
- [x] Ensure level names are set when creating approval levels ✅ (levelName set in workflow.service.ts)
|
||||||
|
- [x] Add device/browser parsing for user sessions ✅ (userAgentParser.ts utility created - can be used for parsing user agent strings)
|
||||||
|
|
||||||
|
### Phase 4: Enhanced Reporting
|
||||||
|
- [x] Build report endpoints using collected data ✅ (getLifecycleReport, getActivityLogReport, getWorkflowAgingReport)
|
||||||
|
- [x] Add filtering by category, severity ✅ (Filtering by category and severity added to getActivityLogReport, frontend UI added)
|
||||||
|
- [x] Add IP/user agent to activity log reports ✅ (IP and user agent captured and displayed)
|
||||||
|
- [x] Use business days in Workflow Aging Report ✅ (calculateBusinessDays implemented and used)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CODE CHANGES NEEDED
|
||||||
|
|
||||||
|
### 1. Update Activity Service (`activity.service.ts`)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export type ActivityEntry = {
|
||||||
|
requestId: string;
|
||||||
|
type: 'created' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'closed' | 'login';
|
||||||
|
user?: { userId: string; name?: string; email?: string };
|
||||||
|
timestamp: string;
|
||||||
|
action: string;
|
||||||
|
details: string;
|
||||||
|
metadata?: any;
|
||||||
|
ipAddress?: string; // NEW
|
||||||
|
userAgent?: string; // NEW
|
||||||
|
category?: string; // NEW
|
||||||
|
severity?: string; // NEW
|
||||||
|
};
|
||||||
|
|
||||||
|
class ActivityService {
|
||||||
|
private inferCategory(type: string): string {
|
||||||
|
const categoryMap: Record<string, string> = {
|
||||||
|
'created': 'WORKFLOW',
|
||||||
|
'approval': 'WORKFLOW',
|
||||||
|
'rejection': 'WORKFLOW',
|
||||||
|
'status_change': 'WORKFLOW',
|
||||||
|
'assignment': 'WORKFLOW',
|
||||||
|
'comment': 'COLLABORATION',
|
||||||
|
'document_added': 'DOCUMENT',
|
||||||
|
'sla_warning': 'SYSTEM',
|
||||||
|
'reminder': 'SYSTEM',
|
||||||
|
'ai_conclusion_generated': 'SYSTEM',
|
||||||
|
'closed': 'WORKFLOW',
|
||||||
|
'login': 'AUTHENTICATION'
|
||||||
|
};
|
||||||
|
return categoryMap[type] || 'OTHER';
|
||||||
|
}
|
||||||
|
|
||||||
|
private inferSeverity(type: string): string {
|
||||||
|
const severityMap: Record<string, string> = {
|
||||||
|
'rejection': 'WARNING',
|
||||||
|
'sla_warning': 'WARNING',
|
||||||
|
'approval': 'INFO',
|
||||||
|
'closed': 'INFO',
|
||||||
|
'status_change': 'INFO',
|
||||||
|
'login': 'INFO',
|
||||||
|
'created': 'INFO',
|
||||||
|
'comment': 'INFO',
|
||||||
|
'document_added': 'INFO'
|
||||||
|
};
|
||||||
|
return severityMap[type] || 'INFO';
|
||||||
|
}
|
||||||
|
|
||||||
|
async log(entry: ActivityEntry) {
|
||||||
|
// ... existing code ...
|
||||||
|
const activityData = {
|
||||||
|
requestId: entry.requestId,
|
||||||
|
userId: entry.user?.userId || null,
|
||||||
|
userName: entry.user?.name || entry.user?.email || null,
|
||||||
|
activityType: entry.type,
|
||||||
|
activityDescription: entry.details,
|
||||||
|
activityCategory: entry.category || this.inferCategory(entry.type),
|
||||||
|
severity: entry.severity || this.inferSeverity(entry.type),
|
||||||
|
metadata: entry.metadata || null,
|
||||||
|
isSystemEvent: !entry.user,
|
||||||
|
ipAddress: entry.ipAddress || null, // NEW
|
||||||
|
userAgent: entry.userAgent || null, // NEW
|
||||||
|
};
|
||||||
|
// ... rest of code ...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Update Controller Calls (Example)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In workflow.controller.ts, approval.controller.ts, etc.
|
||||||
|
activityService.log({
|
||||||
|
requestId: workflow.requestId,
|
||||||
|
type: 'created',
|
||||||
|
user: { userId, name: user.displayName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'Request Created',
|
||||||
|
details: `Request ${workflow.requestNumber} created`,
|
||||||
|
ipAddress: req.ip || req.headers['x-forwarded-for'] || null, // NEW
|
||||||
|
userAgent: req.headers['user-agent'] || null, // NEW
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Add Login Activity Logging
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In auth.controller.ts after successful login
|
||||||
|
await activityService.log({
|
||||||
|
requestId: 'SYSTEM_LOGIN', // Special ID for system events
|
||||||
|
type: 'login',
|
||||||
|
user: { userId: user.userId, name: user.displayName },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'User Login',
|
||||||
|
details: `User logged in successfully`,
|
||||||
|
ipAddress: req.ip || req.headers['x-forwarded-for'] || null,
|
||||||
|
userAgent: req.headers['user-agent'] || null,
|
||||||
|
category: 'AUTHENTICATION',
|
||||||
|
severity: 'INFO'
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CONCLUSION
|
||||||
|
|
||||||
|
**Good News:** Most fields already exist in the database! We just need to:
|
||||||
|
1. Populate existing fields (IP, user agent, category, severity)
|
||||||
|
2. Add login activity logging
|
||||||
|
3. Ensure level names are set
|
||||||
|
|
||||||
|
**Estimated Effort:**
|
||||||
|
- Phase 1 (Quick Wins): 2-4 hours
|
||||||
|
- Phase 2 (New Functionality): 4-6 hours
|
||||||
|
- Phase 3 (Enhanced Reporting): 8-12 hours
|
||||||
|
|
||||||
|
**Total: ~14-22 hours of development work**
|
||||||
|
|
||||||
1846
Royal_Enfield_API_Collection.postman_collection.json
Normal file
1846
Royal_Enfield_API_Collection.postman_collection.json
Normal file
File diff suppressed because it is too large
Load Diff
10
src/app.ts
10
src/app.ts
@ -29,6 +29,16 @@ const initializeDatabase = async () => {
|
|||||||
// Initialize database
|
// Initialize database
|
||||||
initializeDatabase();
|
initializeDatabase();
|
||||||
|
|
||||||
|
// Trust proxy - Enable this when behind a reverse proxy (nginx, load balancer, etc.)
|
||||||
|
// This allows Express to read X-Forwarded-* headers correctly
|
||||||
|
// Set to true in production, false in development
|
||||||
|
if (process.env.TRUST_PROXY === 'true' || process.env.NODE_ENV === 'production') {
|
||||||
|
app.set('trust proxy', true);
|
||||||
|
} else {
|
||||||
|
// In development, trust first proxy (useful for local testing with nginx)
|
||||||
|
app.set('trust proxy', 1);
|
||||||
|
}
|
||||||
|
|
||||||
// CORS middleware - MUST be before other middleware
|
// CORS middleware - MUST be before other middleware
|
||||||
app.use(corsMiddleware);
|
app.use(corsMiddleware);
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { ApprovalService } from '@services/approval.service';
|
|||||||
import { validateApprovalAction } from '@validators/approval.validator';
|
import { validateApprovalAction } from '@validators/approval.validator';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
const approvalService = new ApprovalService();
|
const approvalService = new ApprovalService();
|
||||||
|
|
||||||
@ -12,7 +13,11 @@ export class ApprovalController {
|
|||||||
const { levelId } = req.params;
|
const { levelId } = req.params;
|
||||||
const validatedData = validateApprovalAction(req.body);
|
const validatedData = validateApprovalAction(req.body);
|
||||||
|
|
||||||
const level = await approvalService.approveLevel(levelId, validatedData, req.user.userId);
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
const level = await approvalService.approveLevel(levelId, validatedData, req.user.userId, {
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
|
});
|
||||||
|
|
||||||
if (!level) {
|
if (!level) {
|
||||||
ResponseHandler.notFound(res, 'Approval level not found');
|
ResponseHandler.notFound(res, 'Approval level not found');
|
||||||
|
|||||||
@ -4,6 +4,8 @@ import { validateSSOCallback, validateRefreshToken, validateTokenExchange } from
|
|||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
||||||
|
import { getRequestMetadata } from '../utils/requestUtils';
|
||||||
|
|
||||||
export class AuthController {
|
export class AuthController {
|
||||||
private authService: AuthService;
|
private authService: AuthService;
|
||||||
@ -23,6 +25,31 @@ export class AuthController {
|
|||||||
|
|
||||||
const result = await this.authService.handleSSOCallback(validatedData as any);
|
const result = await this.authService.handleSSOCallback(validatedData as any);
|
||||||
|
|
||||||
|
// Log login activity
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
await activityService.log({
|
||||||
|
requestId: SYSTEM_EVENT_REQUEST_ID, // Special UUID for system events
|
||||||
|
type: 'login',
|
||||||
|
user: {
|
||||||
|
userId: result.user.userId,
|
||||||
|
name: result.user.displayName || result.user.email,
|
||||||
|
email: result.user.email
|
||||||
|
},
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'User Login',
|
||||||
|
details: `User logged in via SSO from ${requestMeta.ipAddress || 'unknown IP'}`,
|
||||||
|
metadata: {
|
||||||
|
loginMethod: 'SSO',
|
||||||
|
employeeId: result.user.employeeId,
|
||||||
|
department: result.user.department,
|
||||||
|
role: result.user.role
|
||||||
|
},
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent,
|
||||||
|
category: 'AUTHENTICATION',
|
||||||
|
severity: 'INFO'
|
||||||
|
});
|
||||||
|
|
||||||
ResponseHandler.success(res, {
|
ResponseHandler.success(res, {
|
||||||
user: result.user,
|
user: result.user,
|
||||||
accessToken: result.accessToken,
|
accessToken: result.accessToken,
|
||||||
@ -274,6 +301,31 @@ export class AuthController {
|
|||||||
|
|
||||||
const result = await this.authService.exchangeCodeForTokens(code, redirectUri);
|
const result = await this.authService.exchangeCodeForTokens(code, redirectUri);
|
||||||
|
|
||||||
|
// Log login activity
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
await activityService.log({
|
||||||
|
requestId: SYSTEM_EVENT_REQUEST_ID, // Special UUID for system events
|
||||||
|
type: 'login',
|
||||||
|
user: {
|
||||||
|
userId: result.user.userId,
|
||||||
|
name: result.user.displayName || result.user.email,
|
||||||
|
email: result.user.email
|
||||||
|
},
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'User Login',
|
||||||
|
details: `User logged in via token exchange from ${requestMeta.ipAddress || 'unknown IP'}`,
|
||||||
|
metadata: {
|
||||||
|
loginMethod: 'TOKEN_EXCHANGE',
|
||||||
|
employeeId: result.user.employeeId,
|
||||||
|
department: result.user.department,
|
||||||
|
role: result.user.role
|
||||||
|
},
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent,
|
||||||
|
category: 'AUTHENTICATION',
|
||||||
|
severity: 'INFO'
|
||||||
|
});
|
||||||
|
|
||||||
// Set cookies with httpOnly flag for security
|
// Set cookies with httpOnly flag for security
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
const isProduction = process.env.NODE_ENV === 'production';
|
||||||
const cookieOptions = {
|
const cookieOptions = {
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, Conclusio
|
|||||||
import { aiService } from '@services/ai.service';
|
import { aiService } from '@services/ai.service';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityService } from '@services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
export class ConclusionController {
|
export class ConclusionController {
|
||||||
/**
|
/**
|
||||||
@ -170,13 +171,16 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Log activity
|
// Log activity
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId,
|
||||||
type: 'ai_conclusion_generated',
|
type: 'ai_conclusion_generated',
|
||||||
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
|
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'AI Conclusion Generated',
|
action: 'AI Conclusion Generated',
|
||||||
details: 'AI-powered conclusion remark generated for review'
|
details: 'AI-powered conclusion remark generated for review',
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
@ -338,13 +342,16 @@ export class ConclusionController {
|
|||||||
logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`);
|
logger.info(`[Conclusion] ✅ Request ${requestId} finalized and closed`);
|
||||||
|
|
||||||
// Log activity
|
// Log activity
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId,
|
||||||
type: 'closed',
|
type: 'closed',
|
||||||
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
|
user: { userId, name: (request as any).initiator?.displayName || 'Initiator' },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Request Closed',
|
action: 'Request Closed',
|
||||||
details: `Request closed with conclusion remark by ${(request as any).initiator?.displayName}`
|
details: `Request closed with conclusion remark by ${(request as any).initiator?.displayName}`,
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
|
|||||||
@ -16,8 +16,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const kpis = await this.dashboardService.getKPIs(userId, dateRange);
|
const kpis = await this.dashboardService.getKPIs(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -39,8 +41,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const stats = await this.dashboardService.getRequestStats(userId, dateRange);
|
const stats = await this.dashboardService.getRequestStats(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -62,8 +66,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const efficiency = await this.dashboardService.getTATEfficiency(userId, dateRange);
|
const efficiency = await this.dashboardService.getTATEfficiency(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -85,8 +91,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const load = await this.dashboardService.getApproverLoad(userId, dateRange);
|
const load = await this.dashboardService.getApproverLoad(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -108,8 +116,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const engagement = await this.dashboardService.getEngagementStats(userId, dateRange);
|
const engagement = await this.dashboardService.getEngagementStats(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -131,8 +141,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const insights = await this.dashboardService.getAIInsights(userId, dateRange);
|
const insights = await this.dashboardService.getAIInsights(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -154,8 +166,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const utilization = await this.dashboardService.getAIRemarkUtilization(userId, dateRange);
|
const utilization = await this.dashboardService.getAIRemarkUtilization(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -177,10 +191,12 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
const page = Number(req.query.page || 1);
|
const page = Number(req.query.page || 1);
|
||||||
const limit = Number(req.query.limit || 10);
|
const limit = Number(req.query.limit || 10);
|
||||||
|
|
||||||
const result = await this.dashboardService.getApproverPerformance(userId, dateRange, page, limit);
|
const result = await this.dashboardService.getApproverPerformance(userId, dateRange, page, limit, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -298,8 +314,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const stats = await this.dashboardService.getDepartmentStats(userId, dateRange);
|
const stats = await this.dashboardService.getDepartmentStats(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -321,8 +339,10 @@ export class DashboardController {
|
|||||||
try {
|
try {
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const startDate = req.query.startDate as string | undefined;
|
||||||
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const distribution = await this.dashboardService.getPriorityDistribution(userId, dateRange);
|
const distribution = await this.dashboardService.getPriorityDistribution(userId, dateRange, startDate, endDate);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
@ -336,5 +356,117 @@ export class DashboardController {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Request Lifecycle Report
|
||||||
|
*/
|
||||||
|
async getLifecycleReport(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const userId = (req as any).user?.userId;
|
||||||
|
const page = Number(req.query.page || 1);
|
||||||
|
const limit = Number(req.query.limit || 50);
|
||||||
|
|
||||||
|
const result = await this.dashboardService.getLifecycleReport(userId, page, limit);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.lifecycleData,
|
||||||
|
pagination: {
|
||||||
|
currentPage: result.currentPage,
|
||||||
|
totalPages: result.totalPages,
|
||||||
|
totalRecords: result.totalRecords,
|
||||||
|
limit: result.limit
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Dashboard] Error fetching lifecycle report:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch lifecycle report'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get enhanced User Activity Log Report
|
||||||
|
*/
|
||||||
|
async getActivityLogReport(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const userId = (req as any).user?.userId;
|
||||||
|
const page = Number(req.query.page || 1);
|
||||||
|
const limit = Number(req.query.limit || 50);
|
||||||
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
const filterUserId = req.query.filterUserId as string | undefined;
|
||||||
|
const filterType = req.query.filterType as string | undefined;
|
||||||
|
const filterCategory = req.query.filterCategory as string | undefined;
|
||||||
|
const filterSeverity = req.query.filterSeverity as string | undefined;
|
||||||
|
|
||||||
|
const result = await this.dashboardService.getActivityLogReport(
|
||||||
|
userId,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
dateRange,
|
||||||
|
filterUserId,
|
||||||
|
filterType,
|
||||||
|
filterCategory,
|
||||||
|
filterSeverity
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.activities,
|
||||||
|
pagination: {
|
||||||
|
currentPage: result.currentPage,
|
||||||
|
totalPages: result.totalPages,
|
||||||
|
totalRecords: result.totalRecords,
|
||||||
|
limit: result.limit
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Dashboard] Error fetching activity log report:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch activity log report'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Workflow Aging Report
|
||||||
|
*/
|
||||||
|
async getWorkflowAgingReport(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const userId = (req as any).user?.userId;
|
||||||
|
const threshold = Number(req.query.threshold || 7);
|
||||||
|
const page = Number(req.query.page || 1);
|
||||||
|
const limit = Number(req.query.limit || 50);
|
||||||
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
|
|
||||||
|
const result = await this.dashboardService.getWorkflowAgingReport(
|
||||||
|
userId,
|
||||||
|
threshold,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
dateRange
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: result.agingData,
|
||||||
|
pagination: {
|
||||||
|
currentPage: result.currentPage,
|
||||||
|
totalPages: result.totalPages,
|
||||||
|
totalRecords: result.totalRecords,
|
||||||
|
limit: result.limit
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[Dashboard] Error fetching workflow aging report:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch workflow aging report'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { User } from '@models/User';
|
|||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityService } from '@services/activity.service';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
export class DocumentController {
|
export class DocumentController {
|
||||||
async upload(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async upload(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -58,6 +59,7 @@ export class DocumentController {
|
|||||||
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
|
||||||
|
|
||||||
// Log activity for document upload
|
// Log activity for document upload
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
await activityService.log({
|
await activityService.log({
|
||||||
requestId,
|
requestId,
|
||||||
type: 'document_added',
|
type: 'document_added',
|
||||||
@ -70,7 +72,9 @@ export class DocumentController {
|
|||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
category
|
category
|
||||||
}
|
},
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import { User } from '@models/User';
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowService();
|
||||||
|
|
||||||
@ -22,7 +23,11 @@ export class WorkflowController {
|
|||||||
...validatedData,
|
...validatedData,
|
||||||
priority: validatedData.priority as Priority
|
priority: validatedData.priority as Priority
|
||||||
};
|
};
|
||||||
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData);
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
const workflow = await workflowService.createWorkflow(req.user.userId, workflowData, {
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
|
});
|
||||||
|
|
||||||
ResponseHandler.success(res, workflow, 'Workflow created successfully', 201);
|
ResponseHandler.success(res, workflow, 'Workflow created successfully', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -49,7 +54,11 @@ export class WorkflowController {
|
|||||||
const validated = validateCreateWorkflow(parsed);
|
const validated = validateCreateWorkflow(parsed);
|
||||||
const workflowData = { ...validated, priority: validated.priority as Priority } as any;
|
const workflowData = { ...validated, priority: validated.priority as Priority } as any;
|
||||||
|
|
||||||
const workflow = await workflowService.createWorkflow(userId, workflowData);
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
const workflow = await workflowService.createWorkflow(userId, workflowData, {
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
|
});
|
||||||
|
|
||||||
// Attach files as documents (category defaults to SUPPORTING)
|
// Attach files as documents (category defaults to SUPPORTING)
|
||||||
const files = (req as any).files as Express.Multer.File[] | undefined;
|
const files = (req as any).files as Express.Multer.File[] | undefined;
|
||||||
@ -87,6 +96,7 @@ export class WorkflowController {
|
|||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
|
|
||||||
// Log document upload activity
|
// Log document upload activity
|
||||||
|
const requestMeta = getRequestMetadata(req);
|
||||||
activityService.log({
|
activityService.log({
|
||||||
requestId: workflow.requestId,
|
requestId: workflow.requestId,
|
||||||
type: 'document_added',
|
type: 'document_added',
|
||||||
@ -94,7 +104,9 @@ export class WorkflowController {
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Document Added',
|
action: 'Document Added',
|
||||||
details: `Added ${file.originalname} as supporting document by ${uploaderName}`,
|
details: `Added ${file.originalname} as supporting document by ${uploaderName}`,
|
||||||
metadata: { fileName: file.originalname, fileSize: file.size, fileType: extension }
|
metadata: { fileName: file.originalname, fileSize: file.size, fileType: extension },
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -155,7 +167,15 @@ export class WorkflowController {
|
|||||||
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
||||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||||
const result = await workflowService.listMyRequests(userId, page, limit);
|
|
||||||
|
// Extract filter parameters
|
||||||
|
const filters = {
|
||||||
|
search: req.query.search as string | undefined,
|
||||||
|
status: req.query.status as string | undefined,
|
||||||
|
priority: req.query.priority as string | undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await workflowService.listMyRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'My requests fetched');
|
ResponseHandler.success(res, result, 'My requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -168,7 +188,19 @@ export class WorkflowController {
|
|||||||
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
||||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||||
const result = await workflowService.listOpenForMe(userId, page, limit);
|
|
||||||
|
// Extract filter parameters
|
||||||
|
const filters = {
|
||||||
|
search: req.query.search as string | undefined,
|
||||||
|
status: req.query.status as string | undefined,
|
||||||
|
priority: req.query.priority as string | undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
// Extract sorting parameters
|
||||||
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
|
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -181,7 +213,19 @@ export class WorkflowController {
|
|||||||
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
const userId = (req as any).user?.userId || (req as any).user?.id || (req as any).auth?.userId;
|
||||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||||
const result = await workflowService.listClosedByMe(userId, page, limit);
|
|
||||||
|
// Extract filter parameters
|
||||||
|
const filters = {
|
||||||
|
search: req.query.search as string | undefined,
|
||||||
|
status: req.query.status as string | undefined,
|
||||||
|
priority: req.query.priority as string | undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
// Extract sorting parameters
|
||||||
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
|
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
import type { Request, Response } from 'express';
|
import type { Request, Response } from 'express';
|
||||||
import { workNoteService } from '../services/worknote.service';
|
import { workNoteService } from '../services/worknote.service';
|
||||||
import { WorkflowService } from '../services/workflow.service';
|
import { WorkflowService } from '../services/workflow.service';
|
||||||
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
export class WorkNoteController {
|
export class WorkNoteController {
|
||||||
private workflowService = new WorkflowService();
|
private workflowService = new WorkflowService();
|
||||||
@ -50,7 +51,11 @@ export class WorkNoteController {
|
|||||||
mentionedUsers: mentions // Pass mentioned user IDs to service
|
mentionedUsers: mentions // Pass mentioned user IDs to service
|
||||||
};
|
};
|
||||||
|
|
||||||
const note = await workNoteService.create(requestId, user, workNotePayload, files);
|
const requestMeta = getRequestMetadata(req);
|
||||||
|
const note = await workNoteService.create(requestId, user, workNotePayload, files, {
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
|
});
|
||||||
res.status(201).json({ success: true, data: note });
|
res.status(201).json({ success: true, data: note });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { TatAlert, TatAlertType } from '@models/TatAlert';
|
|||||||
import { activityService } from '@services/activity.service';
|
import { activityService } from '@services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
|
import { calculateElapsedWorkingHours, addWorkingHours, addWorkingHoursExpress } from '@utils/tatTimeUtils';
|
||||||
|
|
||||||
interface TatJobData {
|
interface TatJobData {
|
||||||
type: 'threshold1' | 'threshold2' | 'breach';
|
type: 'threshold1' | 'threshold2' | 'breach';
|
||||||
@ -61,10 +62,19 @@ export async function handleTatJob(job: Job<TatJobData>) {
|
|||||||
const tatHours = Number((approvalLevel as any).tatHours || 0);
|
const tatHours = Number((approvalLevel as any).tatHours || 0);
|
||||||
const levelStartTime = (approvalLevel as any).levelStartTime || (approvalLevel as any).createdAt;
|
const levelStartTime = (approvalLevel as any).levelStartTime || (approvalLevel as any).createdAt;
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const elapsedMs = now.getTime() - new Date(levelStartTime).getTime();
|
|
||||||
const elapsedHours = elapsedMs / (1000 * 60 * 60);
|
// FIXED: Use proper working hours calculation instead of calendar hours
|
||||||
|
// This respects working hours (9 AM - 6 PM), excludes weekends for STANDARD priority, and excludes holidays
|
||||||
|
const priority = ((workflow as any).priority || 'STANDARD').toString().toLowerCase();
|
||||||
|
const elapsedHours = await calculateElapsedWorkingHours(levelStartTime, now, priority);
|
||||||
const remainingHours = Math.max(0, tatHours - elapsedHours);
|
const remainingHours = Math.max(0, tatHours - elapsedHours);
|
||||||
const expectedCompletionTime = dayjs(levelStartTime).add(tatHours, 'hour').toDate();
|
|
||||||
|
// Calculate expected completion time using proper working hours calculation
|
||||||
|
// EXPRESS: includes weekends but only during working hours
|
||||||
|
// STANDARD: excludes weekends and only during working hours
|
||||||
|
const expectedCompletionTime = priority === 'express'
|
||||||
|
? (await addWorkingHoursExpress(levelStartTime, tatHours)).toDate()
|
||||||
|
: (await addWorkingHours(levelStartTime, tatHours)).toDate();
|
||||||
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case 'threshold1':
|
case 'threshold1':
|
||||||
|
|||||||
@ -90,5 +90,23 @@ router.get('/stats/priority-distribution',
|
|||||||
asyncHandler(dashboardController.getPriorityDistribution.bind(dashboardController))
|
asyncHandler(dashboardController.getPriorityDistribution.bind(dashboardController))
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Get Request Lifecycle Report
|
||||||
|
router.get('/reports/lifecycle',
|
||||||
|
authenticateToken,
|
||||||
|
asyncHandler(dashboardController.getLifecycleReport.bind(dashboardController))
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get enhanced User Activity Log Report
|
||||||
|
router.get('/reports/activity-log',
|
||||||
|
authenticateToken,
|
||||||
|
asyncHandler(dashboardController.getActivityLogReport.bind(dashboardController))
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get Workflow Aging Report
|
||||||
|
router.get('/reports/workflow-aging',
|
||||||
|
authenticateToken,
|
||||||
|
asyncHandler(dashboardController.getWorkflowAgingReport.bind(dashboardController))
|
||||||
|
);
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|
||||||
|
|||||||
@ -1,18 +1,61 @@
|
|||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
|
// Special UUID for system events (login, etc.) - well-known UUID: 00000000-0000-0000-0000-000000000001
|
||||||
|
export const SYSTEM_EVENT_REQUEST_ID = '00000000-0000-0000-0000-000000000001';
|
||||||
|
|
||||||
export type ActivityEntry = {
|
export type ActivityEntry = {
|
||||||
requestId: string;
|
requestId: string;
|
||||||
type: 'created' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'closed';
|
type: 'created' | 'assignment' | 'approval' | 'rejection' | 'status_change' | 'comment' | 'reminder' | 'document_added' | 'sla_warning' | 'ai_conclusion_generated' | 'closed' | 'login';
|
||||||
user?: { userId: string; name?: string; email?: string };
|
user?: { userId: string; name?: string; email?: string };
|
||||||
timestamp: string;
|
timestamp: string;
|
||||||
action: string;
|
action: string;
|
||||||
details: string;
|
details: string;
|
||||||
metadata?: any;
|
metadata?: any;
|
||||||
|
ipAddress?: string;
|
||||||
|
userAgent?: string;
|
||||||
|
category?: string;
|
||||||
|
severity?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
class ActivityService {
|
class ActivityService {
|
||||||
private byRequest: Map<string, ActivityEntry[]> = new Map();
|
private byRequest: Map<string, ActivityEntry[]> = new Map();
|
||||||
|
|
||||||
|
private inferCategory(type: string): string {
|
||||||
|
const categoryMap: Record<string, string> = {
|
||||||
|
'created': 'WORKFLOW',
|
||||||
|
'approval': 'WORKFLOW',
|
||||||
|
'rejection': 'WORKFLOW',
|
||||||
|
'status_change': 'WORKFLOW',
|
||||||
|
'assignment': 'WORKFLOW',
|
||||||
|
'comment': 'COLLABORATION',
|
||||||
|
'document_added': 'DOCUMENT',
|
||||||
|
'sla_warning': 'SYSTEM',
|
||||||
|
'reminder': 'SYSTEM',
|
||||||
|
'ai_conclusion_generated': 'SYSTEM',
|
||||||
|
'closed': 'WORKFLOW',
|
||||||
|
'login': 'AUTHENTICATION'
|
||||||
|
};
|
||||||
|
return categoryMap[type] || 'OTHER';
|
||||||
|
}
|
||||||
|
|
||||||
|
private inferSeverity(type: string): string {
|
||||||
|
const severityMap: Record<string, string> = {
|
||||||
|
'rejection': 'WARNING',
|
||||||
|
'sla_warning': 'WARNING',
|
||||||
|
'approval': 'INFO',
|
||||||
|
'closed': 'INFO',
|
||||||
|
'status_change': 'INFO',
|
||||||
|
'login': 'INFO',
|
||||||
|
'created': 'INFO',
|
||||||
|
'comment': 'INFO',
|
||||||
|
'document_added': 'INFO',
|
||||||
|
'assignment': 'INFO',
|
||||||
|
'reminder': 'INFO',
|
||||||
|
'ai_conclusion_generated': 'INFO'
|
||||||
|
};
|
||||||
|
return severityMap[type] || 'INFO';
|
||||||
|
}
|
||||||
|
|
||||||
async log(entry: ActivityEntry) {
|
async log(entry: ActivityEntry) {
|
||||||
const list = this.byRequest.get(entry.requestId) || [];
|
const list = this.byRequest.get(entry.requestId) || [];
|
||||||
list.push(entry);
|
list.push(entry);
|
||||||
@ -29,19 +72,20 @@ class ActivityService {
|
|||||||
userName: userName,
|
userName: userName,
|
||||||
activityType: entry.type,
|
activityType: entry.type,
|
||||||
activityDescription: entry.details,
|
activityDescription: entry.details,
|
||||||
activityCategory: null,
|
activityCategory: entry.category || this.inferCategory(entry.type),
|
||||||
severity: null,
|
severity: entry.severity || this.inferSeverity(entry.type),
|
||||||
metadata: entry.metadata || null,
|
metadata: entry.metadata || null,
|
||||||
isSystemEvent: !entry.user,
|
isSystemEvent: !entry.user,
|
||||||
ipAddress: null,
|
ipAddress: entry.ipAddress || null, // Database accepts null
|
||||||
userAgent: null,
|
userAgent: entry.userAgent || null, // Database accepts null
|
||||||
};
|
};
|
||||||
|
|
||||||
logger.info(`[Activity] Creating activity:`, {
|
logger.info(`[Activity] Creating activity:`, {
|
||||||
requestId: entry.requestId,
|
requestId: entry.requestId,
|
||||||
userName,
|
userName,
|
||||||
userId: entry.user?.userId,
|
userId: entry.user?.userId,
|
||||||
type: entry.type
|
type: entry.type,
|
||||||
|
ipAddress: entry.ipAddress ? '***' : null
|
||||||
});
|
});
|
||||||
|
|
||||||
await Activity.create(activityData);
|
await Activity.create(activityData);
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import { activityService } from './activity.service';
|
|||||||
import { tatSchedulerService } from './tatScheduler.service';
|
import { tatSchedulerService } from './tatScheduler.service';
|
||||||
|
|
||||||
export class ApprovalService {
|
export class ApprovalService {
|
||||||
async approveLevel(levelId: string, action: ApprovalAction, _userId: string): Promise<ApprovalLevel | null> {
|
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
|
||||||
try {
|
try {
|
||||||
const level = await ApprovalLevel.findByPk(levelId);
|
const level = await ApprovalLevel.findByPk(levelId);
|
||||||
if (!level) return null;
|
if (!level) return null;
|
||||||
@ -87,7 +87,9 @@ export class ApprovalService {
|
|||||||
user: { userId: level.approverId, name: level.approverName },
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Approved',
|
action: 'Approved',
|
||||||
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`
|
details: `Request approved and finalized by ${level.approverName || level.approverEmail}. Awaiting conclusion remark from initiator.`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
});
|
});
|
||||||
|
|
||||||
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
|
// Generate AI conclusion remark ASYNCHRONOUSLY (don't wait)
|
||||||
@ -201,7 +203,9 @@ export class ApprovalService {
|
|||||||
user: { userId: 'system', name: 'System' },
|
user: { userId: 'system', name: 'System' },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'AI Conclusion Generated',
|
action: 'AI Conclusion Generated',
|
||||||
details: 'AI-powered conclusion remark generated for review by initiator'
|
details: 'AI-powered conclusion remark generated for review by initiator',
|
||||||
|
ipAddress: undefined, // System-generated, no IP
|
||||||
|
userAgent: undefined // System-generated, no user agent
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// Log why AI generation was skipped
|
// Log why AI generation was skipped
|
||||||
@ -295,7 +299,9 @@ export class ApprovalService {
|
|||||||
user: { userId: level.approverId, name: level.approverName },
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Approved',
|
action: 'Approved',
|
||||||
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`
|
details: `Request approved and forwarded to ${(nextLevel as any).approverName || (nextLevel as any).approverEmail} by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -322,7 +328,9 @@ export class ApprovalService {
|
|||||||
user: { userId: level.approverId, name: level.approverName },
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Approved',
|
action: 'Approved',
|
||||||
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`
|
details: `Request approved and finalized by ${level.approverName || level.approverEmail}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -373,7 +381,9 @@ export class ApprovalService {
|
|||||||
user: { userId: level.approverId, name: level.approverName },
|
user: { userId: level.approverId, name: level.approverName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Rejected',
|
action: 'Rejected',
|
||||||
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`
|
details: `Request rejected by ${level.approverName || level.approverEmail}. Reason: ${action.rejectionReason || action.comments || 'No reason provided'}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -507,11 +507,15 @@ export class WorkflowService {
|
|||||||
// Calculate OVERALL request SLA (from submission to total deadline)
|
// Calculate OVERALL request SLA (from submission to total deadline)
|
||||||
const { calculateSLAStatus } = require('@utils/tatTimeUtils');
|
const { calculateSLAStatus } = require('@utils/tatTimeUtils');
|
||||||
const submissionDate = (wf as any).submissionDate;
|
const submissionDate = (wf as any).submissionDate;
|
||||||
|
const closureDate = (wf as any).closureDate;
|
||||||
|
// For completed requests, use closure_date; for active requests, use current time
|
||||||
|
const overallEndDate = closureDate || null;
|
||||||
|
|
||||||
let overallSLA = null;
|
let overallSLA = null;
|
||||||
|
|
||||||
if (submissionDate && totalTatHours > 0) {
|
if (submissionDate && totalTatHours > 0) {
|
||||||
try {
|
try {
|
||||||
overallSLA = await calculateSLAStatus(submissionDate, totalTatHours, priority);
|
overallSLA = await calculateSLAStatus(submissionDate, totalTatHours, priority, overallEndDate);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Workflow] Error calculating overall SLA:', error);
|
logger.error('[Workflow] Error calculating overall SLA:', error);
|
||||||
}
|
}
|
||||||
@ -522,10 +526,13 @@ export class WorkflowService {
|
|||||||
if (currentLevel) {
|
if (currentLevel) {
|
||||||
const levelStartTime = (currentLevel as any).levelStartTime || (currentLevel as any).tatStartTime;
|
const levelStartTime = (currentLevel as any).levelStartTime || (currentLevel as any).tatStartTime;
|
||||||
const levelTatHours = Number((currentLevel as any).tatHours || 0);
|
const levelTatHours = Number((currentLevel as any).tatHours || 0);
|
||||||
|
// For completed levels, use the level's completion time (if available)
|
||||||
|
// Otherwise, if request is completed, use closure_date
|
||||||
|
const levelEndDate = (currentLevel as any).completedAt || closureDate || null;
|
||||||
|
|
||||||
if (levelStartTime && levelTatHours > 0) {
|
if (levelStartTime && levelTatHours > 0) {
|
||||||
try {
|
try {
|
||||||
currentLevelSLA = await calculateSLAStatus(levelStartTime, levelTatHours, priority);
|
currentLevelSLA = await calculateSLAStatus(levelStartTime, levelTatHours, priority, levelEndDate);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[Workflow] Error calculating current level SLA:', error);
|
logger.error('[Workflow] Error calculating current level SLA:', error);
|
||||||
}
|
}
|
||||||
@ -594,10 +601,37 @@ export class WorkflowService {
|
|||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
async listMyRequests(userId: string, page: number, limit: number) {
|
async listMyRequests(userId: string, page: number, limit: number, filters?: { search?: string; status?: string; priority?: string }) {
|
||||||
const offset = (page - 1) * limit;
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
|
// Build where clause with filters
|
||||||
|
const whereConditions: any[] = [{ initiatorId: userId }];
|
||||||
|
|
||||||
|
// Apply status filter
|
||||||
|
if (filters?.status && filters.status !== 'all') {
|
||||||
|
whereConditions.push({ status: filters.status.toUpperCase() });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply priority filter
|
||||||
|
if (filters?.priority && filters.priority !== 'all') {
|
||||||
|
whereConditions.push({ priority: filters.priority.toUpperCase() });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply search filter (title, description, or requestNumber)
|
||||||
|
if (filters?.search && filters.search.trim()) {
|
||||||
|
whereConditions.push({
|
||||||
|
[Op.or]: [
|
||||||
|
{ title: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ description: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ requestNumber: { [Op.iLike]: `%${filters.search.trim()}%` } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const where = whereConditions.length > 0 ? { [Op.and]: whereConditions } : {};
|
||||||
|
|
||||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
||||||
where: { initiatorId: userId },
|
where,
|
||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
order: [['createdAt', 'DESC']],
|
order: [['createdAt', 'DESC']],
|
||||||
@ -609,7 +643,7 @@ export class WorkflowService {
|
|||||||
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
||||||
}
|
}
|
||||||
|
|
||||||
async listOpenForMe(userId: string, page: number, limit: number) {
|
async listOpenForMe(userId: string, page: number, limit: number, filters?: { search?: string; status?: string; priority?: string }, sortBy?: string, sortOrder?: string) {
|
||||||
const offset = (page - 1) * limit;
|
const offset = (page - 1) * limit;
|
||||||
// Find all pending/in-progress approval levels across requests ordered by levelNumber
|
// Find all pending/in-progress approval levels across requests ordered by levelNumber
|
||||||
const pendingLevels = await ApprovalLevel.findAll({
|
const pendingLevels = await ApprovalLevel.findAll({
|
||||||
@ -664,30 +698,149 @@ export class WorkflowService {
|
|||||||
// Combine all request IDs (approver, spectator, and approved as initiator)
|
// Combine all request IDs (approver, spectator, and approved as initiator)
|
||||||
const allOpenRequestIds = Array.from(new Set([...allRequestIds, ...approvedInitiatorRequestIds]));
|
const allOpenRequestIds = Array.from(new Set([...allRequestIds, ...approvedInitiatorRequestIds]));
|
||||||
|
|
||||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
// Build base where conditions
|
||||||
where: {
|
const baseConditions: any[] = [];
|
||||||
requestId: { [Op.in]: allOpenRequestIds.length ? allOpenRequestIds : ['00000000-0000-0000-0000-000000000000'] },
|
|
||||||
status: { [Op.in]: [
|
// Add the main OR condition for request IDs
|
||||||
WorkflowStatus.PENDING as any,
|
if (allOpenRequestIds.length > 0) {
|
||||||
(WorkflowStatus as any).IN_PROGRESS ?? 'IN_PROGRESS',
|
baseConditions.push({
|
||||||
WorkflowStatus.APPROVED as any, // Include APPROVED for initiators awaiting closure
|
requestId: { [Op.in]: allOpenRequestIds }
|
||||||
'PENDING',
|
});
|
||||||
'IN_PROGRESS',
|
} else {
|
||||||
'APPROVED'
|
// No matching requests
|
||||||
] as any },
|
baseConditions.push({
|
||||||
},
|
requestId: { [Op.in]: ['00000000-0000-0000-0000-000000000000'] }
|
||||||
offset,
|
});
|
||||||
limit,
|
}
|
||||||
order: [['createdAt', 'DESC']],
|
|
||||||
include: [
|
// Add status condition
|
||||||
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName', 'department', 'designation'] },
|
baseConditions.push({
|
||||||
],
|
status: { [Op.in]: [
|
||||||
|
WorkflowStatus.PENDING as any,
|
||||||
|
(WorkflowStatus as any).IN_PROGRESS ?? 'IN_PROGRESS',
|
||||||
|
WorkflowStatus.APPROVED as any,
|
||||||
|
'PENDING',
|
||||||
|
'IN_PROGRESS',
|
||||||
|
'APPROVED'
|
||||||
|
] as any }
|
||||||
});
|
});
|
||||||
const data = await this.enrichForCards(rows);
|
|
||||||
|
// Apply status filter if provided (overrides default status filter)
|
||||||
|
if (filters?.status && filters.status !== 'all') {
|
||||||
|
baseConditions.pop(); // Remove default status
|
||||||
|
baseConditions.push({ status: filters.status.toUpperCase() });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply priority filter
|
||||||
|
if (filters?.priority && filters.priority !== 'all') {
|
||||||
|
baseConditions.push({ priority: filters.priority.toUpperCase() });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply search filter (title, description, or requestNumber)
|
||||||
|
if (filters?.search && filters.search.trim()) {
|
||||||
|
baseConditions.push({
|
||||||
|
[Op.or]: [
|
||||||
|
{ title: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ description: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ requestNumber: { [Op.iLike]: `%${filters.search.trim()}%` } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const where = baseConditions.length > 0 ? { [Op.and]: baseConditions } : {};
|
||||||
|
|
||||||
|
// Build order clause based on sortBy parameter
|
||||||
|
// For computed fields (due, sla), we'll sort after enrichment
|
||||||
|
let order: any[] = [['createdAt', 'DESC']]; // Default order
|
||||||
|
const validSortOrder = (sortOrder?.toLowerCase() === 'asc' ? 'ASC' : 'DESC');
|
||||||
|
|
||||||
|
if (sortBy) {
|
||||||
|
switch (sortBy.toLowerCase()) {
|
||||||
|
case 'created':
|
||||||
|
order = [['createdAt', validSortOrder]];
|
||||||
|
break;
|
||||||
|
case 'priority':
|
||||||
|
// Map priority values: EXPRESS = 1, STANDARD = 2 for ascending (standard first), or reverse for descending
|
||||||
|
// For simplicity, we'll sort alphabetically: EXPRESS < STANDARD
|
||||||
|
order = [['priority', validSortOrder], ['createdAt', 'DESC']]; // Secondary sort by createdAt
|
||||||
|
break;
|
||||||
|
// For 'due' and 'sla', we need to sort after enrichment (handled below)
|
||||||
|
case 'due':
|
||||||
|
case 'sla':
|
||||||
|
// Keep default order - will sort after enrichment
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
// Unknown sortBy, use default
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For computed field sorting (due, sla), we need to fetch all matching records first,
|
||||||
|
// enrich them, sort, then paginate. For DB fields, we can use SQL pagination.
|
||||||
|
const needsPostEnrichmentSort = sortBy && ['due', 'sla'].includes(sortBy.toLowerCase());
|
||||||
|
|
||||||
|
let rows: any[];
|
||||||
|
let count: number;
|
||||||
|
|
||||||
|
if (needsPostEnrichmentSort) {
|
||||||
|
// Fetch all matching records (no pagination yet)
|
||||||
|
const result = await WorkflowRequest.findAndCountAll({
|
||||||
|
where,
|
||||||
|
include: [
|
||||||
|
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName', 'department', 'designation'] },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Enrich all records
|
||||||
|
const allEnriched = await this.enrichForCards(result.rows);
|
||||||
|
|
||||||
|
// Sort enriched data
|
||||||
|
allEnriched.sort((a: any, b: any) => {
|
||||||
|
let aValue: any, bValue: any;
|
||||||
|
|
||||||
|
if (sortBy.toLowerCase() === 'due') {
|
||||||
|
aValue = a.currentLevelSLA?.deadline ? new Date(a.currentLevelSLA.deadline).getTime() : Number.MAX_SAFE_INTEGER;
|
||||||
|
bValue = b.currentLevelSLA?.deadline ? new Date(b.currentLevelSLA.deadline).getTime() : Number.MAX_SAFE_INTEGER;
|
||||||
|
} else if (sortBy.toLowerCase() === 'sla') {
|
||||||
|
aValue = a.currentLevelSLA?.percentageUsed || 0;
|
||||||
|
bValue = b.currentLevelSLA?.percentageUsed || 0;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validSortOrder === 'ASC') {
|
||||||
|
return aValue > bValue ? 1 : -1;
|
||||||
|
} else {
|
||||||
|
return aValue < bValue ? 1 : -1;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
count = result.count;
|
||||||
|
|
||||||
|
// Apply pagination after sorting
|
||||||
|
const startIndex = offset;
|
||||||
|
const endIndex = startIndex + limit;
|
||||||
|
rows = allEnriched.slice(startIndex, endIndex);
|
||||||
|
} else {
|
||||||
|
// Use database sorting for simple fields (created, priority)
|
||||||
|
const result = await WorkflowRequest.findAndCountAll({
|
||||||
|
where,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
order,
|
||||||
|
include: [
|
||||||
|
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName', 'department', 'designation'] },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
rows = result.rows;
|
||||||
|
count = result.count;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = needsPostEnrichmentSort ? rows : await this.enrichForCards(rows);
|
||||||
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
||||||
}
|
}
|
||||||
|
|
||||||
async listClosedByMe(userId: string, page: number, limit: number) {
|
async listClosedByMe(userId: string, page: number, limit: number, filters?: { search?: string; status?: string; priority?: string }, sortBy?: string, sortOrder?: string) {
|
||||||
const offset = (page - 1) * limit;
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
// Get requests where user participated as approver
|
// Get requests where user participated as approver
|
||||||
@ -722,40 +875,130 @@ export class WorkflowService {
|
|||||||
const whereConditions: any[] = [];
|
const whereConditions: any[] = [];
|
||||||
|
|
||||||
// 1. Requests where user was approver/spectator (show APPROVED, REJECTED, CLOSED)
|
// 1. Requests where user was approver/spectator (show APPROVED, REJECTED, CLOSED)
|
||||||
|
const approverSpectatorStatuses = [
|
||||||
|
WorkflowStatus.APPROVED as any,
|
||||||
|
WorkflowStatus.REJECTED as any,
|
||||||
|
(WorkflowStatus as any).CLOSED ?? 'CLOSED',
|
||||||
|
'APPROVED',
|
||||||
|
'REJECTED',
|
||||||
|
'CLOSED'
|
||||||
|
] as any;
|
||||||
|
|
||||||
if (allRequestIds.length > 0) {
|
if (allRequestIds.length > 0) {
|
||||||
whereConditions.push({
|
const approverConditionParts: any[] = [
|
||||||
requestId: { [Op.in]: allRequestIds },
|
{ requestId: { [Op.in]: allRequestIds } }
|
||||||
status: { [Op.in]: [
|
];
|
||||||
WorkflowStatus.APPROVED as any,
|
|
||||||
WorkflowStatus.REJECTED as any,
|
// Apply status filter
|
||||||
(WorkflowStatus as any).CLOSED ?? 'CLOSED',
|
if (filters?.status && filters.status !== 'all') {
|
||||||
'APPROVED',
|
approverConditionParts.push({ status: filters.status.toUpperCase() });
|
||||||
'REJECTED',
|
} else {
|
||||||
'CLOSED'
|
approverConditionParts.push({ status: { [Op.in]: approverSpectatorStatuses } });
|
||||||
] as any }
|
}
|
||||||
});
|
|
||||||
|
// Apply priority filter
|
||||||
|
if (filters?.priority && filters.priority !== 'all') {
|
||||||
|
approverConditionParts.push({ priority: filters.priority.toUpperCase() });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply search filter (title, description, or requestNumber)
|
||||||
|
if (filters?.search && filters.search.trim()) {
|
||||||
|
approverConditionParts.push({
|
||||||
|
[Op.or]: [
|
||||||
|
{ title: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ description: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ requestNumber: { [Op.iLike]: `%${filters.search.trim()}%` } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const approverCondition = approverConditionParts.length > 0
|
||||||
|
? { [Op.and]: approverConditionParts }
|
||||||
|
: { requestId: { [Op.in]: allRequestIds } };
|
||||||
|
|
||||||
|
whereConditions.push(approverCondition);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Requests where user is initiator (show ONLY REJECTED or CLOSED, NOT APPROVED)
|
// 2. Requests where user is initiator (show ONLY REJECTED or CLOSED, NOT APPROVED)
|
||||||
// APPROVED means initiator still needs to finalize conclusion
|
// APPROVED means initiator still needs to finalize conclusion
|
||||||
whereConditions.push({
|
const initiatorStatuses = [
|
||||||
initiatorId: userId,
|
WorkflowStatus.REJECTED as any,
|
||||||
status: { [Op.in]: [
|
(WorkflowStatus as any).CLOSED ?? 'CLOSED',
|
||||||
WorkflowStatus.REJECTED as any,
|
'REJECTED',
|
||||||
(WorkflowStatus as any).CLOSED ?? 'CLOSED',
|
'CLOSED'
|
||||||
'REJECTED',
|
] as any;
|
||||||
'CLOSED'
|
|
||||||
] as any }
|
const initiatorConditionParts: any[] = [
|
||||||
});
|
{ initiatorId: userId }
|
||||||
|
];
|
||||||
|
|
||||||
|
// Apply status filter
|
||||||
|
if (filters?.status && filters.status !== 'all') {
|
||||||
|
const filterStatus = filters.status.toUpperCase();
|
||||||
|
// Only apply if status is REJECTED or CLOSED (not APPROVED for initiator)
|
||||||
|
if (filterStatus === 'REJECTED' || filterStatus === 'CLOSED') {
|
||||||
|
initiatorConditionParts.push({ status: filterStatus });
|
||||||
|
} else {
|
||||||
|
// If filtering for APPROVED, don't include initiator requests
|
||||||
|
initiatorConditionParts.push({ status: { [Op.in]: [] } }); // Empty set - no results
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
initiatorConditionParts.push({ status: { [Op.in]: initiatorStatuses } });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply priority filter
|
||||||
|
if (filters?.priority && filters.priority !== 'all') {
|
||||||
|
initiatorConditionParts.push({ priority: filters.priority.toUpperCase() });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply search filter (title, description, or requestNumber)
|
||||||
|
if (filters?.search && filters.search.trim()) {
|
||||||
|
initiatorConditionParts.push({
|
||||||
|
[Op.or]: [
|
||||||
|
{ title: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ description: { [Op.iLike]: `%${filters.search.trim()}%` } },
|
||||||
|
{ requestNumber: { [Op.iLike]: `%${filters.search.trim()}%` } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const initiatorCondition = initiatorConditionParts.length > 0
|
||||||
|
? { [Op.and]: initiatorConditionParts }
|
||||||
|
: { initiatorId: userId };
|
||||||
|
|
||||||
|
whereConditions.push(initiatorCondition);
|
||||||
|
|
||||||
|
// Build where clause with OR conditions
|
||||||
|
const where: any = whereConditions.length > 0 ? { [Op.or]: whereConditions } : {};
|
||||||
|
|
||||||
|
// Build order clause based on sortBy parameter
|
||||||
|
let order: any[] = [['createdAt', 'DESC']]; // Default order
|
||||||
|
const validSortOrder = (sortOrder?.toLowerCase() === 'asc' ? 'ASC' : 'DESC');
|
||||||
|
|
||||||
|
if (sortBy) {
|
||||||
|
switch (sortBy.toLowerCase()) {
|
||||||
|
case 'created':
|
||||||
|
order = [['createdAt', validSortOrder]];
|
||||||
|
break;
|
||||||
|
case 'due':
|
||||||
|
// Sort by closureDate or updatedAt (closed date)
|
||||||
|
order = [['updatedAt', validSortOrder], ['createdAt', 'DESC']];
|
||||||
|
break;
|
||||||
|
case 'priority':
|
||||||
|
order = [['priority', validSortOrder], ['createdAt', 'DESC']];
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
// Unknown sortBy, use default
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch closed/rejected/approved requests (including finalized ones)
|
// Fetch closed/rejected/approved requests (including finalized ones)
|
||||||
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
const { rows, count } = await WorkflowRequest.findAndCountAll({
|
||||||
where: {
|
where,
|
||||||
[Op.or]: whereConditions
|
|
||||||
},
|
|
||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
order: [['createdAt', 'DESC']],
|
order,
|
||||||
include: [
|
include: [
|
||||||
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName', 'department', 'designation'] },
|
{ association: 'initiator', required: false, attributes: ['userId', 'email', 'displayName', 'department', 'designation'] },
|
||||||
],
|
],
|
||||||
@ -763,7 +1006,7 @@ export class WorkflowService {
|
|||||||
const data = await this.enrichForCards(rows);
|
const data = await this.enrichForCards(rows);
|
||||||
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
return { data, pagination: { page, limit, total: count, totalPages: Math.ceil(count / limit) || 1 } };
|
||||||
}
|
}
|
||||||
async createWorkflow(initiatorId: string, workflowData: CreateWorkflowRequest): Promise<WorkflowRequest> {
|
async createWorkflow(initiatorId: string, workflowData: CreateWorkflowRequest, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<WorkflowRequest> {
|
||||||
try {
|
try {
|
||||||
const requestNumber = generateRequestNumber();
|
const requestNumber = generateRequestNumber();
|
||||||
const totalTatHours = workflowData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0);
|
const totalTatHours = workflowData.approvalLevels.reduce((sum, level) => sum + level.tatHours, 0);
|
||||||
@ -834,7 +1077,9 @@ export class WorkflowService {
|
|||||||
user: { userId: initiatorId, name: initiatorName },
|
user: { userId: initiatorId, name: initiatorName },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Initial request submitted',
|
action: 'Initial request submitted',
|
||||||
details: `Initial request submitted for ${workflowData.title} by ${initiatorName}`
|
details: `Initial request submitted for ${workflowData.title} by ${initiatorName}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
});
|
});
|
||||||
|
|
||||||
// Send notification to INITIATOR confirming submission
|
// Send notification to INITIATOR confirming submission
|
||||||
|
|||||||
@ -71,7 +71,7 @@ export class WorkNoteService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path: string; originalname: string; mimetype: string; size: number }>): Promise<any> {
|
async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path: string; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<any> {
|
||||||
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
|
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
|
||||||
|
|
||||||
const note = await WorkNote.create({
|
const note = await WorkNote.create({
|
||||||
@ -123,7 +123,9 @@ export class WorkNoteService {
|
|||||||
user: { userId: user.userId, name: user.name || 'User' },
|
user: { userId: user.userId, name: user.name || 'User' },
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
action: 'Work Note Added',
|
action: 'Work Note Added',
|
||||||
details: `${user.name || 'User'} added a work note: ${payload.message.substring(0, 100)}${payload.message.length > 100 ? '...' : ''}`
|
details: `${user.name || 'User'} added a work note: ${payload.message.substring(0, 100)}${payload.message.length > 100 ? '...' : ''}`,
|
||||||
|
ipAddress: requestMetadata?.ipAddress || undefined,
|
||||||
|
userAgent: requestMetadata?.userAgent || undefined
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
80
src/utils/requestUtils.ts
Normal file
80
src/utils/requestUtils.ts
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import { Request } from 'express';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract client IP address from request
|
||||||
|
* Handles proxies and load balancers via x-forwarded-for header
|
||||||
|
* Normalizes IPv6 loopback (::1) to IPv4 loopback (127.0.0.1)
|
||||||
|
*/
|
||||||
|
export function getClientIp(req: Request | any): string | null {
|
||||||
|
let ip: string | null = null;
|
||||||
|
|
||||||
|
// Priority 1: Check x-forwarded-for header (for proxies/load balancers)
|
||||||
|
const forwardedFor = req.headers['x-forwarded-for'];
|
||||||
|
if (forwardedFor) {
|
||||||
|
// x-forwarded-for can contain multiple IPs, take the first one
|
||||||
|
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||||
|
ip = ips.split(',')[0].trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 2: Check x-real-ip header (some proxies use this)
|
||||||
|
if (!ip) {
|
||||||
|
const realIp = req.headers['x-real-ip'];
|
||||||
|
if (realIp) {
|
||||||
|
ip = Array.isArray(realIp) ? realIp[0] : realIp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 3: Check cf-connecting-ip (Cloudflare)
|
||||||
|
if (!ip) {
|
||||||
|
const cfIp = req.headers['cf-connecting-ip'];
|
||||||
|
if (cfIp) {
|
||||||
|
ip = Array.isArray(cfIp) ? cfIp[0] : cfIp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 4: Fallback to req.ip (requires trust proxy to be set)
|
||||||
|
if (!ip && req.ip) {
|
||||||
|
ip = req.ip;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 5: Check connection remote address
|
||||||
|
if (!ip && req.socket?.remoteAddress) {
|
||||||
|
ip = req.socket.remoteAddress;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize IPv6 loopback to IPv4 loopback for consistency
|
||||||
|
if (ip === '::1' || ip === '::ffff:127.0.0.1') {
|
||||||
|
ip = '127.0.0.1';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove IPv6 prefix if present (::ffff:)
|
||||||
|
if (ip && ip.startsWith('::ffff:')) {
|
||||||
|
ip = ip.substring(7);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ip;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract user agent from request
|
||||||
|
*/
|
||||||
|
export function getUserAgent(req: Request | any): string | null {
|
||||||
|
return req.headers['user-agent'] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract both IP and user agent from request
|
||||||
|
* Returns undefined instead of null to match TypeScript optional property types
|
||||||
|
*/
|
||||||
|
export function getRequestMetadata(req: Request | any): {
|
||||||
|
ipAddress: string | undefined;
|
||||||
|
userAgent: string | undefined;
|
||||||
|
} {
|
||||||
|
const ip = getClientIp(req);
|
||||||
|
const ua = getUserAgent(req);
|
||||||
|
return {
|
||||||
|
ipAddress: ip || undefined,
|
||||||
|
userAgent: ua || undefined
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@ -476,16 +476,18 @@ export async function isCurrentlyWorkingTime(priority: string = 'standard'): Pro
|
|||||||
export async function calculateSLAStatus(
|
export async function calculateSLAStatus(
|
||||||
levelStartTime: Date | string,
|
levelStartTime: Date | string,
|
||||||
tatHours: number,
|
tatHours: number,
|
||||||
priority: string = 'standard'
|
priority: string = 'standard',
|
||||||
|
endDate?: Date | string | null
|
||||||
) {
|
) {
|
||||||
await loadWorkingHoursCache();
|
await loadWorkingHoursCache();
|
||||||
await loadHolidaysCache();
|
await loadHolidaysCache();
|
||||||
|
|
||||||
const startDate = dayjs(levelStartTime);
|
const startDate = dayjs(levelStartTime);
|
||||||
const now = dayjs();
|
// Use provided endDate if available (for completed requests), otherwise use current time
|
||||||
|
const endTime = endDate ? dayjs(endDate) : dayjs();
|
||||||
|
|
||||||
// Calculate elapsed working hours
|
// Calculate elapsed working hours
|
||||||
const elapsedHours = await calculateElapsedWorkingHours(levelStartTime, now.toDate(), priority);
|
const elapsedHours = await calculateElapsedWorkingHours(levelStartTime, endTime.toDate(), priority);
|
||||||
const remainingHours = Math.max(0, tatHours - elapsedHours);
|
const remainingHours = Math.max(0, tatHours - elapsedHours);
|
||||||
const percentageUsed = tatHours > 0 ? Math.min(100, Math.round((elapsedHours / tatHours) * 100)) : 0;
|
const percentageUsed = tatHours > 0 ? Math.min(100, Math.round((elapsedHours / tatHours) * 100)) : 0;
|
||||||
|
|
||||||
@ -497,7 +499,8 @@ export async function calculateSLAStatus(
|
|||||||
: (await addWorkingHours(levelStartTime, tatHours)).toDate();
|
: (await addWorkingHours(levelStartTime, tatHours)).toDate();
|
||||||
|
|
||||||
// Check if currently paused (outside working hours)
|
// Check if currently paused (outside working hours)
|
||||||
const isPaused = !(await isCurrentlyWorkingTime(priority));
|
// For completed requests (with endDate), it's not paused
|
||||||
|
const isPaused = endDate ? false : !(await isCurrentlyWorkingTime(priority));
|
||||||
|
|
||||||
// Determine status
|
// Determine status
|
||||||
let status: 'on_track' | 'approaching' | 'critical' | 'breached' = 'on_track';
|
let status: 'on_track' | 'approaching' | 'critical' | 'breached' = 'on_track';
|
||||||
@ -694,3 +697,66 @@ export async function calculateElapsedWorkingHours(
|
|||||||
return hours;
|
return hours;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate business days between two dates
|
||||||
|
* Excludes weekends and holidays
|
||||||
|
* @param startDate - Start date
|
||||||
|
* @param endDate - End date (defaults to now)
|
||||||
|
* @param priority - 'express' or 'standard' (express includes weekends, standard excludes)
|
||||||
|
* @returns Number of business days
|
||||||
|
*/
|
||||||
|
export async function calculateBusinessDays(
|
||||||
|
startDate: Date | string,
|
||||||
|
endDate: Date | string | null = null,
|
||||||
|
priority: string = 'standard'
|
||||||
|
): Promise<number> {
|
||||||
|
await loadWorkingHoursCache();
|
||||||
|
await loadHolidaysCache();
|
||||||
|
|
||||||
|
let start = dayjs(startDate).startOf('day');
|
||||||
|
const end = dayjs(endDate || new Date()).startOf('day');
|
||||||
|
|
||||||
|
// In test mode, use calendar days
|
||||||
|
if (isTestMode()) {
|
||||||
|
return end.diff(start, 'day') + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = workingHoursCache || {
|
||||||
|
startHour: TAT_CONFIG.WORK_START_HOUR,
|
||||||
|
endHour: TAT_CONFIG.WORK_END_HOUR,
|
||||||
|
startDay: TAT_CONFIG.WORK_START_DAY,
|
||||||
|
endDay: TAT_CONFIG.WORK_END_DAY
|
||||||
|
};
|
||||||
|
|
||||||
|
let businessDays = 0;
|
||||||
|
let current = start;
|
||||||
|
|
||||||
|
// Count each day from start to end (inclusive)
|
||||||
|
while (current.isBefore(end) || current.isSame(end, 'day')) {
|
||||||
|
const dayOfWeek = current.day(); // 0 = Sunday, 6 = Saturday
|
||||||
|
const dateStr = current.format('YYYY-MM-DD');
|
||||||
|
|
||||||
|
// For express priority: count all days (including weekends) but exclude holidays
|
||||||
|
// For standard priority: count only working days (Mon-Fri) and exclude holidays
|
||||||
|
const isWorkingDay = priority === 'express'
|
||||||
|
? true // Express includes weekends
|
||||||
|
: (dayOfWeek >= config.startDay && dayOfWeek <= config.endDay);
|
||||||
|
|
||||||
|
const isNotHoliday = !holidaysCache.has(dateStr);
|
||||||
|
|
||||||
|
if (isWorkingDay && isNotHoliday) {
|
||||||
|
businessDays++;
|
||||||
|
}
|
||||||
|
|
||||||
|
current = current.add(1, 'day');
|
||||||
|
|
||||||
|
// Safety check to prevent infinite loops
|
||||||
|
if (current.diff(start, 'day') > 730) { // 2 years
|
||||||
|
console.error('[TAT] Safety break - exceeded 2 years in business days calculation');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return businessDays;
|
||||||
|
}
|
||||||
|
|
||||||
|
|||||||
85
src/utils/userAgentParser.ts
Normal file
85
src/utils/userAgentParser.ts
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
/**
|
||||||
|
* Parse user agent string to extract device, browser, and OS information
|
||||||
|
* Simple parser without external dependencies
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ParsedUserAgent {
|
||||||
|
deviceType: 'WEB' | 'MOBILE' | 'TABLET' | 'UNKNOWN';
|
||||||
|
browser: string;
|
||||||
|
os: string;
|
||||||
|
raw: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse user agent string to extract device, browser, and OS
|
||||||
|
*/
|
||||||
|
export function parseUserAgent(userAgent: string | null | undefined): ParsedUserAgent {
|
||||||
|
if (!userAgent) {
|
||||||
|
return {
|
||||||
|
deviceType: 'UNKNOWN',
|
||||||
|
browser: 'Unknown',
|
||||||
|
os: 'Unknown',
|
||||||
|
raw: ''
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ua = userAgent.toLowerCase();
|
||||||
|
|
||||||
|
// Detect device type
|
||||||
|
let deviceType: 'WEB' | 'MOBILE' | 'TABLET' | 'UNKNOWN' = 'WEB';
|
||||||
|
if (/tablet|ipad|playbook|silk/i.test(userAgent)) {
|
||||||
|
deviceType = 'TABLET';
|
||||||
|
} else if (/mobile|iphone|ipod|android|blackberry|opera|mini|windows\s+phone|palm|iemobile/i.test(userAgent)) {
|
||||||
|
deviceType = 'MOBILE';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect browser
|
||||||
|
let browser = 'Unknown';
|
||||||
|
if (ua.includes('edg/')) {
|
||||||
|
browser = 'Edge';
|
||||||
|
} else if (ua.includes('chrome/') && !ua.includes('edg/')) {
|
||||||
|
browser = 'Chrome';
|
||||||
|
} else if (ua.includes('firefox/')) {
|
||||||
|
browser = 'Firefox';
|
||||||
|
} else if (ua.includes('safari/') && !ua.includes('chrome/')) {
|
||||||
|
browser = 'Safari';
|
||||||
|
} else if (ua.includes('opera/') || ua.includes('opr/')) {
|
||||||
|
browser = 'Opera';
|
||||||
|
} else if (ua.includes('msie') || ua.includes('trident/')) {
|
||||||
|
browser = 'Internet Explorer';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect OS
|
||||||
|
let os = 'Unknown';
|
||||||
|
if (ua.includes('windows nt')) {
|
||||||
|
if (ua.includes('windows nt 10.0')) {
|
||||||
|
os = 'Windows 10/11';
|
||||||
|
} else if (ua.includes('windows nt 6.3')) {
|
||||||
|
os = 'Windows 8.1';
|
||||||
|
} else if (ua.includes('windows nt 6.2')) {
|
||||||
|
os = 'Windows 8';
|
||||||
|
} else if (ua.includes('windows nt 6.1')) {
|
||||||
|
os = 'Windows 7';
|
||||||
|
} else {
|
||||||
|
os = 'Windows';
|
||||||
|
}
|
||||||
|
} else if (ua.includes('mac os x') || ua.includes('macintosh')) {
|
||||||
|
os = 'macOS';
|
||||||
|
} else if (ua.includes('linux')) {
|
||||||
|
os = 'Linux';
|
||||||
|
} else if (ua.includes('android')) {
|
||||||
|
os = 'Android';
|
||||||
|
} else if (ua.includes('iphone') || ua.includes('ipad')) {
|
||||||
|
os = 'iOS';
|
||||||
|
} else if (ua.includes('ipod')) {
|
||||||
|
os = 'iOS';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
deviceType,
|
||||||
|
browser,
|
||||||
|
os,
|
||||||
|
raw: userAgent
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
Loading…
Reference in New Issue
Block a user