diff --git a/GCS_CONFIGURATION_GUIDE.md b/GCS_CONFIGURATION_GUIDE.md
new file mode 100644
index 0000000..b380d0e
--- /dev/null
+++ b/GCS_CONFIGURATION_GUIDE.md
@@ -0,0 +1,125 @@
+# GCS (Google Cloud Storage) Configuration Guide
+
+## Overview
+All document uploads (workflow documents, work note attachments) are now configured to use Google Cloud Storage (GCS) instead of local file storage.
+
+## Configuration Steps
+
+### 1. Update `.env` File
+
+Add or update the following environment variables in your `.env` file:
+
+```env
+# Cloud Storage (GCP)
+GCP_PROJECT_ID=re-platform-workflow-dealer
+GCP_BUCKET_NAME=your-bucket-name-here
+GCP_KEY_FILE=./credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
+```
+
+**Important Notes:**
+- `GCP_PROJECT_ID`: Should match the `project_id` in your credentials JSON file (currently: `re-platform-workflow-dealer`)
+- `GCP_BUCKET_NAME`: The name of your GCS bucket (create one in GCP Console if needed)
+- `GCP_KEY_FILE`: Path to your service account credentials JSON file (relative to project root or absolute path)
+
+### 2. Create GCS Bucket (if not exists)
+
+1. Go to [Google Cloud Console](https://console.cloud.google.com/)
+2. Navigate to **Cloud Storage** > **Buckets**
+3. Click **Create Bucket**
+4. Choose a unique bucket name (e.g., `re-workflow-documents`)
+5. Select a location for your bucket
+6. Set permissions:
+ - Make bucket publicly readable (for public URLs) OR
+ - Keep private and use signed URLs (more secure)
+
+### 3. Grant Service Account Permissions
+
+Your service account (`re-bridge-workflow@re-platform-workflow-dealer.iam.gserviceaccount.com`) needs:
+- **Storage Object Admin** role (to upload/delete files)
+- **Storage Object Viewer** role (to read files)
+
+### 4. Verify Configuration
+
+The system will:
+- ✅ Automatically detect if GCS is configured
+- ✅ Fall back to local storage if GCS is not configured
+- ✅ Upload files to GCS when configured
+- ✅ Store GCS URLs in the database
+- ✅ Redirect downloads/previews to GCS URLs
+
+## File Storage Structure
+
+Files are organized in GCS by request number with subfolders for documents and attachments:
+
+```
+reflow-documents-uat/
+├── requests/
+│ ├── REQ-2025-12-0001/
+│ │ ├── documents/
+│ │ │ ├── {timestamp}-{hash}-{filename}
+│ │ │ └── ...
+│ │ └── attachments/
+│ │ ├── {timestamp}-{hash}-{filename}
+│ │ └── ...
+│ ├── REQ-2025-12-0002/
+│ │ ├── documents/
+│ │ └── attachments/
+│ └── ...
+```
+
+- **Documents**: `requests/{requestNumber}/documents/{timestamp}-{hash}-{filename}`
+- **Work Note Attachments**: `requests/{requestNumber}/attachments/{timestamp}-{hash}-{filename}`
+
+This structure makes it easy to:
+- Track all files for a specific request
+- Organize documents vs attachments separately
+- Navigate and manage files in GCS console
+
+## How It Works
+
+### Upload Flow
+1. File is received via multer (memory storage)
+2. File buffer is uploaded to GCS
+3. GCS returns a public URL
+4. URL is stored in database (`storage_url` field)
+5. Local file is deleted (if it existed)
+
+### Download/Preview Flow
+1. System checks if `storage_url` is a GCS URL
+2. If GCS URL: Redirects to GCS public URL
+3. If local path: Serves file from local storage
+
+## Troubleshooting
+
+### Files not uploading to GCS
+- Check `.env` configuration matches your credentials
+- Verify service account has correct permissions
+- Check bucket name exists and is accessible
+- Review application logs for GCS errors
+
+### Files uploading but not accessible
+- Verify bucket permissions (public read or signed URLs)
+- Check CORS configuration if accessing from browser
+- Ensure `storage_url` is being saved correctly in database
+
+### Fallback to Local Storage
+If GCS is not configured or fails, the system will:
+- Log a warning
+- Continue using local file storage
+- Store local paths in database
+
+## Testing
+
+After configuration:
+1. Upload a document via API
+2. Check database - `storage_url` should contain GCS URL
+3. Try downloading/previewing the document
+4. Verify file is accessible at GCS URL
+
+## Security Notes
+
+- **Public Buckets**: Files are publicly accessible via URL
+- **Private Buckets**: Consider using signed URLs for better security
+- **Service Account**: Keep credentials file secure, never commit to git
+- **Bucket Policies**: Configure bucket-level permissions as needed
+
diff --git a/TESTING_GCS_UPLOADS.md b/TESTING_GCS_UPLOADS.md
new file mode 100644
index 0000000..711a446
--- /dev/null
+++ b/TESTING_GCS_UPLOADS.md
@@ -0,0 +1,216 @@
+# Testing GCS File Uploads from Frontend
+
+## ✅ Pre-Testing Checklist
+
+Before testing, ensure the following are configured:
+
+### 1. Environment Variables (.env file)
+
+Make sure your `.env` file has these values:
+
+```env
+GCP_PROJECT_ID=re-platform-workflow-dealer
+GCP_BUCKET_NAME=your-bucket-name-here
+GCP_KEY_FILE=./credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
+```
+
+**Important:**
+- Replace `your-bucket-name-here` with your actual GCS bucket name
+- Ensure the credentials file path is correct
+- The credentials file should exist at the specified path
+
+### 2. GCS Bucket Setup
+
+- [ ] Bucket exists in GCP Console
+- [ ] Service account has permissions (Storage Object Admin)
+- [ ] Bucket is accessible (public or with proper IAM)
+
+### 3. Backend Server
+
+- [ ] Backend server is running
+- [ ] Check backend logs for GCS initialization message:
+ ```
+ [GCS] Initialized successfully
+ ```
+
+## 🧪 Testing Steps
+
+### Test 1: Upload Document (Standalone)
+
+1. **Navigate to a Request Detail page**
+ - Open any existing workflow request
+ - Go to the "Documents" tab
+
+2. **Upload a document**
+ - Click "Upload Document" or browse button
+ - Select a file (PDF, DOCX, etc.)
+ - Wait for upload to complete
+
+3. **Verify in Backend Logs:**
+ ```
+ [GCS] File uploaded successfully
+ ```
+
+4. **Check Database:**
+ - `storage_url` field should contain GCS URL like:
+ ```
+ https://storage.googleapis.com/BUCKET_NAME/requests/REQ-2025-12-0001/documents/...
+ ```
+
+5. **Verify in GCS Console:**
+ - Go to GCS Console
+ - Navigate to: `requests/{requestNumber}/documents/`
+ - File should be there
+
+### Test 2: Upload Document During Workflow Creation
+
+1. **Create New Workflow**
+ - Go to "Create Request"
+ - Fill in workflow details
+ - In "Documents" step, upload files
+ - Submit workflow
+
+2. **Verify:**
+ - Check backend logs for GCS upload
+ - Check GCS bucket: `requests/{requestNumber}/documents/`
+ - Files should be organized by request number
+
+### Test 3: Upload Work Note Attachment
+
+1. **Open Work Notes/Chat**
+ - Go to any request
+ - Open the work notes/chat section
+
+2. **Attach File to Comment**
+ - Type a comment
+ - Click attachment icon
+ - Select a file
+ - Send the comment
+
+3. **Verify:**
+ - Check backend logs
+ - Check GCS bucket: `requests/{requestNumber}/attachments/`
+ - File should appear in attachments folder
+
+### Test 4: Download/Preview Files
+
+1. **Download Document**
+ - Click download on any document
+ - Should redirect to GCS URL or download from GCS
+
+2. **Preview Document**
+ - Click preview on any document
+ - Should open from GCS URL
+
+## 🔍 What to Check
+
+### Backend Logs
+
+**Success:**
+```
+[GCS] Initialized successfully { projectId: '...', bucketName: '...' }
+[GCS] File uploaded successfully { fileName: '...', gcsPath: '...' }
+```
+
+**Error (Falls back to local):**
+```
+[GCS] GCP configuration missing. File uploads will fail.
+[GCS] GCS upload failed, falling back to local storage
+```
+
+### Database Verification
+
+Check the `documents` and `work_note_attachments` tables:
+
+```sql
+-- Check documents
+SELECT document_id, file_name, storage_url, file_path
+FROM documents
+WHERE request_id = 'YOUR_REQUEST_ID';
+
+-- Check attachments
+SELECT attachment_id, file_name, storage_url, file_path
+FROM work_note_attachments
+WHERE note_id IN (
+ SELECT note_id FROM work_notes WHERE request_id = 'YOUR_REQUEST_ID'
+);
+```
+
+**Expected:**
+- `storage_url` should contain GCS URL (if GCS configured)
+- `file_path` should contain GCS path like `requests/REQ-2025-12-0001/documents/...`
+
+### GCS Console Verification
+
+1. Go to [GCS Console](https://console.cloud.google.com/storage)
+2. Navigate to your bucket
+3. Check folder structure:
+ ```
+ requests/
+ ├── REQ-2025-12-0001/
+ │ ├── documents/
+ │ │ └── {timestamp}-{hash}-{filename}
+ │ └── attachments/
+ │ └── {timestamp}-{hash}-{filename}
+ ```
+
+## 🐛 Troubleshooting
+
+### Issue: Files not uploading to GCS
+
+**Check:**
+1. `.env` file has correct values
+2. Credentials file exists at specified path
+3. Service account has correct permissions
+4. Bucket name is correct
+5. Backend logs for errors
+
+**Solution:**
+- System will automatically fall back to local storage
+- Fix configuration and restart backend
+- Re-upload files
+
+### Issue: "GCP configuration missing" in logs
+
+**Cause:** Missing or incorrect environment variables
+
+**Fix:**
+```env
+GCP_PROJECT_ID=re-platform-workflow-dealer
+GCP_BUCKET_NAME=your-actual-bucket-name
+GCP_KEY_FILE=./credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
+```
+
+### Issue: "Key file not found"
+
+**Cause:** Credentials file path is incorrect
+
+**Fix:**
+- Verify file exists at: `Re_Backend/credentials/re-platform-workflow-dealer-3d5738fcc1f9.json`
+- Update `GCP_KEY_FILE` path in `.env` if needed
+
+### Issue: Files upload but can't download/preview
+
+**Cause:** Bucket permissions or CORS configuration
+
+**Fix:**
+- Check bucket IAM permissions
+- Verify CORS is configured (see GCP_STORAGE_SETUP.md)
+- Check if bucket is public or using signed URLs
+
+## ✅ Success Indicators
+
+- ✅ Backend logs show "GCS Initialized successfully"
+- ✅ Files upload without errors
+- ✅ Database `storage_url` contains GCS URLs
+- ✅ Files visible in GCS Console under correct folder structure
+- ✅ Downloads/previews work from GCS URLs
+- ✅ Files organized by request number with documents/attachments separation
+
+## 📝 Notes
+
+- **No Frontend Changes Required:** The frontend uses the same API endpoints
+- **Automatic Fallback:** If GCS is not configured, system uses local storage
+- **Backward Compatible:** Existing local files continue to work
+- **Folder Structure:** Files are automatically organized by request number
+
diff --git a/credentials/re-platform-workflow-dealer-3d5738fcc1f9.json b/credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
new file mode 100644
index 0000000..2628f02
--- /dev/null
+++ b/credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
@@ -0,0 +1,13 @@
+{
+ "type": "service_account",
+ "project_id": "re-platform-workflow-dealer",
+ "private_key_id": "3d5738fcc1f9d44e4521f86d690d09317cb40f3b",
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC2PM6D3wtRBSHm\nrbDqPraUt+EdJkZDOABC2U7sLeO8fIJjdwC3tzDBiCFJkUF2LoFWgI4S0xNFLNk2\njvK6+J8DsTv1feZ6UwrFazYyC1Xexcm6OAQBsfIZdVHsBjzOLNvVI/83Sl+siQv9\nKteN/OoBnAC+ietxk9RGzW706m6irte7nJ4BhOW+SDMaB8QuKJFSDQfpraLL7osI\ntcxG+n7LhO+Qi4slvcCrEIo0jUEHREbjlBagjpJnfnuVpi2Le0UBzRd8seMCzH3c\n4d4sxSI6ChaJBldv0TRKSpj2O0Vc35+tGCd0D/iUzSlLvdMkv7ettYGSjhM2rL4b\nc6O0vQbDAgMBAAECggEAWgki0v5Qvf+2Jx0rah//3uwWOWuejTlOz7hDiOaHPKmb\nVf8GiL3mRce3AnzUhcomNpGfH+fO/n9Q9eacQAnzrkRTZk+Enm0GxlDY3tLA4yZ/\nKxTfzeKXxUI0blMKmaaKGf0F69BAAqNXHAadptYM2yyzJXBItb2exDhdGH32mULI\nG8ZPFnw+pNwJkxGPy60CZvbbwTp4dfGwVabPLx08B0hRLjggke0dCm7I5SgPxTwa\nrqemkF0M+OMGNi87eTuhgYVG8ApGgW11fvFOtvQBZ9VCQgQiqLl4nvraSdGBmKtf\nZQKxsqMHfpqrcndF7m07hWgk/mn6rRnsnj8BHn0XcQKBgQDyFjO9SEl++byJ99FJ\nEsUvj6IUq22wejJNMtdyETyT5rPCD3nMiPjU73HbYsggZ0m1C5cCU9VIHPRy3jy+\nO3WW2pv5YeIyUmfZqk5FWJktFOPisDEggZAOZE3D9V47tfvd7L5uK5yo83ncDRrz\n8p60v7imf2eMKdTjF8wB08xkCQKBgQDAtgycmJmQrbTjj5CJD1EWADxMBeIyBoNW\nV6qHCiKOdNw+NME0RDhy5Uuv70bjHnc41fhHRZprzoUjpNQSEbgg/eQI7dKKQjHP\n4ISb9y7rbfIbV9BUvR+TLTBEyTxknPmwRnknYmSy9e4XjzZOduGgZ0glFPIJWKkR\nYozHimk/awKBgQCWwkbUUKkcfw/v57mYxSyxUsSAFMYJif+7XbcX3S4ZeSlm59ZV\nDtPPX5JLKngw3cHkEmSnWWfQMd/1jPrNCSBQorFRm6iO6AyuW8XEn8k8bu7/4/Ok\nJ6t7mvFm4G4fx1Qjv2RUHarA+GdiJ3MqimRVcbPfVCY6/m4KQm6UkL6PaQKBgGLg\nhZQLkC91kSx5SvWoEDizojx3gFmekeDJVku3XYeuWhrowoDox/XbxHvez4ZU6WMW\nFi+rfNH3wsRJHC6xPMJgwpH6RF6AHELGtgO4TjCp1uFEqzXvW7YOJ4gDoKMXD93s\nKtmUWIqiOKmJ55lW0emVVKUCHDXDcevjnsv7LolFAoGAeDo7II0y/iUtb9Pni8V2\nnqwdZ9h+RyxD8ua374/fTTnwKDrt8+XkL1oU2Zca6aaF5NDudjta9ZyLuga3/RjH\nCKOyT1nuWBKW67fVS7yosOCksoFygs5O/ZvfC3D1b7hrJN8oaMJCECB5sJSCjyM9\nyjsJCTPGSnE9LKEJURCZYsM=\n-----END PRIVATE KEY-----\n",
+ "client_email": "re-bridge-workflow@re-platform-workflow-dealer.iam.gserviceaccount.com",
+ "client_id": "108776059196607325512",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/re-bridge-workflow%40re-platform-workflow-dealer.iam.gserviceaccount.com",
+ "universe_domain": "googleapis.com"
+}
diff --git a/docs/FILE_PATH_STORAGE.md b/docs/FILE_PATH_STORAGE.md
new file mode 100644
index 0000000..035f9e9
--- /dev/null
+++ b/docs/FILE_PATH_STORAGE.md
@@ -0,0 +1,212 @@
+# File Path Storage in Database - How It Works
+
+This document explains how file paths and storage URLs are stored in the database for different storage scenarios (GCS vs Local Storage).
+
+## Database Schema
+
+### Documents Table
+- **`file_path`** (VARCHAR(500), NOT NULL): Stores the relative path or GCS path
+- **`storage_url`** (VARCHAR(500), NULLABLE): Stores the full URL for accessing the file
+
+### Work Note Attachments Table
+- **`file_path`** (VARCHAR(500), NOT NULL): Stores the relative path or GCS path
+- **`storage_url`** (VARCHAR(500), NULLABLE): Stores the full URL for accessing the file
+
+## Storage Scenarios
+
+### Scenario 1: File Uploaded to GCS (Successfully)
+
+When GCS is configured and the upload succeeds:
+
+**Database Values:**
+```sql
+file_path = "requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf"
+storage_url = "https://storage.googleapis.com/bucket-name/requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf"
+```
+
+**File Location:**
+- Physical: Google Cloud Storage bucket
+- Path Structure: `requests/{requestNumber}/{fileType}/{fileName}`
+- Access: Public URL or signed URL (depending on bucket configuration)
+
+---
+
+### Scenario 2: File Saved to Local Storage (GCS Not Configured or Failed)
+
+When GCS is not configured or upload fails, files are saved to local storage:
+
+**Database Values:**
+```sql
+file_path = "requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf"
+storage_url = "/uploads/requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf"
+```
+
+**File Location:**
+- Physical: Local filesystem at `{UPLOAD_DIR}/requests/{requestNumber}/{fileType}/{fileName}`
+- Path Structure: Same as GCS structure for consistency
+- Access: Served via Express static middleware at `/uploads/*`
+
+**Example:**
+```
+uploads/
+└── requests/
+ └── REQ-2025-12-0001/
+ ├── documents/
+ │ └── 1701234567890-abc123-proposal.pdf
+ └── attachments/
+ └── 1701234567890-xyz789-note.pdf
+```
+
+---
+
+### Scenario 3: Legacy Files (Before This Implementation)
+
+Older files may have different path formats:
+
+**Possible Database Values:**
+```sql
+file_path = "/absolute/path/to/uploads/file.pdf" -- Absolute path
+-- OR
+file_path = "file.pdf" -- Simple filename (in root uploads folder)
+storage_url = "/uploads/file.pdf" -- Simple URL
+```
+
+**File Location:**
+- Physical: Various locations depending on when file was uploaded
+- Access: Handled by legacy route logic
+
+---
+
+## How Download/Preview Routes Handle Different Storage Types
+
+### Document Preview Route (`GET /workflows/documents/:documentId/preview`)
+
+1. **Check if GCS URL:**
+ ```typescript
+ const isGcsUrl = storageUrl && (
+ storageUrl.startsWith('https://storage.googleapis.com') ||
+ storageUrl.startsWith('gs://')
+ );
+ ```
+ - If yes → Redirect to GCS URL
+
+2. **Check if Local Storage URL:**
+ ```typescript
+ if (storageUrl && storageUrl.startsWith('/uploads/')) {
+ res.redirect(storageUrl); // Express static serves it
+ return;
+ }
+ ```
+ - If yes → Redirect to `/uploads/...` (served by Express static middleware)
+
+3. **Legacy File Handling:**
+ ```typescript
+ const absolutePath = filePath && !path.isAbsolute(filePath)
+ ? path.join(UPLOAD_DIR, filePath)
+ : filePath;
+ ```
+ - Resolve relative path to absolute
+ - Serve file directly using `res.sendFile()`
+
+### Work Note Attachment Routes
+
+Same logic as document routes:
+- Preview: `/workflows/work-notes/attachments/:attachmentId/preview`
+- Download: `/workflows/work-notes/attachments/:attachmentId/download`
+
+---
+
+## Key Points
+
+### 1. Consistent Path Structure
+- **Both GCS and local storage** use the same path structure: `requests/{requestNumber}/{fileType}/{fileName}`
+- This makes migration seamless when moving from local to GCS
+
+### 2. Storage URL Format
+- **GCS:** Full HTTPS URL (`https://storage.googleapis.com/...`)
+- **Local:** Relative URL (`/uploads/requests/...`)
+- **Legacy:** May vary
+
+### 3. File Path Format
+- **GCS:** Relative path in bucket (`requests/REQ-.../documents/file.pdf`)
+- **Local:** Same relative path format for consistency
+- **Legacy:** May be absolute path or simple filename
+
+### 4. Automatic Fallback
+- When GCS fails, system automatically saves to local storage
+- Same folder structure maintained
+- No data loss
+
+### 5. Serving Files
+- **GCS files:** Redirect to public/signed URL
+- **Local files (new):** Redirect to `/uploads/...` (Express static)
+- **Local files (legacy):** Direct file serving with `res.sendFile()`
+
+---
+
+## Migration Path
+
+When migrating from local storage to GCS:
+
+1. **Files already follow same structure** - No path changes needed
+2. **Upload new files** - They automatically go to GCS if configured
+3. **Existing files** - Can remain in local storage until migrated
+4. **Database** - Only `storage_url` field changes (from `/uploads/...` to `https://...`)
+
+---
+
+## Example Database Records
+
+### GCS File (New Upload)
+```json
+{
+ "document_id": "uuid-123",
+ "file_path": "requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf",
+ "storage_url": "https://storage.googleapis.com/my-bucket/requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf",
+ "file_name": "1701234567890-abc123-proposal.pdf",
+ "original_file_name": "proposal.pdf"
+}
+```
+
+### Local Storage File (Fallback)
+```json
+{
+ "document_id": "uuid-456",
+ "file_path": "requests/REQ-2025-12-0001/documents/1701234567891-def456-report.pdf",
+ "storage_url": "/uploads/requests/REQ-2025-12-0001/documents/1701234567891-def456-report.pdf",
+ "file_name": "1701234567891-def456-report.pdf",
+ "original_file_name": "report.pdf"
+}
+```
+
+### Legacy File (Old Format)
+```json
+{
+ "document_id": "uuid-789",
+ "file_path": "/var/app/uploads/old-file.pdf",
+ "storage_url": "/uploads/old-file.pdf",
+ "file_name": "old-file.pdf",
+ "original_file_name": "old-file.pdf"
+}
+```
+
+---
+
+## Troubleshooting
+
+### Issue: File not found when downloading
+
+**Check:**
+1. Verify `storage_url` format in database
+2. Check if file exists at expected location:
+ - GCS: Check bucket and path
+ - Local: Check `{UPLOAD_DIR}/requests/...` path
+3. Verify Express static middleware is mounted at `/uploads`
+
+### Issue: Files not organizing correctly
+
+**Check:**
+1. Verify `requestNumber` is being passed correctly to upload functions
+2. Check folder structure matches: `requests/{requestNumber}/{fileType}/`
+3. Verify `fileType` is either `'documents'` or `'attachments'`
+
diff --git a/docs/SSO_IMPLEMENTATION.md b/docs/SSO_IMPLEMENTATION.md
new file mode 100644
index 0000000..d1148dd
--- /dev/null
+++ b/docs/SSO_IMPLEMENTATION.md
@@ -0,0 +1,1216 @@
+# SSO Implementation for RE Workflow Management System
+
+## 1. Landing Page
+
+The RE Workflow Management System serves as a unified landing page and application portal for workflow request management. Users access the application through a web-based interface that provides:
+
+- **Unified Login Experience**: Single sign-on (SSO) integration with Okta Identity Provider
+- **Application Dashboard**: Centralized view of workflow requests, approvals, and activities
+- **Role-Based Access**: Personalized interface based on user roles (USER, MANAGEMENT, ADMIN)
+- **Request Management**: Create, view, and manage workflow requests with multi-level approvals
+
+### Landing Page Features
+
+- **Dashboard**: Overview of open requests, pending approvals, and system metrics
+- **Request Creation**: Guided wizards for creating custom requests and claim management workflows
+- **Request Tracking**: Real-time status updates and activity tracking
+- **Document Management**: Upload, preview, and download documents with GCS integration
+- **Work Notes**: Collaborative communication within request context
+- **Notifications**: Real-time alerts for approvals, assignments, and system events
+
+---
+
+## 2. System Architecture
+
+### 2a. Architecture for Frontend (RE Workflow Application)
+
+**Technology Stack:**
+- **Framework**: React 18 with TypeScript
+- **Build Tool**: Vite
+- **Routing**: React Router v6
+- **State Management**: Redux Toolkit (authSlice, storeSlice) + React Context (AuthContext)
+- **UI Components**: shadcn/ui, Radix UI, Tailwind CSS
+- **HTTP Client**: Axios with interceptors
+- **Authentication**: Okta Sign-In Widget (production) / Token Exchange (development)
+
+**Frontend Architecture Components:**
+
+```
+┌─────────────────────────────────────────────────────────────┐
+│ User's Web Browser │
+│ ┌──────────────────────────────────────────────────────┐ │
+│ │ React Application (Vite + React Router) │ │
+│ │ ┌──────────────┐ ┌──────────────┐ ┌─────────────┐ │ │
+│ │ │ AuthContext │ │ Redux Store │ │ Components │ │ │
+│ │ │ (SSO Flow) │ │ (State Mgmt) │ │ (UI Layer) │ │ │
+│ │ └──────────────┘ └──────────────┘ └─────────────┘ │ │
+│ │ │ │ │ │ │
+│ │ └──────────────────┼──────────────────┘ │ │
+│ │ │ │ │
+│ │ ┌───────▼────────┐ │ │
+│ │ │ Axios Client │ │ │
+│ │ │ (withCredentials) │ │
+│ │ └───────┬────────┘ │ │
+│ └────────────────────────────┼──────────────────────────┘ │
+└────────────────────────────────┼──────────────────────────────┘
+ │
+ ┌────────────▼────────────┐
+ │ Backend API Server │
+ │ (Express + Node.js) │
+ └────────────────────────┘
+```
+
+**Mermaid Diagram - Frontend Architecture:**
+
+```mermaid
+graph TB
+ Browser[User's Web Browser]
+ ReactApp[React Application
Vite + React Router]
+ AuthContext[AuthContext
SSO Flow]
+ ReduxStore[Redux Store
State Management]
+ Components[Components
UI Layer]
+ AxiosClient[Axios Client
withCredentials]
+ BackendAPI[Backend API Server
Express + Node.js]
+
+ Browser --> ReactApp
+ ReactApp --> AuthContext
+ ReactApp --> ReduxStore
+ ReactApp --> Components
+ AuthContext --> AxiosClient
+ ReduxStore --> AxiosClient
+ Components --> AxiosClient
+ AxiosClient --> BackendAPI
+```
+
+**Key Frontend Files:**
+- `src/contexts/AuthContext.tsx`: Authentication state management and SSO flow
+- `src/services/authApi.ts`: API client with token refresh interceptors
+- `src/utils/tokenManager.ts`: Token storage and management (localStorage in dev, httpOnly cookies in prod)
+- `src/pages/Auth/AuthCallback.tsx`: Handles OAuth callback from Okta
+- `src/App.tsx`: Main routing and application structure
+
+**Authentication Flow (Frontend):**
+
+1. **Production Mode (SSO via Okta)**:
+ - User navigates to application
+ - Okta Sign-In Widget initiates authentication
+ - User authenticates with Okta (MFA if enabled)
+ - Okta redirects to `/login/callback` with authorization code
+ - Frontend exchanges code for tokens via `/api/v1/auth/token-exchange`
+ - Backend sets httpOnly cookies (`accessToken`, `refreshToken`)
+ - User is redirected to dashboard
+
+2. **Development Mode (Localhost)**:
+ - User navigates to application
+ - Okta Sign-In Widget initiates authentication
+ - After Okta authentication, frontend receives authorization code
+ - Frontend calls `/api/v1/auth/token-exchange` with code
+ - Backend returns tokens in response body (for cross-port development)
+ - Tokens stored in localStorage for development convenience
+ - User is redirected to dashboard
+
+**Session Management:**
+- **Production**: Tokens stored in httpOnly cookies (secure, not accessible via JavaScript)
+- **Development**: Tokens stored in localStorage (for debugging and cross-port setup)
+- **Token Refresh**: Automatic refresh via Axios interceptor when access token expires
+- **Logout**: Clears cookies and redirects to Okta logout endpoint
+
+### 2b. Architecture for Backend (API Server)
+
+**Technology Stack:**
+- **Runtime**: Node.js 22 LTS
+- **Language**: TypeScript 5.7
+- **Framework**: Express.js 4.21
+- **Database**: PostgreSQL 16
+- **ORM**: Sequelize 6.37
+- **Authentication**: JWT (JSON Web Tokens)
+- **Session**: HttpOnly cookies (production) / JWT tokens (development)
+
+**Backend Architecture Components:**
+
+```
+┌─────────────────────────────────────────────────────────────┐
+│ Express.js Application Server │
+│ ┌──────────────────────────────────────────────────────┐ │
+│ │ Middleware Layer │ │
+│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │
+│ │ │ CORS │ │ Helmet │ │ Cookie │ │ Auth │ │ │
+│ │ │ │ │ (Security)│ │ Parser │ │Middleware│ │ │
+│ │ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ │
+│ └──────────────────────────────────────────────────────┘ │
+│ ┌──────────────────────────────────────────────────────┐ │
+│ │ Route Handlers │ │
+│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │
+│ │ │ Auth │ │ Workflow │ │ Document │ │ User │ │ │
+│ │ │Controller│ │Controller│ │Controller│ │Controller│ │ │
+│ │ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ │
+│ └──────────────────────────────────────────────────────┘ │
+│ ┌──────────────────────────────────────────────────────┐ │
+│ │ Service Layer │ │
+│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │
+│ │ │ Auth │ │ Workflow │ │ Document │ │ User │ │ │
+│ │ │ Service │ │ Service │ │ Service │ │ Service │ │ │
+│ │ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ │
+│ └──────────────────────────────────────────────────────┘ │
+│ ┌──────────────────────────────────────────────────────┐ │
+│ │ Data Access Layer │ │
+│ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ │
+│ │ │ User │ │Workflow │ │ Document │ │ Activity │ │ │
+│ │ │ Model │ │ Model │ │ Model │ │ Model │ │ │
+│ │ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ │
+│ └──────────────────────────────────────────────────────┘ │
+└─────────────────────────────────────────────────────────────┘
+ │ │ │
+ ▼ ▼ ▼
+┌──────────────┐ ┌──────────────┐ ┌──────────────┐
+│ PostgreSQL │ │ Okta API │ │ GCS Bucket │
+│ Database │ │ (IdP) │ │ (Storage) │
+└──────────────┘ └──────────────┘ └──────────────┘
+```
+
+**Mermaid Diagram - Backend Architecture:**
+
+```mermaid
+graph TB
+ ExpressApp[Express.js Application Server]
+
+ subgraph Middleware["Middleware Layer"]
+ CORS[CORS]
+ Helmet[Helmet Security]
+ CookieParser[Cookie Parser]
+ AuthMiddleware[Auth Middleware]
+ end
+
+ subgraph Controllers["Route Handlers"]
+ AuthController[Auth Controller]
+ WorkflowController[Workflow Controller]
+ DocumentController[Document Controller]
+ UserController[User Controller]
+ end
+
+ subgraph Services["Service Layer"]
+ AuthService[Auth Service]
+ WorkflowService[Workflow Service]
+ DocumentService[Document Service]
+ UserService[User Service]
+ end
+
+ subgraph Models["Data Access Layer"]
+ UserModel[User Model]
+ WorkflowModel[Workflow Model]
+ DocumentModel[Document Model]
+ ActivityModel[Activity Model]
+ end
+
+ PostgreSQL[(PostgreSQL Database)]
+ OktaAPI[Okta API IdP]
+ GCS[GCS Bucket Storage]
+
+ ExpressApp --> Middleware
+ Middleware --> Controllers
+ Controllers --> Services
+ Services --> Models
+ Models --> PostgreSQL
+ AuthService --> OktaAPI
+ DocumentService --> GCS
+```
+
+**Key Backend Files:**
+- `src/app.ts`: Express application setup, middleware configuration
+- `src/routes/auth.routes.ts`: Authentication endpoints
+- `src/controllers/auth.controller.ts`: Authentication request handlers
+- `src/services/auth.service.ts`: Authentication business logic
+- `src/middlewares/auth.middleware.ts`: JWT token validation middleware
+- `src/config/sso.ts`: SSO configuration (Okta, JWT secrets)
+
+**Authentication Endpoints:**
+
+1. **POST `/api/v1/auth/token-exchange`** (No auth required)
+ - Exchanges Okta authorization code for JWT tokens
+ - Creates/updates user in database
+ - Sets httpOnly cookies (production) or returns tokens (development)
+
+2. **POST `/api/v1/auth/sso-callback`** (No auth required)
+ - Legacy endpoint for direct SSO callback
+ - Accepts SSO user data, creates/updates user
+ - Returns JWT tokens
+
+3. **POST `/api/v1/auth/login`** (No auth required)
+ - Username/password authentication via Okta Resource Owner Password flow
+ - For API clients (Postman, mobile apps)
+ - Returns JWT tokens
+
+4. **POST `/api/v1/auth/refresh`** (No auth required)
+ - Refreshes access token using refresh token
+ - Accepts refresh token from request body or httpOnly cookie
+ - Returns new access token
+
+5. **GET `/api/v1/auth/me`** (Auth required)
+ - Returns current authenticated user profile
+
+6. **POST `/api/v1/auth/logout`** (Optional auth)
+ - Clears authentication cookies
+ - Logs user activity
+
+**Cookie Configuration:**
+- **Name**: `accessToken`, `refreshToken`
+- **HttpOnly**: `true` (prevents JavaScript access)
+- **Secure**: `true` in production (HTTPS only)
+- **SameSite**: `lax` (development) / `none` (production for cross-domain)
+- **MaxAge**: 24 hours (accessToken), 7 days (refreshToken)
+- **Path**: `/`
+
+---
+
+## 3. Deployment Architecture
+
+### Deployment Environment
+
+**Current Deployment:**
+- **Environment**: Development/Staging
+- **Region**: To be configured based on organizational requirements
+
+### User Domain
+
+- **Domain**: To be configured (e.g., `rebridge.co.in` or organization-specific domain)
+- **Purpose**: Unified domain for SSO and application access
+- **Integration**: Federated with Okta Identity Provider
+
+### Deployment Components
+
+**Frontend Deployment:**
+- **Build**: Vite production build (`npm run build`)
+- **Static Hosting**: Served via Express static middleware or CDN
+- **Environment Variables**:
+ - `VITE_API_BASE_URL`: Backend API URL
+ - `VITE_OKTA_DOMAIN`: Okta domain for authentication
+ - `VITE_OKTA_CLIENT_ID`: Okta client ID
+
+**Backend Deployment:**
+- **Runtime**: Node.js 22 LTS
+- **Process Manager**: PM2 (recommended for production)
+- **Reverse Proxy**: Nginx (recommended for SSL termination and load balancing)
+- **Database**: PostgreSQL 16 (managed service or self-hosted)
+- **File Storage**: Google Cloud Storage (GCS) for document storage
+- **Environment Variables**: See `.env.example` for complete list
+
+**Key Environment Variables:**
+```env
+# Server Configuration
+NODE_ENV=production
+PORT=5000
+FRONTEND_URL=https://rebridge.co.in
+
+# Database
+DB_HOST=postgresql-host
+DB_PORT=5432
+DB_NAME=re_workflow_db
+DB_USER=postgres
+DB_PASSWORD=secure_password
+
+# JWT Configuration
+JWT_SECRET=your-secret-key
+JWT_EXPIRY=24h
+REFRESH_TOKEN_EXPIRY=7d
+
+# Okta Configuration
+OKTA_DOMAIN=https://dev-830839.oktapreview.com
+OKTA_CLIENT_ID=your-client-id
+OKTA_CLIENT_SECRET=your-client-secret
+
+# GCS Configuration
+GCP_PROJECT_ID=re-platform-workflow-dealer
+GCP_BUCKET_NAME=re-workflow-documents
+GCP_KEY_FILE=./config/gcp-key.json
+GCP_BUCKET_REGION=asia-south1
+GCP_BUCKET_PUBLIC=true
+```
+
+---
+
+## 4. Identity Provider (IdP) - Okta
+
+### IdP Configuration
+
+**Identity Provider**: Okta
+- **Domain**: Configurable via `OKTA_DOMAIN` environment variable
+- **Default**: `https://dev-830839.oktapreview.com`
+- **Protocol**: OAuth 2.0 / OpenID Connect (OIDC)
+- **Grant Types**: Authorization Code, Resource Owner Password Credentials
+
+### IdP Features
+
+**Authentication:**
+- Username/password authentication
+- Multi-Factor Authentication (MFA) support
+- Conditional Access Policies
+- Session management
+
+**User Management:**
+- User directory management
+- Group/role mappings
+- User provisioning and deprovisioning
+- Profile synchronization
+
+**Integration:**
+- OAuth 2.0 Authorization Code flow (web applications)
+- Resource Owner Password Credentials flow (API clients)
+- Token exchange and validation
+- User info endpoint integration
+
+### IdP Integration Flow
+
+1. **User Authentication**:
+ - User initiates login from frontend
+ - Frontend redirects to Okta Sign-In Widget
+ - User enters credentials (and MFA if enabled)
+ - Okta validates credentials and issues authorization code
+
+2. **Token Exchange**:
+ - Frontend receives authorization code
+ - Frontend calls backend `/api/v1/auth/token-exchange` with code
+ - Backend exchanges code for access token with Okta
+ - Backend fetches user info from Okta userinfo endpoint
+ - Backend creates/updates user in local database
+ - Backend generates JWT tokens (access + refresh)
+ - Backend sets httpOnly cookies (production) or returns tokens (development)
+
+3. **Token Validation**:
+ - Backend validates JWT tokens on each API request
+ - Middleware (`authenticateToken`) verifies token signature and expiry
+ - User record is fetched from database to ensure active status
+ - User info is attached to request object for authorization
+
+---
+
+## 5. User Authentication Flow for RE Workflow System
+
+### 5.1 Web Application Flow (Production)
+
+**Sequence:**
+1. User navigates to RE Workflow application (e.g., `https://rebridge.co.in`)
+2. **Authentication**: Frontend checks for existing session (httpOnly cookie)
+ - If no valid session, redirects to Okta Sign-In Widget
+ - User enters credentials (and MFA if enabled) on Okta login page
+3. **Token Issuance**: Upon successful authentication, Okta issues authorization code
+4. **Token Exchange**: Frontend exchanges authorization code for tokens via backend
+5. **User Creation/Update**: Backend creates or updates user in local database based on Okta user info
+6. **JWT Generation**: Backend generates JWT access and refresh tokens
+7. **Cookie Setting**: Backend sets httpOnly cookies (`accessToken`, `refreshToken`)
+8. **Token Validation**: On subsequent requests, backend validates JWT token from cookie
+9. **Access Control**: Backend applies role-based access controls (USER, MANAGEMENT, ADMIN)
+
+**Mermaid Sequence Diagram - Web Application Authentication Flow:**
+
+```mermaid
+sequenceDiagram
+ participant User
+ participant Browser
+ participant ReactApp as React Application
+ participant Okta as Okta IdP
+ participant Backend as Backend API
+ participant DB as PostgreSQL
+
+ User->>Browser: Navigate to application
+ Browser->>ReactApp: Load application
+ ReactApp->>ReactApp: Check for httpOnly cookie
+ alt No valid session
+ ReactApp->>Okta: Redirect to Sign-In Widget
+ User->>Okta: Enter credentials + MFA
+ Okta->>Okta: Validate credentials
+ Okta->>ReactApp: Redirect with authorization code
+ ReactApp->>Backend: POST /api/v1/auth/token-exchange (code)
+ Backend->>Okta: Exchange code for tokens
+ Okta->>Backend: Return access token + user info
+ Backend->>DB: Create/Update user
+ DB->>Backend: User record
+ Backend->>Backend: Generate JWT tokens
+ Backend->>ReactApp: Set httpOnly cookies (accessToken, refreshToken)
+ ReactApp->>Browser: Redirect to dashboard
+ else Valid session exists
+ ReactApp->>Browser: Show dashboard
+ end
+
+ User->>ReactApp: Make API request
+ ReactApp->>Backend: API request (with cookie)
+ Backend->>Backend: Validate JWT token
+ Backend->>DB: Fetch user (verify active)
+ DB->>Backend: User data
+ Backend->>Backend: Apply RBAC
+ Backend->>ReactApp: Return data
+ ReactApp->>User: Display data
+```
+
+### 5.2 API Client Flow (Development/Postman)
+
+**Sequence:**
+1. Client calls `POST /api/v1/auth/login` with username and password
+2. **Okta Authentication**: Backend authenticates with Okta using Resource Owner Password flow
+3. **Token Retrieval**: Backend receives access token and user info from Okta
+4. **User Creation/Update**: Backend creates or updates user in local database
+5. **JWT Generation**: Backend generates JWT access and refresh tokens
+6. **Token Return**: Backend returns tokens in response body (for API clients)
+
+### 5.3 Token Refresh Flow
+
+**Sequence:**
+1. Client makes API request with access token
+2. **Token Expiry**: If access token is expired, backend returns 401 Unauthorized
+3. **Automatic Refresh**: Frontend Axios interceptor catches 401 and calls `/api/v1/auth/refresh`
+4. **Refresh Token Validation**: Backend validates refresh token (from cookie or request body)
+5. **New Token Generation**: Backend generates new access token
+6. **Cookie Update**: Backend sets new access token in httpOnly cookie (production)
+7. **Request Retry**: Frontend retries original request with new token
+
+**Mermaid Sequence Diagram - Token Refresh Flow:**
+
+```mermaid
+sequenceDiagram
+ participant User
+ participant ReactApp as React Application
+ participant AxiosInterceptor as Axios Interceptor
+ participant Backend as Backend API
+ participant DB as PostgreSQL
+
+ User->>ReactApp: Trigger API request
+ ReactApp->>Backend: API request (with accessToken cookie)
+ Backend->>Backend: Validate JWT token
+ alt Token expired
+ Backend->>ReactApp: 401 Unauthorized
+ ReactApp->>AxiosInterceptor: Catch 401 error
+ AxiosInterceptor->>Backend: POST /api/v1/auth/refresh (refreshToken)
+ Backend->>Backend: Validate refresh token
+ Backend->>DB: Fetch user (verify active)
+ DB->>Backend: User data
+ Backend->>Backend: Generate new access token
+ Backend->>ReactApp: Set new accessToken cookie
+ AxiosInterceptor->>Backend: Retry original request (with new token)
+ Backend->>Backend: Validate new JWT token
+ Backend->>DB: Fetch data
+ DB->>Backend: Return data
+ Backend->>ReactApp: 200 OK (data)
+ ReactApp->>User: Display data
+ else Token valid
+ Backend->>DB: Fetch data
+ DB->>Backend: Return data
+ Backend->>ReactApp: 200 OK (data)
+ ReactApp->>User: Display data
+ end
+```
+
+### 5.4 Logout Flow
+
+**Sequence:**
+1. User clicks logout button
+2. **Logout Request**: Frontend calls `POST /api/v1/auth/logout`
+3. **Cookie Clearing**: Backend clears httpOnly cookies (`accessToken`, `refreshToken`)
+4. **Okta Logout**: Frontend redirects to Okta logout endpoint (if ID token available)
+5. **Session Termination**: User session is terminated on both application and IdP
+
+**Mermaid Sequence Diagram - Logout Flow:**
+
+```mermaid
+sequenceDiagram
+ participant User
+ participant ReactApp as React Application
+ participant Backend as Backend API
+ participant Okta as Okta IdP
+ participant Browser as Browser Storage
+
+ User->>ReactApp: Click logout button
+ ReactApp->>Backend: POST /api/v1/auth/logout
+ Backend->>Backend: Clear httpOnly cookies (accessToken, refreshToken)
+ Backend->>Backend: Log logout activity
+ Backend->>ReactApp: 200 OK (logout successful)
+ ReactApp->>Browser: Clear localStorage (if any)
+ ReactApp->>Okta: Redirect to Okta logout endpoint (with ID token)
+ Okta->>Okta: Terminate Okta session
+ Okta->>ReactApp: Redirect to application
+ ReactApp->>User: Show login page
+```
+
+---
+
+## 6. RE Workflow System Flow Diagram
+
+### 6.1 Executive Summary
+
+This document formalizes the technical architecture for the RE Workflow Management System, covering runtime architecture, routing, authentication/SSO, session & data flow, state management, integrations (Okta, GCS), configuration, security posture, error handling, and extensibility.
+
+**High-Level Architecture (Simple View)**
+
+```
+User's Web Browser → RE Workflow Application (React) → Backend API (Express) → PostgreSQL Database
+ ↓
+ Okta Identity Provider (IdP)
+ ↓
+ Google Cloud Storage (GCS)
+```
+
+**Mermaid Diagram - High-Level Architecture:**
+
+```mermaid
+graph LR
+ Browser[User's Web Browser]
+ ReactApp[RE Workflow Application
React]
+ BackendAPI[Backend API
Express]
+ PostgreSQL[(PostgreSQL Database)]
+ Okta[Okta Identity Provider
IdP]
+ GCS[Google Cloud Storage
GCS]
+
+ Browser -->|HTTPS| ReactApp
+ ReactApp -->|API Calls| BackendAPI
+ BackendAPI -->|Data Storage| PostgreSQL
+ BackendAPI -->|Authentication| Okta
+ BackendAPI -->|File Storage| GCS
+ ReactApp -->|SSO Login| Okta
+```
+
+### 6.2 Technology Stack & Conventions
+
+**Frontend:**
+- **Framework**: React 18 with TypeScript
+- **Build Tool**: Vite
+- **Routing**: React Router v6
+- **Styling**: Tailwind CSS; UI primitives via shadcn/ui & Radix
+- **State**: Redux Toolkit (authSlice, storeSlice) + React Context (AuthContext)
+- **Data Fetching**: Axios with interceptors (token refresh, error handling)
+- **Security**: HttpOnly cookies (production) / localStorage (development)
+
+**Backend:**
+- **Runtime**: Node.js 22 LTS
+- **Language**: TypeScript 5.7
+- **Framework**: Express.js 4.21
+- **Database**: PostgreSQL 16
+- **ORM**: Sequelize 6.37
+- **Authentication**: JWT (JSON Web Tokens)
+- **Validation**: Zod schemas
+- **Logging**: Winston logger
+- **Security**: Helmet.js, CORS middleware, cookie-parser
+
+### 6.3 Routing & Layout
+
+**Frontend Routes:**
+- `/` or `/dashboard` - Dashboard (authenticated users)
+- `/login/callback` - OAuth callback handler
+- `/request/:requestId` - Request detail view
+- `/new-request` - Create new request
+- `/my-requests` - User's requests
+- `/open-requests` - Open requests (management/admin)
+- `/closed-requests` - Closed requests
+- `/admin` - Admin control panel (admin only)
+- `/profile` - User profile
+- `/settings` - Application settings
+
+**Backend Routes:**
+- `/api/v1/auth/*` - Authentication endpoints
+- `/api/v1/workflows/*` - Workflow management
+- `/api/v1/documents/*` - Document management
+- `/api/v1/users/*` - User management
+- `/api/v1/activities/*` - Activity logging
+- `/health` - Health check endpoint
+
+**Route Protection:**
+- **Frontend**: `AuthContext` checks authentication status, redirects to login if not authenticated
+- **Backend**: `authenticateToken` middleware validates JWT token on protected routes
+- **Role-Based**: `requireRole` middleware enforces role-based access control
+
+### 6.4 Authentication (SSO) & Session
+
+**Login Flow:**
+1. User navigates to application
+2. Frontend checks for existing session (httpOnly cookie in production, localStorage in development)
+3. If no session, redirects to Okta Sign-In Widget
+4. User authenticates with Okta (credentials + MFA if enabled)
+5. Okta redirects to `/login/callback` with authorization code
+6. Frontend calls `/api/v1/auth/token-exchange` with code
+7. Backend exchanges code for tokens with Okta
+8. Backend creates/updates user in database
+9. Backend generates JWT tokens and sets httpOnly cookies (production) or returns tokens (development)
+10. User is redirected to dashboard
+
+**Cookie Configuration:**
+- **Name**: `accessToken`, `refreshToken`
+- **HttpOnly**: `true` (prevents XSS attacks)
+- **Secure**: `true` in production (HTTPS only)
+- **SameSite**: `lax` (development) / `none` (production for cross-domain)
+- **MaxAge**: 24 hours (accessToken), 7 days (refreshToken)
+- **Path**: `/`
+
+**Cookie Payload Schema:**
+```json
+{
+ "accessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
+ "refreshToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..."
+}
+```
+
+**JWT Token Payload:**
+```json
+{
+ "userId": "uuid",
+ "employeeId": "EMP001",
+ "email": "user@royalenfield.com",
+ "role": "USER" | "MANAGEMENT" | "ADMIN",
+ "iat": 1234567890,
+ "exp": 1234654290
+}
+```
+
+**Cookie Consumers:**
+- **Backend Middleware**: `authenticateToken` reads token from cookie or Authorization header
+- **Frontend**: Axios automatically sends cookies with `withCredentials: true`
+- **Token Refresh**: Automatic refresh via Axios interceptor when access token expires
+
+**Behavior:**
+After login, the cookie is validated on each request, driving personalized UI and API authorization. Token refresh happens automatically in the background when access token expires.
+
+### 6.5 Session & Application Data Flow
+
+**Session Endpoints:**
+- **GET `/api/v1/auth/me`**: Returns current user profile (validates cookie, fetches user from database)
+- **POST `/api/v1/auth/refresh`**: Refreshes access token using refresh token (from cookie or request body)
+
+**Data Flow:**
+1. **Request**: Frontend makes API request with httpOnly cookie (production) or Authorization header (development)
+2. **Validation**: Backend middleware validates JWT token
+3. **User Lookup**: Backend fetches user from database to ensure active status
+4. **Authorization**: Backend applies role-based access control
+5. **Response**: Backend returns data with appropriate status code
+
+**Error Handling:**
+- **401 Unauthorized**: Token missing, invalid, or expired → Frontend triggers token refresh or redirects to login
+- **403 Forbidden**: User lacks required role → Frontend shows access denied message
+- **500 Internal Server Error**: Backend logs error, returns generic error message to frontend
+
+### 6.6 Security Posture
+
+**Cookies:**
+- **HttpOnly**: `true` (prevents JavaScript access, mitigates XSS)
+- **Secure**: `true` in production (HTTPS only, prevents man-in-the-middle)
+- **SameSite**: `lax` (development) / `none` (production for cross-domain)
+- **MaxAge**: 24 hours (accessToken), 7 days (refreshToken)
+- **Path**: `/` (available to all routes)
+
+**Headers (Helmet.js):**
+- **Content-Security-Policy (CSP)**: Restricts resource loading (scripts, styles, images, connections)
+- **X-Frame-Options**: Prevents clickjacking attacks
+- **X-Content-Type-Options**: Prevents MIME type sniffing
+- **Referrer-Policy**: Controls referrer information
+- **Strict-Transport-Security (HSTS)**: Enforces HTTPS in production
+
+**TLS/SSL:**
+- **Production**: HTTPS enforced via reverse proxy (Nginx) or load balancer
+- **Development**: HTTP allowed for local development
+- **Certificate**: Managed via Let's Encrypt or enterprise CA
+
+**Secrets Management:**
+- **JWT Secret**: Stored in environment variable (`JWT_SECRET`)
+- **Okta Credentials**: Stored in environment variables (`OKTA_CLIENT_ID`, `OKTA_CLIENT_SECRET`)
+- **Database Credentials**: Stored in environment variables (never committed to repository)
+- **Recommendation**: Use secret manager (AWS Secrets Manager, Azure Key Vault, HashiCorp Vault) in production
+
+**Input Validation:**
+- **Zod Schemas**: All request bodies validated using Zod schemas
+- **SQL Injection Prevention**: Sequelize ORM uses parameterized queries
+- **XSS Prevention**: Input sanitization and output encoding
+
+**Rate Limiting:**
+- **Recommendation**: Implement rate limiting middleware (express-rate-limit) for authentication endpoints
+- **Current**: Not implemented (to be added)
+
+---
+
+## 7. Key Components
+
+### 7.1 User Directory
+
+**Current Implementation:**
+- User accounts stored in PostgreSQL database (`users` table)
+- Roles: `USER`, `MANAGEMENT`, `ADMIN`
+- User data synced from Okta on each login
+- Just-in-Time (JIT) provisioning: Users created automatically on first SSO login
+
+### 7.2 User Management
+
+**Okta Integration:**
+- Primary identity provider for authentication
+- User directory managed in Okta
+- User attributes synced to local database on login
+
+**Local Database:**
+- User records stored in PostgreSQL
+- Fields: `userId`, `email`, `employeeId`, `firstName`, `lastName`, `displayName`, `department`, `designation`, `role`, `isActive`, `oktaSub`
+- User creation/update happens automatically on SSO callback
+
+**User Service:**
+- `src/services/user.service.ts`: User CRUD operations
+- `src/services/auth.service.ts`: User creation/update during SSO callback
+
+### 7.3 Login and Dashboard
+
+**RE Workflow Portal:**
+- Unified login experience via Okta Sign-In Widget
+- Dashboard provides overview of:
+ - Open requests (pending user action)
+ - My requests (user-initiated requests)
+ - Pending approvals (requests awaiting user approval)
+ - System metrics (TAT, completion rates)
+
+**Authentication Flow:**
+- Frontend: `src/contexts/AuthContext.tsx` manages authentication state
+- Backend: `src/controllers/auth.controller.ts` handles authentication requests
+- Okta: Identity provider for user authentication
+
+### 7.4 Database Configuration
+
+**PostgreSQL Database:**
+- **Database Name**: `re_workflow_db` (configurable)
+- **Tables**: `users`, `workflow_requests`, `documents`, `work_note_attachments`, `activities`, etc.
+- **ORM**: Sequelize for database operations
+- **Migrations**: Database schema managed via Sequelize migrations
+
+**Database Models:**
+- `src/models/User.ts`: User model with authentication fields
+- `src/models/WorkflowRequest.ts`: Workflow request model
+- `src/models/Document.ts`: Document model with GCS integration
+- `src/models/WorkNoteAttachment.ts`: Work note attachment model
+
+### 7.5 File Storage
+
+**Google Cloud Storage (GCS):**
+- **Bucket**: `re-workflow-documents` (configurable)
+- **Region**: `asia-south1` (configurable)
+- **Structure**: `requests/{requestNumber}/{fileType}/{fileName}`
+ - `fileType`: `documents` or `attachments`
+- **Access**: Public URLs (if `GCP_BUCKET_PUBLIC=true`) or signed URLs
+- **Service**: `src/services/gcsStorage.service.ts`
+
+**Local Storage (Fallback):**
+- Used when GCS is not configured or upload fails
+- Files stored in `uploads/` directory
+- Served via Express static middleware
+
+### 7.6 API Access
+
+**OAuth2 Flows:**
+- **Authorization Code Flow**: Used by web application (frontend)
+- **Resource Owner Password Credentials Flow**: Used by API clients (Postman, mobile apps)
+
+**Service-to-Service Authentication:**
+- **Current**: Not implemented (to be added for integrations)
+- **Recommendation**: OAuth2 Client Credentials flow for service-to-service authentication
+
+---
+
+## 8. Security Considerations
+
+### 8.1 Multi-Factor Authentication (MFA)
+
+**IdP Configuration:**
+- MFA enforced via Okta conditional access policies
+- Users required to complete MFA challenge during login
+- MFA methods: SMS, TOTP, Push notification (configurable in Okta)
+
+**Application Level:**
+- MFA handled entirely by Okta
+- Application receives authenticated user info after MFA completion
+- No MFA state management required in application
+
+### 8.2 Conditional Access
+
+**IdP Policies:**
+- Location-based restrictions (e.g., block access from certain countries)
+- Device compliance requirements
+- Time-based access restrictions
+- IP whitelist/blacklist
+
+**Application Level:**
+- Role-based access control (RBAC) enforced via middleware
+- Routes protected by `authenticateToken` and `requireRole` middleware
+- Frontend route guards prevent unauthorized access
+
+### 8.3 Role Assignments and Permissions
+
+**Roles:**
+- **USER**: Standard user, can create requests and participate in workflows
+- **MANAGEMENT**: Management user, can view all requests and perform management actions
+- **ADMIN**: Administrator, full system access including user management
+
+**Permission Enforcement:**
+- **Backend**: `requireRole` middleware enforces role-based access
+- **Frontend**: `hasManagementAccess` helper function checks user role
+- **Database**: Role stored in `users.role` column
+
+**Regular Review:**
+- **Recommendation**: Implement periodic role review process
+- **Audit**: All role changes logged in `activities` table
+
+### 8.4 Audit Logging
+
+**Application Logging:**
+- **Winston Logger**: Structured logging for all application events
+- **Activity Logging**: All user actions logged in `activities` table
+- **Authentication Events**: Login, logout, token refresh logged with user info and IP address
+
+**IdP Logging:**
+- **Okta System Log**: All authentication events logged in Okta
+- **Azure Entra Audit Logs**: (Future) User access and role changes logged
+
+**Log Retention:**
+- **Recommendation**: Retain logs for 90 days (configurable)
+- **Compliance**: Logs retained per organizational compliance requirements
+
+### 8.5 Token Security
+
+**JWT Token Security:**
+- **Secret**: Strong secret key stored in environment variable
+- **Expiry**: Short-lived access tokens (24 hours), longer-lived refresh tokens (7 days)
+- **Signature**: HMAC SHA-256 algorithm
+- **Validation**: Token signature and expiry validated on each request
+
+**Cookie Security:**
+- **HttpOnly**: Prevents JavaScript access (mitigates XSS)
+- **Secure**: HTTPS only in production (mitigates man-in-the-middle)
+- **SameSite**: CSRF protection
+
+**Token Refresh:**
+- **Automatic**: Frontend automatically refreshes expired tokens
+- **Secure**: Refresh token validated before issuing new access token
+- **Rotation**: (Future) Implement refresh token rotation for enhanced security
+
+---
+
+## 9. Provisioning & Lifecycle
+
+### 9.1 User Provisioning
+
+**Current Implementation:**
+- **Just-in-Time (JIT) Provisioning**: Users created automatically on first SSO login
+- **User Data Source**: Okta user directory
+- **Sync Frequency**: Real-time (on each login)
+
+**User Creation Flow:**
+1. User authenticates with Okta
+2. Backend receives user info from Okta
+3. Backend checks if user exists in database (by email)
+4. If user exists, updates user record with latest info from Okta
+5. If user doesn't exist, creates new user record with default role `USER`
+6. User is granted access to application
+
+**Mermaid Sequence Diagram - User Provisioning Flow:**
+
+```mermaid
+sequenceDiagram
+ participant User
+ participant Okta as Okta IdP
+ participant Backend as Backend API
+ participant AuthService as Auth Service
+ participant DB as PostgreSQL Database
+
+ User->>Okta: Authenticate (credentials + MFA)
+ Okta->>Okta: Validate credentials
+ Okta->>Backend: Return authorization code
+ Backend->>Okta: Exchange code for tokens
+ Okta->>Backend: Return access token + user info
+ Backend->>AuthService: handleSSOCallback(userData)
+ AuthService->>DB: Check if user exists (by email)
+
+ alt User exists
+ DB->>AuthService: User found
+ AuthService->>DB: Update user record (displayName, department, etc.)
+ DB->>AuthService: User updated
+ else User doesn't exist
+ DB->>AuthService: User not found
+ AuthService->>DB: Create new user (role: USER, isActive: true)
+ DB->>AuthService: User created
+ end
+
+ AuthService->>AuthService: Generate JWT tokens
+ AuthService->>Backend: Return tokens + user data
+ Backend->>User: Set cookies + redirect to dashboard
+```
+
+**User Update Flow:**
+- User attributes updated on each login
+- Fields updated: `displayName`, `department`, `designation`, `phone`, `lastLogin`
+- `oktaSub` updated if changed in Okta
+
+### 9.2 SCIM Provisioning (Future)
+
+**Recommendation**: Implement SCIM 2.0 for automated user provisioning
+
+**Benefits:**
+- Automated user creation from HRMS
+- Automatic role assignments based on HRMS data
+- User deprovisioning when user is removed from HRMS
+- Group/role synchronization
+
+**Implementation:**
+- **SCIM Endpoint**: `/api/v1/scim/v2/Users`
+- **SCIM Provider**: Okta or Azure Entra
+- **SCIM Client**: HRMS system
+
+### 9.3 User Deprovisioning
+
+**Current Implementation:**
+- **Manual Deactivation**: Admin can deactivate user via admin panel
+- **Field**: `users.isActive` set to `false`
+- **Effect**: User cannot authenticate (middleware checks `isActive` status)
+
+**Future Implementation:**
+- **Automatic Deprovisioning**: User deactivated when removed from Okta
+- **SCIM Integration**: User deprovisioned via SCIM DELETE request
+- **Data Retention**: User data retained for audit purposes (soft delete)
+
+### 9.4 Group/Role Assignments
+
+**Current Implementation:**
+- **Default Role**: New users assigned `USER` role
+- **Manual Assignment**: Admin can change user role via admin panel
+- **Role Storage**: Role stored in `users.role` column
+
+**Future Implementation:**
+- **Okta Group Mapping**: Map Okta groups to application roles
+- **Automatic Assignment**: Assign roles based on Okta group membership
+- **HRMS Integration**: Assign roles based on HRMS job title/department
+
+### 9.5 Lifecycle Management
+
+**User Onboarding:**
+1. User added to Okta directory (by IT admin)
+2. User receives welcome email with application URL
+3. User logs in via Okta SSO
+4. User account created automatically in application
+5. User assigned default role `USER`
+6. User can access application
+
+**User Offboarding:**
+1. User removed from Okta directory (by IT admin)
+2. User cannot authenticate (Okta rejects login)
+3. User account remains in database (for audit)
+4. Admin can manually deactivate user account
+5. User data retained per retention policy
+
+**Role Changes:**
+1. Admin updates user role in admin panel
+2. Role change logged in `activities` table
+3. User permissions updated immediately (no logout required)
+4. Role change synced to database
+
+---
+
+## 10. SSO Implementation Activity Tracker
+
+This section tracks SSO implementation activities specific to the RE Workflow Management System.
+
+| Sr | Activity | Owner | Status | Remarks |
+|----|----------|-------|--------|---------|
+| 1 | Okta Application Configuration | - | Completed | OAuth 2.0 / OIDC application configured |
+| 2 | Backend SSO Integration | - | Completed | Token exchange endpoint implemented |
+| 3 | Frontend SSO Integration | - | Completed | Okta Sign-In Widget integrated |
+| 4 | JWT Token Implementation | - | Completed | Access and refresh tokens configured |
+| 5 | HttpOnly Cookie Implementation | - | Completed | Secure cookie-based authentication |
+| 6 | User Provisioning (JIT) | - | Completed | Automatic user creation on first login |
+| 7 | Token Refresh Mechanism | - | Completed | Automatic token refresh implemented |
+| 8 | Role-Based Access Control | - | Completed | USER, MANAGEMENT, ADMIN roles implemented |
+| 9 | GCS Integration for Documents | - | Completed | Google Cloud Storage configured |
+| 10 | Audit Logging | - | Completed | Authentication events logged |
+| 11 | Domain Configuration | - | Pending | Configure production domain |
+| 12 | Production Deployment | - | Pending | Deploy to production environment |
+| 13 | MFA Enforcement | - | Pending | Configure MFA policies in Okta |
+| 14 | SCIM Provisioning | - | Future | Automated user provisioning from HRMS |
+| 15 | Session Management Dashboard | - | Future | Active session tracking and management |
+
+---
+
+## 11. Configuration Reference
+
+### 11.1 Environment Variables
+
+**Backend (.env):**
+```env
+# Server
+NODE_ENV=production
+PORT=5000
+FRONTEND_URL=https://rebridge.co.in
+
+# Database
+DB_HOST=postgresql-host
+DB_PORT=5432
+DB_NAME=re_workflow_db
+DB_USER=postgres
+DB_PASSWORD=secure_password
+
+# JWT
+JWT_SECRET=your-secret-key
+JWT_EXPIRY=24h
+REFRESH_TOKEN_EXPIRY=7d
+
+# Okta
+OKTA_DOMAIN=https://dev-830839.oktapreview.com
+OKTA_CLIENT_ID=your-client-id
+OKTA_CLIENT_SECRET=your-client-secret
+
+# GCS
+GCP_PROJECT_ID=re-platform-workflow-dealer
+GCP_BUCKET_NAME=re-workflow-documents
+GCP_KEY_FILE=./config/gcp-key.json
+GCP_BUCKET_REGION=asia-south1
+GCP_BUCKET_PUBLIC=true
+```
+
+**Frontend (.env):**
+```env
+VITE_API_BASE_URL=https://api.rebridge.co.in/api/v1
+VITE_OKTA_DOMAIN=https://dev-830839.oktapreview.com
+VITE_OKTA_CLIENT_ID=your-client-id
+```
+
+### 11.2 Okta Application Configuration
+
+**Application Type**: Single-Page App (SPA)
+**Grant Types**:
+- Authorization Code
+- Refresh Token
+**Redirect URIs**:
+- Production: `https://rebridge.co.in/login/callback`
+- Development: `http://localhost:3000/login/callback`
+**Logout Redirect URI**:
+- Production: `https://rebridge.co.in`
+- Development: `http://localhost:3000`
+
+### 11.3 Database Schema
+
+**Users Table:**
+```sql
+CREATE TABLE users (
+ user_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ email VARCHAR(255) UNIQUE NOT NULL,
+ employee_id VARCHAR(50),
+ okta_sub VARCHAR(255) UNIQUE,
+ first_name VARCHAR(100),
+ last_name VARCHAR(100),
+ display_name VARCHAR(200),
+ department VARCHAR(100),
+ designation VARCHAR(100),
+ phone VARCHAR(20),
+ role VARCHAR(20) DEFAULT 'USER' CHECK (role IN ('USER', 'MANAGEMENT', 'ADMIN')),
+ is_active BOOLEAN DEFAULT true,
+ last_login TIMESTAMP,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+```
+
+---
+
+## 12. Troubleshooting
+
+### 12.1 Common Issues
+
+**Issue: "Token has expired"**
+- **Cause**: Access token expired (24 hours)
+- **Solution**: Token should refresh automatically. If not, check refresh token validity.
+
+**Issue: "User not found or inactive"**
+- **Cause**: User account deactivated or removed from database
+- **Solution**: Check `users.isActive` status. Reactivate user if needed.
+
+**Issue: "Invalid token"**
+- **Cause**: Token signature invalid or JWT secret mismatch
+- **Solution**: Verify `JWT_SECRET` matches between environments.
+
+**Issue: "Okta authentication failed"**
+- **Cause**: Invalid Okta credentials or network issue
+- **Solution**: Verify `OKTA_CLIENT_ID` and `OKTA_CLIENT_SECRET`. Check Okta domain accessibility.
+
+**Issue: Cookies not being set**
+- **Cause**: Cookie options mismatch or CORS configuration
+- **Solution**: Verify `FRONTEND_URL` matches frontend domain. Check `withCredentials: true` in Axios config.
+
+### 12.2 Debugging
+
+**Enable Debug Logging:**
+```env
+LOG_LEVEL=debug
+```
+
+**Check Token Validity:**
+```bash
+# Decode JWT token (without verification)
+echo "YOUR_TOKEN" | base64 -d
+```
+
+**Verify Cookie Settings:**
+- Check browser DevTools → Application → Cookies
+- Verify `HttpOnly`, `Secure`, `SameSite` flags
+- Check cookie `Path` and `Domain`
+
+**Test Authentication Flow:**
+1. Clear browser cookies and localStorage
+2. Navigate to application
+3. Complete Okta login
+4. Check network tab for `/api/v1/auth/token-exchange` request
+5. Verify cookies are set in response headers
+6. Check subsequent API requests include cookies
+
+---
+
+## 13. Future Enhancements
+
+### 13.1 Planned Features
+
+1. **SCIM 2.0 Provisioning**: Automated user provisioning from HRMS
+2. **Refresh Token Rotation**: Enhanced security for token refresh
+3. **Rate Limiting**: Protect authentication endpoints from brute force
+4. **Session Management**: Active session tracking and management
+5. **Device Management**: Track and manage user devices
+6. **Audit Dashboard**: Visual interface for authentication and access logs
+
+### 13.2 Integration Opportunities
+
+1. **Azure Entra Integration**: Federation with Azure Entra for enterprise SSO
+2. **Tanflow Integration**: Full integration with Tanflow IdP
+3. **HRMS Integration**: Direct integration with HRMS for user data
+4. **Active Directory Integration**: LDAP/AD integration for on-premises users
+
+---
+
+## Document Version
+
+- **Version**: 1.0
+- **Last Updated**: December 2024
+- **Author**: RE Workflow Development Team
+- **Review Status**: Draft
+
+---
+
+## Appendix
+
+### A. Glossary
+
+- **IdP**: Identity Provider (Okta, Tanflow)
+- **SSO**: Single Sign-On
+- **JWT**: JSON Web Token
+- **OAuth 2.0**: Authorization framework
+- **OIDC**: OpenID Connect
+- **SAML**: Security Assertion Markup Language
+- **SCIM**: System for Cross-domain Identity Management
+- **MFA**: Multi-Factor Authentication
+- **RBAC**: Role-Based Access Control
+- **CSP**: Content Security Policy
+- **HSTS**: HTTP Strict Transport Security
+
+### B. References
+
+- [Okta Developer Documentation](https://developer.okta.com/docs/)
+- [OAuth 2.0 Specification](https://oauth.net/2/)
+- [OpenID Connect Specification](https://openid.net/connect/)
+- [JWT.io](https://jwt.io/)
+- [SCIM 2.0 Specification](https://www.rfc-editor.org/rfc/rfc7644)
+
+---
+
+**End of Document**
+
diff --git a/package-lock.json b/package-lock.json
index d4a32be..4738fa1 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -9,7 +9,7 @@
"version": "1.0.0",
"dependencies": {
"@anthropic-ai/sdk": "^0.68.0",
- "@google-cloud/storage": "^7.14.0",
+ "@google-cloud/storage": "^7.18.0",
"@google/generative-ai": "^0.24.1",
"@types/nodemailer": "^7.0.4",
"@types/uuid": "^8.3.4",
@@ -1656,9 +1656,9 @@
}
},
"node_modules/@google-cloud/storage": {
- "version": "7.17.2",
- "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.17.2.tgz",
- "integrity": "sha512-6xN0KNO8L/LIA5zu3CJwHkJiB6n65eykBLOb0E+RooiHYgX8CSao6lvQiKT9TBk2gL5g33LL3fmhDodZnt56rw==",
+ "version": "7.18.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz",
+ "integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==",
"license": "Apache-2.0",
"dependencies": {
"@google-cloud/paginator": "^5.0.0",
diff --git a/package.json b/package.json
index 27ff726..5ff91eb 100644
--- a/package.json
+++ b/package.json
@@ -21,7 +21,7 @@
},
"dependencies": {
"@anthropic-ai/sdk": "^0.68.0",
- "@google-cloud/storage": "^7.14.0",
+ "@google-cloud/storage": "^7.18.0",
"@google/generative-ai": "^0.24.1",
"@types/nodemailer": "^7.0.4",
"@types/uuid": "^8.3.4",
diff --git a/src/controllers/document.controller.ts b/src/controllers/document.controller.ts
index 1ae17d7..640dd24 100644
--- a/src/controllers/document.controller.ts
+++ b/src/controllers/document.controller.ts
@@ -1,10 +1,13 @@
import { Request, Response } from 'express';
import crypto from 'crypto';
import path from 'path';
+import fs from 'fs';
import { Document } from '@models/Document';
import { User } from '@models/User';
+import { WorkflowRequest } from '@models/WorkflowRequest';
import { ResponseHandler } from '@utils/responseHandler';
import { activityService } from '@services/activity.service';
+import { gcsStorageService } from '@services/gcsStorage.service';
import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils';
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
@@ -25,6 +28,14 @@ export class DocumentController {
return;
}
+ // Get workflow request to retrieve requestNumber
+ const workflowRequest = await WorkflowRequest.findOne({ where: { requestId } });
+ if (!workflowRequest) {
+ ResponseHandler.error(res, 'Workflow request not found', 404);
+ return;
+ }
+ const requestNumber = (workflowRequest as any).requestNumber || (workflowRequest as any).request_number;
+
const file = (req as any).file as Express.Multer.File | undefined;
if (!file) {
ResponseHandler.error(res, 'No file uploaded', 400);
@@ -58,10 +69,33 @@ export class DocumentController {
return;
}
- const checksum = crypto.createHash('sha256').update(file.buffer || '').digest('hex');
+ // Get file buffer
+ const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
+ const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
const category = (req.body?.category as string) || 'OTHER';
+ // Upload with automatic fallback to local storage
+ const uploadResult = await gcsStorageService.uploadFileWithFallback({
+ buffer: fileBuffer,
+ originalName: file.originalname,
+ mimeType: file.mimetype,
+ requestNumber: requestNumber,
+ fileType: 'documents'
+ });
+
+ const storageUrl = uploadResult.storageUrl;
+ const gcsFilePath = uploadResult.filePath;
+
+ // Clean up local temporary file if it exists (from multer disk storage)
+ if (file.path && fs.existsSync(file.path)) {
+ try {
+ fs.unlinkSync(file.path);
+ } catch (unlinkError) {
+ logWithContext('warn', 'Failed to delete local temporary file', { filePath: file.path });
+ }
+ }
+
const doc = await Document.create({
requestId,
uploadedBy: userId,
@@ -70,8 +104,8 @@ export class DocumentController {
fileType: extension,
fileExtension: extension,
fileSize: file.size,
- filePath: file.path, // server path
- storageUrl: `/uploads/${path.basename(file.path)}`,
+ filePath: gcsFilePath, // Store GCS path or local path
+ storageUrl: storageUrl, // Store GCS URL or local URL
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
diff --git a/src/controllers/workflow.controller.ts b/src/controllers/workflow.controller.ts
index 8df697b..8260867 100644
--- a/src/controllers/workflow.controller.ts
+++ b/src/controllers/workflow.controller.ts
@@ -7,6 +7,7 @@ import { Priority } from '../types/common.types';
import type { UpdateWorkflowRequest } from '../types/workflow.types';
import { Document } from '@models/Document';
import { User } from '@models/User';
+import { gcsStorageService } from '@services/gcsStorage.service';
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
@@ -251,9 +252,33 @@ export class WorkflowController {
const uploaderName = (user as any)?.displayName || (user as any)?.email || 'User';
for (const file of files) {
- const buffer = fs.readFileSync(file.path);
- const checksum = crypto.createHash('sha256').update(buffer).digest('hex');
+ // Get file buffer - multer.memoryStorage provides buffer, not path
+ const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
+ const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
+
+ // Upload with automatic fallback to local storage
+ const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
+ const uploadResult = await gcsStorageService.uploadFileWithFallback({
+ buffer: fileBuffer,
+ originalName: file.originalname,
+ mimeType: file.mimetype,
+ requestNumber: requestNumber,
+ fileType: 'documents'
+ });
+
+ const storageUrl = uploadResult.storageUrl;
+ const gcsFilePath = uploadResult.filePath;
+
+ // Clean up local temporary file if it exists (from multer disk storage)
+ if (file.path && fs.existsSync(file.path)) {
+ try {
+ fs.unlinkSync(file.path);
+ } catch (unlinkError) {
+ logger.warn('[Workflow] Failed to delete local temporary file:', unlinkError);
+ }
+ }
+
const doc = await Document.create({
requestId: workflow.requestId,
uploadedBy: userId,
@@ -262,8 +287,8 @@ export class WorkflowController {
fileType: extension,
fileExtension: extension,
fileSize: file.size,
- filePath: file.path,
- storageUrl: `/uploads/${path.basename(file.path)}`,
+ filePath: gcsFilePath, // Store GCS path or local path
+ storageUrl: storageUrl, // Store GCS URL or local URL
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
@@ -575,9 +600,40 @@ export class WorkflowController {
if (files && files.length > 0) {
const actualRequestId = (workflow as any).requestId;
for (const file of files) {
- const buffer = fs.readFileSync(file.path);
- const checksum = crypto.createHash('sha256').update(buffer).digest('hex');
+ // Get file buffer - multer.memoryStorage provides buffer, not path
+ const fileBuffer = (file as any).buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
+ const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
+
+ // Upload with automatic fallback to local storage
+ const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
+ const uploadResult = await gcsStorageService.uploadFileWithFallback({
+ buffer: fileBuffer,
+ originalName: file.originalname,
+ mimeType: file.mimetype,
+ requestNumber: requestNumber,
+ fileType: 'documents'
+ });
+
+ const storageUrl = uploadResult.storageUrl;
+ const gcsFilePath = uploadResult.filePath;
+
+ // Clean up local temporary file if it exists (from multer disk storage)
+ if (file.path && fs.existsSync(file.path)) {
+ try {
+ fs.unlinkSync(file.path);
+ } catch (unlinkError) {
+ logger.warn('[Workflow] Failed to delete local temporary file:', unlinkError);
+ }
+ }
+
+ logger.info('[Workflow] Creating document record', {
+ fileName: file.originalname,
+ filePath: gcsFilePath,
+ storageUrl: storageUrl,
+ requestId: actualRequestId
+ });
+
const doc = await Document.create({
requestId: actualRequestId,
uploadedBy: userId,
@@ -586,8 +642,8 @@ export class WorkflowController {
fileType: extension,
fileExtension: extension,
fileSize: file.size,
- filePath: file.path,
- storageUrl: `/uploads/${path.basename(file.path)}`,
+ filePath: gcsFilePath, // Store GCS path or local path
+ storageUrl: storageUrl, // Store GCS URL or local URL
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
diff --git a/src/controllers/worknote.controller.ts b/src/controllers/worknote.controller.ts
index b05a421..d689c8e 100644
--- a/src/controllers/worknote.controller.ts
+++ b/src/controllers/worknote.controller.ts
@@ -40,7 +40,14 @@ export class WorkNoteController {
};
const payload = req.body?.payload ? JSON.parse(req.body.payload) : (req.body || {});
- const files = (req.files as any[])?.map(f => ({ path: f.path, originalname: f.originalname, mimetype: f.mimetype, size: f.size })) || [];
+ // Map files with buffer for GCS upload (multer.memoryStorage provides buffer, not path)
+ const files = (req.files as any[])?.map(f => ({
+ buffer: f.buffer,
+ path: f.path || null, // May not exist with memory storage
+ originalname: f.originalname,
+ mimetype: f.mimetype,
+ size: f.size
+ })) || [];
// Extract mentions from payload (sent by frontend)
const mentions = payload.mentions || [];
diff --git a/src/emailtemplates/emailPreferences.helper.ts b/src/emailtemplates/emailPreferences.helper.ts
index b120198..1bc66c0 100644
--- a/src/emailtemplates/emailPreferences.helper.ts
+++ b/src/emailtemplates/emailPreferences.helper.ts
@@ -7,6 +7,7 @@
import { User } from '@models/User';
import { SYSTEM_CONFIG } from '../config/system.config';
+import { getConfigValue } from '../services/configReader.service';
import logger from '../utils/logger';
/**
@@ -67,18 +68,41 @@ export async function shouldSendEmail(
/**
* Check if admin has enabled emails globally
- * Uses SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_EMAIL
+ * Checks database configuration first, then falls back to environment variable
*/
async function isAdminEmailEnabled(emailType: EmailNotificationType): Promise {
- // Check global email setting from system config
- const adminEmailEnabled = SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_EMAIL;
-
- if (!adminEmailEnabled) {
- logger.info('[Email] Admin has disabled email notifications globally');
- return false;
+ try {
+ // Step 1: Check database configuration (admin panel setting)
+ const dbConfigValue = await getConfigValue('ENABLE_EMAIL_NOTIFICATIONS', '');
+
+ if (dbConfigValue) {
+ // Parse database value (it's stored as string 'true' or 'false')
+ const dbEnabled = dbConfigValue.toLowerCase() === 'true';
+
+ if (!dbEnabled) {
+ logger.info('[Email] Admin has disabled email notifications globally (from database config)');
+ return false;
+ }
+
+ logger.debug('[Email] Email notifications enabled (from database config)');
+ return true;
+ }
+
+ // Step 2: Fall back to environment variable if database config not found
+ const envEnabled = SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_EMAIL;
+
+ if (!envEnabled) {
+ logger.info('[Email] Admin has disabled email notifications globally (from environment variable)');
+ return false;
+ }
+
+ logger.debug('[Email] Email notifications enabled (from environment variable)');
+ return true;
+ } catch (error) {
+ logger.error('[Email] Error checking admin email configuration, defaulting to enabled:', error);
+ // On error, default to enabled (safe default to avoid blocking notifications)
+ return true;
}
-
- return true;
}
/**
@@ -152,19 +176,54 @@ export async function shouldSendInAppNotification(
}
/**
- * Check if admin has enabled in-app notifications
- * Uses SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_IN_APP
+ * Check if admin has enabled in-app notifications globally
+ * Checks database configuration first, then falls back to environment variable
*/
async function isAdminInAppEnabled(notificationType: string): Promise {
- // Check global in-app setting from system config
- const adminInAppEnabled = SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_IN_APP;
-
- if (!adminInAppEnabled) {
- logger.info('[Notification] Admin has disabled in-app notifications globally');
- return false;
+ try {
+ // Step 1: Check database configuration (admin panel setting)
+ const dbConfigValue = await getConfigValue('ENABLE_IN_APP_NOTIFICATIONS', '');
+
+ if (dbConfigValue) {
+ // Parse database value (it's stored as string 'true' or 'false')
+ const dbEnabled = dbConfigValue.toLowerCase() === 'true';
+
+ if (!dbEnabled) {
+ logger.info('[Notification] Admin has disabled in-app notifications globally (from database config)');
+ return false;
+ }
+
+ logger.debug('[Notification] In-app notifications enabled (from database config)');
+ return true;
+ }
+
+ // Step 2: Fall back to environment variable if database config not found
+ const envValue = process.env.ENABLE_IN_APP_NOTIFICATIONS;
+ if (envValue !== undefined) {
+ const envEnabled = envValue.toLowerCase() === 'true';
+ if (!envEnabled) {
+ logger.info('[Notification] Admin has disabled in-app notifications globally (from environment variable)');
+ return false;
+ }
+ logger.debug('[Notification] In-app notifications enabled (from environment variable)');
+ return true;
+ }
+
+ // Step 3: Final fallback to system config (defaults to true)
+ const adminInAppEnabled = SYSTEM_CONFIG.NOTIFICATIONS.ENABLE_IN_APP;
+
+ if (!adminInAppEnabled) {
+ logger.info('[Notification] Admin has disabled in-app notifications globally (from system config)');
+ return false;
+ }
+
+ logger.debug('[Notification] In-app notifications enabled (from system config default)');
+ return true;
+ } catch (error) {
+ logger.error('[Notification] Error checking admin in-app notification configuration, defaulting to enabled:', error);
+ // On error, default to enabled (safe default to avoid blocking notifications)
+ return true;
}
-
- return true;
}
/**
diff --git a/src/routes/document.routes.ts b/src/routes/document.routes.ts
index dabaf1c..fd2986f 100644
--- a/src/routes/document.routes.ts
+++ b/src/routes/document.routes.ts
@@ -7,17 +7,9 @@ import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { DocumentController } from '../controllers/document.controller';
import { ensureUploadDir, UPLOAD_DIR } from '../config/storage';
-ensureUploadDir();
-
-const storage = multer.diskStorage({
- destination: (_req, _file, cb) => cb(null, UPLOAD_DIR),
- filename: (_req, file, cb) => {
- const safeBase = path.basename(file.originalname).replace(/[^a-zA-Z0-9._-]/g, '_');
- const hash = crypto.randomBytes(6).toString('hex');
- const name = `${Date.now()}-${hash}-${safeBase}`;
- cb(null, name);
- }
-});
+// Use memory storage for GCS uploads (files will be in memory, then uploaded to GCS)
+// If GCS is not configured, files will still be handled in memory and can be saved locally if needed
+const storage = multer.memoryStorage();
const upload = multer({
storage,
diff --git a/src/routes/workflow.routes.ts b/src/routes/workflow.routes.ts
index 88db260..9a58857 100644
--- a/src/routes/workflow.routes.ts
+++ b/src/routes/workflow.routes.ts
@@ -68,15 +68,8 @@ router.post('/',
);
// Multipart create (payload + files[])
-ensureUploadDir();
-const storage = multer.diskStorage({
- destination: (_req, _file, cb) => cb(null, UPLOAD_DIR),
- filename: (_req, file, cb) => {
- const safeBase = path.basename(file.originalname).replace(/[^a-zA-Z0-9._-]/g, '_');
- const hash = crypto.randomBytes(6).toString('hex');
- cb(null, `${Date.now()}-${hash}-${safeBase}`);
- }
-});
+// Use memory storage for GCS uploads
+const storage = multer.memoryStorage();
const upload = multer({ storage, limits: { fileSize: 10 * 1024 * 1024 } });
router.post('/multipart',
@@ -193,7 +186,7 @@ router.get('/:id/work-notes',
asyncHandler(workNoteController.list.bind(workNoteController))
);
-const noteUpload = upload; // reuse same storage/limits
+const noteUpload = upload; // reuse same memory storage/limits
router.post('/:id/work-notes',
authenticateToken,
validateParams(workflowParamsSchema),
@@ -207,6 +200,8 @@ router.get('/documents/:documentId/preview',
asyncHandler(async (req: any, res: Response) => {
const { documentId } = req.params;
const { Document } = require('@models/Document');
+ const { gcsStorageService } = require('../services/gcsStorage.service');
+ const fs = require('fs');
const document = await Document.findOne({ where: { documentId } });
if (!document) {
@@ -214,12 +209,34 @@ router.get('/documents/:documentId/preview',
return;
}
- const filePath = (document as any).filePath;
- const fileName = (document as any).originalFileName || (document as any).fileName;
- const fileType = (document as any).fileType;
+ const storageUrl = (document as any).storageUrl || (document as any).storage_url;
+ const filePath = (document as any).filePath || (document as any).file_path;
+ const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName;
+ const fileType = (document as any).mimeType || (document as any).mime_type;
- // Check if file exists
- if (!require('fs').existsSync(filePath)) {
+ // Check if it's a GCS URL
+ const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
+
+ if (isGcsUrl) {
+ // Redirect to GCS public URL or use signed URL for private files
+ res.redirect(storageUrl);
+ return;
+ }
+
+ // Local file handling - check if storageUrl is a local path (starts with /uploads/)
+ if (storageUrl && storageUrl.startsWith('/uploads/')) {
+ // File is served by express.static middleware, redirect to the storage URL
+ res.redirect(storageUrl);
+ return;
+ }
+
+ // Legacy local file handling (absolute path stored in filePath)
+ // Resolve relative path if needed
+ const absolutePath = filePath && !path.isAbsolute(filePath)
+ ? path.join(UPLOAD_DIR, filePath)
+ : filePath;
+
+ if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' });
return;
}
@@ -233,17 +250,17 @@ router.get('/documents/:documentId/preview',
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type
- res.contentType(fileType);
+ res.contentType(fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing
- const isPreviewable = fileType.includes('image') || fileType.includes('pdf');
+ const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
if (isPreviewable) {
res.setHeader('Content-Disposition', `inline; filename="${fileName}"`);
} else {
res.setHeader('Content-Disposition', `attachment; filename="${fileName}"`);
}
- res.sendFile(filePath, (err) => {
+ res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' });
}
@@ -257,6 +274,8 @@ router.get('/documents/:documentId/download',
asyncHandler(async (req: any, res: Response) => {
const { documentId } = req.params;
const { Document } = require('@models/Document');
+ const { gcsStorageService } = require('../services/gcsStorage.service');
+ const fs = require('fs');
const document = await Document.findOne({ where: { documentId } });
if (!document) {
@@ -264,16 +283,40 @@ router.get('/documents/:documentId/download',
return;
}
- const filePath = (document as any).filePath;
- const fileName = (document as any).originalFileName || (document as any).fileName;
+ const storageUrl = (document as any).storageUrl || (document as any).storage_url;
+ const filePath = (document as any).filePath || (document as any).file_path;
+ const fileName = (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName;
- // Check if file exists
- if (!require('fs').existsSync(filePath)) {
+ // Check if it's a GCS URL
+ const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
+
+ if (isGcsUrl) {
+ // Redirect to GCS public URL for download
+ res.redirect(storageUrl);
+ return;
+ }
+
+ // Local file handling - check if storageUrl is a local path (starts with /uploads/)
+ if (storageUrl && storageUrl.startsWith('/uploads/')) {
+ // File is served by express.static middleware, redirect to the storage URL
+ res.redirect(storageUrl);
+ return;
+ }
+
+ // Legacy local file handling (absolute path stored in filePath)
+ // Resolve relative path if needed
+ const path = require('path');
+ const { UPLOAD_DIR } = require('../config/storage');
+ const absolutePath = filePath && !path.isAbsolute(filePath)
+ ? path.join(UPLOAD_DIR, filePath)
+ : filePath;
+
+ if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found on server' });
return;
}
- res.download(filePath, fileName, (err) => {
+ res.download(absolutePath, fileName, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' });
}
@@ -287,9 +330,31 @@ router.get('/work-notes/attachments/:attachmentId/preview',
asyncHandler(async (req: any, res: Response) => {
const { attachmentId } = req.params;
const fileInfo = await workNoteService.downloadAttachment(attachmentId);
+ const fs = require('fs');
- // Check if file exists
- if (!require('fs').existsSync(fileInfo.filePath)) {
+ // Check if it's a GCS URL
+ if (fileInfo.isGcsUrl && fileInfo.storageUrl) {
+ // Redirect to GCS public URL
+ res.redirect(fileInfo.storageUrl);
+ return;
+ }
+
+ // Local file handling - check if storageUrl is a local path (starts with /uploads/)
+ if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) {
+ // File is served by express.static middleware, redirect to the storage URL
+ res.redirect(fileInfo.storageUrl);
+ return;
+ }
+
+ // Legacy local file handling (absolute path stored in filePath)
+ // Resolve relative path if needed
+ const path = require('path');
+ const { UPLOAD_DIR } = require('../config/storage');
+ const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath)
+ ? path.join(UPLOAD_DIR, fileInfo.filePath)
+ : fileInfo.filePath;
+
+ if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' });
return;
}
@@ -303,17 +368,17 @@ router.get('/work-notes/attachments/:attachmentId/preview',
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set appropriate content type
- res.contentType(fileInfo.fileType);
+ res.contentType(fileInfo.fileType || 'application/octet-stream');
// For images and PDFs, allow inline viewing
- const isPreviewable = fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf');
+ const isPreviewable = fileInfo.fileType && (fileInfo.fileType.includes('image') || fileInfo.fileType.includes('pdf'));
if (isPreviewable) {
res.setHeader('Content-Disposition', `inline; filename="${fileInfo.fileName}"`);
} else {
res.setHeader('Content-Disposition', `attachment; filename="${fileInfo.fileName}"`);
}
- res.sendFile(fileInfo.filePath, (err) => {
+ res.sendFile(absolutePath, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to serve file' });
}
@@ -327,14 +392,36 @@ router.get('/work-notes/attachments/:attachmentId/download',
asyncHandler(async (req: any, res: Response) => {
const { attachmentId } = req.params;
const fileInfo = await workNoteService.downloadAttachment(attachmentId);
+ const fs = require('fs');
- // Check if file exists
- if (!require('fs').existsSync(fileInfo.filePath)) {
+ // Check if it's a GCS URL
+ if (fileInfo.isGcsUrl && fileInfo.storageUrl) {
+ // Redirect to GCS public URL for download
+ res.redirect(fileInfo.storageUrl);
+ return;
+ }
+
+ // Local file handling - check if storageUrl is a local path (starts with /uploads/)
+ if (fileInfo.storageUrl && fileInfo.storageUrl.startsWith('/uploads/')) {
+ // File is served by express.static middleware, redirect to the storage URL
+ res.redirect(fileInfo.storageUrl);
+ return;
+ }
+
+ // Legacy local file handling (absolute path stored in filePath)
+ // Resolve relative path if needed
+ const path = require('path');
+ const { UPLOAD_DIR } = require('../config/storage');
+ const absolutePath = fileInfo.filePath && !path.isAbsolute(fileInfo.filePath)
+ ? path.join(UPLOAD_DIR, fileInfo.filePath)
+ : fileInfo.filePath;
+
+ if (!absolutePath || !fs.existsSync(absolutePath)) {
res.status(404).json({ success: false, error: 'File not found' });
return;
}
- res.download(fileInfo.filePath, fileInfo.fileName, (err) => {
+ res.download(absolutePath, fileInfo.fileName, (err) => {
if (err && !res.headersSent) {
res.status(500).json({ success: false, error: 'Failed to download file' });
}
diff --git a/src/scripts/seed-admin-config.ts b/src/scripts/seed-admin-config.ts
index fea8177..e5c90ec 100644
--- a/src/scripts/seed-admin-config.ts
+++ b/src/scripts/seed-admin-config.ts
@@ -331,24 +331,6 @@ async function seedAdminConfigurations() {
NOW(),
NOW()
),
- (
- gen_random_uuid(),
- 'ENABLE_PUSH_NOTIFICATIONS',
- 'FEATURES',
- 'true',
- 'BOOLEAN',
- 'Enable Push Notifications',
- 'Send browser push notifications for real-time events',
- 'true',
- true,
- false,
- '{}'::jsonb,
- 'switch',
- 41,
- false,
- NOW(),
- NOW()
- ),
(
gen_random_uuid(),
'ENABLE_EMAIL_NOTIFICATIONS',
@@ -362,11 +344,29 @@ async function seedAdminConfigurations() {
false,
'{}'::jsonb,
'switch',
- 42,
+ 41,
true,
NOW(),
NOW()
),
+ (
+ gen_random_uuid(),
+ 'ENABLE_IN_APP_NOTIFICATIONS',
+ 'FEATURES',
+ 'true',
+ 'BOOLEAN',
+ 'Enable In-App Notifications',
+ 'Show notifications within the application portal',
+ 'true',
+ true,
+ false,
+ '{}'::jsonb,
+ 'switch',
+ 42,
+ false,
+ NOW(),
+ NOW()
+ ),
-- AI Configuration (from migration 20251111-add-ai-provider-configs)
(
diff --git a/src/scripts/seed-configurations-complete.sql b/src/scripts/seed-configurations-complete.sql
index 602e673..165798b 100644
--- a/src/scripts/seed-configurations-complete.sql
+++ b/src/scripts/seed-configurations-complete.sql
@@ -238,12 +238,12 @@ INSERT INTO admin_configurations (
),
(
gen_random_uuid(),
- 'ENABLE_PUSH_NOTIFICATIONS',
+ 'ENABLE_IN_APP_NOTIFICATIONS',
'NOTIFICATION_RULES',
'true',
'BOOLEAN',
- 'Enable Push Notifications',
- 'Send browser push notifications for real-time events',
+ 'Enable In-App Notifications',
+ 'Show notifications within the application portal',
'true',
true,
false,
diff --git a/src/services/configSeed.service.ts b/src/services/configSeed.service.ts
index 0abc80e..2fd833a 100644
--- a/src/services/configSeed.service.ts
+++ b/src/services/configSeed.service.ts
@@ -495,12 +495,12 @@ export async function seedDefaultConfigurations(): Promise {
),
(
gen_random_uuid(),
- 'ENABLE_PUSH_NOTIFICATIONS',
+ 'ENABLE_IN_APP_NOTIFICATIONS',
'NOTIFICATION_RULES',
'true',
'BOOLEAN',
- 'Enable Push Notifications',
- 'Send browser push notifications for real-time events',
+ 'Enable In-App Notifications',
+ 'Show notifications within the application portal',
'true',
true,
false,
diff --git a/src/services/gcsStorage.service.ts b/src/services/gcsStorage.service.ts
new file mode 100644
index 0000000..8e2f7aa
--- /dev/null
+++ b/src/services/gcsStorage.service.ts
@@ -0,0 +1,341 @@
+import { Storage } from '@google-cloud/storage';
+import path from 'path';
+import fs from 'fs';
+import logger from '@utils/logger';
+import { UPLOAD_DIR } from '@config/storage';
+
+interface UploadFileOptions {
+ filePath?: string;
+ buffer?: Buffer;
+ originalName: string;
+ mimeType: string;
+ requestNumber: string; // Request number (e.g., 'REQ-2025-12-0001')
+ fileType: 'documents' | 'attachments'; // Type of file: documents or attachments
+}
+
+interface UploadResult {
+ storageUrl: string;
+ filePath: string; // GCS path
+ fileName: string; // Generated file name in GCS
+}
+
+class GCSStorageService {
+ private storage: Storage | null = null;
+ private bucketName: string;
+ private projectId: string;
+
+ constructor() {
+ this.projectId = process.env.GCP_PROJECT_ID || '';
+ this.bucketName = process.env.GCP_BUCKET_NAME || '';
+ const keyFilePath = process.env.GCP_KEY_FILE || '';
+
+ if (!this.projectId || !this.bucketName || !keyFilePath) {
+ logger.warn('[GCS] GCP configuration missing. File uploads will fail.');
+ return;
+ }
+
+ try {
+ // Resolve key file path (can be relative or absolute)
+ const resolvedKeyPath = path.isAbsolute(keyFilePath)
+ ? keyFilePath
+ : path.resolve(process.cwd(), keyFilePath);
+
+ if (!fs.existsSync(resolvedKeyPath)) {
+ logger.error(`[GCS] Key file not found at: ${resolvedKeyPath}`);
+ return;
+ }
+
+ this.storage = new Storage({
+ projectId: this.projectId,
+ keyFilename: resolvedKeyPath,
+ });
+
+ logger.info('[GCS] Initialized successfully', {
+ projectId: this.projectId,
+ bucketName: this.bucketName,
+ });
+ } catch (error) {
+ logger.error('[GCS] Failed to initialize:', error);
+ }
+ }
+
+ /**
+ * Ensure the bucket exists, create it if it doesn't
+ * This is called lazily on first upload
+ */
+ private async ensureBucketExists(): Promise {
+ if (!this.storage) {
+ throw new Error('GCS storage not initialized');
+ }
+
+ try {
+ const bucket = this.storage.bucket(this.bucketName);
+ const [exists] = await bucket.exists();
+
+ if (!exists) {
+ logger.info(`[GCS] Bucket "${this.bucketName}" does not exist. Creating...`);
+
+ // Get region from env or default to asia-south1 (Mumbai)
+ const region = process.env.GCP_BUCKET_REGION || 'asia-south1';
+
+ // Create bucket with default settings
+ // Note: publicAccessPrevention is not set to allow public file access
+ // If you need private buckets, set GCP_BUCKET_PUBLIC=false and use signed URLs
+ const bucketOptions: any = {
+ location: region,
+ storageClass: 'STANDARD',
+ uniformBucketLevelAccess: true,
+ };
+
+ // Only enforce public access prevention if explicitly configured
+ if (process.env.GCP_BUCKET_PUBLIC === 'false') {
+ bucketOptions.publicAccessPrevention = 'enforced';
+ }
+
+ await bucket.create(bucketOptions);
+
+ logger.info(`[GCS] Bucket "${this.bucketName}" created successfully in region "${region}"`);
+ } else {
+ logger.debug(`[GCS] Bucket "${this.bucketName}" already exists`);
+ }
+ } catch (error) {
+ logger.error(`[GCS] Failed to check/create bucket "${this.bucketName}":`, error);
+ throw error;
+ }
+ }
+
+ /**
+ * Upload a file to Google Cloud Storage
+ * @param options File upload options
+ * @returns Upload result with storage URL and file path
+ */
+ async uploadFile(options: UploadFileOptions): Promise {
+ if (!this.storage) {
+ throw new Error('GCS storage not initialized. Check GCP configuration.');
+ }
+
+ const { filePath, buffer, originalName, mimeType, requestNumber, fileType } = options;
+
+ if (!filePath && !buffer) {
+ throw new Error('Either filePath or buffer must be provided');
+ }
+
+ if (!requestNumber) {
+ throw new Error('Request number is required for file upload');
+ }
+
+ try {
+ // Ensure bucket exists before uploading
+ await this.ensureBucketExists();
+
+ // Generate unique file name
+ const timestamp = Date.now();
+ const randomHash = Math.random().toString(36).substring(2, 8);
+ const safeName = originalName.replace(/[^a-zA-Z0-9._-]/g, '_');
+ const extension = path.extname(originalName);
+ const fileName = `${timestamp}-${randomHash}-${safeName}`;
+
+ // Build GCS path: requests/{requestNumber}/{fileType}/{fileName}
+ // Example: requests/REQ-2025-12-0001/documents/proposal.pdf
+ // Example: requests/REQ-2025-12-0001/attachments/approval_note.pdf
+ const gcsFilePath = `requests/${requestNumber}/${fileType}/${fileName}`;
+
+ const bucket = this.storage.bucket(this.bucketName);
+ const file = bucket.file(gcsFilePath);
+
+ // Upload options
+ const uploadOptions: any = {
+ metadata: {
+ contentType: mimeType,
+ metadata: {
+ originalName: originalName,
+ uploadedAt: new Date().toISOString(),
+ },
+ },
+ };
+
+ // Upload from buffer or file path
+ if (buffer) {
+ await file.save(buffer, uploadOptions);
+ } else if (filePath) {
+ await bucket.upload(filePath, {
+ destination: gcsFilePath,
+ metadata: uploadOptions.metadata,
+ });
+ }
+
+ // Make file publicly readable (or use signed URLs for private access)
+ // Note: This will fail if bucket has publicAccessPrevention enabled
+ let publicUrl: string;
+ try {
+ await file.makePublic();
+ // Get public URL
+ publicUrl = `https://storage.googleapis.com/${this.bucketName}/${gcsFilePath}`;
+ } catch (makePublicError: any) {
+ // If making public fails (e.g., public access prevention), use signed URL
+ if (makePublicError?.code === 400 || makePublicError?.message?.includes('publicAccessPrevention')) {
+ logger.warn('[GCS] Cannot make file public (public access prevention enabled). Using signed URL.');
+ publicUrl = await this.getSignedUrl(gcsFilePath, 60 * 24 * 365); // 1 year expiry
+ } else {
+ throw makePublicError;
+ }
+ }
+
+ logger.info('[GCS] File uploaded successfully', {
+ fileName: originalName,
+ gcsPath: gcsFilePath,
+ storageUrl: publicUrl,
+ size: buffer ? buffer.length : 'unknown',
+ });
+
+ return {
+ storageUrl: publicUrl,
+ filePath: gcsFilePath,
+ fileName: fileName,
+ };
+ } catch (error) {
+ logger.error('[GCS] Upload failed:', error);
+ throw new Error(`Failed to upload file to GCS: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ }
+ }
+
+ /**
+ * Delete a file from Google Cloud Storage
+ * @param gcsFilePath The GCS file path (e.g., 'attachments/file-name.ext')
+ */
+ async deleteFile(gcsFilePath: string): Promise {
+ if (!this.storage) {
+ throw new Error('GCS storage not initialized. Check GCP configuration.');
+ }
+
+ try {
+ const bucket = this.storage.bucket(this.bucketName);
+ const file = bucket.file(gcsFilePath);
+ await file.delete();
+
+ logger.info('[GCS] File deleted successfully', { gcsPath: gcsFilePath });
+ } catch (error) {
+ logger.error('[GCS] Delete failed:', error);
+ throw new Error(`Failed to delete file from GCS: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ }
+ }
+
+ /**
+ * Get a signed URL for private file access (valid for 1 hour by default)
+ * @param gcsFilePath The GCS file path
+ * @param expiresInMinutes URL expiration time in minutes (default: 60)
+ * @returns Signed URL
+ */
+ async getSignedUrl(gcsFilePath: string, expiresInMinutes: number = 60): Promise {
+ if (!this.storage) {
+ throw new Error('GCS storage not initialized. Check GCP configuration.');
+ }
+
+ try {
+ const bucket = this.storage.bucket(this.bucketName);
+ const file = bucket.file(gcsFilePath);
+
+ const [url] = await file.getSignedUrl({
+ action: 'read',
+ expires: Date.now() + expiresInMinutes * 60 * 1000,
+ });
+
+ return url;
+ } catch (error) {
+ logger.error('[GCS] Failed to generate signed URL:', error);
+ throw new Error(`Failed to generate signed URL: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ }
+ }
+
+ /**
+ * Save file to local storage with the same folder structure as GCS
+ * This is used as a fallback when GCS is not configured or fails
+ * @param options File upload options
+ * @returns Upload result with local storage URL and file path
+ */
+ saveToLocalStorage(options: UploadFileOptions): UploadResult {
+ const { buffer, originalName, requestNumber, fileType } = options;
+
+ if (!buffer) {
+ throw new Error('Buffer is required for local storage fallback');
+ }
+
+ if (!requestNumber) {
+ throw new Error('Request number is required for file upload');
+ }
+
+ try {
+ // Generate unique file name (same format as GCS)
+ const timestamp = Date.now();
+ const randomHash = Math.random().toString(36).substring(2, 8);
+ const safeName = originalName.replace(/[^a-zA-Z0-9._-]/g, '_');
+ const fileName = `${timestamp}-${randomHash}-${safeName}`;
+
+ // Build local path: uploads/requests/{requestNumber}/{fileType}/{fileName}
+ // This matches the GCS structure: requests/{requestNumber}/{fileType}/{fileName}
+ const localDir = path.join(UPLOAD_DIR, 'requests', requestNumber, fileType);
+
+ // Ensure directory exists
+ if (!fs.existsSync(localDir)) {
+ fs.mkdirSync(localDir, { recursive: true });
+ }
+
+ const localFilePath = path.join(localDir, fileName);
+ const relativePath = `requests/${requestNumber}/${fileType}/${fileName}`;
+
+ // Save file to disk
+ fs.writeFileSync(localFilePath, buffer);
+
+ // Create URL path (will be served by express.static)
+ const storageUrl = `/uploads/${relativePath}`;
+
+ logger.info('[GCS] File saved to local storage (fallback)', {
+ fileName: originalName,
+ localPath: relativePath,
+ storageUrl: storageUrl,
+ requestNumber: requestNumber,
+ });
+
+ return {
+ storageUrl: storageUrl,
+ filePath: relativePath, // Store relative path (same format as GCS path)
+ fileName: fileName,
+ };
+ } catch (error) {
+ logger.error('[GCS] Local storage save failed:', error);
+ throw new Error(`Failed to save file to local storage: ${error instanceof Error ? error.message : 'Unknown error'}`);
+ }
+ }
+
+ /**
+ * Upload file with automatic fallback to local storage
+ * If GCS is configured and works, uploads to GCS. Otherwise, saves to local storage.
+ * @param options File upload options
+ * @returns Upload result with storage URL and file path
+ */
+ async uploadFileWithFallback(options: UploadFileOptions): Promise {
+ // If GCS is not configured, use local storage directly
+ if (!this.isConfigured()) {
+ logger.info('[GCS] GCS not configured, using local storage');
+ return this.saveToLocalStorage(options);
+ }
+
+ // Try GCS upload first
+ try {
+ return await this.uploadFile(options);
+ } catch (gcsError) {
+ logger.warn('[GCS] GCS upload failed, falling back to local storage', { error: gcsError });
+ // Fallback to local storage
+ return this.saveToLocalStorage(options);
+ }
+ }
+
+ /**
+ * Check if GCS is properly configured
+ */
+ isConfigured(): boolean {
+ return this.storage !== null && this.bucketName !== '' && this.projectId !== '';
+ }
+}
+
+export const gcsStorageService = new GCSStorageService();
diff --git a/src/services/worknote.service.ts b/src/services/worknote.service.ts
index ac190a1..1c3d337 100644
--- a/src/services/worknote.service.ts
+++ b/src/services/worknote.service.ts
@@ -5,7 +5,10 @@ import { Participant } from '@models/Participant';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { activityService } from './activity.service';
import { notificationService } from './notification.service';
+import { gcsStorageService } from './gcsStorage.service';
import logger from '@utils/logger';
+import fs from 'fs';
+import path from 'path';
export class WorkNoteService {
async list(requestId: string) {
@@ -71,7 +74,7 @@ export class WorkNoteService {
}
}
- async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path: string; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise {
+ async create(requestId: string, user: { userId: string; name?: string; role?: string }, payload: { message: string; isPriority?: boolean; parentNoteId?: string | null; mentionedUsers?: string[] | null; }, files?: Array<{ path?: string | null; buffer?: Buffer; originalname: string; mimetype: string; size: number }>, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise {
logger.info('[WorkNote] Creating note:', { requestId, user, messageLength: payload.message?.length });
const note = await WorkNote.create({
@@ -95,13 +98,44 @@ export class WorkNoteService {
const attachments = [];
if (files && files.length) {
+ // Get request number for folder structure
+ const workflow = await WorkflowRequest.findOne({ where: { requestId } });
+ const requestNumber = workflow ? ((workflow as any).requestNumber || (workflow as any).request_number) : null;
+
for (const f of files) {
+ // Read file buffer if path exists, otherwise use provided buffer
+ const fileBuffer = f.buffer || (f.path ? fs.readFileSync(f.path) : Buffer.from(''));
+
+ // Upload with automatic fallback to local storage
+ // If requestNumber is not available, use a default structure
+ const effectiveRequestNumber = requestNumber || 'UNKNOWN';
+ const uploadResult = await gcsStorageService.uploadFileWithFallback({
+ buffer: fileBuffer,
+ originalName: f.originalname,
+ mimeType: f.mimetype,
+ requestNumber: effectiveRequestNumber,
+ fileType: 'attachments'
+ });
+
+ const storageUrl = uploadResult.storageUrl;
+ const gcsFilePath = uploadResult.filePath;
+
+ // Clean up local temporary file if it exists (from multer disk storage)
+ if (f.path && fs.existsSync(f.path)) {
+ try {
+ fs.unlinkSync(f.path);
+ } catch (unlinkError) {
+ logger.warn('[WorkNote] Failed to delete local temporary file:', unlinkError);
+ }
+ }
+
const attachment = await WorkNoteAttachment.create({
noteId: (note as any).noteId,
fileName: f.originalname,
fileType: f.mimetype,
fileSize: f.size,
- filePath: f.path,
+ filePath: gcsFilePath, // Store GCS path or local path
+ storageUrl: storageUrl, // Store GCS URL or local URL
isDownloadable: true
} as any);
@@ -111,6 +145,7 @@ export class WorkNoteService {
fileType: (attachment as any).fileType,
fileSize: (attachment as any).fileSize,
filePath: (attachment as any).filePath,
+ storageUrl: (attachment as any).storageUrl,
isDownloadable: (attachment as any).isDownloadable
});
}
@@ -189,10 +224,20 @@ export class WorkNoteService {
throw new Error('Attachment not found');
}
+ const storageUrl = (attachment as any).storageUrl || (attachment as any).storage_url;
+ const filePath = (attachment as any).filePath || (attachment as any).file_path;
+ const fileName = (attachment as any).fileName || (attachment as any).file_name;
+ const fileType = (attachment as any).fileType || (attachment as any).file_type;
+
+ // Check if it's a GCS URL
+ const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
+
return {
- filePath: (attachment as any).filePath,
- fileName: (attachment as any).fileName,
- fileType: (attachment as any).fileType
+ filePath: filePath,
+ storageUrl: storageUrl,
+ fileName: fileName,
+ fileType: fileType,
+ isGcsUrl: isGcsUrl
};
}
}