Compare commits

..

8 Commits

39 changed files with 1828 additions and 229 deletions

View File

@ -1,2 +0,0 @@
import{a as t}from"./index-C331nI1Q.js";import"./radix-vendor-DIkYAdWy.js";import"./charts-vendor-Bme4E5cb.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-CdaLA-IN.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-AvM4PHvP.js";async function m(n){return(await t.post(`/conclusions/${n}/generate`)).data.data}async function d(n,o){return(await t.post(`/conclusions/${n}/finalize`,{finalRemark:o})).data.data}async function f(n){return(await t.get(`/conclusions/${n}`)).data.data}export{d as finalizeConclusion,m as generateConclusion,f as getConclusion};
//# sourceMappingURL=conclusionApi-CdXsBdJs.js.map

View File

@ -0,0 +1,2 @@
import{a as s}from"./index-BtWUMn8R.js";import"./radix-vendor-DIkYAdWy.js";import"./charts-vendor-Bme4E5cb.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-CdaLA-IN.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-AvM4PHvP.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
//# sourceMappingURL=conclusionApi-t9LwHY2s.js.map

View File

@ -1 +1 @@
{"version":3,"file":"conclusionApi-CdXsBdJs.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAKA,eAAsBC,EAAcJ,EAA8C,CAEhF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB"}
{"version":3,"file":"conclusionApi-t9LwHY2s.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n * Returns null if conclusion doesn't exist (404) instead of throwing error\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark | null> {\r\n try {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n } catch (error: any) {\r\n // Handle 404 gracefully - conclusion doesn't exist yet, which is normal\r\n if (error.response?.status === 404) {\r\n return null;\r\n }\r\n // Re-throw other errors\r\n throw error;\r\n }\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion","error","_a"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAMA,eAAsBC,EAAcJ,EAAqD,OACvF,GAAI,CAEF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB,OAASK,EAAY,CAEnB,KAAIC,EAAAD,EAAM,WAAN,YAAAC,EAAgB,UAAW,IAC7B,OAAO,KAGT,MAAMD,CACR,CACF"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

View File

@ -1,2 +0,0 @@
import{g as s}from"./index-C331nI1Q.js";import"./radix-vendor-DIkYAdWy.js";import"./charts-vendor-Bme4E5cb.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-CdaLA-IN.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-AvM4PHvP.js";function R(o){const{requestId:e,status:t,request:a,navigate:r}=o;if((t==null?void 0:t.toLowerCase())==="draft"||t==="DRAFT"){r(`/edit-request/${e}`);return}const i=s(e);r(i)}export{R as navigateToRequest};
//# sourceMappingURL=requestNavigation-DAAuTKQF.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"requestNavigation-DAAuTKQF.js","sources":["../../src/utils/requestNavigation.ts"],"sourcesContent":["/**\r\n * Global Request Navigation Utility\r\n * \r\n * Centralized navigation logic for request-related routes.\r\n * This utility decides where to navigate when clicking on request cards\r\n * from anywhere in the application.\r\n * \r\n * Features:\r\n * - Single point of navigation logic\r\n * - Handles draft vs active requests\r\n * - Supports different flow types (CUSTOM, DEALER_CLAIM)\r\n * - Type-safe navigation\r\n */\r\n\r\nimport { NavigateFunction } from 'react-router-dom';\r\nimport { getRequestDetailRoute, RequestFlowType } from './requestTypeUtils';\r\n\r\nexport interface RequestNavigationOptions {\r\n requestId: string;\r\n requestTitle?: string;\r\n status?: string;\r\n request?: any; // Full request object if available\r\n navigate: NavigateFunction;\r\n}\r\n\r\n/**\r\n * Navigate to the appropriate request detail page based on request type\r\n * \r\n * This is the single point of navigation for all request cards.\r\n * It handles:\r\n * - Draft requests (navigate to edit)\r\n * - Different flow types (CUSTOM, DEALER_CLAIM)\r\n * - Status-based routing\r\n */\r\nexport function navigateToRequest(options: RequestNavigationOptions): void {\r\n const { requestId, status, request, navigate } = options;\r\n\r\n // Check if request is a draft - if so, route to edit form instead of detail view\r\n const isDraft = status?.toLowerCase() === 'draft' || status === 'DRAFT';\r\n if (isDraft) {\r\n navigate(`/edit-request/${requestId}`);\r\n return;\r\n }\r\n\r\n // Determine the appropriate route based on request type\r\n const route = getRequestDetailRoute(requestId, request);\r\n navigate(route);\r\n}\r\n\r\n/**\r\n * Navigate to create a new request based on flow type\r\n */\r\nexport function navigateToCreateRequest(\r\n navigate: NavigateFunction,\r\n flowType: RequestFlowType = 'CUSTOM'\r\n): void {\r\n const route = flowType === 'DEALER_CLAIM' \r\n ? '/claim-management' \r\n : '/new-request';\r\n navigate(route);\r\n}\r\n\r\n/**\r\n * Create a navigation handler function for request cards\r\n * This can be used directly in onClick handlers\r\n */\r\nexport function createRequestNavigationHandler(\r\n navigate: NavigateFunction\r\n) {\r\n return (requestId: string, requestTitle?: string, status?: string, request?: any) => {\r\n navigateToRequest({\r\n requestId,\r\n requestTitle,\r\n status,\r\n request,\r\n navigate,\r\n });\r\n };\r\n}\r\n"],"names":["navigateToRequest","options","requestId","status","request","navigate","route","getRequestDetailRoute"],"mappings":"6RAkCO,SAASA,EAAkBC,EAAyC,CACzE,KAAM,CAAE,UAAAC,EAAW,OAAAC,EAAQ,QAAAC,EAAS,SAAAC,GAAaJ,EAIjD,IADgBE,GAAA,YAAAA,EAAQ,iBAAkB,SAAWA,IAAW,QACnD,CACXE,EAAS,iBAAiBH,CAAS,EAAE,EACrC,MACF,CAGA,MAAMI,EAAQC,EAAsBL,CAAkB,EACtDG,EAASC,CAAK,CAChB"}

View File

@ -52,7 +52,7 @@
transition: transform 0.2s ease;
}
</style>
<script type="module" crossorigin src="/assets/index-C331nI1Q.js"></script>
<script type="module" crossorigin src="/assets/index-BtWUMn8R.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-Bme4E5cb.js">
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-DIkYAdWy.js">
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-DHm03ykU.js">
@ -60,7 +60,7 @@
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
<link rel="modulepreload" crossorigin href="/assets/router-vendor-AvM4PHvP.js">
<link rel="stylesheet" crossorigin href="/assets/index-CxZ05Q0s.css">
<link rel="stylesheet" crossorigin href="/assets/index-Cki_huzr.css">
</head>
<body>
<div id="root"></div>

View File

@ -15,15 +15,16 @@
## Overview
The AI Conclusion Remark Generation feature automatically generates professional, context-aware conclusion remarks for workflow requests that have been approved or rejected. This feature uses AI providers (Claude, OpenAI, or Gemini) to analyze the entire request lifecycle and create a comprehensive summary suitable for permanent archiving.
The AI Conclusion Remark Generation feature automatically generates professional, context-aware conclusion remarks for workflow requests that have been approved or rejected. This feature uses **Google Cloud Vertex AI Gemini** to analyze the entire request lifecycle and create a comprehensive summary suitable for permanent archiving.
### Key Features
- **Multi-Provider Support**: Supports Claude (Anthropic), OpenAI (GPT-4), and Google Gemini
- **Vertex AI Integration**: Uses Google Cloud Vertex AI Gemini with service account authentication
- **Context-Aware**: Analyzes approval flow, work notes, documents, and activities
- **Configurable**: Admin-configurable max length, provider selection, and enable/disable
- **Configurable**: Admin-configurable max length, model selection, and enable/disable
- **Automatic Generation**: Can be triggered automatically when a request is approved/rejected
- **Manual Generation**: Users can regenerate conclusions on demand
- **Editable**: Generated remarks can be edited before finalization
- **Enterprise Security**: Uses same service account credentials as Google Cloud Storage
### Use Cases
1. **Automatic Generation**: When the final approver approves/rejects a request, an AI conclusion is generated in the background
@ -74,10 +75,10 @@ The AI Conclusion Remark Generation feature automatically generates professional
│ │ │
│ ▼ │
│ ┌──────────────────────────────────────────────────────┐ │
│ │ AI Providers (Claude/OpenAI/Gemini) │ │
│ │ - ClaudeProvider │ │
│ │ - OpenAIProvider │ │
│ │ - GeminiProvider │ │
│ │ Vertex AI Gemini (Google Cloud) │ │
│ │ - VertexAI Client │ │
│ │ - Service Account Authentication │ │
│ │ - Gemini Models (gemini-2.5-flash, etc.) │ │
│ └──────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
@ -114,22 +115,18 @@ The AI Conclusion Remark Generation feature automatically generates professional
### Environment Variables
```bash
# AI Provider Selection (claude, openai, gemini)
AI_PROVIDER=claude
# Google Cloud Configuration (required - same as GCS)
GCP_PROJECT_ID=re-platform-workflow-dealer
GCP_KEY_FILE=./credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
# Claude Configuration
CLAUDE_API_KEY=your_claude_api_key
CLAUDE_MODEL=claude-sonnet-4-20250514
# OpenAI Configuration
OPENAI_API_KEY=your_openai_api_key
OPENAI_MODEL=gpt-4o
# Gemini Configuration
GEMINI_API_KEY=your_gemini_api_key
GEMINI_MODEL=gemini-2.0-flash-lite
# Vertex AI Configuration (optional - defaults provided)
VERTEX_AI_MODEL=gemini-2.5-flash
VERTEX_AI_LOCATION=asia-south1
AI_ENABLED=true
```
**Note**: The service account key file is the same one used for Google Cloud Storage, ensuring consistent authentication across services.
### Admin Configuration (Database)
The system reads configuration from the `system_config` table. Key settings:
@ -138,21 +135,29 @@ The system reads configuration from the `system_config` table. Key settings:
|------------|---------|-------------|
| `AI_ENABLED` | `true` | Enable/disable all AI features |
| `AI_REMARK_GENERATION_ENABLED` | `true` | Enable/disable conclusion generation |
| `AI_PROVIDER` | `claude` | Preferred AI provider (claude, openai, gemini) |
| `AI_MAX_REMARK_LENGTH` | `2000` | Maximum characters for generated remarks |
| `CLAUDE_API_KEY` | - | Claude API key (if using Claude) |
| `CLAUDE_MODEL` | `claude-sonnet-4-20250514` | Claude model name |
| `OPENAI_API_KEY` | - | OpenAI API key (if using OpenAI) |
| `OPENAI_MODEL` | `gpt-4o` | OpenAI model name |
| `GEMINI_API_KEY` | - | Gemini API key (if using Gemini) |
| `GEMINI_MODEL` | `gemini-2.0-flash-lite` | Gemini model name |
| `VERTEX_AI_MODEL` | `gemini-2.5-flash` | Vertex AI Gemini model name |
### Provider Priority
### Available Models
1. **Preferred Provider**: Set via `AI_PROVIDER` config
2. **Fallback Chain**: If preferred fails, tries:
- Claude → OpenAI → Gemini
3. **Environment Fallback**: If database config fails, uses environment variables
| Model Name | Description | Use Case |
|------------|-------------|----------|
| `gemini-2.5-flash` | Latest fast model (default) | General purpose, quick responses |
| `gemini-1.5-flash` | Previous fast model | General purpose |
| `gemini-1.5-pro` | Advanced model | Complex tasks, better quality |
| `gemini-1.5-pro-latest` | Latest Pro version | Best quality, complex reasoning |
### Supported Regions
| Region Code | Location | Availability |
|-------------|----------|--------------|
| `us-central1` | Iowa, USA | ✅ Default |
| `us-east1` | South Carolina, USA | ✅ |
| `us-west1` | Oregon, USA | ✅ |
| `europe-west1` | Belgium | ✅ |
| `asia-south1` | Mumbai, India | ✅ (Current default) |
**Note**: Model and region are configured via environment variables, not database config.
---
@ -186,7 +191,7 @@ Authorization: Bearer <token>
],
"confidence": 0.85,
"generatedAt": "2025-01-15T10:30:00Z",
"provider": "Claude (Anthropic)"
"provider": "Vertex AI (Gemini)"
}
}
```
@ -254,7 +259,7 @@ Content-Type: application/json
"finalRemark": "Finalized text...",
"isEdited": true,
"editCount": 2,
"aiModelUsed": "Claude (Anthropic)",
"aiModelUsed": "Vertex AI (Gemini)",
"aiConfidenceScore": 0.85,
"keyDiscussionPoints": ["Point 1", "Point 2"],
"generatedAt": "2025-01-15T10:30:00Z",
@ -324,9 +329,9 @@ interface ConclusionContext {
- Sets target word count based on `AI_MAX_REMARK_LENGTH`
3. **AI Generation**:
- Sends prompt to selected AI provider
- Receives generated text
- Validates length (trims if exceeds max)
- Sends prompt to Vertex AI Gemini
- Receives generated text (up to 4096 tokens)
- Preserves full AI response (no truncation)
- Extracts key points
- Calculates confidence score
@ -407,13 +412,24 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
4. **Tone Guidelines**: Emphasizes natural, professional, archival-quality writing
5. **Context Awareness**: Includes all relevant data (approvals, notes, documents, activities)
### Provider-Specific Settings
### Vertex AI Settings
| Provider | Model | Max Tokens | Temperature | Notes |
|----------|-------|------------|-------------|-------|
| Claude | claude-sonnet-4-20250514 | 2048 | 0.3 | Best for longer, detailed conclusions |
| OpenAI | gpt-4o | 1024 | 0.3 | Balanced performance |
| Gemini | gemini-2.0-flash-lite | - | 0.3 | Fast and cost-effective |
| Setting | Value | Description |
|---------|-------|-------------|
| Model | `gemini-2.5-flash` (default) | Fast, efficient model for conclusion generation |
| Max Output Tokens | `4096` | Maximum tokens in response (technical limit) |
| Character Limit | `2000` (configurable) | Actual limit enforced via prompt (`AI_MAX_REMARK_LENGTH`) |
| Temperature | `0.3` | Lower temperature for more focused, consistent output |
| Location | `asia-south1` (default) | Google Cloud region for API calls |
| Authentication | Service Account | Uses same credentials as Google Cloud Storage |
**Note on Token vs Character Limits:**
- **4096 tokens** is the technical maximum Vertex AI can generate
- **2000 characters** (default) is the actual limit enforced by the prompt
- Token-to-character conversion: ~1 token ≈ 3-4 characters
- With HTML tags: 4096 tokens ≈ 12,000-16,000 characters (including tags)
- The AI is instructed to stay within the character limit, not the token limit
- The token limit provides headroom but the character limit is what matters for storage
---
@ -423,15 +439,21 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
1. **No AI Provider Available**
```
Error: AI features are currently unavailable. Please configure an AI provider...
Error: AI features are currently unavailable. Please verify Vertex AI configuration and service account credentials.
```
**Solution**: Configure API keys in admin panel or environment variables
**Solution**:
- Verify service account key file exists at path specified in `GCP_KEY_FILE`
- Ensure Vertex AI API is enabled in Google Cloud Console
- Check service account has `Vertex AI User` role (`roles/aiplatform.user`)
2. **Provider API Error**
2. **Vertex AI API Error**
```
Error: AI generation failed (Claude): API rate limit exceeded
Error: AI generation failed (Vertex AI): Model was not found or your project does not have access
```
**Solution**: Check API key validity, rate limits, and provider status
**Solution**:
- Verify model name is correct (e.g., `gemini-2.5-flash`)
- Ensure model is available in selected region
- Check Vertex AI API is enabled in Google Cloud Console
3. **Request Not Found**
```
@ -453,10 +475,10 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
### Error Recovery
- **Automatic Fallback**: If preferred provider fails, system tries fallback providers
- **Graceful Degradation**: If AI generation fails, user can write conclusion manually
- **Retry Logic**: Manual regeneration is always available
- **Logging**: All errors are logged with context for debugging
- **Token Limit Handling**: If response hits token limit, full response is preserved (no truncation)
---
@ -472,14 +494,17 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
### For Administrators
1. **API Key Management**: Store API keys securely in database or environment variables
2. **Provider Selection**: Choose provider based on:
- **Claude**: Best quality, higher cost
- **OpenAI**: Balanced quality/cost
- **Gemini**: Fast, cost-effective
1. **Service Account Setup**:
- Ensure service account key file exists and is accessible
- Verify service account has `Vertex AI User` role
- Use same credentials as Google Cloud Storage for consistency
2. **Model Selection**: Choose model based on needs:
- **gemini-2.5-flash**: Fast, cost-effective (default, recommended)
- **gemini-1.5-pro**: Better quality for complex requests
3. **Length Configuration**: Set `AI_MAX_REMARK_LENGTH` based on your archival needs
4. **Monitoring**: Monitor AI usage and costs through provider dashboards
4. **Monitoring**: Monitor AI usage and costs through Google Cloud Console
5. **Testing**: Test with sample requests before enabling in production
6. **Region Selection**: Choose region closest to your deployment for lower latency
### For Users
@ -499,8 +524,10 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
**Diagnosis**:
1. Check `AI_ENABLED` config value
2. Check `AI_REMARK_GENERATION_ENABLED` config value
3. Verify API keys are configured
4. Check provider initialization logs
3. Verify service account key file exists and is accessible
4. Check Vertex AI API is enabled in Google Cloud Console
5. Verify service account has `Vertex AI User` role
6. Check provider initialization logs
**Solution**:
```bash
@ -509,6 +536,14 @@ tail -f logs/app.log | grep "AI Service"
# Verify config
SELECT * FROM system_config WHERE config_key LIKE 'AI_%';
# Verify service account key file
ls -la credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
# Check environment variables
echo $GCP_PROJECT_ID
echo $GCP_KEY_FILE
echo $VERTEX_AI_MODEL
```
### Issue: Generated Text Too Long/Short
@ -518,7 +553,8 @@ SELECT * FROM system_config WHERE config_key LIKE 'AI_%';
**Solution**:
1. Adjust `AI_MAX_REMARK_LENGTH` in admin config
2. Check prompt target word count calculation
3. Verify provider max_tokens setting
3. Note: Vertex AI max output tokens is 4096 (system handles this automatically)
4. AI is instructed to stay within character limit, but full response is preserved
### Issue: Poor Quality Conclusions
@ -527,37 +563,50 @@ SELECT * FROM system_config WHERE config_key LIKE 'AI_%';
**Solution**:
1. Verify context data is complete (approvals, notes, documents)
2. Check prompt includes all relevant information
3. Try different provider (Claude generally produces better quality)
4. Adjust temperature if needed (lower = more focused)
3. Try different model (e.g., `gemini-1.5-pro` for better quality)
4. Temperature is set to 0.3 for focused output (can be adjusted in code if needed)
### Issue: Slow Generation
**Symptoms**: AI generation takes too long
**Solution**:
1. Check provider API status
1. Check Vertex AI API status in Google Cloud Console
2. Verify network connectivity
3. Consider using faster provider (Gemini)
4. Check for rate limiting
3. Consider using `gemini-2.5-flash` model (fastest option)
4. Check for rate limiting in Google Cloud Console
5. Verify region selection (closer region = lower latency)
### Issue: Provider Not Initializing
### Issue: Vertex AI Not Initializing
**Symptoms**: Provider shows as "None" in logs
**Symptoms**: Provider shows as "None" or initialization fails in logs
**Diagnosis**:
1. Check API key is valid
2. Verify SDK package is installed
3. Check environment variables
1. Check service account key file exists and is valid
2. Verify `@google-cloud/vertexai` package is installed
3. Check environment variables (`GCP_PROJECT_ID`, `GCP_KEY_FILE`)
4. Verify Vertex AI API is enabled in Google Cloud Console
5. Check service account permissions
**Solution**:
```bash
# Install missing SDK
npm install @anthropic-ai/sdk # For Claude
npm install openai # For OpenAI
npm install @google/generative-ai # For Gemini
npm install @google-cloud/vertexai
# Verify API key
echo $CLAUDE_API_KEY # Should show key
# Verify service account key file
ls -la credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
# Verify environment variables
echo $GCP_PROJECT_ID
echo $GCP_KEY_FILE
echo $VERTEX_AI_MODEL
echo $VERTEX_AI_LOCATION
# Check Google Cloud Console
# 1. Go to APIs & Services > Library
# 2. Search for "Vertex AI API"
# 3. Ensure it's enabled
# 4. Verify service account has "Vertex AI User" role
```
---
@ -644,12 +693,13 @@ reference.
## Version History
- **v1.0.0** (2025-01-15): Initial implementation
- Multi-provider support (Claude, OpenAI, Gemini)
- Automatic and manual generation
- TAT risk integration
- Key points extraction
- Confidence scoring
- **v2.0.0**: Vertex AI Migration
- Migrated to Google Cloud Vertex AI Gemini
- Service account authentication (same as GCS)
- Removed multi-provider support
- Increased max output tokens to 4096
- Full response preservation (no truncation)
- HTML format support for rich text editor
---
@ -659,13 +709,18 @@ For issues or questions:
1. Check logs: `logs/app.log`
2. Review admin configuration panel
3. Contact development team
4. Refer to provider documentation:
- [Claude API Docs](https://docs.anthropic.com)
- [OpenAI API Docs](https://platform.openai.com/docs)
- [Gemini API Docs](https://ai.google.dev/docs)
4. Refer to Vertex AI documentation:
- [Vertex AI Documentation](https://cloud.google.com/vertex-ai/docs)
- [Gemini Models](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini)
- [Vertex AI Setup Guide](../VERTEX_AI_INTEGRATION.md)
---
**Last Updated**: January 2025
**Maintained By**: Royal Enfield Development Team
---
## Related Documentation
- [Vertex AI Integration Guide](./VERTEX_AI_INTEGRATION.md) - Detailed setup and migration information

View File

@ -1,6 +1,7 @@
import { Request, Response } from 'express';
import { Holiday, HolidayType } from '@models/Holiday';
import { holidayService } from '@services/holiday.service';
import { activityTypeService } from '@services/activityType.service';
import { sequelize } from '@config/database';
import { QueryTypes, Op } from 'sequelize';
import logger from '@utils/logger';
@ -878,3 +879,174 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
}
};
// ==================== Activity Type Management Routes ====================
/**
* Get all activity types (optionally filtered by active status)
*/
export const getAllActivityTypes = async (req: Request, res: Response): Promise<void> => {
try {
const { activeOnly } = req.query;
const activeOnlyBool = activeOnly === 'true';
const activityTypes = await activityTypeService.getAllActivityTypes(activeOnlyBool);
res.json({
success: true,
data: activityTypes,
count: activityTypes.length
});
} catch (error: any) {
logger.error('[Admin] Error fetching activity types:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to fetch activity types'
});
}
};
/**
* Get a single activity type by ID
*/
export const getActivityTypeById = async (req: Request, res: Response): Promise<void> => {
try {
const { activityTypeId } = req.params;
const activityType = await activityTypeService.getActivityTypeById(activityTypeId);
if (!activityType) {
res.status(404).json({
success: false,
error: 'Activity type not found'
});
return;
}
res.json({
success: true,
data: activityType
});
} catch (error: any) {
logger.error('[Admin] Error fetching activity type:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to fetch activity type'
});
}
};
/**
* Create a new activity type
*/
export const createActivityType = async (req: Request, res: Response): Promise<void> => {
try {
const userId = req.user?.userId;
if (!userId) {
res.status(401).json({
success: false,
error: 'User not authenticated'
});
return;
}
const {
title,
itemCode,
taxationType,
sapRefNo
} = req.body;
// Validate required fields
if (!title) {
res.status(400).json({
success: false,
error: 'Activity type title is required'
});
return;
}
const activityType = await activityTypeService.createActivityType({
title,
itemCode: itemCode || null,
taxationType: taxationType || null,
sapRefNo: sapRefNo || null,
createdBy: userId
});
res.status(201).json({
success: true,
message: 'Activity type created successfully',
data: activityType
});
} catch (error: any) {
logger.error('[Admin] Error creating activity type:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to create activity type'
});
}
};
/**
* Update an activity type
*/
export const updateActivityType = async (req: Request, res: Response): Promise<void> => {
try {
const userId = req.user?.userId;
if (!userId) {
res.status(401).json({
success: false,
error: 'User not authenticated'
});
return;
}
const { activityTypeId } = req.params;
const updates = req.body;
const activityType = await activityTypeService.updateActivityType(activityTypeId, updates, userId);
if (!activityType) {
res.status(404).json({
success: false,
error: 'Activity type not found'
});
return;
}
res.json({
success: true,
message: 'Activity type updated successfully',
data: activityType
});
} catch (error: any) {
logger.error('[Admin] Error updating activity type:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to update activity type'
});
}
};
/**
* Delete (deactivate) an activity type
*/
export const deleteActivityType = async (req: Request, res: Response): Promise<void> => {
try {
const { activityTypeId } = req.params;
await activityTypeService.deleteActivityType(activityTypeId);
res.json({
success: true,
message: 'Activity type deleted successfully'
});
} catch (error: any) {
logger.error('[Admin] Error deleting activity type:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to delete activity type'
});
}
};

View File

@ -79,7 +79,7 @@ export class ConclusionController {
const workNotes = await WorkNote.findAll({
where: { requestId },
order: [['createdAt', 'ASC']],
limit: 20 // Last 20 work notes
limit: 20 // Last 20 work notes - keep full context for better conclusions
});
const documents = await Document.findAll({
@ -90,7 +90,7 @@ export class ConclusionController {
const activities = await Activity.findAll({
where: { requestId },
order: [['createdAt', 'ASC']],
limit: 50 // Last 50 activities
limit: 50 // Last 50 activities - keep full context for better conclusions
});
// Build context object

View File

@ -5,9 +5,14 @@ import fs from 'fs';
import { Document } from '@models/Document';
import { User } from '@models/User';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { Participant } from '@models/Participant';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { Op } from 'sequelize';
import { ResponseHandler } from '@utils/responseHandler';
import { activityService } from '@services/activity.service';
import { gcsStorageService } from '@services/gcsStorage.service';
import { emailNotificationService } from '@services/emailNotification.service';
import { notificationService } from '@services/notification.service';
import type { AuthenticatedRequest } from '../types/express';
import { getRequestMetadata } from '@utils/requestUtils';
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
@ -291,6 +296,205 @@ export class DocumentController {
userAgent: requestMeta.userAgent
});
// Send notifications for additional document added
try {
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
const isInitiator = userId === initiatorId;
// Get all participants (spectators)
const spectators = await Participant.findAll({
where: {
requestId,
participantType: 'SPECTATOR'
},
include: [{
model: User,
as: 'user',
attributes: ['userId', 'email', 'displayName']
}]
});
// Get current approver (pending or in-progress approval level)
const currentApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
},
order: [['levelNumber', 'ASC']],
include: [{
model: User,
as: 'approver',
attributes: ['userId', 'email', 'displayName']
}]
});
logWithContext('info', 'Current approver lookup for document notification', {
requestId,
currentApprovalLevelFound: !!currentApprovalLevel,
approverUserId: currentApprovalLevel ? ((currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver)?.userId : null,
isInitiator
});
// Determine who to notify based on who uploaded
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
if (isInitiator) {
// Initiator added → notify spectators and current approver
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Check if uploader is a spectator
const uploaderParticipant = await Participant.findOne({
where: {
requestId,
userId,
participantType: 'SPECTATOR'
}
});
if (uploaderParticipant) {
// Spectator added → notify initiator and current approver
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Approver added → notify initiator and spectators
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
}
}
// Send notifications (email, in-app, and web-push)
const requestData = {
requestNumber: requestNumber,
requestId: requestId,
title: (workflowRequest as any).title || 'Request'
};
// Prepare user IDs for in-app and web-push notifications
const recipientUserIds = recipientsToNotify.map(r => r.userId);
// Send in-app and web-push notifications
if (recipientUserIds.length > 0) {
try {
await notificationService.sendToUsers(
recipientUserIds,
{
title: 'Additional Document Added',
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'document_added',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
documentName: file.originalname,
fileSize: file.size,
addedByName: uploaderName,
source: 'Documents Tab'
}
}
);
logWithContext('info', 'In-app and web-push notifications sent for additional document', {
requestId,
documentName: file.originalname,
recipientsCount: recipientUserIds.length
});
} catch (notifyError) {
logWithContext('error', 'Failed to send in-app/web-push notifications for additional document', {
requestId,
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
});
}
}
// Send email notifications
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(
requestData,
recipient,
{
documentName: file.originalname,
fileSize: file.size,
addedByName: uploaderName,
source: 'Documents Tab'
}
);
}
logWithContext('info', 'Additional document notifications sent', {
requestId,
documentName: file.originalname,
recipientsCount: recipientsToNotify.length,
isInitiator
});
} catch (notifyError) {
// Don't fail document upload if notifications fail
logWithContext('error', 'Failed to send additional document notifications', {
requestId,
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
});
}
ResponseHandler.success(res, doc, 'File uploaded', 201);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';

View File

@ -0,0 +1,152 @@
/**
* Additional Document Added Email Template
*
* Sent when a document is added to a request by:
* - Initiator Notifies spectators and current approver
* - Spectator Notifies initiator and current approver
* - Approver Notifies initiator and spectators
*/
import { AdditionalDocumentAddedData } from './types';
import { getEmailFooter, getEmailHeader, HeaderStyles, getResponsiveStyles, getEmailContainerStyles } from './helpers';
import { getBrandedHeader } from './branding.config';
export function getAdditionalDocumentAddedEmail(data: AdditionalDocumentAddedData): string {
return `
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="format-detection" content="telephone=no">
<title>Additional Document Added</title>
${getResponsiveStyles()}
</head>
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
<tr>
<td style="padding: 40px 0;">
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
<!-- Header -->
${getEmailHeader(getBrandedHeader({
title: 'Additional Document Added',
...HeaderStyles.info
}))}
<!-- Content -->
<tr>
<td class="email-content">
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
Dear <strong style="color: #667eea;">${data.recipientName}</strong>,
</p>
<p style="margin: 0 0 30px; color: #666666; font-size: 16px; line-height: 1.6;">
<strong>${data.addedByName}</strong> has added an additional document to the following request:
</p>
<!-- Request Details Box -->
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f8f9fa; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
<tr>
<td class="detail-box" style="padding: 30px;">
<h2 style="margin: 0 0 25px; color: #333333; font-size: 20px; font-weight: 600;">Request Details</h2>
<table role="presentation" class="detail-table" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>Request ID:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.requestNumber || data.requestId}
</td>
</tr>
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>Title:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.requestTitle || 'N/A'}
</td>
</tr>
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>Document Name:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.documentName}
</td>
</tr>
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>File Size:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.fileSize}
</td>
</tr>
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>Added By:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.addedByName}
</td>
</tr>
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>Added On:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.addedDate} at ${data.addedTime}
</td>
</tr>
${data.source ? `
<tr>
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
<strong>Source:</strong>
</td>
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
${data.source}
</td>
</tr>
` : ''}
</table>
</td>
</tr>
</table>
<!-- Information Box -->
<div style="padding: 20px; background-color: #e7f3ff; border-left: 4px solid #0066cc; border-radius: 4px; margin-bottom: 30px;">
<h3 style="margin: 0 0 10px; color: #004085; font-size: 16px; font-weight: 600;">What This Means</h3>
<p style="margin: 0; color: #004085; font-size: 14px; line-height: 1.8;">
A new document has been added to this request. Please review the document in the request details page to stay updated with the latest information.
</p>
</div>
<!-- View Details Button -->
<table role="presentation" style="width: 100%; border-collapse: collapse; margin-bottom: 20px;" cellpadding="0" cellspacing="0">
<tr>
<td style="text-align: center;">
<a href="${data.viewDetailsLink}" class="cta-button" style="display: inline-block; padding: 15px 40px; background-color: #1a1a1a; color: #ffffff; text-decoration: none; text-align: center; border-radius: 6px; font-size: 16px; font-weight: 600; box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); min-width: 200px;">
View Request Details
</a>
</td>
</tr>
</table>
<p style="margin: 0; color: #666666; font-size: 14px; line-height: 1.6; text-align: center;">
Thank you for using the ${data.companyName} Workflow System.
</p>
</td>
</tr>
${getEmailFooter(data.companyName)}
</table>
</td>
</tr>
</table>
</body>
</html>
`;
}

View File

@ -31,7 +31,8 @@ export enum EmailNotificationType {
ACTIVITY_CREATED = 'activity_created',
COMPLETION_DOCUMENTS_SUBMITTED = 'completion_documents_submitted',
EINVOICE_GENERATED = 'einvoice_generated',
CREDIT_NOTE_SENT = 'credit_note_sent'
CREDIT_NOTE_SENT = 'credit_note_sent',
ADDITIONAL_DOCUMENT_ADDED = 'additional_document_added'
}
/**

View File

@ -35,4 +35,5 @@ export { getActivityCreatedEmail } from './activityCreated.template';
export { getCompletionDocumentsSubmittedEmail } from './completionDocumentsSubmitted.template';
export { getEInvoiceGeneratedEmail } from './einvoiceGenerated.template';
export { getCreditNoteSentEmail } from './creditNoteSent.template';
export { getAdditionalDocumentAddedEmail } from './additionalDocumentAdded.template';

View File

@ -236,3 +236,13 @@ export interface DealerProposalRequiredData extends BaseEmailData {
dueDate?: string;
}
export interface AdditionalDocumentAddedData extends BaseEmailData {
documentName: string;
fileSize: string;
addedByName: string;
addedDate: string;
addedTime: string;
requestNumber?: string;
source?: string; // 'Documents Tab' or 'Work Notes'
}

View File

@ -0,0 +1,83 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration to create activity_types table for claim management activity types
* Admin can manage activity types similar to holiday management
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('activity_types', {
activity_type_id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: {
type: DataTypes.STRING(200),
allowNull: false,
unique: true,
comment: 'Activity type title/name (e.g., "Riders Mania Claims", "Legal Claims Reimbursement")'
},
item_code: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
comment: 'Optional item code for the activity type'
},
taxation_type: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
comment: 'Optional taxation type for the activity'
},
sap_ref_no: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
comment: 'Optional SAP reference number'
},
is_active: {
type: DataTypes.BOOLEAN,
defaultValue: true,
comment: 'Whether this activity type is currently active/available for selection'
},
created_by: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin user who created this activity type'
},
updated_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
comment: 'Admin user who last updated this activity type'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Indexes for performance
await queryInterface.sequelize.query('CREATE UNIQUE INDEX IF NOT EXISTS "activity_types_title_unique" ON "activity_types" ("title");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_is_active" ON "activity_types" ("is_active");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_item_code" ON "activity_types" ("item_code");');
await queryInterface.sequelize.query('CREATE INDEX IF NOT EXISTS "activity_types_created_by" ON "activity_types" ("created_by");');
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('activity_types');
}

127
src/models/ActivityType.ts Normal file
View File

@ -0,0 +1,127 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
interface ActivityTypeAttributes {
activityTypeId: string;
title: string;
itemCode?: string;
taxationType?: string;
sapRefNo?: string;
isActive: boolean;
createdBy: string;
updatedBy?: string;
createdAt: Date;
updatedAt: Date;
}
interface ActivityTypeCreationAttributes extends Optional<ActivityTypeAttributes, 'activityTypeId' | 'itemCode' | 'taxationType' | 'sapRefNo' | 'isActive' | 'updatedBy' | 'createdAt' | 'updatedAt'> {}
class ActivityType extends Model<ActivityTypeAttributes, ActivityTypeCreationAttributes> implements ActivityTypeAttributes {
public activityTypeId!: string;
public title!: string;
public itemCode?: string;
public taxationType?: string;
public sapRefNo?: string;
public isActive!: boolean;
public createdBy!: string;
public updatedBy?: string;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public creator?: User;
public updater?: User;
}
ActivityType.init(
{
activityTypeId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'activity_type_id'
},
title: {
type: DataTypes.STRING(200),
allowNull: false,
unique: true,
field: 'title'
},
itemCode: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
field: 'item_code'
},
taxationType: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
field: 'taxation_type'
},
sapRefNo: {
type: DataTypes.STRING(100),
allowNull: true,
defaultValue: null,
field: 'sap_ref_no'
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_active'
},
createdBy: {
type: DataTypes.UUID,
allowNull: false,
field: 'created_by'
},
updatedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'updated_by'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'ActivityType',
tableName: 'activity_types',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ fields: ['title'], unique: true },
{ fields: ['is_active'] },
{ fields: ['item_code'] },
{ fields: ['created_by'] }
]
}
);
// Associations
ActivityType.belongsTo(User, {
as: 'creator',
foreignKey: 'createdBy',
targetKey: 'userId'
});
ActivityType.belongsTo(User, {
as: 'updater',
foreignKey: 'updatedBy',
targetKey: 'userId'
});
export { ActivityType };

View File

@ -24,6 +24,7 @@ import { WorkflowTemplate } from './WorkflowTemplate';
import { InternalOrder } from './InternalOrder';
import { ClaimBudgetTracking } from './ClaimBudgetTracking';
import { Dealer } from './Dealer';
import { ActivityType } from './ActivityType';
// Define associations
const defineAssociations = () => {
@ -168,7 +169,8 @@ export {
WorkflowTemplate,
InternalOrder,
ClaimBudgetTracking,
Dealer
Dealer,
ActivityType
};
// Export default sequelize instance

View File

@ -14,7 +14,12 @@ import {
updateUserRole,
getUsersByRole,
getRoleStatistics,
assignRoleByEmail
assignRoleByEmail,
getAllActivityTypes,
getActivityTypeById,
createActivityType,
updateActivityType,
deleteActivityType
} from '@controllers/admin.controller';
const router = Router();
@ -135,5 +140,48 @@ router.get('/users/by-role', getUsersByRole);
*/
router.get('/users/role-statistics', getRoleStatistics);
// ==================== Activity Type Management Routes ====================
/**
* @route GET /api/admin/activity-types
* @desc Get all activity types (optional activeOnly filter)
* @query activeOnly (optional): true | false
* @access Admin
*/
router.get('/activity-types', getAllActivityTypes);
/**
* @route GET /api/admin/activity-types/:activityTypeId
* @desc Get a single activity type by ID
* @params activityTypeId
* @access Admin
*/
router.get('/activity-types/:activityTypeId', getActivityTypeById);
/**
* @route POST /api/admin/activity-types
* @desc Create a new activity type
* @body { title, itemCode?, taxationType?, sapRefNo? }
* @access Admin
*/
router.post('/activity-types', createActivityType);
/**
* @route PUT /api/admin/activity-types/:activityTypeId
* @desc Update an activity type
* @params activityTypeId
* @body Activity type fields to update
* @access Admin
*/
router.put('/activity-types/:activityTypeId', updateActivityType);
/**
* @route DELETE /api/admin/activity-types/:activityTypeId
* @desc Delete (deactivate) an activity type
* @params activityTypeId
* @access Admin
*/
router.delete('/activity-types/:activityTypeId', deleteActivityType);
export default router;

View File

@ -1,6 +1,7 @@
import { Router, Request, Response } from 'express';
import { getPublicConfig } from '../config/system.config';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { activityTypeService } from '../services/activityType.service';
const router = Router();
@ -20,5 +21,27 @@ router.get('/',
})
);
/**
* GET /api/v1/config/activity-types
* Returns all active activity types for frontend
* No authentication required - public endpoint
*/
router.get('/activity-types',
asyncHandler(async (req: Request, res: Response): Promise<void> => {
const activityTypes = await activityTypeService.getAllActivityTypes(true);
res.json({
success: true,
data: activityTypes.map((at: any) => ({
activityTypeId: at.activityTypeId,
title: at.title,
itemCode: at.itemCode,
taxationType: at.taxationType,
sapRefNo: at.sapRefNo
}))
});
return;
})
);
export default router;

View File

@ -134,6 +134,7 @@ async function runMigrations(): Promise<void> {
const m39 = require('../migrations/20251214-create-dealer-completion-expenses');
const m40 = require('../migrations/20251218-fix-claim-invoice-credit-note-columns');
const m41 = require('../migrations/20250120-create-dealers-table');
const m42 = require('../migrations/20250125-create-activity-types');
const migrations = [
{ name: '2025103000-create-users', module: m0 },
@ -180,6 +181,7 @@ async function runMigrations(): Promise<void> {
{ name: '20251214-create-dealer-completion-expenses', module: m39 },
{ name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 },
{ name: '20250120-create-dealers-table', module: m41 },
{ name: '20250125-create-activity-types', module: m42 },
];
const queryInterface = sequelize.getQueryInterface();

View File

@ -44,6 +44,7 @@ import * as m38 from '../migrations/20251213-create-claim-invoice-credit-note-ta
import * as m39 from '../migrations/20251214-create-dealer-completion-expenses';
import * as m40 from '../migrations/20251218-fix-claim-invoice-credit-note-columns';
import * as m41 from '../migrations/20250120-create-dealers-table';
import * as m42 from '../migrations/20250125-create-activity-types';
interface Migration {
name: string;
@ -102,6 +103,7 @@ const migrations: Migration[] = [
{ name: '20251214-create-dealer-completion-expenses', module: m39 },
{ name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 },
{ name: '20250120-create-dealers-table', module: m41 },
{ name: '20250125-create-activity-types', module: m42 },
];
/**

View File

@ -7,9 +7,11 @@ import { logTatConfig } from './config/tat.config';
import { logSystemConfig } from './config/system.config';
import { initializeHolidaysCache } from './utils/tatTimeUtils';
import { seedDefaultConfigurations } from './services/configSeed.service';
import { seedDefaultActivityTypes } from './services/activityTypeSeed.service';
import { startPauseResumeJob } from './jobs/pauseResumeJob';
import './queues/pauseResumeWorker'; // Initialize pause resume worker
import { initializeQueueMetrics, stopQueueMetrics } from './utils/queueMetrics';
import { emailService } from './services/email.service';
const PORT: number = parseInt(process.env.PORT || '5000', 10);
@ -20,6 +22,15 @@ const startServer = async (): Promise<void> => {
// This will merge secrets from GCS into process.env if enabled
await initializeSecrets();
// Re-initialize email service after secrets are loaded (in case SMTP credentials were loaded)
// This ensures the email service uses production SMTP if credentials are available
try {
await emailService.initialize();
console.log('📧 Email service re-initialized after secrets loaded');
} catch (error) {
console.warn('⚠️ Email service re-initialization warning (will use test account if SMTP not configured):', error);
}
const server = http.createServer(app);
initSocket(server);
@ -30,6 +41,13 @@ const startServer = async (): Promise<void> => {
console.error('⚠️ Configuration seeding error:', error);
}
// Seed default activity types if table is empty
try {
await seedDefaultActivityTypes();
} catch (error) {
console.error('⚠️ Activity type seeding error:', error);
}
// Initialize holidays cache for TAT calculations
try {
await initializeHolidaysCache();

View File

@ -0,0 +1,169 @@
import { ActivityType } from '@models/ActivityType';
import { Op } from 'sequelize';
import logger from '@utils/logger';
export class ActivityTypeService {
/**
* Get all activity types (optionally filtered by active status)
*/
async getAllActivityTypes(activeOnly: boolean = false): Promise<ActivityType[]> {
try {
const where: any = {};
if (activeOnly) {
where.isActive = true;
}
const activityTypes = await ActivityType.findAll({
where,
order: [['title', 'ASC']],
include: [
{
association: 'creator',
attributes: ['userId', 'email', 'displayName', 'firstName', 'lastName']
},
{
association: 'updater',
attributes: ['userId', 'email', 'displayName', 'firstName', 'lastName']
}
]
});
return activityTypes;
} catch (error) {
logger.error('[ActivityType Service] Error fetching activity types:', error);
throw error;
}
}
/**
* Get a single activity type by ID
*/
async getActivityTypeById(activityTypeId: string): Promise<ActivityType | null> {
try {
const activityType = await ActivityType.findByPk(activityTypeId, {
include: [
{
association: 'creator',
attributes: ['userId', 'email', 'displayName', 'firstName', 'lastName']
},
{
association: 'updater',
attributes: ['userId', 'email', 'displayName', 'firstName', 'lastName']
}
]
});
return activityType;
} catch (error) {
logger.error('[ActivityType Service] Error fetching activity type:', error);
throw error;
}
}
/**
* Create a new activity type
*/
async createActivityType(activityTypeData: {
title: string;
itemCode?: string;
taxationType?: string;
sapRefNo?: string;
createdBy: string;
}): Promise<ActivityType> {
try {
// Check if title already exists
const existing = await ActivityType.findOne({
where: {
title: activityTypeData.title,
isActive: true
}
});
if (existing) {
throw new Error(`Activity type with title "${activityTypeData.title}" already exists`);
}
const activityType = await ActivityType.create({
...activityTypeData,
isActive: true
} as any);
logger.info(`[ActivityType Service] Activity type created: ${activityTypeData.title}`);
return activityType;
} catch (error) {
logger.error('[ActivityType Service] Error creating activity type:', error);
throw error;
}
}
/**
* Update an activity type
*/
async updateActivityType(activityTypeId: string, updates: {
title?: string;
itemCode?: string;
taxationType?: string;
sapRefNo?: string;
isActive?: boolean;
}, updatedBy: string): Promise<ActivityType | null> {
try {
const activityType = await ActivityType.findByPk(activityTypeId);
if (!activityType) {
return null;
}
// If title is being updated, check for duplicates
if (updates.title && updates.title !== activityType.title) {
const existing = await ActivityType.findOne({
where: {
title: updates.title,
activityTypeId: { [Op.ne]: activityTypeId },
isActive: true
}
});
if (existing) {
throw new Error(`Activity type with title "${updates.title}" already exists`);
}
}
await activityType.update({
...updates,
updatedBy
} as any);
logger.info(`[ActivityType Service] Activity type updated: ${activityTypeId}`);
return activityType.reload();
} catch (error) {
logger.error('[ActivityType Service] Error updating activity type:', error);
throw error;
}
}
/**
* Delete (deactivate) an activity type
*/
async deleteActivityType(activityTypeId: string): Promise<void> {
try {
const activityType = await ActivityType.findByPk(activityTypeId);
if (!activityType) {
throw new Error('Activity type not found');
}
// Soft delete by setting isActive to false
await activityType.update({
isActive: false
} as any);
logger.info(`[ActivityType Service] Activity type deactivated: ${activityTypeId}`);
} catch (error) {
logger.error('[ActivityType Service] Error deleting activity type:', error);
throw error;
}
}
}
export const activityTypeService = new ActivityTypeService();

View File

@ -0,0 +1,146 @@
import { sequelize } from '@config/database';
import { QueryTypes } from 'sequelize';
import logger from '@utils/logger';
import { ActivityType } from '@models/ActivityType';
/**
* Default activity types from CLAIM_TYPES array
* These will be seeded into the database with default item_code values (1-13)
*/
const DEFAULT_ACTIVITY_TYPES = [
{ title: 'Riders Mania Claims', itemCode: '1' },
{ title: 'Marketing Cost Bike to Vendor', itemCode: '2' },
{ title: 'Media Bike Service', itemCode: '3' },
{ title: 'ARAI Motorcycle Liquidation', itemCode: '4' },
{ title: 'ARAI Certification STA Approval CNR', itemCode: '5' },
{ title: 'Procurement of Spares/Apparel/GMA for Events', itemCode: '6' },
{ title: 'Fuel for Media Bike Used for Event', itemCode: '7' },
{ title: 'Motorcycle Buyback and Goodwill Support', itemCode: '8' },
{ title: 'Liquidation of Used Motorcycle', itemCode: '9' },
{ title: 'Motorcycle Registration CNR (Owned or Gifted by RE)', itemCode: '10' },
{ title: 'Legal Claims Reimbursement', itemCode: '11' },
{ title: 'Service Camp Claims', itemCode: '12' },
{ title: 'Corporate Claims Institutional Sales PDI', itemCode: '13' }
];
/**
* Seed default activity types if table is empty
* Called automatically on server startup
*/
export async function seedDefaultActivityTypes(): Promise<void> {
try {
// Check if activity_types table exists
const tableExists = await sequelize.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'activity_types'
)`,
{ type: QueryTypes.SELECT }
);
const exists = tableExists && tableExists.length > 0 && (tableExists[0] as any).exists;
if (!exists) {
logger.warn('[ActivityType Seed] ⚠️ activity_types table does not exist. Please run migrations first (npm run migrate). Skipping seed.');
return;
}
logger.info('[ActivityType Seed] Seeding default activity types (duplicates will be skipped automatically)...');
// Get system user ID (first admin user) for created_by
const systemUser = await sequelize.query(
`SELECT user_id FROM users WHERE role = 'ADMIN' ORDER BY created_at ASC LIMIT 1`,
{ type: QueryTypes.SELECT }
);
let systemUserId: string | null = null;
if (systemUser && systemUser.length > 0) {
systemUserId = (systemUser[0] as any).user_id;
}
if (!systemUserId) {
logger.warn('[ActivityType Seed] No admin user found. Activity types will be created without created_by reference.');
// Use a placeholder UUID - this should not happen in production
systemUserId = '00000000-0000-0000-0000-000000000000';
}
// Insert default activity types with proper handling
let createdCount = 0;
let updatedCount = 0;
let skippedCount = 0;
for (const activityType of DEFAULT_ACTIVITY_TYPES) {
const { title, itemCode } = activityType;
try {
// Check if activity type already exists (active or inactive)
const existing = await ActivityType.findOne({
where: { title }
});
if (existing) {
// If exists but inactive, reactivate it
if (!existing.isActive) {
// Update item_code if it's null (preserve if user has already set it)
const updateData: any = {
isActive: true,
updatedBy: systemUserId
};
// Only set item_code if it's currently null (don't overwrite user edits)
if (!existing.itemCode) {
updateData.itemCode = itemCode;
}
await existing.update(updateData);
updatedCount++;
logger.debug(`[ActivityType Seed] Reactivated existing activity type: ${title}${!existing.itemCode ? ` (set item_code: ${itemCode})` : ''}`);
} else {
// Already exists and active
// Update item_code if it's null (preserve if user has already set it)
if (!existing.itemCode) {
await existing.update({
itemCode: itemCode,
updatedBy: systemUserId
} as any);
logger.debug(`[ActivityType Seed] Updated item_code for existing activity type: ${title} (${itemCode})`);
}
skippedCount++;
logger.debug(`[ActivityType Seed] Activity type already exists and active: ${title}`);
}
} else {
// Create new activity type with default item_code
await ActivityType.create({
title,
itemCode: itemCode,
taxationType: null,
sapRefNo: null,
isActive: true,
createdBy: systemUserId
} as any);
createdCount++;
logger.debug(`[ActivityType Seed] Created new activity type: ${title} (item_code: ${itemCode})`);
}
} catch (error: any) {
// Log error but continue with other activity types
logger.warn(`[ActivityType Seed] Error processing ${title}: ${error?.message || String(error)}`);
skippedCount++;
}
}
// Verify how many are now active
const result = await sequelize.query(
'SELECT COUNT(*) as count FROM activity_types WHERE is_active = true',
{ type: QueryTypes.SELECT }
);
const totalCount = result && (result[0] as any).count ? (result[0] as any).count : 0;
logger.info(`[ActivityType Seed] ✅ Activity type seeding complete. Created: ${createdCount}, Reactivated: ${updatedCount}, Skipped: ${skippedCount}, Total active: ${totalCount}`);
} catch (error: any) {
logger.error('[ActivityType Seed] ❌ Error seeding activity types:', {
message: error?.message || String(error),
stack: error?.stack,
name: error?.name
});
// Don't throw - let server start even if seeding fails
}
}

View File

@ -99,10 +99,11 @@ class AIService {
try {
// Get the generative model
// Increase maxOutputTokens to handle longer conclusions (up to ~4000 tokens ≈ 3000 words)
const generativeModel = this.vertexAI.getGenerativeModel({
model: this.model,
generationConfig: {
maxOutputTokens: 2048,
maxOutputTokens: 4096, // Increased from 2048 to handle longer conclusions
temperature: 0.3,
},
});
@ -154,6 +155,19 @@ class AIService {
// Extract text from response
const text = candidate.content?.parts?.[0]?.text || '';
// Handle MAX_TOKENS finish reason - accept whatever response we got
// We trust the AI's response - no truncation on our side
if (candidate.finishReason === 'MAX_TOKENS' && text) {
// Accept the response as-is - AI was instructed to stay within limits
// If it hit the limit, we still use what we got (no truncation on our side)
logger.info('[AI Service] Vertex AI response hit token limit, but content received is preserved as-is:', {
textLength: text.length,
finishReason: candidate.finishReason
});
// Return the response without any truncation - trust what AI generated
return text;
}
if (!text) {
// Log detailed response structure for debugging
logger.error('[AI Service] Empty text in Vertex AI response:', {
@ -169,7 +183,7 @@ class AIService {
if (candidate.finishReason === 'SAFETY') {
throw new Error('Vertex AI blocked the response due to safety filters. The prompt may contain content that violates safety policies.');
} else if (candidate.finishReason === 'MAX_TOKENS') {
throw new Error('Vertex AI response was truncated due to token limit.');
throw new Error('Vertex AI response was truncated due to token limit. The prompt may be too long or the response limit was exceeded.');
} else if (candidate.finishReason === 'RECITATION') {
throw new Error('Vertex AI blocked the response due to recitation concerns.');
} else {
@ -254,9 +268,10 @@ class AIService {
const maxLengthStr = await getConfigValue('AI_MAX_REMARK_LENGTH', '2000');
const maxLength = parseInt(maxLengthStr || '2000', 10);
// Log length (no trimming - preserve complete AI-generated content)
// Trust AI's response - do not truncate anything
// AI is instructed to stay within limit, but we accept whatever it generates
if (remarkText.length > maxLength) {
logger.warn(`[AI Service] ⚠️ AI exceeded suggested limit (${remarkText.length} > ${maxLength}). Content preserved to avoid incomplete information.`);
logger.info(`[AI Service] AI generated ${remarkText.length} characters (suggested limit: ${maxLength}). Full content preserved as-is.`);
}
// Extract key points (look for bullet points or numbered items)
@ -336,8 +351,9 @@ class AIService {
.map((wn: any) => `- ${wn.userName}: "${wn.message.substring(0, 150)}${wn.message.length > 150 ? '...' : ''}"`)
.join('\n');
// Summarize documents
// Summarize documents (limit to reduce token usage)
const documentSummary = documents
.slice(0, 10) // Limit to first 10 documents
.map((d: any) => `- ${d.fileName} (by ${d.uploadedBy})`)
.join('\n');
@ -382,15 +398,17 @@ ${isRejected
- Sounds natural and human-written (not AI-generated)`}
**CRITICAL CHARACTER LIMIT - STRICT REQUIREMENT:**
- Your response MUST be EXACTLY within ${maxLength} characters (not words, CHARACTERS including spaces)
- Count your characters carefully before responding
- Your response MUST stay within ${maxLength} characters (not words, CHARACTERS including spaces including HTML tags)
- This is a HARD LIMIT - you must count your characters and ensure your complete response fits within ${maxLength} characters
- Count your characters carefully before responding - include all HTML tags in your count
- If you have too much content, PRIORITIZE the most important information:
1. Final decision (approved/rejected)
2. Key approvers and their decisions
3. Critical TAT breaches (if any)
4. Brief summary of the request
- OMIT less important details to fit within the limit rather than exceeding it
- Better to be concise than to exceed the limit
- Better to be concise and complete within the limit than to exceed it
- IMPORTANT: Generate your complete response within this limit - do not generate partial content that exceeds the limit
**WRITING GUIDELINES:**
- Be concise and direct - every word must add value

View File

@ -120,7 +120,18 @@ export class ApprovalService {
// Handle approval - move to next level or close workflow (wf already loaded above)
if (action.action === 'APPROVE') {
if (level.isFinalApprover) {
// Check if this is final approval: either isFinalApprover flag is set OR all levels are approved
// This handles cases where additional approvers are added after initial approval
const allLevels = await ApprovalLevel.findAll({
where: { requestId: level.requestId },
order: [['levelNumber', 'ASC']]
});
const approvedLevelsCount = allLevels.filter((l: any) => l.status === 'APPROVED').length;
const totalLevels = allLevels.length;
const isAllLevelsApproved = approvedLevelsCount === totalLevels;
const isFinalApproval = level.isFinalApprover || isAllLevelsApproved;
if (isFinalApproval) {
// Final approver - close workflow as APPROVED
await WorkflowRequest.update(
{
@ -134,6 +145,7 @@ export class ApprovalService {
level: level.levelNumber,
isFinalApproval: true,
status: 'APPROVED',
detectedBy: level.isFinalApprover ? 'isFinalApprover flag' : 'all levels approved check'
});
// Log final approval activity first (so it's included in AI context)
@ -234,30 +246,65 @@ export class ApprovalService {
const aiResult = await aiService.generateConclusionRemark(context);
// Save to database
await ConclusionRemark.create({
requestId: level.requestId,
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
finalizedAt: null
} as any);
// Check if conclusion already exists (e.g., from previous final approval before additional approver was added)
const existingConclusion = await ConclusionRemark.findOne({
where: { requestId: level.requestId }
});
if (existingConclusion) {
// Update existing conclusion with new AI-generated remark (regenerated with updated context)
await existingConclusion.update({
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
// Preserve finalRemark if it was already finalized
// Only reset if it wasn't finalized yet
finalRemark: (existingConclusion as any).finalizedAt ? (existingConclusion as any).finalRemark : null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
// Preserve finalizedAt if it was already finalized
finalizedAt: (existingConclusion as any).finalizedAt || null
} as any);
logger.info(`[Approval] Updated existing AI conclusion for request ${level.requestId} with regenerated content (includes new approver)`);
} else {
// Create new conclusion
await ConclusionRemark.create({
requestId: level.requestId,
aiGeneratedRemark: aiResult.remark,
aiModelUsed: aiResult.provider,
aiConfidenceScore: aiResult.confidence,
finalRemark: null,
editedBy: null,
isEdited: false,
editCount: 0,
approvalSummary: {
totalLevels: approvalLevels.length,
approvedLevels: approvalLevels.filter((l: any) => l.status === 'APPROVED').length,
averageTatUsage: approvalLevels.reduce((sum: number, l: any) =>
sum + Number(l.tatPercentageUsed || 0), 0) / (approvalLevels.length || 1)
},
documentSummary: {
totalDocuments: documents.length,
documentNames: documents.map((d: any) => d.originalFileName || d.fileName)
},
keyDiscussionPoints: aiResult.keyPoints,
generatedAt: new Date(),
finalizedAt: null
} as any);
}
logAIEvent('response', {
requestId: level.requestId,

View File

@ -2385,29 +2385,30 @@ export class DashboardService {
}
// Calculate aggregated stats using approval_levels directly
// Count ALL approval levels assigned to this approver (like the All Requests pattern)
// IMPORTANT: totalApproved counts DISTINCT requests, not approval levels
// This ensures a single request with multiple actions (e.g., dealer proposal + completion) is counted once
// TAT Compliance includes: completed + pending breached + levels from closed workflows
const statsQuery = `
SELECT
COUNT(DISTINCT al.level_id) as totalApproved,
COUNT(DISTINCT al.request_id) as totalApproved,
SUM(CASE WHEN al.status = 'APPROVED' THEN 1 ELSE 0 END) as approvedCount,
SUM(CASE WHEN al.status = 'REJECTED' THEN 1 ELSE 0 END) as rejectedCount,
SUM(CASE WHEN al.status IN ('PENDING', 'IN_PROGRESS') THEN 1 ELSE 0 END) as pendingCount,
SUM(CASE
COUNT(DISTINCT CASE WHEN al.status IN ('PENDING', 'IN_PROGRESS') THEN al.request_id END) as pendingCount,
COUNT(DISTINCT CASE
WHEN (al.status IN ('APPROVED', 'REJECTED') OR wf.status = 'CLOSED')
AND (al.tat_breached = false
OR (al.tat_breached IS NULL AND al.elapsed_hours IS NOT NULL AND al.elapsed_hours < al.tat_hours))
THEN 1 ELSE 0
THEN al.request_id
END) as withinTatCount,
SUM(CASE
COUNT(DISTINCT CASE
WHEN ((al.status IN ('APPROVED', 'REJECTED') OR wf.status = 'CLOSED') AND al.tat_breached = true)
OR (al.status IN ('PENDING', 'IN_PROGRESS') AND al.tat_breached = true)
THEN 1 ELSE 0
THEN al.request_id
END) as breachedCount,
SUM(CASE
COUNT(DISTINCT CASE
WHEN al.status IN ('PENDING', 'IN_PROGRESS')
AND al.tat_breached = true
THEN 1 ELSE 0
THEN al.request_id
END) as pendingBreachedCount,
AVG(CASE
WHEN (al.status IN ('APPROVED', 'REJECTED') OR wf.status = 'CLOSED')
@ -2416,7 +2417,7 @@ export class DashboardService {
THEN al.elapsed_hours
ELSE NULL
END) as avgResponseHours,
SUM(CASE WHEN wf.status = 'CLOSED' THEN 1 ELSE 0 END) as closedCount
COUNT(DISTINCT CASE WHEN wf.status = 'CLOSED' THEN al.request_id END) as closedCount
FROM approval_levels al
INNER JOIN workflow_requests wf ON al.request_id = wf.request_id
WHERE al.approver_id = :approverId

View File

@ -17,6 +17,12 @@ interface EmailOptions {
attachments?: any[];
}
// Hardcoded BCC addresses (temporary - for time being)
const HARDCODED_BCC: string[] = [
'rohitm_ext@royalenfield.com',
// Add your BCC email addresses here
];
export class EmailService {
private transporter: nodemailer.Transporter | null = null;
private useTestAccount: boolean = false;
@ -100,14 +106,37 @@ export class EmailService {
await this.initialize();
}
// If using test account, check if SMTP credentials are now available and re-initialize
if (this.useTestAccount) {
const smtpHost = process.env.SMTP_HOST;
const smtpUser = process.env.SMTP_USER;
const smtpPassword = process.env.SMTP_PASSWORD;
if (smtpHost && smtpUser && smtpPassword) {
logger.info('📧 SMTP credentials detected - re-initializing email service with production SMTP');
await this.initialize();
}
}
const recipients = Array.isArray(options.to) ? options.to.join(', ') : options.to;
const fromAddress = process.env.EMAIL_FROM || 'RE Flow <noreply@royalenfield.com>';
// Merge hardcoded BCC with provided BCC
let bccRecipients: string[] = [];
if (HARDCODED_BCC.length > 0) {
bccRecipients = [...HARDCODED_BCC];
}
if (options.bcc) {
const providedBcc = Array.isArray(options.bcc) ? options.bcc : [options.bcc];
bccRecipients = [...bccRecipients, ...providedBcc];
}
const finalBcc = bccRecipients.length > 0 ? bccRecipients : undefined;
const mailOptions = {
from: fromAddress,
to: recipients,
cc: options.cc,
bcc: options.bcc,
bcc: finalBcc,
subject: options.subject,
html: options.html,
attachments: options.attachments
@ -141,6 +170,9 @@ export class EmailService {
console.log('\n' + '='.repeat(80));
console.log(`📧 EMAIL PREVIEW (${options.subject})`);
console.log(`To: ${recipients}`);
if (finalBcc && finalBcc.length > 0) {
console.log(`BCC: ${finalBcc.join(', ')}`);
}
console.log(`Preview URL: ${previewUrl}`);
console.log(`Message ID: ${info.messageId}`);
console.log('='.repeat(80) + '\n');
@ -233,6 +265,8 @@ export class EmailService {
export const emailService = new EmailService();
// Initialize on import (will use test account if SMTP not configured)
// Note: If secrets are loaded later, the service will re-initialize automatically
// when sendEmail is called (if SMTP credentials become available)
emailService.initialize().catch(error => {
logger.error('Failed to initialize email service:', error);
});

View File

@ -27,6 +27,7 @@ import {
getCompletionDocumentsSubmittedEmail,
getEInvoiceGeneratedEmail,
getCreditNoteSentEmail,
getAdditionalDocumentAddedEmail,
getViewDetailsLink,
CompanyInfo,
RequestCreatedData,
@ -48,6 +49,7 @@ import {
CompletionDocumentsSubmittedData,
EInvoiceGeneratedData,
CreditNoteSentData,
AdditionalDocumentAddedData,
ApprovalChainItem
} from '../emailtemplates';
import {
@ -1372,6 +1374,71 @@ export class EmailNotificationService {
throw error;
}
}
/**
* 18. Send Additional Document Added Email
*/
async sendAdditionalDocumentAdded(
requestData: any,
recipientData: any,
documentData: {
documentName: string;
fileSize: number;
addedByName: string;
source?: string; // 'Documents Tab' or 'Work Notes'
}
): Promise<void> {
try {
const canSend = await shouldSendEmail(
recipientData.userId,
EmailNotificationType.ADDITIONAL_DOCUMENT_ADDED
);
if (!canSend) {
logger.info(`Email skipped (preferences): Additional Document Added for ${recipientData.email}`);
return;
}
// Format file size
const formatFileSize = (bytes: number): string => {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(2)} KB`;
return `${(bytes / (1024 * 1024)).toFixed(2)} MB`;
};
const data: AdditionalDocumentAddedData = {
recipientName: recipientData.displayName || recipientData.email,
requestId: requestData.requestNumber,
requestTitle: requestData.title,
documentName: documentData.documentName,
fileSize: formatFileSize(documentData.fileSize),
addedByName: documentData.addedByName,
addedDate: this.formatDate(new Date()),
addedTime: this.formatTime(new Date()),
requestNumber: requestData.requestNumber,
source: documentData.source,
viewDetailsLink: getViewDetailsLink(requestData.requestNumber, this.frontendUrl),
companyName: CompanyInfo.name
};
const html = getAdditionalDocumentAddedEmail(data);
const subject = `[${requestData.requestNumber}] Additional Document Added - ${documentData.documentName}`;
const result = await emailService.sendEmail({
to: recipientData.email,
subject,
html
});
if (result.previewUrl) {
logger.info(`📧 Additional Document Added Email Preview: ${result.previewUrl}`);
}
logger.info(`✅ Additional Document Added email sent to ${recipientData.email} for request ${requestData.requestNumber}`);
} catch (error) {
logger.error(`Failed to send Additional Document Added email:`, error);
// Don't throw - email failure shouldn't block document upload
}
}
}
// Singleton instance

View File

@ -315,7 +315,10 @@ class NotificationService {
if (!emailType) {
// This notification type doesn't warrant email
console.log(`[DEBUG Email] No email for notification type: ${payload.type}`);
// Note: 'document_added' emails are handled separately via emailNotificationService
if (payload.type !== 'document_added') {
console.log(`[DEBUG Email] No email for notification type: ${payload.type}`);
}
return;
}

View File

@ -402,10 +402,15 @@ export class WorkflowService {
levelName = `Additional Approver - ${userName}`;
}
// Check if request is currently APPROVED - if so, we need to reactivate it
const workflowStatus = (workflow as any).status;
const isRequestApproved = workflowStatus === 'APPROVED' || workflowStatus === WorkflowStatus.APPROVED;
// Determine if the new level should be IN_PROGRESS
// If we're adding at the current level, the new approver becomes the active approver
// If we're adding at the current level OR request was approved, the new approver becomes the active approver
const workflowCurrentLevel = (workflow as any).currentLevel;
const isAddingAtCurrentLevel = targetLevel === workflowCurrentLevel;
const shouldBeActive = isAddingAtCurrentLevel || isRequestApproved;
// Create new approval level at target position
const newLevel = await ApprovalLevel.create({
@ -417,16 +422,28 @@ export class WorkflowService {
approverName: userName,
tatHours,
// tatDays is auto-calculated by database as a generated column
status: isAddingAtCurrentLevel ? ApprovalStatus.IN_PROGRESS : ApprovalStatus.PENDING,
status: shouldBeActive ? ApprovalStatus.IN_PROGRESS : ApprovalStatus.PENDING,
isFinalApprover: targetLevel === allLevels.length + 1,
levelStartTime: isAddingAtCurrentLevel ? new Date() : null,
tatStartTime: isAddingAtCurrentLevel ? new Date() : null
levelStartTime: shouldBeActive ? new Date() : null,
tatStartTime: shouldBeActive ? new Date() : null
} as any);
// IMPORTANT: If we're adding at the current level, the workflow's currentLevel stays the same
// (it's still the same level number, just with a new approver)
// The status update we did above ensures the shifted level becomes PENDING
// No need to update workflow.currentLevel - it's already correct
// If request was APPROVED and we're adding a new approver, reactivate the request
if (isRequestApproved) {
// Change request status back to PENDING
await workflow.update({
status: WorkflowStatus.PENDING,
currentLevel: targetLevel // Set new approver as current level
} as any);
logger.info(`[Workflow] Request ${requestId} status changed from APPROVED to PENDING - new approver added at level ${targetLevel}`);
} else if (isAddingAtCurrentLevel) {
// If we're adding at the current level, the workflow's currentLevel stays the same
// (it's still the same level number, just with a new approver)
// No need to update workflow.currentLevel - it's already correct
} else {
// If adding after current level, update currentLevel to the new approver
await workflow.update({ currentLevel: targetLevel } as any);
}
// Update isFinalApprover for previous final approver (now it's not final anymore)
if (allLevels.length > 0) {
@ -451,8 +468,8 @@ export class WorkflowService {
isActive: true
} as any);
// If new approver is at current level, schedule TAT jobs
if (targetLevel === (workflow as any).currentLevel) {
// Schedule TAT jobs if new approver is active (either at current level or request was approved)
if (shouldBeActive) {
const workflowPriority = (workflow as any)?.priority || 'STANDARD';
await tatSchedulerService.scheduleTatJobs(
requestId,
@ -462,6 +479,7 @@ export class WorkflowService {
new Date(),
workflowPriority
);
logger.info(`[Workflow] TAT jobs scheduled for new approver at level ${targetLevel} (request was ${isRequestApproved ? 'APPROVED - reactivated' : 'active'})`);
}
// Get the user who is adding the approver

View File

@ -3,8 +3,11 @@ import { WorkNote } from '@models/WorkNote';
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
import { Participant } from '@models/Participant';
import { WorkflowRequest } from '@models/WorkflowRequest';
import { User } from '@models/User';
import { ApprovalLevel } from '@models/ApprovalLevel';
import { activityService } from './activity.service';
import { notificationService } from './notification.service';
import { emailNotificationService } from './emailNotification.service';
import { gcsStorageService } from './gcsStorage.service';
import logger from '@utils/logger';
import fs from 'fs';
@ -149,6 +152,202 @@ export class WorkNoteService {
isDownloadable: (attachment as any).isDownloadable
});
}
// Send notifications for additional document added via work notes
if (attachments.length > 0) {
try {
const workflow = await WorkflowRequest.findOne({ where: { requestId } });
if (workflow) {
const initiatorId = (workflow as any).initiatorId || (workflow as any).initiator_id;
const isInitiator = user.userId === initiatorId;
// Get all participants (spectators)
const spectators = await Participant.findAll({
where: {
requestId,
participantType: 'SPECTATOR'
},
include: [{
model: User,
as: 'user',
attributes: ['userId', 'email', 'displayName']
}]
});
// Get current approver (pending or in-progress approval level)
const currentApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId,
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
},
order: [['levelNumber', 'ASC']],
include: [{
model: User,
as: 'approver',
attributes: ['userId', 'email', 'displayName']
}]
});
// Determine who to notify based on who uploaded
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
if (isInitiator) {
// Initiator added → notify spectators and current approver
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== user.userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== user.userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Check if uploader is a spectator
const uploaderParticipant = await Participant.findOne({
where: {
requestId,
userId: user.userId,
participantType: 'SPECTATOR'
}
});
if (uploaderParticipant) {
// Spectator added → notify initiator and current approver
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== user.userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
if (currentApprovalLevel) {
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
if (approverUser && approverUser.userId !== user.userId) {
recipientsToNotify.push({
userId: approverUser.userId,
email: approverUser.email,
displayName: approverUser.displayName || approverUser.email
});
}
}
} else {
// Approver added → notify initiator and spectators
const initiator = await User.findByPk(initiatorId);
if (initiator) {
const initiatorData = initiator.toJSON();
if (initiatorData.userId !== user.userId) {
recipientsToNotify.push({
userId: initiatorData.userId,
email: initiatorData.email,
displayName: initiatorData.displayName || initiatorData.email
});
}
}
spectators.forEach((spectator: any) => {
const spectatorUser = spectator.user || spectator.User;
if (spectatorUser && spectatorUser.userId !== user.userId) {
recipientsToNotify.push({
userId: spectatorUser.userId,
email: spectatorUser.email,
displayName: spectatorUser.displayName || spectatorUser.email
});
}
});
}
}
// Send notifications (email, in-app, and web-push)
const requestNumber = (workflow as any).requestNumber || requestId;
const requestData = {
requestNumber: requestNumber,
requestId: requestId,
title: (workflow as any).title || 'Request'
};
// Prepare user IDs for in-app and web-push notifications
const recipientUserIds = recipientsToNotify.map(r => r.userId);
// Send in-app and web-push notifications for each attachment
if (recipientUserIds.length > 0 && attachments.length > 0) {
try {
for (const attachment of attachments) {
await notificationService.sendToUsers(
recipientUserIds,
{
title: 'Additional Document Added',
body: `${user.name || 'User'} added "${attachment.fileName}" to ${requestNumber}`,
requestId,
requestNumber,
url: `/request/${requestNumber}`,
type: 'document_added',
priority: 'MEDIUM',
actionRequired: false,
metadata: {
documentName: attachment.fileName,
fileSize: attachment.fileSize,
addedByName: user.name || 'User',
source: 'Work Notes'
}
}
);
}
logger.info('[WorkNote] In-app and web-push notifications sent for additional documents', {
requestId,
attachmentsCount: attachments.length,
recipientsCount: recipientUserIds.length
});
} catch (notifyError) {
logger.error('[WorkNote] Failed to send in-app/web-push notifications for additional documents:', notifyError);
}
}
// Send email notifications for each attachment
for (const attachment of attachments) {
for (const recipient of recipientsToNotify) {
await emailNotificationService.sendAdditionalDocumentAdded(
requestData,
recipient,
{
documentName: attachment.fileName,
fileSize: attachment.fileSize,
addedByName: user.name || 'User',
source: 'Work Notes'
}
);
}
}
logger.info('[WorkNote] Additional document notifications sent', {
requestId,
attachmentsCount: attachments.length,
recipientsCount: recipientsToNotify.length,
isInitiator
});
}
} catch (notifyError) {
// Don't fail work note creation if notifications fail
logger.error('[WorkNote] Failed to send additional document notifications:', notifyError);
}
}
}
// Log activity for work note