Compare commits
No commits in common. "main" and "dealer_claim" have entirely different histories.
main
...
dealer_cla
@ -1326,9 +1326,9 @@ GCP_KEY_FILE=./config/gcp-key.json
|
|||||||
SMTP_HOST=smtp.gmail.com
|
SMTP_HOST=smtp.gmail.com
|
||||||
SMTP_PORT=587
|
SMTP_PORT=587
|
||||||
SMTP_SECURE=false
|
SMTP_SECURE=false
|
||||||
SMTP_USER=notifications@{{APP_DOMAIN}}
|
SMTP_USER=notifications@royalenfield.com
|
||||||
SMTP_PASSWORD=your_smtp_password
|
SMTP_PASSWORD=your_smtp_password
|
||||||
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}>
|
EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
|
||||||
|
|
||||||
# AI Service (for conclusion generation)
|
# AI Service (for conclusion generation)
|
||||||
AI_API_KEY=your_ai_api_key
|
AI_API_KEY=your_ai_api_key
|
||||||
|
|||||||
@ -155,13 +155,13 @@ export async function calculateBusinessDays(
|
|||||||
2. ✅ Imported `calculateElapsedWorkingHours`, `addWorkingHours`, `addWorkingHoursExpress` from `@utils/tatTimeUtils`
|
2. ✅ Imported `calculateElapsedWorkingHours`, `addWorkingHours`, `addWorkingHoursExpress` from `@utils/tatTimeUtils`
|
||||||
3. ✅ Replaced lines 64-65 with proper working hours calculation (now lines 66-77)
|
3. ✅ Replaced lines 64-65 with proper working hours calculation (now lines 66-77)
|
||||||
4. ✅ Gets priority from workflow
|
4. ✅ Gets priority from workflow
|
||||||
5. Done: Test TAT breach alerts
|
5. ⏳ **TODO:** Test TAT breach alerts
|
||||||
|
|
||||||
### Step 2: Add Business Days Function ✅ **DONE**
|
### Step 2: Add Business Days Function ✅ **DONE**
|
||||||
1. ✅ Opened `Re_Backend/src/utils/tatTimeUtils.ts`
|
1. ✅ Opened `Re_Backend/src/utils/tatTimeUtils.ts`
|
||||||
2. ✅ Added `calculateBusinessDays()` function (lines 697-758)
|
2. ✅ Added `calculateBusinessDays()` function (lines 697-758)
|
||||||
3. ✅ Exported the function
|
3. ✅ Exported the function
|
||||||
4. Done: Test with various date ranges
|
4. ⏳ **TODO:** Test with various date ranges
|
||||||
|
|
||||||
### Step 3: Update Workflow Aging Report ✅ **DONE**
|
### Step 3: Update Workflow Aging Report ✅ **DONE**
|
||||||
1. ✅ Built report endpoint using `calculateBusinessDays()`
|
1. ✅ Built report endpoint using `calculateBusinessDays()`
|
||||||
|
|||||||
@ -19,10 +19,10 @@ This command will output something like:
|
|||||||
```
|
```
|
||||||
=======================================
|
=======================================
|
||||||
Public Key:
|
Public Key:
|
||||||
{{VAPID_PUBLIC_KEY}}
|
BEl62iUYgUivxIkvpY5kXK3t3b9i5X8YzA1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6
|
||||||
|
|
||||||
Private Key:
|
Private Key:
|
||||||
{{VAPID_PRIVATE_KEY}}
|
aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCdEfGhIjKlMnOpQrStUvWxYz
|
||||||
|
|
||||||
=======================================
|
=======================================
|
||||||
```
|
```
|
||||||
@ -59,9 +59,9 @@ Add the generated keys to your backend `.env` file:
|
|||||||
|
|
||||||
```env
|
```env
|
||||||
# Notification Service Worker credentials (Web Push / VAPID)
|
# Notification Service Worker credentials (Web Push / VAPID)
|
||||||
VAPID_PUBLIC_KEY={{VAPID_PUBLIC_KEY}}
|
VAPID_PUBLIC_KEY=BEl62iUYgUivxIkvpY5kXK3t3b9i5X8YzA1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6
|
||||||
VAPID_PRIVATE_KEY={{VAPID_PRIVATE_KEY}}
|
VAPID_PRIVATE_KEY=aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCdEfGhIjKlMnOpQrStUvWxYz
|
||||||
VAPID_CONTACT=mailto:{{ADMIN_EMAIL}}
|
VAPID_CONTACT=mailto:admin@royalenfield.com
|
||||||
```
|
```
|
||||||
|
|
||||||
**Important Notes:**
|
**Important Notes:**
|
||||||
@ -75,7 +75,7 @@ Add the **SAME** `VAPID_PUBLIC_KEY` to your frontend `.env` file:
|
|||||||
|
|
||||||
```env
|
```env
|
||||||
# Push Notifications (Web Push / VAPID)
|
# Push Notifications (Web Push / VAPID)
|
||||||
VITE_PUBLIC_VAPID_KEY={{VAPID_PUBLIC_KEY}}
|
VITE_PUBLIC_VAPID_KEY=BEl62iUYgUivxIkvpY5kXK3t3b9i5X8YzA1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6
|
||||||
```
|
```
|
||||||
|
|
||||||
**Important:**
|
**Important:**
|
||||||
|
|||||||
@ -98,7 +98,7 @@ npm run dev
|
|||||||
1. Server will start automatically
|
1. Server will start automatically
|
||||||
2. Log in via SSO
|
2. Log in via SSO
|
||||||
3. Run this SQL to make yourself admin:
|
3. Run this SQL to make yourself admin:
|
||||||
UPDATE users SET role = 'ADMIN' WHERE email = 'your-email@{{APP_DOMAIN}}';
|
UPDATE users SET role = 'ADMIN' WHERE email = 'your-email@royalenfield.com';
|
||||||
|
|
||||||
[Config Seed] ✅ Default configurations seeded successfully (30 settings)
|
[Config Seed] ✅ Default configurations seeded successfully (30 settings)
|
||||||
info: ✅ Server started successfully on port 5000
|
info: ✅ Server started successfully on port 5000
|
||||||
@ -112,7 +112,7 @@ psql -d royal_enfield_workflow
|
|||||||
|
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'ADMIN'
|
SET role = 'ADMIN'
|
||||||
WHERE email = 'your-email@{{APP_DOMAIN}}';
|
WHERE email = 'your-email@royalenfield.com';
|
||||||
|
|
||||||
\q
|
\q
|
||||||
```
|
```
|
||||||
|
|||||||
@ -471,7 +471,7 @@ The backend supports web push notifications via VAPID (Voluntary Application Ser
|
|||||||
```
|
```
|
||||||
VAPID_PUBLIC_KEY=<your-public-key>
|
VAPID_PUBLIC_KEY=<your-public-key>
|
||||||
VAPID_PRIVATE_KEY=<your-private-key>
|
VAPID_PRIVATE_KEY=<your-private-key>
|
||||||
VAPID_CONTACT=mailto:admin@{{APP_DOMAIN}}
|
VAPID_CONTACT=mailto:admin@royalenfield.com
|
||||||
```
|
```
|
||||||
|
|
||||||
3. **Add to Frontend `.env`:**
|
3. **Add to Frontend `.env`:**
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
1
build/assets/charts-vendor-Cji9-Yri.js.map
Normal file
1
build/assets/charts-vendor-Cji9-Yri.js.map
Normal file
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
|||||||
import{a as s}from"./index-CULgQ-8S.js";import"./radix-vendor-CYvDqP9X.js";import"./charts-vendor-BVfwAPj-.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-CX5oLBI_.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-B_rK4TXr.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
|
|
||||||
2
build/assets/conclusionApi-Dx0VmMvk.js
Normal file
2
build/assets/conclusionApi-Dx0VmMvk.js
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
import{a as t}from"./index-DtEUJDeH.js";import"./radix-vendor-DA0cB_hD.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BPwaxA-i.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";async function m(n){return(await t.post(`/conclusions/${n}/generate`)).data.data}async function d(n,o){return(await t.post(`/conclusions/${n}/finalize`,{finalRemark:o})).data.data}async function f(n){return(await t.get(`/conclusions/${n}`)).data.data}export{d as finalizeConclusion,m as generateConclusion,f as getConclusion};
|
||||||
|
//# sourceMappingURL=conclusionApi-Dx0VmMvk.js.map
|
||||||
1
build/assets/conclusionApi-Dx0VmMvk.js.map
Normal file
1
build/assets/conclusionApi-Dx0VmMvk.js.map
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"conclusionApi-Dx0VmMvk.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAKA,eAAsBC,EAAcJ,EAA8C,CAEhF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB"}
|
||||||
File diff suppressed because one or more lines are too long
70
build/assets/index-DtEUJDeH.js
Normal file
70
build/assets/index-DtEUJDeH.js
Normal file
File diff suppressed because one or more lines are too long
1
build/assets/index-DtEUJDeH.js.map
Normal file
1
build/assets/index-DtEUJDeH.js.map
Normal file
File diff suppressed because one or more lines are too long
1
build/assets/index-P-Le9vHs.css
Normal file
1
build/assets/index-P-Le9vHs.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 MiB |
File diff suppressed because one or more lines are too long
1
build/assets/radix-vendor-DA0cB_hD.js.map
Normal file
1
build/assets/radix-vendor-DA0cB_hD.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1
build/assets/redux-vendor-tbZCm13o.js.map
Normal file
1
build/assets/redux-vendor-tbZCm13o.js.map
Normal file
File diff suppressed because one or more lines are too long
2
build/assets/requestNavigation-DjrXcYns.js
Normal file
2
build/assets/requestNavigation-DjrXcYns.js
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
import{g as s}from"./index-DtEUJDeH.js";import"./radix-vendor-DA0cB_hD.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BPwaxA-i.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";function R(o){const{requestId:e,status:t,request:a,navigate:r}=o;if((t==null?void 0:t.toLowerCase())==="draft"||t==="DRAFT"){r(`/edit-request/${e}`);return}const i=s(e);r(i)}export{R as navigateToRequest};
|
||||||
|
//# sourceMappingURL=requestNavigation-DjrXcYns.js.map
|
||||||
1
build/assets/requestNavigation-DjrXcYns.js.map
Normal file
1
build/assets/requestNavigation-DjrXcYns.js.map
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"requestNavigation-DjrXcYns.js","sources":["../../src/utils/requestNavigation.ts"],"sourcesContent":["/**\r\n * Global Request Navigation Utility\r\n * \r\n * Centralized navigation logic for request-related routes.\r\n * This utility decides where to navigate when clicking on request cards\r\n * from anywhere in the application.\r\n * \r\n * Features:\r\n * - Single point of navigation logic\r\n * - Handles draft vs active requests\r\n * - Supports different flow types (CUSTOM, DEALER_CLAIM)\r\n * - Type-safe navigation\r\n */\r\n\r\nimport { NavigateFunction } from 'react-router-dom';\r\nimport { getRequestDetailRoute, RequestFlowType } from './requestTypeUtils';\r\n\r\nexport interface RequestNavigationOptions {\r\n requestId: string;\r\n requestTitle?: string;\r\n status?: string;\r\n request?: any; // Full request object if available\r\n navigate: NavigateFunction;\r\n}\r\n\r\n/**\r\n * Navigate to the appropriate request detail page based on request type\r\n * \r\n * This is the single point of navigation for all request cards.\r\n * It handles:\r\n * - Draft requests (navigate to edit)\r\n * - Different flow types (CUSTOM, DEALER_CLAIM)\r\n * - Status-based routing\r\n */\r\nexport function navigateToRequest(options: RequestNavigationOptions): void {\r\n const { requestId, status, request, navigate } = options;\r\n\r\n // Check if request is a draft - if so, route to edit form instead of detail view\r\n const isDraft = status?.toLowerCase() === 'draft' || status === 'DRAFT';\r\n if (isDraft) {\r\n navigate(`/edit-request/${requestId}`);\r\n return;\r\n }\r\n\r\n // Determine the appropriate route based on request type\r\n const route = getRequestDetailRoute(requestId, request);\r\n navigate(route);\r\n}\r\n\r\n/**\r\n * Navigate to create a new request based on flow type\r\n */\r\nexport function navigateToCreateRequest(\r\n navigate: NavigateFunction,\r\n flowType: RequestFlowType = 'CUSTOM'\r\n): void {\r\n const route = flowType === 'DEALER_CLAIM' \r\n ? '/claim-management' \r\n : '/new-request';\r\n navigate(route);\r\n}\r\n\r\n/**\r\n * Create a navigation handler function for request cards\r\n * This can be used directly in onClick handlers\r\n */\r\nexport function createRequestNavigationHandler(\r\n navigate: NavigateFunction\r\n) {\r\n return (requestId: string, requestTitle?: string, status?: string, request?: any) => {\r\n navigateToRequest({\r\n requestId,\r\n requestTitle,\r\n status,\r\n request,\r\n navigate,\r\n });\r\n };\r\n}\r\n"],"names":["navigateToRequest","options","requestId","status","request","navigate","route","getRequestDetailRoute"],"mappings":"6RAkCO,SAASA,EAAkBC,EAAyC,CACzE,KAAM,CAAE,UAAAC,EAAW,OAAAC,EAAQ,QAAAC,EAAS,SAAAC,GAAaJ,EAIjD,IADgBE,GAAA,YAAAA,EAAQ,iBAAkB,SAAWA,IAAW,QACnD,CACXE,EAAS,iBAAiBH,CAAS,EAAE,EACrC,MACF,CAGA,MAAMI,EAAQC,EAAsBL,CAAkB,EACtDG,EAASC,CAAK,CAChB"}
|
||||||
File diff suppressed because one or more lines are too long
13
build/assets/router-vendor-CRr9x_Jp.js
Normal file
13
build/assets/router-vendor-CRr9x_Jp.js
Normal file
File diff suppressed because one or more lines are too long
1
build/assets/router-vendor-CRr9x_Jp.js.map
Normal file
1
build/assets/router-vendor-CRr9x_Jp.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1
build/assets/socket-vendor-TjCxX7sJ.js.map
Normal file
1
build/assets/socket-vendor-TjCxX7sJ.js.map
Normal file
File diff suppressed because one or more lines are too long
608
build/assets/ui-vendor-BPwaxA-i.js
Normal file
608
build/assets/ui-vendor-BPwaxA-i.js
Normal file
File diff suppressed because one or more lines are too long
1
build/assets/ui-vendor-BPwaxA-i.js.map
Normal file
1
build/assets/ui-vendor-BPwaxA-i.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
7
build/assets/utils-vendor-DHm03ykU.js
Normal file
7
build/assets/utils-vendor-DHm03ykU.js
Normal file
File diff suppressed because one or more lines are too long
1
build/assets/utils-vendor-DHm03ykU.js.map
Normal file
1
build/assets/utils-vendor-DHm03ykU.js.map
Normal file
File diff suppressed because one or more lines are too long
@ -1,31 +1,69 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
|
<!-- CSP: Allows blob URLs for file previews and cross-origin API calls during development -->
|
||||||
|
<meta http-equiv="Content-Security-Policy" content="default-src 'self' blob:; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; script-src 'self'; img-src 'self' data: https: blob:; connect-src 'self' blob: data: http://localhost:5000 http://localhost:3000 ws://localhost:5000 ws://localhost:3000 wss://localhost:5000 wss://localhost:3000; frame-src 'self' blob:; font-src 'self' https://fonts.gstatic.com data:; object-src 'none'; base-uri 'self'; form-action 'self';" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/royal_enfield_logo.svg" />
|
<link rel="icon" type="image/svg+xml" href="/royal_enfield_logo.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<meta name="description"
|
<meta name="description" content="Royal Enfield Approval & Request Management Portal - Streamlined approval workflows for enterprise operations" />
|
||||||
content="Royal Enfield Approval & Request Management Portal - Streamlined approval workflows for enterprise operations" />
|
|
||||||
<meta name="theme-color" content="#2d4a3e" />
|
<meta name="theme-color" content="#2d4a3e" />
|
||||||
<title>Royal Enfield | Approval Portal</title>
|
<title>Royal Enfield | Approval Portal</title>
|
||||||
|
|
||||||
<!-- Preload essential fonts and icons -->
|
<!-- Preload critical fonts and icons -->
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
<script type="module" crossorigin src="/assets/index-CULgQ-8S.js"></script>
|
|
||||||
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-BVfwAPj-.js">
|
<!-- Ensure proper icon rendering and layout -->
|
||||||
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-CYvDqP9X.js">
|
<style>
|
||||||
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js">
|
/* Ensure Lucide icons render properly */
|
||||||
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-CX5oLBI_.js">
|
svg {
|
||||||
|
display: inline-block;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Fix for icon alignment in buttons */
|
||||||
|
button svg {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ensure proper text rendering */
|
||||||
|
body {
|
||||||
|
-webkit-font-smoothing: antialiased;
|
||||||
|
-moz-osx-font-smoothing: grayscale;
|
||||||
|
text-rendering: optimizeLegibility;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Fix for mobile viewport and sidebar */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
html {
|
||||||
|
overflow-x: hidden;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ensure proper sidebar toggle behavior */
|
||||||
|
.sidebar-toggle {
|
||||||
|
transition: all 0.3s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Fix for icon button hover states */
|
||||||
|
button:hover svg {
|
||||||
|
transform: scale(1.05);
|
||||||
|
transition: transform 0.2s ease;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<script type="module" crossorigin src="/assets/index-DtEUJDeH.js"></script>
|
||||||
|
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-Cji9-Yri.js">
|
||||||
|
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-DA0cB_hD.js">
|
||||||
|
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-DHm03ykU.js">
|
||||||
|
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-BPwaxA-i.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
|
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
|
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/router-vendor-B_rK4TXr.js">
|
<link rel="modulepreload" crossorigin href="/assets/router-vendor-CRr9x_Jp.js">
|
||||||
<link rel="stylesheet" crossorigin href="/assets/index-XBJXaMj2.css">
|
<link rel="stylesheet" crossorigin href="/assets/index-P-Le9vHs.css">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +0,0 @@
|
|||||||
User-agent: *
|
|
||||||
Disallow: /api/
|
|
||||||
|
|
||||||
Sitemap: https://reflow.royalenfield.com/sitemap.xml
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
|
||||||
<url>
|
|
||||||
<loc>https://reflow.royalenfield.com</loc>
|
|
||||||
<lastmod>2024-03-20T12:00:00+00:00</lastmod>
|
|
||||||
<changefreq>daily</changefreq>
|
|
||||||
<priority>1.0</priority>
|
|
||||||
</url>
|
|
||||||
</urlset>
|
|
||||||
@ -1,8 +1,39 @@
|
|||||||
# docker-compose.full.yml
|
# =============================================================================
|
||||||
# Synced with streamlined infrastructure
|
# RE Workflow - Full Stack Docker Compose
|
||||||
|
# Includes: Application + Database + Monitoring Stack
|
||||||
|
# =============================================================================
|
||||||
|
# Usage:
|
||||||
|
# docker-compose -f docker-compose.full.yml up -d
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
version: '3.8'
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
# ===========================================================================
|
||||||
|
# APPLICATION SERVICES
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
container_name: re_workflow_db
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${DB_USER:-laxman}
|
||||||
|
POSTGRES_PASSWORD: ${DB_PASSWORD:-Admin@123}
|
||||||
|
POSTGRES_DB: ${DB_NAME:-re_workflow_db}
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
- ./database/schema:/docker-entrypoint-initdb.d
|
||||||
|
networks:
|
||||||
|
- re_workflow_network
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-laxman}"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:7-alpine
|
image: redis:7-alpine
|
||||||
container_name: re_workflow_redis
|
container_name: re_workflow_redis
|
||||||
@ -19,24 +50,70 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|
||||||
clamav:
|
backend:
|
||||||
image: clamav/clamav:latest
|
build:
|
||||||
container_name: re_clamav
|
context: .
|
||||||
ports:
|
dockerfile: Dockerfile
|
||||||
- "3310:3310"
|
container_name: re_workflow_backend
|
||||||
volumes:
|
|
||||||
- clamav_data:/var/lib/clamav
|
|
||||||
environment:
|
environment:
|
||||||
- CLAMAV_NO_FRESHCLAMD=false
|
NODE_ENV: development
|
||||||
healthcheck:
|
DB_HOST: postgres
|
||||||
test: ["CMD", "clamdcheck"]
|
DB_PORT: 5432
|
||||||
interval: 60s
|
DB_USER: ${DB_USER:-laxman}
|
||||||
timeout: 10s
|
DB_PASSWORD: ${DB_PASSWORD:-Admin@123}
|
||||||
retries: 5
|
DB_NAME: ${DB_NAME:-re_workflow_db}
|
||||||
start_period: 120s
|
REDIS_URL: redis://redis:6379
|
||||||
restart: unless-stopped
|
PORT: 5000
|
||||||
|
# Loki for logging
|
||||||
|
LOKI_HOST: http://loki:3100
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
volumes:
|
||||||
|
- ./logs:/app/logs
|
||||||
|
- ./uploads:/app/uploads
|
||||||
networks:
|
networks:
|
||||||
- re_workflow_network
|
- re_workflow_network
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "node -e \"require('http').get('http://localhost:5000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})\""]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 40s
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# MONITORING SERVICES
|
||||||
|
# ===========================================================================
|
||||||
|
|
||||||
|
prometheus:
|
||||||
|
image: prom/prometheus:v2.47.2
|
||||||
|
container_name: re_prometheus
|
||||||
|
ports:
|
||||||
|
- "9090:9090"
|
||||||
|
volumes:
|
||||||
|
- ./monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
||||||
|
- ./monitoring/prometheus/alert.rules.yml:/etc/prometheus/alert.rules.yml:ro
|
||||||
|
- prometheus_data:/prometheus
|
||||||
|
command:
|
||||||
|
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||||
|
- '--storage.tsdb.path=/prometheus'
|
||||||
|
- '--storage.tsdb.retention.time=15d'
|
||||||
|
- '--web.console.libraries=/usr/share/prometheus/console_libraries'
|
||||||
|
- '--web.console.templates=/usr/share/prometheus/consoles'
|
||||||
|
- '--web.enable-lifecycle'
|
||||||
|
networks:
|
||||||
|
- re_workflow_network
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "-q", "--spider", "http://localhost:9090/-/healthy"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
loki:
|
loki:
|
||||||
image: grafana/loki:2.9.2
|
image: grafana/loki:2.9.2
|
||||||
@ -79,12 +156,15 @@ services:
|
|||||||
- GF_SECURITY_ADMIN_USER=admin
|
- GF_SECURITY_ADMIN_USER=admin
|
||||||
- GF_SECURITY_ADMIN_PASSWORD=REWorkflow@2024
|
- GF_SECURITY_ADMIN_PASSWORD=REWorkflow@2024
|
||||||
- GF_USERS_ALLOW_SIGN_UP=false
|
- GF_USERS_ALLOW_SIGN_UP=false
|
||||||
|
- GF_FEATURE_TOGGLES_ENABLE=publicDashboards
|
||||||
|
- GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-simple-json-datasource,grafana-piechart-panel
|
||||||
volumes:
|
volumes:
|
||||||
- grafana_data:/var/lib/grafana
|
- grafana_data:/var/lib/grafana
|
||||||
- ./monitoring/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
|
- ./monitoring/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
|
||||||
- ./monitoring/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro
|
- ./monitoring/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro
|
||||||
- ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro
|
- ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro
|
||||||
depends_on:
|
depends_on:
|
||||||
|
- prometheus
|
||||||
- loki
|
- loki
|
||||||
networks:
|
networks:
|
||||||
- re_workflow_network
|
- re_workflow_network
|
||||||
@ -95,13 +175,54 @@ services:
|
|||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|
||||||
volumes:
|
node-exporter:
|
||||||
redis_data:
|
image: prom/node-exporter:v1.6.1
|
||||||
clamav_data:
|
container_name: re_node_exporter
|
||||||
loki_data:
|
ports:
|
||||||
promtail_data:
|
- "9100:9100"
|
||||||
grafana_data:
|
networks:
|
||||||
|
- re_workflow_network
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
alertmanager:
|
||||||
|
image: prom/alertmanager:v0.26.0
|
||||||
|
container_name: re_alertmanager
|
||||||
|
ports:
|
||||||
|
- "9093:9093"
|
||||||
|
volumes:
|
||||||
|
- ./monitoring/alertmanager/alertmanager.yml:/etc/alertmanager/alertmanager.yml:ro
|
||||||
|
- alertmanager_data:/alertmanager
|
||||||
|
command:
|
||||||
|
- '--config.file=/etc/alertmanager/alertmanager.yml'
|
||||||
|
- '--storage.path=/alertmanager'
|
||||||
|
networks:
|
||||||
|
- re_workflow_network
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# NETWORKS
|
||||||
|
# ===========================================================================
|
||||||
networks:
|
networks:
|
||||||
re_workflow_network:
|
re_workflow_network:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
name: re_workflow_network
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# VOLUMES
|
||||||
|
# ===========================================================================
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
name: re_postgres_data
|
||||||
|
redis_data:
|
||||||
|
name: re_redis_data
|
||||||
|
prometheus_data:
|
||||||
|
name: re_prometheus_data
|
||||||
|
loki_data:
|
||||||
|
name: re_loki_data
|
||||||
|
promtail_data:
|
||||||
|
name: re_promtail_data
|
||||||
|
grafana_data:
|
||||||
|
name: re_grafana_data
|
||||||
|
alertmanager_data:
|
||||||
|
name: re_alertmanager_data
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,28 @@
|
|||||||
# docker-compose.yml
|
# docker-compose.yml
|
||||||
# Streamlined infrastructure for local development
|
|
||||||
version: '3.8'
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
container_name: re_workflow_db
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${DB_USER:-laxman}
|
||||||
|
POSTGRES_PASSWORD: ${DB_PASSWORD:-Admin@123}
|
||||||
|
POSTGRES_DB: ${DB_NAME:-re_workflow_db}
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
- ./database/schema:/docker-entrypoint-initdb.d
|
||||||
|
networks:
|
||||||
|
- re_workflow_network
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-laxman}"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:7-alpine
|
image: redis:7-alpine
|
||||||
container_name: re_workflow_redis
|
container_name: re_workflow_redis
|
||||||
@ -19,88 +39,43 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|
||||||
clamav:
|
backend:
|
||||||
image: clamav/clamav:latest
|
build:
|
||||||
container_name: re_clamav
|
context: .
|
||||||
ports:
|
dockerfile: Dockerfile
|
||||||
- "3310:3310"
|
container_name: re_workflow_backend
|
||||||
volumes:
|
|
||||||
- clamav_data:/var/lib/clamav
|
|
||||||
environment:
|
environment:
|
||||||
- CLAMAV_NO_FRESHCLAMD=false
|
NODE_ENV: development
|
||||||
healthcheck:
|
DB_HOST: postgres
|
||||||
test: ["CMD", "clamdcheck"]
|
DB_PORT: 5432
|
||||||
interval: 60s
|
DB_USER: ${DB_USER:-laxman}
|
||||||
timeout: 10s
|
DB_PASSWORD: ${DB_PASSWORD:-Admin@123}
|
||||||
retries: 5
|
DB_NAME: ${DB_NAME:-re_workflow_db}
|
||||||
start_period: 120s
|
REDIS_URL: redis://redis:6379
|
||||||
restart: unless-stopped
|
PORT: 5000
|
||||||
networks:
|
|
||||||
- re_workflow_network
|
|
||||||
|
|
||||||
loki:
|
|
||||||
image: grafana/loki:2.9.2
|
|
||||||
container_name: re_loki
|
|
||||||
ports:
|
ports:
|
||||||
- "3100:3100"
|
- "5000:5000"
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
volumes:
|
volumes:
|
||||||
- ./monitoring/loki/loki-config.yml:/etc/loki/local-config.yaml:ro
|
- ./logs:/app/logs
|
||||||
- loki_data:/loki
|
- ./uploads:/app/uploads
|
||||||
command: -config.file=/etc/loki/local-config.yaml
|
|
||||||
networks:
|
networks:
|
||||||
- re_workflow_network
|
- re_workflow_network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:3100/ready || exit 1"]
|
test: ["CMD-SHELL", "node -e \"require('http').get('http://localhost:5000/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})\""]
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 5
|
|
||||||
|
|
||||||
promtail:
|
|
||||||
image: grafana/promtail:2.9.2
|
|
||||||
container_name: re_promtail
|
|
||||||
volumes:
|
|
||||||
- ./monitoring/promtail/promtail-config.yml:/etc/promtail/config.yml:ro
|
|
||||||
- ./logs:/var/log/app:ro
|
|
||||||
- promtail_data:/tmp/promtail
|
|
||||||
command: -config.file=/etc/promtail/config.yml
|
|
||||||
depends_on:
|
|
||||||
- loki
|
|
||||||
networks:
|
|
||||||
- re_workflow_network
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
grafana:
|
|
||||||
image: grafana/grafana:10.2.2
|
|
||||||
container_name: re_grafana
|
|
||||||
ports:
|
|
||||||
- "3001:3000"
|
|
||||||
environment:
|
|
||||||
- GF_SECURITY_ADMIN_USER=admin
|
|
||||||
- GF_SECURITY_ADMIN_PASSWORD=REWorkflow@2024
|
|
||||||
- GF_USERS_ALLOW_SIGN_UP=false
|
|
||||||
volumes:
|
|
||||||
- grafana_data:/var/lib/grafana
|
|
||||||
- ./monitoring/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
|
|
||||||
- ./monitoring/grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards:ro
|
|
||||||
- ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro
|
|
||||||
depends_on:
|
|
||||||
- loki
|
|
||||||
networks:
|
|
||||||
- re_workflow_network
|
|
||||||
restart: unless-stopped
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1"]
|
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
start_period: 40s
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
postgres_data:
|
||||||
redis_data:
|
redis_data:
|
||||||
clamav_data:
|
|
||||||
loki_data:
|
|
||||||
promtail_data:
|
|
||||||
grafana_data:
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
re_workflow_network:
|
re_workflow_network:
|
||||||
|
|||||||
@ -15,16 +15,15 @@
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
The AI Conclusion Remark Generation feature automatically generates professional, context-aware conclusion remarks for workflow requests that have been approved or rejected. This feature uses **Google Cloud Vertex AI Gemini** to analyze the entire request lifecycle and create a comprehensive summary suitable for permanent archiving.
|
The AI Conclusion Remark Generation feature automatically generates professional, context-aware conclusion remarks for workflow requests that have been approved or rejected. This feature uses AI providers (Claude, OpenAI, or Gemini) to analyze the entire request lifecycle and create a comprehensive summary suitable for permanent archiving.
|
||||||
|
|
||||||
### Key Features
|
### Key Features
|
||||||
- **Vertex AI Integration**: Uses Google Cloud Vertex AI Gemini with service account authentication
|
- **Multi-Provider Support**: Supports Claude (Anthropic), OpenAI (GPT-4), and Google Gemini
|
||||||
- **Context-Aware**: Analyzes approval flow, work notes, documents, and activities
|
- **Context-Aware**: Analyzes approval flow, work notes, documents, and activities
|
||||||
- **Configurable**: Admin-configurable max length, model selection, and enable/disable
|
- **Configurable**: Admin-configurable max length, provider selection, and enable/disable
|
||||||
- **Automatic Generation**: Can be triggered automatically when a request is approved/rejected
|
- **Automatic Generation**: Can be triggered automatically when a request is approved/rejected
|
||||||
- **Manual Generation**: Users can regenerate conclusions on demand
|
- **Manual Generation**: Users can regenerate conclusions on demand
|
||||||
- **Editable**: Generated remarks can be edited before finalization
|
- **Editable**: Generated remarks can be edited before finalization
|
||||||
- **Enterprise Security**: Uses same service account credentials as Google Cloud Storage
|
|
||||||
|
|
||||||
### Use Cases
|
### Use Cases
|
||||||
1. **Automatic Generation**: When the final approver approves/rejects a request, an AI conclusion is generated in the background
|
1. **Automatic Generation**: When the final approver approves/rejects a request, an AI conclusion is generated in the background
|
||||||
@ -75,10 +74,10 @@ The AI Conclusion Remark Generation feature automatically generates professional
|
|||||||
│ │ │
|
│ │ │
|
||||||
│ ▼ │
|
│ ▼ │
|
||||||
│ ┌──────────────────────────────────────────────────────┐ │
|
│ ┌──────────────────────────────────────────────────────┐ │
|
||||||
│ │ Vertex AI Gemini (Google Cloud) │ │
|
│ │ AI Providers (Claude/OpenAI/Gemini) │ │
|
||||||
│ │ - VertexAI Client │ │
|
│ │ - ClaudeProvider │ │
|
||||||
│ │ - Service Account Authentication │ │
|
│ │ - OpenAIProvider │ │
|
||||||
│ │ - Gemini Models (gemini-2.5-flash, etc.) │ │
|
│ │ - GeminiProvider │ │
|
||||||
│ └──────────────────────────────────────────────────────┘ │
|
│ └──────────────────────────────────────────────────────┘ │
|
||||||
│ │ │
|
│ │ │
|
||||||
│ ▼ │
|
│ ▼ │
|
||||||
@ -115,18 +114,22 @@ The AI Conclusion Remark Generation feature automatically generates professional
|
|||||||
### Environment Variables
|
### Environment Variables
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Google Cloud Configuration (required - same as GCS)
|
# AI Provider Selection (claude, openai, gemini)
|
||||||
GCP_PROJECT_ID=re-platform-workflow-dealer
|
AI_PROVIDER=claude
|
||||||
GCP_KEY_FILE=./credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
|
|
||||||
|
|
||||||
# Vertex AI Configuration (optional - defaults provided)
|
# Claude Configuration
|
||||||
VERTEX_AI_MODEL=gemini-2.5-flash
|
CLAUDE_API_KEY=your_claude_api_key
|
||||||
VERTEX_AI_LOCATION=asia-south1
|
CLAUDE_MODEL=claude-sonnet-4-20250514
|
||||||
AI_ENABLED=true
|
|
||||||
|
# OpenAI Configuration
|
||||||
|
OPENAI_API_KEY=your_openai_api_key
|
||||||
|
OPENAI_MODEL=gpt-4o
|
||||||
|
|
||||||
|
# Gemini Configuration
|
||||||
|
GEMINI_API_KEY=your_gemini_api_key
|
||||||
|
GEMINI_MODEL=gemini-2.0-flash-lite
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note**: The service account key file is the same one used for Google Cloud Storage, ensuring consistent authentication across services.
|
|
||||||
|
|
||||||
### Admin Configuration (Database)
|
### Admin Configuration (Database)
|
||||||
|
|
||||||
The system reads configuration from the `system_config` table. Key settings:
|
The system reads configuration from the `system_config` table. Key settings:
|
||||||
@ -135,29 +138,21 @@ The system reads configuration from the `system_config` table. Key settings:
|
|||||||
|------------|---------|-------------|
|
|------------|---------|-------------|
|
||||||
| `AI_ENABLED` | `true` | Enable/disable all AI features |
|
| `AI_ENABLED` | `true` | Enable/disable all AI features |
|
||||||
| `AI_REMARK_GENERATION_ENABLED` | `true` | Enable/disable conclusion generation |
|
| `AI_REMARK_GENERATION_ENABLED` | `true` | Enable/disable conclusion generation |
|
||||||
|
| `AI_PROVIDER` | `claude` | Preferred AI provider (claude, openai, gemini) |
|
||||||
| `AI_MAX_REMARK_LENGTH` | `2000` | Maximum characters for generated remarks |
|
| `AI_MAX_REMARK_LENGTH` | `2000` | Maximum characters for generated remarks |
|
||||||
| `VERTEX_AI_MODEL` | `gemini-2.5-flash` | Vertex AI Gemini model name |
|
| `CLAUDE_API_KEY` | - | Claude API key (if using Claude) |
|
||||||
|
| `CLAUDE_MODEL` | `claude-sonnet-4-20250514` | Claude model name |
|
||||||
|
| `OPENAI_API_KEY` | - | OpenAI API key (if using OpenAI) |
|
||||||
|
| `OPENAI_MODEL` | `gpt-4o` | OpenAI model name |
|
||||||
|
| `GEMINI_API_KEY` | - | Gemini API key (if using Gemini) |
|
||||||
|
| `GEMINI_MODEL` | `gemini-2.0-flash-lite` | Gemini model name |
|
||||||
|
|
||||||
### Available Models
|
### Provider Priority
|
||||||
|
|
||||||
| Model Name | Description | Use Case |
|
1. **Preferred Provider**: Set via `AI_PROVIDER` config
|
||||||
|------------|-------------|----------|
|
2. **Fallback Chain**: If preferred fails, tries:
|
||||||
| `gemini-2.5-flash` | Latest fast model (default) | General purpose, quick responses |
|
- Claude → OpenAI → Gemini
|
||||||
| `gemini-1.5-flash` | Previous fast model | General purpose |
|
3. **Environment Fallback**: If database config fails, uses environment variables
|
||||||
| `gemini-1.5-pro` | Advanced model | Complex tasks, better quality |
|
|
||||||
| `gemini-1.5-pro-latest` | Latest Pro version | Best quality, complex reasoning |
|
|
||||||
|
|
||||||
### Supported Regions
|
|
||||||
|
|
||||||
| Region Code | Location | Availability |
|
|
||||||
|-------------|----------|--------------|
|
|
||||||
| `us-central1` | Iowa, USA | ✅ Default |
|
|
||||||
| `us-east1` | South Carolina, USA | ✅ |
|
|
||||||
| `us-west1` | Oregon, USA | ✅ |
|
|
||||||
| `europe-west1` | Belgium | ✅ |
|
|
||||||
| `asia-south1` | Mumbai, India | ✅ (Current default) |
|
|
||||||
|
|
||||||
**Note**: Model and region are configured via environment variables, not database config.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -191,7 +186,7 @@ Authorization: Bearer <token>
|
|||||||
],
|
],
|
||||||
"confidence": 0.85,
|
"confidence": 0.85,
|
||||||
"generatedAt": "2025-01-15T10:30:00Z",
|
"generatedAt": "2025-01-15T10:30:00Z",
|
||||||
"provider": "Vertex AI (Gemini)"
|
"provider": "Claude (Anthropic)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -259,7 +254,7 @@ Content-Type: application/json
|
|||||||
"finalRemark": "Finalized text...",
|
"finalRemark": "Finalized text...",
|
||||||
"isEdited": true,
|
"isEdited": true,
|
||||||
"editCount": 2,
|
"editCount": 2,
|
||||||
"aiModelUsed": "Vertex AI (Gemini)",
|
"aiModelUsed": "Claude (Anthropic)",
|
||||||
"aiConfidenceScore": 0.85,
|
"aiConfidenceScore": 0.85,
|
||||||
"keyDiscussionPoints": ["Point 1", "Point 2"],
|
"keyDiscussionPoints": ["Point 1", "Point 2"],
|
||||||
"generatedAt": "2025-01-15T10:30:00Z",
|
"generatedAt": "2025-01-15T10:30:00Z",
|
||||||
@ -329,9 +324,9 @@ interface ConclusionContext {
|
|||||||
- Sets target word count based on `AI_MAX_REMARK_LENGTH`
|
- Sets target word count based on `AI_MAX_REMARK_LENGTH`
|
||||||
|
|
||||||
3. **AI Generation**:
|
3. **AI Generation**:
|
||||||
- Sends prompt to Vertex AI Gemini
|
- Sends prompt to selected AI provider
|
||||||
- Receives generated text (up to 4096 tokens)
|
- Receives generated text
|
||||||
- Preserves full AI response (no truncation)
|
- Validates length (trims if exceeds max)
|
||||||
- Extracts key points
|
- Extracts key points
|
||||||
- Calculates confidence score
|
- Calculates confidence score
|
||||||
|
|
||||||
@ -412,24 +407,13 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
|
|||||||
4. **Tone Guidelines**: Emphasizes natural, professional, archival-quality writing
|
4. **Tone Guidelines**: Emphasizes natural, professional, archival-quality writing
|
||||||
5. **Context Awareness**: Includes all relevant data (approvals, notes, documents, activities)
|
5. **Context Awareness**: Includes all relevant data (approvals, notes, documents, activities)
|
||||||
|
|
||||||
### Vertex AI Settings
|
### Provider-Specific Settings
|
||||||
|
|
||||||
| Setting | Value | Description |
|
| Provider | Model | Max Tokens | Temperature | Notes |
|
||||||
|---------|-------|-------------|
|
|----------|-------|------------|-------------|-------|
|
||||||
| Model | `gemini-2.5-flash` (default) | Fast, efficient model for conclusion generation |
|
| Claude | claude-sonnet-4-20250514 | 2048 | 0.3 | Best for longer, detailed conclusions |
|
||||||
| Max Output Tokens | `4096` | Maximum tokens in response (technical limit) |
|
| OpenAI | gpt-4o | 1024 | 0.3 | Balanced performance |
|
||||||
| Character Limit | `2000` (configurable) | Actual limit enforced via prompt (`AI_MAX_REMARK_LENGTH`) |
|
| Gemini | gemini-2.0-flash-lite | - | 0.3 | Fast and cost-effective |
|
||||||
| Temperature | `0.3` | Lower temperature for more focused, consistent output |
|
|
||||||
| Location | `asia-south1` (default) | Google Cloud region for API calls |
|
|
||||||
| Authentication | Service Account | Uses same credentials as Google Cloud Storage |
|
|
||||||
|
|
||||||
**Note on Token vs Character Limits:**
|
|
||||||
- **4096 tokens** is the technical maximum Vertex AI can generate
|
|
||||||
- **2000 characters** (default) is the actual limit enforced by the prompt
|
|
||||||
- Token-to-character conversion: ~1 token ≈ 3-4 characters
|
|
||||||
- With HTML tags: 4096 tokens ≈ 12,000-16,000 characters (including tags)
|
|
||||||
- The AI is instructed to stay within the character limit, not the token limit
|
|
||||||
- The token limit provides headroom but the character limit is what matters for storage
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -439,21 +423,15 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
|
|||||||
|
|
||||||
1. **No AI Provider Available**
|
1. **No AI Provider Available**
|
||||||
```
|
```
|
||||||
Error: AI features are currently unavailable. Please verify Vertex AI configuration and service account credentials.
|
Error: AI features are currently unavailable. Please configure an AI provider...
|
||||||
```
|
```
|
||||||
**Solution**:
|
**Solution**: Configure API keys in admin panel or environment variables
|
||||||
- Verify service account key file exists at path specified in `GCP_KEY_FILE`
|
|
||||||
- Ensure Vertex AI API is enabled in Google Cloud Console
|
|
||||||
- Check service account has `Vertex AI User` role (`roles/aiplatform.user`)
|
|
||||||
|
|
||||||
2. **Vertex AI API Error**
|
2. **Provider API Error**
|
||||||
```
|
```
|
||||||
Error: AI generation failed (Vertex AI): Model was not found or your project does not have access
|
Error: AI generation failed (Claude): API rate limit exceeded
|
||||||
```
|
```
|
||||||
**Solution**:
|
**Solution**: Check API key validity, rate limits, and provider status
|
||||||
- Verify model name is correct (e.g., `gemini-2.5-flash`)
|
|
||||||
- Ensure model is available in selected region
|
|
||||||
- Check Vertex AI API is enabled in Google Cloud Console
|
|
||||||
|
|
||||||
3. **Request Not Found**
|
3. **Request Not Found**
|
||||||
```
|
```
|
||||||
@ -475,10 +453,10 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
|
|||||||
|
|
||||||
### Error Recovery
|
### Error Recovery
|
||||||
|
|
||||||
|
- **Automatic Fallback**: If preferred provider fails, system tries fallback providers
|
||||||
- **Graceful Degradation**: If AI generation fails, user can write conclusion manually
|
- **Graceful Degradation**: If AI generation fails, user can write conclusion manually
|
||||||
- **Retry Logic**: Manual regeneration is always available
|
- **Retry Logic**: Manual regeneration is always available
|
||||||
- **Logging**: All errors are logged with context for debugging
|
- **Logging**: All errors are logged with context for debugging
|
||||||
- **Token Limit Handling**: If response hits token limit, full response is preserved (no truncation)
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -494,17 +472,14 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
|
|||||||
|
|
||||||
### For Administrators
|
### For Administrators
|
||||||
|
|
||||||
1. **Service Account Setup**:
|
1. **API Key Management**: Store API keys securely in database or environment variables
|
||||||
- Ensure service account key file exists and is accessible
|
2. **Provider Selection**: Choose provider based on:
|
||||||
- Verify service account has `Vertex AI User` role
|
- **Claude**: Best quality, higher cost
|
||||||
- Use same credentials as Google Cloud Storage for consistency
|
- **OpenAI**: Balanced quality/cost
|
||||||
2. **Model Selection**: Choose model based on needs:
|
- **Gemini**: Fast, cost-effective
|
||||||
- **gemini-2.5-flash**: Fast, cost-effective (default, recommended)
|
|
||||||
- **gemini-1.5-pro**: Better quality for complex requests
|
|
||||||
3. **Length Configuration**: Set `AI_MAX_REMARK_LENGTH` based on your archival needs
|
3. **Length Configuration**: Set `AI_MAX_REMARK_LENGTH` based on your archival needs
|
||||||
4. **Monitoring**: Monitor AI usage and costs through Google Cloud Console
|
4. **Monitoring**: Monitor AI usage and costs through provider dashboards
|
||||||
5. **Testing**: Test with sample requests before enabling in production
|
5. **Testing**: Test with sample requests before enabling in production
|
||||||
6. **Region Selection**: Choose region closest to your deployment for lower latency
|
|
||||||
|
|
||||||
### For Users
|
### For Users
|
||||||
|
|
||||||
@ -524,10 +499,8 @@ Write a brief, professional conclusion (approximately X words, max Y characters)
|
|||||||
**Diagnosis**:
|
**Diagnosis**:
|
||||||
1. Check `AI_ENABLED` config value
|
1. Check `AI_ENABLED` config value
|
||||||
2. Check `AI_REMARK_GENERATION_ENABLED` config value
|
2. Check `AI_REMARK_GENERATION_ENABLED` config value
|
||||||
3. Verify service account key file exists and is accessible
|
3. Verify API keys are configured
|
||||||
4. Check Vertex AI API is enabled in Google Cloud Console
|
4. Check provider initialization logs
|
||||||
5. Verify service account has `Vertex AI User` role
|
|
||||||
6. Check provider initialization logs
|
|
||||||
|
|
||||||
**Solution**:
|
**Solution**:
|
||||||
```bash
|
```bash
|
||||||
@ -536,14 +509,6 @@ tail -f logs/app.log | grep "AI Service"
|
|||||||
|
|
||||||
# Verify config
|
# Verify config
|
||||||
SELECT * FROM system_config WHERE config_key LIKE 'AI_%';
|
SELECT * FROM system_config WHERE config_key LIKE 'AI_%';
|
||||||
|
|
||||||
# Verify service account key file
|
|
||||||
ls -la credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
|
|
||||||
|
|
||||||
# Check environment variables
|
|
||||||
echo $GCP_PROJECT_ID
|
|
||||||
echo $GCP_KEY_FILE
|
|
||||||
echo $VERTEX_AI_MODEL
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Issue: Generated Text Too Long/Short
|
### Issue: Generated Text Too Long/Short
|
||||||
@ -553,8 +518,7 @@ echo $VERTEX_AI_MODEL
|
|||||||
**Solution**:
|
**Solution**:
|
||||||
1. Adjust `AI_MAX_REMARK_LENGTH` in admin config
|
1. Adjust `AI_MAX_REMARK_LENGTH` in admin config
|
||||||
2. Check prompt target word count calculation
|
2. Check prompt target word count calculation
|
||||||
3. Note: Vertex AI max output tokens is 4096 (system handles this automatically)
|
3. Verify provider max_tokens setting
|
||||||
4. AI is instructed to stay within character limit, but full response is preserved
|
|
||||||
|
|
||||||
### Issue: Poor Quality Conclusions
|
### Issue: Poor Quality Conclusions
|
||||||
|
|
||||||
@ -563,50 +527,37 @@ echo $VERTEX_AI_MODEL
|
|||||||
**Solution**:
|
**Solution**:
|
||||||
1. Verify context data is complete (approvals, notes, documents)
|
1. Verify context data is complete (approvals, notes, documents)
|
||||||
2. Check prompt includes all relevant information
|
2. Check prompt includes all relevant information
|
||||||
3. Try different model (e.g., `gemini-1.5-pro` for better quality)
|
3. Try different provider (Claude generally produces better quality)
|
||||||
4. Temperature is set to 0.3 for focused output (can be adjusted in code if needed)
|
4. Adjust temperature if needed (lower = more focused)
|
||||||
|
|
||||||
### Issue: Slow Generation
|
### Issue: Slow Generation
|
||||||
|
|
||||||
**Symptoms**: AI generation takes too long
|
**Symptoms**: AI generation takes too long
|
||||||
|
|
||||||
**Solution**:
|
**Solution**:
|
||||||
1. Check Vertex AI API status in Google Cloud Console
|
1. Check provider API status
|
||||||
2. Verify network connectivity
|
2. Verify network connectivity
|
||||||
3. Consider using `gemini-2.5-flash` model (fastest option)
|
3. Consider using faster provider (Gemini)
|
||||||
4. Check for rate limiting in Google Cloud Console
|
4. Check for rate limiting
|
||||||
5. Verify region selection (closer region = lower latency)
|
|
||||||
|
|
||||||
### Issue: Vertex AI Not Initializing
|
### Issue: Provider Not Initializing
|
||||||
|
|
||||||
**Symptoms**: Provider shows as "None" or initialization fails in logs
|
**Symptoms**: Provider shows as "None" in logs
|
||||||
|
|
||||||
**Diagnosis**:
|
**Diagnosis**:
|
||||||
1. Check service account key file exists and is valid
|
1. Check API key is valid
|
||||||
2. Verify `@google-cloud/vertexai` package is installed
|
2. Verify SDK package is installed
|
||||||
3. Check environment variables (`GCP_PROJECT_ID`, `GCP_KEY_FILE`)
|
3. Check environment variables
|
||||||
4. Verify Vertex AI API is enabled in Google Cloud Console
|
|
||||||
5. Check service account permissions
|
|
||||||
|
|
||||||
**Solution**:
|
**Solution**:
|
||||||
```bash
|
```bash
|
||||||
# Install missing SDK
|
# Install missing SDK
|
||||||
npm install @google-cloud/vertexai
|
npm install @anthropic-ai/sdk # For Claude
|
||||||
|
npm install openai # For OpenAI
|
||||||
|
npm install @google/generative-ai # For Gemini
|
||||||
|
|
||||||
# Verify service account key file
|
# Verify API key
|
||||||
ls -la credentials/re-platform-workflow-dealer-3d5738fcc1f9.json
|
echo $CLAUDE_API_KEY # Should show key
|
||||||
|
|
||||||
# Verify environment variables
|
|
||||||
echo $GCP_PROJECT_ID
|
|
||||||
echo $GCP_KEY_FILE
|
|
||||||
echo $VERTEX_AI_MODEL
|
|
||||||
echo $VERTEX_AI_LOCATION
|
|
||||||
|
|
||||||
# Check Google Cloud Console
|
|
||||||
# 1. Go to APIs & Services > Library
|
|
||||||
# 2. Search for "Vertex AI API"
|
|
||||||
# 3. Ensure it's enabled
|
|
||||||
# 4. Verify service account has "Vertex AI User" role
|
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -693,13 +644,12 @@ reference.
|
|||||||
|
|
||||||
## Version History
|
## Version History
|
||||||
|
|
||||||
- **v2.0.0**: Vertex AI Migration
|
- **v1.0.0** (2025-01-15): Initial implementation
|
||||||
- Migrated to Google Cloud Vertex AI Gemini
|
- Multi-provider support (Claude, OpenAI, Gemini)
|
||||||
- Service account authentication (same as GCS)
|
- Automatic and manual generation
|
||||||
- Removed multi-provider support
|
- TAT risk integration
|
||||||
- Increased max output tokens to 4096
|
- Key points extraction
|
||||||
- Full response preservation (no truncation)
|
- Confidence scoring
|
||||||
- HTML format support for rich text editor
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -709,18 +659,13 @@ For issues or questions:
|
|||||||
1. Check logs: `logs/app.log`
|
1. Check logs: `logs/app.log`
|
||||||
2. Review admin configuration panel
|
2. Review admin configuration panel
|
||||||
3. Contact development team
|
3. Contact development team
|
||||||
4. Refer to Vertex AI documentation:
|
4. Refer to provider documentation:
|
||||||
- [Vertex AI Documentation](https://cloud.google.com/vertex-ai/docs)
|
- [Claude API Docs](https://docs.anthropic.com)
|
||||||
- [Gemini Models](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini)
|
- [OpenAI API Docs](https://platform.openai.com/docs)
|
||||||
- [Vertex AI Setup Guide](../VERTEX_AI_INTEGRATION.md)
|
- [Gemini API Docs](https://ai.google.dev/docs)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Last Updated**: January 2025
|
||||||
**Maintained By**: Royal Enfield Development Team
|
**Maintained By**: Royal Enfield Development Team
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Related Documentation
|
|
||||||
|
|
||||||
- [Vertex AI Integration Guide](./VERTEX_AI_INTEGRATION.md) - Detailed setup and migration information
|
|
||||||
|
|
||||||
|
|||||||
@ -1,71 +0,0 @@
|
|||||||
# Dealer Claim Financial Settlement Workflow
|
|
||||||
|
|
||||||
This document outlines the workflow for financial settlement of dealer claims within the Royal Enfield platform, following the transition from direct DMS integration to an Azure File Storage (AFS) based data exchange with SAP.
|
|
||||||
|
|
||||||
## Workflow Overview
|
|
||||||
|
|
||||||
The financial settlement process ensures that dealer claims are legally documented and financially settled through Royal Enfield's SAP system.
|
|
||||||
|
|
||||||
### 1. Legal Compliance: PWC E-Invoicing
|
|
||||||
Once the **Dealer Completion Documents** are submitted and approved by the **Initiator (Requestor Evaluation)**, the system triggers the legal compliance step.
|
|
||||||
|
|
||||||
- **Service**: `PWCIntegrationService`
|
|
||||||
- **Action**: Generates a signed E-Invoice via PWC API.
|
|
||||||
- **Output**: IRN (Invoice Reference Number), Ack No, Ack Date, Signed Invoice (PDF/B64), and QR Code.
|
|
||||||
- **Purpose**: Ensures the claim is legally recognized under GST regulations.
|
|
||||||
|
|
||||||
### 2. Financial Posting: AFS/CSV Integration
|
|
||||||
The financial settlement is handled by exchanging data files with SAP via **Azure File Storage (AFS)**.
|
|
||||||
|
|
||||||
- **Action**: The system generates a **CSV file** containing the following details:
|
|
||||||
- Invoice Number (from PWC)
|
|
||||||
- Invoice Amount (with/without GST as per activity type)
|
|
||||||
- GL Code (Resolved based on Activity Type/IO)
|
|
||||||
- Internal Order (IO) Number
|
|
||||||
- Dealer Code
|
|
||||||
- **Storage**: CSV is uploaded to a designated folder in AFS.
|
|
||||||
- **SAP Role**: SAP periodically polls AFS, picks up the CSV, and posts the transaction internally.
|
|
||||||
|
|
||||||
### 3. Payment Outcome: Credit Note
|
|
||||||
The result of the financial posting in SAP is a **Credit Note**.
|
|
||||||
|
|
||||||
- **Workflow**:
|
|
||||||
- SAP generates a Credit Note and uploads it back to AFS.
|
|
||||||
- RE Backend monitors the AFS folder.
|
|
||||||
- Once a Credit Note is detected, the system retrieves it and attaches it to the workflow request.
|
|
||||||
- An email notification (using `creditNoteSent.template.ts`) is sent to the dealer.
|
|
||||||
|
|
||||||
## Sequence Diagram
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
sequenceDiagram
|
|
||||||
participant Dealer
|
|
||||||
participant Backend
|
|
||||||
participant PWC
|
|
||||||
participant AFS as Azure File Storage
|
|
||||||
participant SAP
|
|
||||||
|
|
||||||
Dealer->>Backend: Submit Completion Docs (Actuals)
|
|
||||||
Backend->>Backend: Initiator Approval
|
|
||||||
Backend->>PWC: Generate Signed E-Invoice
|
|
||||||
PWC-->>Backend: Return IRN & QR Code
|
|
||||||
Backend->>Backend: Generate Settlement CSV
|
|
||||||
Backend->>AFS: Upload CSV
|
|
||||||
SAP->>AFS: Pick up CSV
|
|
||||||
SAP->>SAP: Post Financials
|
|
||||||
SAP->>AFS: Upload Credit Note
|
|
||||||
Backend->>AFS: Poll/Retrieve Credit Note
|
|
||||||
Backend->>Dealer: Send Credit Note Notification
|
|
||||||
```
|
|
||||||
|
|
||||||
## GL Code Resolution
|
|
||||||
The GL Code is solved dynamically based on:
|
|
||||||
1. **Activity Type**: Each activity (e.g., Marketing Event, Demo) has a primary GL mapping.
|
|
||||||
2. **Internal Order (IO)**: If specific IO logic is required, the GL can be overridden.
|
|
||||||
|
|
||||||
## Summary of Integration Points
|
|
||||||
| Component | Integration Type | Responsibility |
|
|
||||||
| :--- | :--- | :--- |
|
|
||||||
| **PWC** | REST API | Legal E-Invoice |
|
|
||||||
| **AFS (Azure)** | File Storage SDK | CSV Exchange |
|
|
||||||
| **SAP** | Batch Processing | Financial Posting & Credit Note |
|
|
||||||
@ -34,7 +34,7 @@ The Claim Management workflow has **8 fixed steps** with specific approvers and
|
|||||||
- **Approver Type**: System (Auto-processed)
|
- **Approver Type**: System (Auto-processed)
|
||||||
- **Action Type**: **AUTO** (System automatically creates activity)
|
- **Action Type**: **AUTO** (System automatically creates activity)
|
||||||
- **TAT**: 1 hour
|
- **TAT**: 1 hour
|
||||||
- **Mapping**: System user (`system@{{APP_DOMAIN}}`)
|
- **Mapping**: System user (`system@royalenfield.com`)
|
||||||
- **Status**: Auto-approved when triggered
|
- **Status**: Auto-approved when triggered
|
||||||
|
|
||||||
### Step 5: Dealer Completion Documents
|
### Step 5: Dealer Completion Documents
|
||||||
@ -55,7 +55,7 @@ The Claim Management workflow has **8 fixed steps** with specific approvers and
|
|||||||
- **Approver Type**: System (Auto-processed via DMS)
|
- **Approver Type**: System (Auto-processed via DMS)
|
||||||
- **Action Type**: **AUTO** (System generates e-invoice via DMS integration)
|
- **Action Type**: **AUTO** (System generates e-invoice via DMS integration)
|
||||||
- **TAT**: 1 hour
|
- **TAT**: 1 hour
|
||||||
- **Mapping**: System user (`system@{{APP_DOMAIN}}`)
|
- **Mapping**: System user (`system@royalenfield.com`)
|
||||||
- **Status**: Auto-approved when triggered
|
- **Status**: Auto-approved when triggered
|
||||||
|
|
||||||
### Step 8: Credit Note Confirmation
|
### Step 8: Credit Note Confirmation
|
||||||
@ -121,7 +121,7 @@ const dealerUser = await User.findOne({ where: { email: dealerEmail } });
|
|||||||
1. Find user with department containing "Finance" and role = 'MANAGEMENT'
|
1. Find user with department containing "Finance" and role = 'MANAGEMENT'
|
||||||
2. Find user with designation containing "Finance" or "Accountant"
|
2. Find user with designation containing "Finance" or "Accountant"
|
||||||
3. Use configured finance team email from admin_configurations table
|
3. Use configured finance team email from admin_configurations table
|
||||||
4. Fallback: Use default finance email (e.g., finance@{{APP_DOMAIN}})
|
4. Fallback: Use default finance email (e.g., finance@royalenfield.com)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Next Steps
|
## Next Steps
|
||||||
|
|||||||
@ -1,224 +0,0 @@
|
|||||||
-- ============================================================
|
|
||||||
-- DEALERS CSV IMPORT - WORKING SOLUTION
|
|
||||||
-- ============================================================
|
|
||||||
-- This script provides a working solution for importing dealers
|
|
||||||
-- from CSV with auto-generated columns (dealer_id, created_at, updated_at, is_active)
|
|
||||||
-- ============================================================
|
|
||||||
|
|
||||||
-- METHOD 1: If your CSV does NOT have dealer_id, created_at, updated_at, is_active
|
|
||||||
-- ============================================================
|
|
||||||
-- Use this COPY command if your CSV has exactly 44 columns (without the auto-generated ones)
|
|
||||||
|
|
||||||
\copy public.dealers (sales_code,service_code,gear_code,gma_code,region,dealership,state,district,city,location,city_category_pst,layout_format,tier_city_category,on_boarding_charges,"date",single_format_month_year,domain_id,replacement,termination_resignation_status,date_of_termination_resignation,last_date_of_operations,old_codes,branch_details,dealer_principal_name,dealer_principal_email_id,dp_contact_number,dp_contacts,showroom_address,showroom_pincode,workshop_address,workshop_pincode,location_district,state_workshop,no_of_studios,website_update,gst,pan,firm_type,prop_managing_partners_directors,total_prop_partners_directors,docs_folder_link,workshop_gma_codes,existing_new,dlrcode) FROM 'C:/Users/BACKPACKERS/Downloads/Dealer_Master.csv' CSV HEADER ENCODING 'WIN1252';
|
|
||||||
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- METHOD 2: If your CSV HAS dealer_id, created_at, updated_at, is_active columns
|
|
||||||
-- ============================================================
|
|
||||||
-- Use this approach if your CSV has 48 columns (including the auto-generated ones)
|
|
||||||
-- This creates a temporary table, imports, then inserts with defaults
|
|
||||||
|
|
||||||
-- Step 1: Create temporary table matching your CSV structure
|
|
||||||
-- This accepts ALL columns from CSV (whether 44 or 48 columns)
|
|
||||||
CREATE TEMP TABLE dealers_temp (
|
|
||||||
dealer_id TEXT,
|
|
||||||
sales_code TEXT,
|
|
||||||
service_code TEXT,
|
|
||||||
gear_code TEXT,
|
|
||||||
gma_code TEXT,
|
|
||||||
region TEXT,
|
|
||||||
dealership TEXT,
|
|
||||||
state TEXT,
|
|
||||||
district TEXT,
|
|
||||||
city TEXT,
|
|
||||||
location TEXT,
|
|
||||||
city_category_pst TEXT,
|
|
||||||
layout_format TEXT,
|
|
||||||
tier_city_category TEXT,
|
|
||||||
on_boarding_charges TEXT,
|
|
||||||
date TEXT,
|
|
||||||
single_format_month_year TEXT,
|
|
||||||
domain_id TEXT,
|
|
||||||
replacement TEXT,
|
|
||||||
termination_resignation_status TEXT,
|
|
||||||
date_of_termination_resignation TEXT,
|
|
||||||
last_date_of_operations TEXT,
|
|
||||||
old_codes TEXT,
|
|
||||||
branch_details TEXT,
|
|
||||||
dealer_principal_name TEXT,
|
|
||||||
dealer_principal_email_id TEXT,
|
|
||||||
dp_contact_number TEXT,
|
|
||||||
dp_contacts TEXT,
|
|
||||||
showroom_address TEXT,
|
|
||||||
showroom_pincode TEXT,
|
|
||||||
workshop_address TEXT,
|
|
||||||
workshop_pincode TEXT,
|
|
||||||
location_district TEXT,
|
|
||||||
state_workshop TEXT,
|
|
||||||
no_of_studios TEXT,
|
|
||||||
website_update TEXT,
|
|
||||||
gst TEXT,
|
|
||||||
pan TEXT,
|
|
||||||
firm_type TEXT,
|
|
||||||
prop_managing_partners_directors TEXT,
|
|
||||||
total_prop_partners_directors TEXT,
|
|
||||||
docs_folder_link TEXT,
|
|
||||||
workshop_gma_codes TEXT,
|
|
||||||
existing_new TEXT,
|
|
||||||
dlrcode TEXT,
|
|
||||||
created_at TEXT,
|
|
||||||
updated_at TEXT,
|
|
||||||
is_active TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Step 2: Import CSV into temporary table
|
|
||||||
-- This will work whether your CSV has 44 or 48 columns
|
|
||||||
\copy dealers_temp FROM 'C:/Users/COMP/Downloads/DEALERS_CLEAN.csv' WITH (FORMAT csv, HEADER true, ENCODING 'UTF8');
|
|
||||||
|
|
||||||
-- Optional: Check what was imported
|
|
||||||
-- SELECT COUNT(*) FROM dealers_temp;
|
|
||||||
|
|
||||||
-- Step 3: Insert into actual dealers table
|
|
||||||
-- IMPORTANT: We IGNORE dealer_id, created_at, updated_at, is_active from CSV
|
|
||||||
-- These will use database DEFAULT values (auto-generated UUID, current timestamp, true)
|
|
||||||
INSERT INTO public.dealers (
|
|
||||||
sales_code,
|
|
||||||
service_code,
|
|
||||||
gear_code,
|
|
||||||
gma_code,
|
|
||||||
region,
|
|
||||||
dealership,
|
|
||||||
state,
|
|
||||||
district,
|
|
||||||
city,
|
|
||||||
location,
|
|
||||||
city_category_pst,
|
|
||||||
layout_format,
|
|
||||||
tier_city_category,
|
|
||||||
on_boarding_charges,
|
|
||||||
date,
|
|
||||||
single_format_month_year,
|
|
||||||
domain_id,
|
|
||||||
replacement,
|
|
||||||
termination_resignation_status,
|
|
||||||
date_of_termination_resignation,
|
|
||||||
last_date_of_operations,
|
|
||||||
old_codes,
|
|
||||||
branch_details,
|
|
||||||
dealer_principal_name,
|
|
||||||
dealer_principal_email_id,
|
|
||||||
dp_contact_number,
|
|
||||||
dp_contacts,
|
|
||||||
showroom_address,
|
|
||||||
showroom_pincode,
|
|
||||||
workshop_address,
|
|
||||||
workshop_pincode,
|
|
||||||
location_district,
|
|
||||||
state_workshop,
|
|
||||||
no_of_studios,
|
|
||||||
website_update,
|
|
||||||
gst,
|
|
||||||
pan,
|
|
||||||
firm_type,
|
|
||||||
prop_managing_partners_directors,
|
|
||||||
total_prop_partners_directors,
|
|
||||||
docs_folder_link,
|
|
||||||
workshop_gma_codes,
|
|
||||||
existing_new,
|
|
||||||
dlrcode
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
NULLIF(sales_code, ''),
|
|
||||||
NULLIF(service_code, ''),
|
|
||||||
NULLIF(gear_code, ''),
|
|
||||||
NULLIF(gma_code, ''),
|
|
||||||
NULLIF(region, ''),
|
|
||||||
NULLIF(dealership, ''),
|
|
||||||
NULLIF(state, ''),
|
|
||||||
NULLIF(district, ''),
|
|
||||||
NULLIF(city, ''),
|
|
||||||
NULLIF(location, ''),
|
|
||||||
NULLIF(city_category_pst, ''),
|
|
||||||
NULLIF(layout_format, ''),
|
|
||||||
NULLIF(tier_city_category, ''),
|
|
||||||
NULLIF(on_boarding_charges, ''),
|
|
||||||
NULLIF(date, ''),
|
|
||||||
NULLIF(single_format_month_year, ''),
|
|
||||||
NULLIF(domain_id, ''),
|
|
||||||
NULLIF(replacement, ''),
|
|
||||||
NULLIF(termination_resignation_status, ''),
|
|
||||||
NULLIF(date_of_termination_resignation, ''),
|
|
||||||
NULLIF(last_date_of_operations, ''),
|
|
||||||
NULLIF(old_codes, ''),
|
|
||||||
NULLIF(branch_details, ''),
|
|
||||||
NULLIF(dealer_principal_name, ''),
|
|
||||||
NULLIF(dealer_principal_email_id, ''),
|
|
||||||
NULLIF(dp_contact_number, ''),
|
|
||||||
NULLIF(dp_contacts, ''),
|
|
||||||
NULLIF(showroom_address, ''),
|
|
||||||
NULLIF(showroom_pincode, ''),
|
|
||||||
NULLIF(workshop_address, ''),
|
|
||||||
NULLIF(workshop_pincode, ''),
|
|
||||||
NULLIF(location_district, ''),
|
|
||||||
NULLIF(state_workshop, ''),
|
|
||||||
CASE WHEN no_of_studios = '' THEN 0 ELSE no_of_studios::INTEGER END,
|
|
||||||
NULLIF(website_update, ''),
|
|
||||||
NULLIF(gst, ''),
|
|
||||||
NULLIF(pan, ''),
|
|
||||||
NULLIF(firm_type, ''),
|
|
||||||
NULLIF(prop_managing_partners_directors, ''),
|
|
||||||
NULLIF(total_prop_partners_directors, ''),
|
|
||||||
NULLIF(docs_folder_link, ''),
|
|
||||||
NULLIF(workshop_gma_codes, ''),
|
|
||||||
NULLIF(existing_new, ''),
|
|
||||||
NULLIF(dlrcode, '')
|
|
||||||
FROM dealers_temp;
|
|
||||||
|
|
||||||
-- Step 4: Clean up temporary table
|
|
||||||
DROP TABLE dealers_temp;
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- METHOD 3: Using COPY with DEFAULT (PostgreSQL 12+)
|
|
||||||
-- ============================================================
|
|
||||||
-- Alternative approach using a function to set defaults
|
|
||||||
|
|
||||||
-- Create a function to handle the import with defaults
|
|
||||||
CREATE OR REPLACE FUNCTION import_dealers_from_csv()
|
|
||||||
RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
-- This will be called from a COPY command that uses a function
|
|
||||||
-- See METHOD 1 for the actual COPY command
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- VERIFICATION QUERIES
|
|
||||||
-- ============================================================
|
|
||||||
|
|
||||||
-- Check import results
|
|
||||||
SELECT
|
|
||||||
COUNT(*) as total_dealers,
|
|
||||||
COUNT(dealer_id) as has_dealer_id,
|
|
||||||
COUNT(created_at) as has_created_at,
|
|
||||||
COUNT(updated_at) as has_updated_at,
|
|
||||||
COUNT(*) FILTER (WHERE is_active = true) as active_count
|
|
||||||
FROM dealers;
|
|
||||||
|
|
||||||
-- View sample records with auto-generated values
|
|
||||||
SELECT
|
|
||||||
dealer_id,
|
|
||||||
dlrcode,
|
|
||||||
dealership,
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
is_active
|
|
||||||
FROM dealers
|
|
||||||
LIMIT 5;
|
|
||||||
|
|
||||||
-- Check for any issues
|
|
||||||
SELECT
|
|
||||||
COUNT(*) FILTER (WHERE dealer_id IS NULL) as missing_dealer_id,
|
|
||||||
COUNT(*) FILTER (WHERE created_at IS NULL) as missing_created_at,
|
|
||||||
COUNT(*) FILTER (WHERE updated_at IS NULL) as missing_updated_at
|
|
||||||
FROM dealers;
|
|
||||||
|
|
||||||
@ -1,515 +0,0 @@
|
|||||||
# Dealers CSV Import Guide
|
|
||||||
|
|
||||||
This guide explains how to format and import dealer data from a CSV file into the PostgreSQL `dealers` table.
|
|
||||||
|
|
||||||
## ⚠️ Important: Auto-Generated Columns
|
|
||||||
|
|
||||||
**DO NOT include these columns in your CSV file** - they are automatically generated by the database:
|
|
||||||
|
|
||||||
- ❌ `dealer_id` - Auto-generated UUID (e.g., `550e8400-e29b-41d4-a716-446655440000`)
|
|
||||||
- ❌ `created_at` - Auto-generated timestamp (current time on import)
|
|
||||||
- ❌ `updated_at` - Auto-generated timestamp (current time on import)
|
|
||||||
- ❌ `is_active` - Defaults to `true`
|
|
||||||
|
|
||||||
Your CSV should have **exactly 44 columns** (the data columns listed below).
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
- [CSV File Format Requirements](#csv-file-format-requirements)
|
|
||||||
- [Column Mapping](#column-mapping)
|
|
||||||
- [Preparing Your CSV File](#preparing-your-csv-file)
|
|
||||||
- [Import Methods](#import-methods)
|
|
||||||
- [Troubleshooting](#troubleshooting)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## CSV File Format Requirements
|
|
||||||
|
|
||||||
### File Requirements
|
|
||||||
- **Format**: CSV (Comma-Separated Values)
|
|
||||||
- **Encoding**: UTF-8
|
|
||||||
- **Header Row**: Required (first row must contain column names)
|
|
||||||
- **Delimiter**: Comma (`,`)
|
|
||||||
- **Text Qualifier**: Double quotes (`"`) for fields containing commas or special characters
|
|
||||||
|
|
||||||
### Required Columns (in exact order)
|
|
||||||
|
|
||||||
**Important Notes:**
|
|
||||||
- **DO NOT include** `dealer_id`, `created_at`, `updated_at`, or `is_active` in your CSV file
|
|
||||||
- These columns will be automatically generated by the database:
|
|
||||||
- `dealer_id`: Auto-generated UUID
|
|
||||||
- `created_at`: Auto-generated timestamp (current time)
|
|
||||||
- `updated_at`: Auto-generated timestamp (current time)
|
|
||||||
- `is_active`: Defaults to `true`
|
|
||||||
|
|
||||||
Your CSV file must have these **44 columns** in the following order:
|
|
||||||
|
|
||||||
1. `sales_code`
|
|
||||||
2. `service_code`
|
|
||||||
3. `gear_code`
|
|
||||||
4. `gma_code`
|
|
||||||
5. `region`
|
|
||||||
6. `dealership`
|
|
||||||
7. `state`
|
|
||||||
8. `district`
|
|
||||||
9. `city`
|
|
||||||
10. `location`
|
|
||||||
11. `city_category_pst`
|
|
||||||
12. `layout_format`
|
|
||||||
13. `tier_city_category`
|
|
||||||
14. `on_boarding_charges`
|
|
||||||
15. `date`
|
|
||||||
16. `single_format_month_year`
|
|
||||||
17. `domain_id`
|
|
||||||
18. `replacement`
|
|
||||||
19. `termination_resignation_status`
|
|
||||||
20. `date_of_termination_resignation`
|
|
||||||
21. `last_date_of_operations`
|
|
||||||
22. `old_codes`
|
|
||||||
23. `branch_details`
|
|
||||||
24. `dealer_principal_name`
|
|
||||||
25. `dealer_principal_email_id`
|
|
||||||
26. `dp_contact_number`
|
|
||||||
27. `dp_contacts`
|
|
||||||
28. `showroom_address`
|
|
||||||
29. `showroom_pincode`
|
|
||||||
30. `workshop_address`
|
|
||||||
31. `workshop_pincode`
|
|
||||||
32. `location_district`
|
|
||||||
33. `state_workshop`
|
|
||||||
34. `no_of_studios`
|
|
||||||
35. `website_update`
|
|
||||||
36. `gst`
|
|
||||||
37. `pan`
|
|
||||||
38. `firm_type`
|
|
||||||
39. `prop_managing_partners_directors`
|
|
||||||
40. `total_prop_partners_directors`
|
|
||||||
41. `docs_folder_link`
|
|
||||||
42. `workshop_gma_codes`
|
|
||||||
43. `existing_new`
|
|
||||||
44. `dlrcode`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Column Mapping
|
|
||||||
|
|
||||||
### Column Details
|
|
||||||
|
|
||||||
| Column Name | Type | Required | Notes |
|
|
||||||
|------------|------|----------|-------|
|
|
||||||
| `sales_code` | String(50) | No | Sales code identifier |
|
|
||||||
| `service_code` | String(50) | No | Service code identifier |
|
|
||||||
| `gear_code` | String(50) | No | Gear code identifier |
|
|
||||||
| `gma_code` | String(50) | No | GMA code identifier |
|
|
||||||
| `region` | String(50) | No | Geographic region |
|
|
||||||
| `dealership` | String(255) | No | Dealership business name |
|
|
||||||
| `state` | String(100) | No | State name |
|
|
||||||
| `district` | String(100) | No | District name |
|
|
||||||
| `city` | String(100) | No | City name |
|
|
||||||
| `location` | String(255) | No | Location details |
|
|
||||||
| `city_category_pst` | String(50) | No | City category (PST) |
|
|
||||||
| `layout_format` | String(50) | No | Layout format |
|
|
||||||
| `tier_city_category` | String(100) | No | TIER City Category |
|
|
||||||
| `on_boarding_charges` | Decimal | No | Numeric value (e.g., 1000.50) |
|
|
||||||
| `date` | Date | No | Format: YYYY-MM-DD (e.g., 2014-09-30) |
|
|
||||||
| `single_format_month_year` | String(50) | No | Format: Sep-2014 |
|
|
||||||
| `domain_id` | String(255) | No | Email domain (e.g., dealer@{{APP_DOMAIN}}) |
|
|
||||||
| `replacement` | String(50) | No | Replacement status |
|
|
||||||
| `termination_resignation_status` | String(255) | No | Termination/Resignation status |
|
|
||||||
| `date_of_termination_resignation` | Date | No | Format: YYYY-MM-DD |
|
|
||||||
| `last_date_of_operations` | Date | No | Format: YYYY-MM-DD |
|
|
||||||
| `old_codes` | String(255) | No | Old code references |
|
|
||||||
| `branch_details` | Text | No | Branch information |
|
|
||||||
| `dealer_principal_name` | String(255) | No | Principal's full name |
|
|
||||||
| `dealer_principal_email_id` | String(255) | No | Principal's email |
|
|
||||||
| `dp_contact_number` | String(20) | No | Contact phone number |
|
|
||||||
| `dp_contacts` | String(20) | No | Additional contacts |
|
|
||||||
| `showroom_address` | Text | No | Full showroom address |
|
|
||||||
| `showroom_pincode` | String(10) | No | Showroom postal code |
|
|
||||||
| `workshop_address` | Text | No | Full workshop address |
|
|
||||||
| `workshop_pincode` | String(10) | No | Workshop postal code |
|
|
||||||
| `location_district` | String(100) | No | Location/District |
|
|
||||||
| `state_workshop` | String(100) | No | State for workshop |
|
|
||||||
| `no_of_studios` | Integer | No | Number of studios (default: 0) |
|
|
||||||
| `website_update` | String(10) | No | Yes/No value |
|
|
||||||
| `gst` | String(50) | No | GST number |
|
|
||||||
| `pan` | String(50) | No | PAN number |
|
|
||||||
| `firm_type` | String(100) | No | Type of firm (e.g., Proprietorship) |
|
|
||||||
| `prop_managing_partners_directors` | String(255) | No | Proprietor/Partners/Directors names |
|
|
||||||
| `total_prop_partners_directors` | String(255) | No | Total count or names |
|
|
||||||
| `docs_folder_link` | Text | No | Google Drive or document folder URL |
|
|
||||||
| `workshop_gma_codes` | String(255) | No | Workshop GMA codes |
|
|
||||||
| `existing_new` | String(50) | No | Existing/New status |
|
|
||||||
| `dlrcode` | String(50) | No | Dealer code |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Preparing Your CSV File
|
|
||||||
|
|
||||||
### Step 1: Create/Edit Your CSV File
|
|
||||||
|
|
||||||
1. **Open your CSV file** in Excel, Google Sheets, or a text editor
|
|
||||||
2. **Remove auto-generated columns** (if present):
|
|
||||||
- ❌ **DO NOT include**: `dealer_id`, `created_at`, `updated_at`, `is_active`
|
|
||||||
- ✅ These will be automatically generated by the database
|
|
||||||
3. **Ensure the header row** matches the column names exactly (see [Column Mapping](#column-mapping))
|
|
||||||
4. **Verify column order** - columns must be in the exact order listed above (44 columns total)
|
|
||||||
5. **Check data formats**:
|
|
||||||
- Dates: Use `YYYY-MM-DD` format (e.g., `2014-09-30`)
|
|
||||||
- Numbers: Use decimal format for `on_boarding_charges` (e.g., `1000.50`)
|
|
||||||
- Empty values: Leave cells empty (don't use "NULL" or "N/A" as text)
|
|
||||||
|
|
||||||
### Step 2: Handle Special Characters
|
|
||||||
|
|
||||||
- **Commas in text**: Wrap the entire field in double quotes
|
|
||||||
- Example: `"No.335, HVP RR Nagar Sector B"`
|
|
||||||
- **Quotes in text**: Use double quotes to escape: `""quoted text""`
|
|
||||||
- **Newlines in text**: Wrap field in double quotes
|
|
||||||
|
|
||||||
### Step 3: Date Formatting
|
|
||||||
|
|
||||||
Ensure dates are in `YYYY-MM-DD` format:
|
|
||||||
- ✅ Correct: `2014-09-30`
|
|
||||||
- ❌ Wrong: `30-Sep-14`, `09/30/2014`, `30-09-2014`
|
|
||||||
|
|
||||||
### Step 4: Save the File
|
|
||||||
|
|
||||||
1. **Save as CSV** (UTF-8 encoding)
|
|
||||||
2. **File location**: Save to an accessible path (e.g., `C:/Users/COMP/Downloads/DEALERS_CLEAN.csv`)
|
|
||||||
3. **File name**: Use a descriptive name (e.g., `DEALERS_CLEAN.csv`)
|
|
||||||
|
|
||||||
### Sample CSV Format
|
|
||||||
|
|
||||||
**Important:** Your CSV should **NOT** include `dealer_id`, `created_at`, `updated_at`, or `is_active` columns. These are auto-generated.
|
|
||||||
|
|
||||||
```csv
|
|
||||||
sales_code,service_code,gear_code,gma_code,region,dealership,state,district,city,location,city_category_pst,layout_format,tier_city_category,on_boarding_charges,date,single_format_month_year,domain_id,replacement,termination_resignation_status,date_of_termination_resignation,last_date_of_operations,old_codes,branch_details,dealer_principal_name,dealer_principal_email_id,dp_contact_number,dp_contacts,showroom_address,showroom_pincode,workshop_address,workshop_pincode,location_district,state_workshop,no_of_studios,website_update,gst,pan,firm_type,prop_managing_partners_directors,total_prop_partners_directors,docs_folder_link,workshop_gma_codes,existing_new,dlrcode
|
|
||||||
5124,5125,5573,9430,S3,Accelerate Motors,Karnataka,Bengaluru,Bengaluru,RAJA RAJESHWARI NAGAR,A+,A+,Tier 1 City,,2014-09-30,Sep-2014,acceleratemotors.rrnagar@dealer.{{APP_DOMAIN}},,,,,,,N. Shyam Charmanna,shyamcharmanna@yahoo.co.in,7022049621,7022049621,"No.335, HVP RR Nagar Sector B, Ideal Homes Town Ship, Bangalore - 560098, Dist – Bangalore, Karnataka",560098,"Works Shop No.460, 80ft Road, 2nd Phase R R Nagar, Bangalore - 560098, Dist – Bangalore, Karnataka",560098,Bangalore,Karnataka,0,Yes,29ARCPS1311D1Z6,ARCPS1311D,Proprietorship,CHARMANNA SHYAM NELLAMAKADA,CHARMANNA SHYAM NELLAMAKADA,https://drive.google.com/drive/folders/1sGtg3s1h9aBXX9fhxJufYuBWar8gVvnb,,,3386
|
|
||||||
```
|
|
||||||
|
|
||||||
**What gets auto-generated:**
|
|
||||||
- `dealer_id`: `550e8400-e29b-41d4-a716-446655440000` (example UUID)
|
|
||||||
- `created_at`: `2025-01-20 10:30:45.123` (current timestamp)
|
|
||||||
- `updated_at`: `2025-01-20 10:30:45.123` (current timestamp)
|
|
||||||
- `is_active`: `true`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Import Methods
|
|
||||||
|
|
||||||
### Method 1: PostgreSQL COPY Command (Recommended - If CSV has 44 columns)
|
|
||||||
|
|
||||||
**Use this if your CSV does NOT include `dealer_id`, `created_at`, `updated_at`, `is_active` columns.**
|
|
||||||
|
|
||||||
**Prerequisites:**
|
|
||||||
- PostgreSQL client (psql) installed
|
|
||||||
- Access to PostgreSQL server
|
|
||||||
- CSV file path accessible from PostgreSQL server
|
|
||||||
|
|
||||||
**Steps:**
|
|
||||||
|
|
||||||
1. **Connect to PostgreSQL:**
|
|
||||||
```bash
|
|
||||||
psql -U your_username -d royal_enfield_workflow -h localhost
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Run the COPY command:**
|
|
||||||
|
|
||||||
**Note:** The COPY command explicitly lists only the columns from your CSV. The following columns are **automatically handled by the database** and should **NOT** be in your CSV:
|
|
||||||
- `dealer_id` - Auto-generated UUID
|
|
||||||
- `created_at` - Auto-generated timestamp
|
|
||||||
- `updated_at` - Auto-generated timestamp
|
|
||||||
- `is_active` - Defaults to `true`
|
|
||||||
|
|
||||||
```sql
|
|
||||||
\copy public.dealers(
|
|
||||||
sales_code,
|
|
||||||
service_code,
|
|
||||||
gear_code,
|
|
||||||
gma_code,
|
|
||||||
region,
|
|
||||||
dealership,
|
|
||||||
state,
|
|
||||||
district,
|
|
||||||
city,
|
|
||||||
location,
|
|
||||||
city_category_pst,
|
|
||||||
layout_format,
|
|
||||||
tier_city_category,
|
|
||||||
on_boarding_charges,
|
|
||||||
date,
|
|
||||||
single_format_month_year,
|
|
||||||
domain_id,
|
|
||||||
replacement,
|
|
||||||
termination_resignation_status,
|
|
||||||
date_of_termination_resignation,
|
|
||||||
last_date_of_operations,
|
|
||||||
old_codes,
|
|
||||||
branch_details,
|
|
||||||
dealer_principal_name,
|
|
||||||
dealer_principal_email_id,
|
|
||||||
dp_contact_number,
|
|
||||||
dp_contacts,
|
|
||||||
showroom_address,
|
|
||||||
showroom_pincode,
|
|
||||||
workshop_address,
|
|
||||||
workshop_pincode,
|
|
||||||
location_district,
|
|
||||||
state_workshop,
|
|
||||||
no_of_studios,
|
|
||||||
website_update,
|
|
||||||
gst,
|
|
||||||
pan,
|
|
||||||
firm_type,
|
|
||||||
prop_managing_partners_directors,
|
|
||||||
total_prop_partners_directors,
|
|
||||||
docs_folder_link,
|
|
||||||
workshop_gma_codes,
|
|
||||||
existing_new,
|
|
||||||
dlrcode
|
|
||||||
)
|
|
||||||
FROM 'C:/Users/COMP/Downloads/DEALERS_CLEAN.csv'
|
|
||||||
WITH (
|
|
||||||
FORMAT csv,
|
|
||||||
HEADER true,
|
|
||||||
ENCODING 'UTF8'
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
**What happens:**
|
|
||||||
- `dealer_id` will be automatically generated as a UUID for each row
|
|
||||||
- `created_at` will be set to the current timestamp
|
|
||||||
- `updated_at` will be set to the current timestamp
|
|
||||||
- `is_active` will default to `true`
|
|
||||||
|
|
||||||
3. **Verify import:**
|
|
||||||
```sql
|
|
||||||
SELECT COUNT(*) FROM dealers;
|
|
||||||
SELECT * FROM dealers LIMIT 5;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Method 2: Using Temporary Table (If CSV has 48 columns including auto-generated ones)
|
|
||||||
|
|
||||||
**Use this if your CSV includes `dealer_id`, `created_at`, `updated_at`, `is_active` columns and you're getting errors.**
|
|
||||||
|
|
||||||
This method uses a temporary table to import the CSV, then inserts into the actual table while ignoring the auto-generated columns:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Step 1: Create temporary table
|
|
||||||
CREATE TEMP TABLE dealers_temp (
|
|
||||||
dealer_id TEXT,
|
|
||||||
sales_code TEXT,
|
|
||||||
service_code TEXT,
|
|
||||||
-- ... (all 48 columns as TEXT)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Step 2: Import CSV into temp table
|
|
||||||
\copy dealers_temp FROM 'C:/Users/COMP/Downloads/DEALERS_CLEAN.csv' WITH (FORMAT csv, HEADER true, ENCODING 'UTF8');
|
|
||||||
|
|
||||||
-- Step 3: Insert into actual table (ignoring dealer_id, created_at, updated_at, is_active)
|
|
||||||
INSERT INTO public.dealers (
|
|
||||||
sales_code,
|
|
||||||
service_code,
|
|
||||||
-- ... (only the 44 data columns)
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
NULLIF(sales_code, ''),
|
|
||||||
NULLIF(service_code, ''),
|
|
||||||
-- ... (convert and handle empty strings)
|
|
||||||
FROM dealers_temp
|
|
||||||
WHERE sales_code IS NOT NULL OR dealership IS NOT NULL; -- Skip completely empty rows
|
|
||||||
|
|
||||||
-- Step 4: Clean up
|
|
||||||
DROP TABLE dealers_temp;
|
|
||||||
```
|
|
||||||
|
|
||||||
**See `DEALERS_CSV_IMPORT_FIX.sql` for the complete working script.**
|
|
||||||
|
|
||||||
### Method 3: Using pgAdmin
|
|
||||||
|
|
||||||
1. Open pgAdmin and connect to your database
|
|
||||||
2. Right-click on `dealers` table → **Import/Export Data**
|
|
||||||
3. Select **Import**
|
|
||||||
4. Configure:
|
|
||||||
- **Filename**: Browse to your CSV file
|
|
||||||
- **Format**: CSV
|
|
||||||
- **Header**: Yes
|
|
||||||
- **Encoding**: UTF8
|
|
||||||
- **Delimiter**: Comma
|
|
||||||
5. Click **OK** to import
|
|
||||||
|
|
||||||
### Method 4: Using Node.js Script
|
|
||||||
|
|
||||||
Create a script to import CSV programmatically (useful for automation):
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { sequelize } from '../config/database';
|
|
||||||
import { QueryTypes } from 'sequelize';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as path from 'path';
|
|
||||||
import * as csv from 'csv-parser';
|
|
||||||
|
|
||||||
async function importDealersFromCSV(csvFilePath: string) {
|
|
||||||
const dealers: any[] = [];
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
fs.createReadStream(csvFilePath)
|
|
||||||
.pipe(csv())
|
|
||||||
.on('data', (row) => {
|
|
||||||
dealers.push(row);
|
|
||||||
})
|
|
||||||
.on('end', async () => {
|
|
||||||
try {
|
|
||||||
// Bulk insert dealers
|
|
||||||
// Implementation depends on your needs
|
|
||||||
console.log(`Imported ${dealers.length} dealers`);
|
|
||||||
resolve(dealers);
|
|
||||||
} catch (error) {
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Common Issues and Solutions
|
|
||||||
|
|
||||||
#### 1. **"Column count mismatch" Error**
|
|
||||||
- **Problem**: CSV has different number of columns than expected
|
|
||||||
- **Solution**:
|
|
||||||
- Verify your CSV has exactly **44 columns** (excluding header)
|
|
||||||
- **Remove** `dealer_id`, `created_at`, `updated_at`, and `is_active` if they exist in your CSV
|
|
||||||
- These columns are auto-generated and should NOT be in the CSV file
|
|
||||||
|
|
||||||
#### 2. **"Invalid date format" Error**
|
|
||||||
- **Problem**: Dates not in `YYYY-MM-DD` format
|
|
||||||
- **Solution**: Convert dates to `YYYY-MM-DD` format (e.g., `2014-09-30`)
|
|
||||||
|
|
||||||
#### 3. **"Encoding error" or "Special characters not displaying correctly**
|
|
||||||
- **Problem**: CSV file not saved in UTF-8 encoding
|
|
||||||
- **Solution**:
|
|
||||||
- In Excel: Save As → CSV UTF-8 (Comma delimited) (*.csv)
|
|
||||||
- In Notepad++: Encoding → Convert to UTF-8 → Save
|
|
||||||
|
|
||||||
#### 4. **"Permission denied" Error (COPY command)**
|
|
||||||
- **Problem**: PostgreSQL server cannot access the file path
|
|
||||||
- **Solution**:
|
|
||||||
- Use absolute path with forward slashes: `C:/Users/COMP/Downloads/DEALERS_CLEAN.csv`
|
|
||||||
- Ensure file permissions allow read access
|
|
||||||
- For remote servers, upload file to server first
|
|
||||||
|
|
||||||
#### 5. **"Duplicate key" Error**
|
|
||||||
- **Problem**: Trying to import duplicate records
|
|
||||||
- **Solution**:
|
|
||||||
- Use `ON CONFLICT` handling in your import
|
|
||||||
- Or clean CSV to remove duplicates before import
|
|
||||||
|
|
||||||
#### 6. **Empty values showing as "NULL" text**
|
|
||||||
- **Problem**: CSV contains literal "NULL" or "N/A" strings
|
|
||||||
- **Solution**: Replace with empty cells in CSV
|
|
||||||
|
|
||||||
#### 7. **Commas in address fields breaking import**
|
|
||||||
- **Problem**: Address fields contain commas not properly quoted
|
|
||||||
- **Solution**: Wrap fields containing commas in double quotes:
|
|
||||||
```csv
|
|
||||||
"No.335, HVP RR Nagar Sector B, Ideal Homes Town Ship"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Pre-Import Checklist
|
|
||||||
|
|
||||||
- [ ] CSV file saved in UTF-8 encoding
|
|
||||||
- [ ] **Removed** `dealer_id`, `created_at`, `updated_at`, and `is_active` columns (if present)
|
|
||||||
- [ ] Header row matches column names exactly
|
|
||||||
- [ ] All 44 columns present in correct order
|
|
||||||
- [ ] Dates formatted as `YYYY-MM-DD`
|
|
||||||
- [ ] Numeric fields contain valid numbers (or are empty)
|
|
||||||
- [ ] Text fields with commas are wrapped in quotes
|
|
||||||
- [ ] File path is accessible from PostgreSQL server
|
|
||||||
- [ ] Database connection credentials are correct
|
|
||||||
|
|
||||||
### Verification Queries
|
|
||||||
|
|
||||||
After import, run these queries to verify:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Count total dealers
|
|
||||||
SELECT COUNT(*) as total_dealers FROM dealers;
|
|
||||||
|
|
||||||
-- Verify auto-generated columns
|
|
||||||
SELECT
|
|
||||||
dealer_id,
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
is_active,
|
|
||||||
dlrcode,
|
|
||||||
dealership
|
|
||||||
FROM dealers
|
|
||||||
LIMIT 5;
|
|
||||||
|
|
||||||
-- Check for null values in key fields
|
|
||||||
SELECT
|
|
||||||
COUNT(*) FILTER (WHERE dlrcode IS NULL) as null_dlrcode,
|
|
||||||
COUNT(*) FILTER (WHERE domain_id IS NULL) as null_domain_id,
|
|
||||||
COUNT(*) FILTER (WHERE dealership IS NULL) as null_dealership
|
|
||||||
FROM dealers;
|
|
||||||
|
|
||||||
-- View sample records
|
|
||||||
SELECT
|
|
||||||
dealer_id,
|
|
||||||
dlrcode,
|
|
||||||
dealership,
|
|
||||||
city,
|
|
||||||
state,
|
|
||||||
domain_id,
|
|
||||||
created_at,
|
|
||||||
is_active
|
|
||||||
FROM dealers
|
|
||||||
LIMIT 10;
|
|
||||||
|
|
||||||
-- Check date formats
|
|
||||||
SELECT
|
|
||||||
dlrcode,
|
|
||||||
date,
|
|
||||||
date_of_termination_resignation,
|
|
||||||
last_date_of_operations
|
|
||||||
FROM dealers
|
|
||||||
WHERE date IS NOT NULL
|
|
||||||
LIMIT 5;
|
|
||||||
|
|
||||||
-- Verify all dealers have dealer_id and timestamps
|
|
||||||
SELECT
|
|
||||||
COUNT(*) as total,
|
|
||||||
COUNT(dealer_id) as has_dealer_id,
|
|
||||||
COUNT(created_at) as has_created_at,
|
|
||||||
COUNT(updated_at) as has_updated_at,
|
|
||||||
COUNT(*) FILTER (WHERE is_active = true) as active_count
|
|
||||||
FROM dealers;
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Additional Notes
|
|
||||||
|
|
||||||
- **Backup**: Always backup your database before bulk imports
|
|
||||||
- **Testing**: Test import with a small sample (5-10 rows) first
|
|
||||||
- **Validation**: Validate data quality before import
|
|
||||||
- **Updates**: Use `UPSERT` logic if you need to update existing records
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues or questions:
|
|
||||||
1. Check the troubleshooting section above
|
|
||||||
2. Review PostgreSQL COPY documentation
|
|
||||||
3. Verify CSV format matches the sample provided
|
|
||||||
4. Check database logs for detailed error messages
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Last Updated**: December 2025
|
|
||||||
**Version**: 1.0
|
|
||||||
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
# Dealer Integration Implementation Status
|
|
||||||
|
|
||||||
This document summarizes the changes made to integrate the external Royal Enfield Dealer API and implement the dealer validation logic during request creation.
|
|
||||||
|
|
||||||
## Completed Work
|
|
||||||
|
|
||||||
### 1. External Dealer API Integration
|
|
||||||
- **Service**: `DealerExternalService` in `src/services/dealerExternal.service.ts`
|
|
||||||
- Implemented `getDealerByCode` to fetch data from `https://api-uat2.royalenfield.com/DealerMaster`.
|
|
||||||
- Returns real-time GSTIN, Address, and location details.
|
|
||||||
- **Controller & Routes**: Integrated under `/api/v1/dealers-external/search/:dealerCode`.
|
|
||||||
- **Enrichment**: `DealerService.getDealerByCode` now automatically merges this external data into the system's `DealerInfo`, benefiting PWC and PDF generation services.
|
|
||||||
|
|
||||||
### 2. Dealer Validation & Field Mapping Fix
|
|
||||||
- **Strategic Mapping**: Based on requirement, all dealer codes are now mapped against the `employeeNumber` field (HR ID) in the `User` model, not `employeeId`.
|
|
||||||
- **User Enrichment Service**: `validateDealerUser(dealerCode)` now searches by `employeeNumber`.
|
|
||||||
- **SSO Alignment**: `AuthService.ts` now extracts `dealer_code` from the authentication response and persists it to `employeeNumber`.
|
|
||||||
- **Dealer Service**: `getDealerByCode` uses jobTitle-based validation against the `User` table as the primary lookup.
|
|
||||||
|
|
||||||
### 3. Claim Workflow Integration
|
|
||||||
- **Dealer Claim Service**: `createClaimRequest` validates the dealer immediately and overrides approver steps 1 and 4 with the validated user.
|
|
||||||
- **Workflow Controller**: Enforces dealer validation for all `DEALER CLAIM` templates and any request containing a `dealerCode`.
|
|
||||||
|
|
||||||
### 4. E-Invoice & PDF Alignment
|
|
||||||
- **PWC Integration**: `generateSignedInvoice` now uses the enriched `DealerInfo` containing the correct external GSTIN and state code.
|
|
||||||
- **Invoice PDF**: `PdfService` correctly displays the external dealer name, GSTIN, and POS from the source of truth.
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
All integrated components have been verified via test scripts and end-to-end flow analysis. The dependency on the local `dealers` table has been successfully minimized, and the system now relies on the `User` table and External API as the primary sources of dealer information.
|
|
||||||
@ -56,7 +56,7 @@ users {
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"userId": "uuid-1",
|
"userId": "uuid-1",
|
||||||
"email": "john.doe@{{APP_DOMAIN}}",
|
"email": "john.doe@royalenfield.com",
|
||||||
"employeeId": "E12345", // Regular employee ID
|
"employeeId": "E12345", // Regular employee ID
|
||||||
"designation": "Software Engineer",
|
"designation": "Software Engineer",
|
||||||
"department": "IT",
|
"department": "IT",
|
||||||
@ -68,7 +68,7 @@ users {
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"userId": "uuid-2",
|
"userId": "uuid-2",
|
||||||
"email": "test.2@{{APP_DOMAIN}}",
|
"email": "test.2@royalenfield.com",
|
||||||
"employeeId": "RE-MH-001", // Dealer code stored here
|
"employeeId": "RE-MH-001", // Dealer code stored here
|
||||||
"designation": "Dealer",
|
"designation": "Dealer",
|
||||||
"department": "Dealer Operations",
|
"department": "Dealer Operations",
|
||||||
|
|||||||
@ -98,8 +98,8 @@ DMS_WEBHOOK_SECRET=your_shared_secret_key_here
|
|||||||
|
|
||||||
**Base URL Examples:**
|
**Base URL Examples:**
|
||||||
- Development: `http://localhost:5000/api/v1/webhooks/dms/invoice`
|
- Development: `http://localhost:5000/api/v1/webhooks/dms/invoice`
|
||||||
- UAT: `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice`
|
- UAT: `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/invoice`
|
||||||
- Production: `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice`
|
- Production: `https://reflow.royalenfield.com/api/v1/webhooks/dms/invoice`
|
||||||
|
|
||||||
### 3.2 Request Headers
|
### 3.2 Request Headers
|
||||||
|
|
||||||
@ -205,8 +205,8 @@ User-Agent: DMS-Webhook-Client/1.0
|
|||||||
|
|
||||||
**Base URL Examples:**
|
**Base URL Examples:**
|
||||||
- Development: `http://localhost:5000/api/v1/webhooks/dms/credit-note`
|
- Development: `http://localhost:5000/api/v1/webhooks/dms/credit-note`
|
||||||
- UAT: `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note`
|
- UAT: `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/credit-note`
|
||||||
- Production: `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note`
|
- Production: `https://reflow.royalenfield.com/api/v1/webhooks/dms/credit-note`
|
||||||
|
|
||||||
### 4.2 Request Headers
|
### 4.2 Request Headers
|
||||||
|
|
||||||
@ -563,8 +563,8 @@ DMS_WEBHOOK_SECRET=your_shared_secret_key_here
|
|||||||
| Environment | Invoice Webhook URL | Credit Note Webhook URL |
|
| Environment | Invoice Webhook URL | Credit Note Webhook URL |
|
||||||
|-------------|---------------------|-------------------------|
|
|-------------|---------------------|-------------------------|
|
||||||
| Development | `http://localhost:5000/api/v1/webhooks/dms/invoice` | `http://localhost:5000/api/v1/webhooks/dms/credit-note` |
|
| Development | `http://localhost:5000/api/v1/webhooks/dms/invoice` | `http://localhost:5000/api/v1/webhooks/dms/credit-note` |
|
||||||
| UAT | `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice` | `https://reflow-uat.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note` |
|
| UAT | `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/invoice` | `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/credit-note` |
|
||||||
| Production | `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/invoice` | `https://reflow.{{APP_DOMAIN}}/api/v1/webhooks/dms/credit-note` |
|
| Production | `https://reflow.royalenfield.com/api/v1/webhooks/dms/invoice` | `https://reflow.royalenfield.com/api/v1/webhooks/dms/credit-note` |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@ -157,7 +157,7 @@ npm run seed:config
|
|||||||
```bash
|
```bash
|
||||||
# Edit the script
|
# Edit the script
|
||||||
nano scripts/assign-admin-user.sql
|
nano scripts/assign-admin-user.sql
|
||||||
# Change: YOUR_EMAIL@{{APP_DOMAIN}}
|
# Change: YOUR_EMAIL@royalenfield.com
|
||||||
|
|
||||||
# Run it
|
# Run it
|
||||||
psql -d royal_enfield_workflow -f scripts/assign-admin-user.sql
|
psql -d royal_enfield_workflow -f scripts/assign-admin-user.sql
|
||||||
@ -170,7 +170,7 @@ psql -d royal_enfield_workflow
|
|||||||
|
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'ADMIN'
|
SET role = 'ADMIN'
|
||||||
WHERE email = 'your-email@{{APP_DOMAIN}}';
|
WHERE email = 'your-email@royalenfield.com';
|
||||||
|
|
||||||
-- Verify
|
-- Verify
|
||||||
SELECT email, role FROM users WHERE role = 'ADMIN';
|
SELECT email, role FROM users WHERE role = 'ADMIN';
|
||||||
@ -188,7 +188,7 @@ psql -d royal_enfield_workflow -c "\dt"
|
|||||||
psql -d royal_enfield_workflow -c "\dT+ user_role_enum"
|
psql -d royal_enfield_workflow -c "\dT+ user_role_enum"
|
||||||
|
|
||||||
# Check your user
|
# Check your user
|
||||||
psql -d royal_enfield_workflow -c "SELECT email, role FROM users WHERE email = 'your-email@{{APP_DOMAIN}}';"
|
psql -d royal_enfield_workflow -c "SELECT email, role FROM users WHERE email = 'your-email@royalenfield.com';"
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -241,13 +241,13 @@ Expected output:
|
|||||||
```sql
|
```sql
|
||||||
-- Single user
|
-- Single user
|
||||||
UPDATE users SET role = 'MANAGEMENT'
|
UPDATE users SET role = 'MANAGEMENT'
|
||||||
WHERE email = 'manager@{{APP_DOMAIN}}';
|
WHERE email = 'manager@royalenfield.com';
|
||||||
|
|
||||||
-- Multiple users
|
-- Multiple users
|
||||||
UPDATE users SET role = 'MANAGEMENT'
|
UPDATE users SET role = 'MANAGEMENT'
|
||||||
WHERE email IN (
|
WHERE email IN (
|
||||||
'manager1@{{APP_DOMAIN}}',
|
'manager1@royalenfield.com',
|
||||||
'manager2@{{APP_DOMAIN}}'
|
'manager2@royalenfield.com'
|
||||||
);
|
);
|
||||||
|
|
||||||
-- By department
|
-- By department
|
||||||
@ -260,13 +260,13 @@ WHERE department = 'Management' AND is_active = true;
|
|||||||
```sql
|
```sql
|
||||||
-- Single user
|
-- Single user
|
||||||
UPDATE users SET role = 'ADMIN'
|
UPDATE users SET role = 'ADMIN'
|
||||||
WHERE email = 'admin@{{APP_DOMAIN}}';
|
WHERE email = 'admin@royalenfield.com';
|
||||||
|
|
||||||
-- Multiple admins
|
-- Multiple admins
|
||||||
UPDATE users SET role = 'ADMIN'
|
UPDATE users SET role = 'ADMIN'
|
||||||
WHERE email IN (
|
WHERE email IN (
|
||||||
'admin1@{{APP_DOMAIN}}',
|
'admin1@royalenfield.com',
|
||||||
'admin2@{{APP_DOMAIN}}'
|
'admin2@royalenfield.com'
|
||||||
);
|
);
|
||||||
|
|
||||||
-- By department
|
-- By department
|
||||||
@ -331,7 +331,7 @@ SELECT
|
|||||||
mobile_phone,
|
mobile_phone,
|
||||||
array_length(ad_groups, 1) as ad_group_count
|
array_length(ad_groups, 1) as ad_group_count
|
||||||
FROM users
|
FROM users
|
||||||
WHERE email = 'your-email@{{APP_DOMAIN}}';
|
WHERE email = 'your-email@royalenfield.com';
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -344,7 +344,7 @@ WHERE email = 'your-email@{{APP_DOMAIN}}';
|
|||||||
curl -X POST http://localhost:5000/api/v1/auth/okta/callback \
|
curl -X POST http://localhost:5000/api/v1/auth/okta/callback \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d '{
|
-d '{
|
||||||
"email": "test@{{APP_DOMAIN}}",
|
"email": "test@royalenfield.com",
|
||||||
"displayName": "Test User",
|
"displayName": "Test User",
|
||||||
"oktaSub": "test-sub-123"
|
"oktaSub": "test-sub-123"
|
||||||
}'
|
}'
|
||||||
@ -353,14 +353,14 @@ curl -X POST http://localhost:5000/api/v1/auth/okta/callback \
|
|||||||
### 2. Check User Created with Default Role
|
### 2. Check User Created with Default Role
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SELECT email, role FROM users WHERE email = 'test@{{APP_DOMAIN}}';
|
SELECT email, role FROM users WHERE email = 'test@royalenfield.com';
|
||||||
-- Expected: role = 'USER'
|
-- Expected: role = 'USER'
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3. Update to ADMIN
|
### 3. Update to ADMIN
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
UPDATE users SET role = 'ADMIN' WHERE email = 'test@{{APP_DOMAIN}}';
|
UPDATE users SET role = 'ADMIN' WHERE email = 'test@royalenfield.com';
|
||||||
```
|
```
|
||||||
|
|
||||||
### 4. Verify API Access
|
### 4. Verify API Access
|
||||||
@ -369,7 +369,7 @@ UPDATE users SET role = 'ADMIN' WHERE email = 'test@{{APP_DOMAIN}}';
|
|||||||
# Login and get token
|
# Login and get token
|
||||||
curl -X POST http://localhost:5000/api/v1/auth/login \
|
curl -X POST http://localhost:5000/api/v1/auth/login \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d '{"email": "test@{{APP_DOMAIN}}", ...}'
|
-d '{"email": "test@royalenfield.com", ...}'
|
||||||
|
|
||||||
# Try admin endpoint (should work if ADMIN role)
|
# Try admin endpoint (should work if ADMIN role)
|
||||||
curl http://localhost:5000/api/v1/admin/configurations \
|
curl http://localhost:5000/api/v1/admin/configurations \
|
||||||
@ -449,7 +449,7 @@ npm run migrate
|
|||||||
|
|
||||||
```sql
|
```sql
|
||||||
-- Check if user exists
|
-- Check if user exists
|
||||||
SELECT * FROM users WHERE email = 'your-email@{{APP_DOMAIN}}';
|
SELECT * FROM users WHERE email = 'your-email@royalenfield.com';
|
||||||
|
|
||||||
-- Check Okta sub
|
-- Check Okta sub
|
||||||
SELECT * FROM users WHERE okta_sub = 'your-okta-sub';
|
SELECT * FROM users WHERE okta_sub = 'your-okta-sub';
|
||||||
@ -459,7 +459,7 @@ SELECT * FROM users WHERE okta_sub = 'your-okta-sub';
|
|||||||
|
|
||||||
```sql
|
```sql
|
||||||
-- Verify role
|
-- Verify role
|
||||||
SELECT email, role, is_active FROM users WHERE email = 'your-email@{{APP_DOMAIN}}';
|
SELECT email, role, is_active FROM users WHERE email = 'your-email@royalenfield.com';
|
||||||
|
|
||||||
-- Check role enum
|
-- Check role enum
|
||||||
\dT+ user_role_enum
|
\dT+ user_role_enum
|
||||||
|
|||||||
@ -29,7 +29,7 @@ This guide provides step-by-step instructions for setting up Google Cloud Storag
|
|||||||
|------|------------------|
|
|------|------------------|
|
||||||
| **Application** | Royal Enfield Workflow System |
|
| **Application** | Royal Enfield Workflow System |
|
||||||
| **Environment** | Production |
|
| **Environment** | Production |
|
||||||
| **Domain** | `https://reflow.{{APP_DOMAIN}}` |
|
| **Domain** | `https://reflow.royalenfield.com` |
|
||||||
| **Purpose** | Store workflow documents, attachments, invoices, and credit notes |
|
| **Purpose** | Store workflow documents, attachments, invoices, and credit notes |
|
||||||
| **Storage Type** | Google Cloud Storage (GCS) |
|
| **Storage Type** | Google Cloud Storage (GCS) |
|
||||||
| **Region** | `asia-south1` (Mumbai) |
|
| **Region** | `asia-south1` (Mumbai) |
|
||||||
@ -325,8 +325,8 @@ Create `cors-config-prod.json`:
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"origin": [
|
"origin": [
|
||||||
"https://reflow.{{APP_DOMAIN}}",
|
"https://reflow.royalenfield.com",
|
||||||
"https://www.{{APP_DOMAIN}}"
|
"https://www.royalenfield.com"
|
||||||
],
|
],
|
||||||
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
|
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
|
||||||
"responseHeader": [
|
"responseHeader": [
|
||||||
|
|||||||
@ -6,7 +6,7 @@
|
|||||||
|------|-------|
|
|------|-------|
|
||||||
| **Application** | RE Workflow System |
|
| **Application** | RE Workflow System |
|
||||||
| **Environment** | UAT |
|
| **Environment** | UAT |
|
||||||
| **Domain** | https://reflow-uat.{{APP_DOMAIN}} |
|
| **Domain** | https://reflow-uat.royalenfield.com |
|
||||||
| **Purpose** | Store workflow documents and attachments |
|
| **Purpose** | Store workflow documents and attachments |
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -131,8 +131,8 @@ Apply this CORS policy to allow browser uploads:
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"origin": [
|
"origin": [
|
||||||
"https://reflow-uat.{{APP_DOMAIN}}",
|
"https://reflow-uat.royalenfield.com",
|
||||||
"https://reflow.{{APP_DOMAIN}}"
|
"https://reflow.royalenfield.com"
|
||||||
],
|
],
|
||||||
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
|
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
|
||||||
"responseHeader": [
|
"responseHeader": [
|
||||||
|
|||||||
@ -1,277 +0,0 @@
|
|||||||
# Google Secret Manager Integration Guide
|
|
||||||
|
|
||||||
This guide explains how to integrate Google Cloud Secret Manager with your Node.js application to securely manage environment variables.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The Google Secret Manager integration allows you to:
|
|
||||||
- Store sensitive configuration values (passwords, API keys, tokens) in Google Cloud Secret Manager
|
|
||||||
- Load secrets at application startup and merge them with your existing environment variables
|
|
||||||
- Maintain backward compatibility with `.env` files for local development
|
|
||||||
- Use minimal code changes - existing `process.env.VARIABLE_NAME` access continues to work
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
1. **Google Cloud Project** with Secret Manager API enabled
|
|
||||||
2. **Service Account** with Secret Manager Secret Accessor role
|
|
||||||
3. **Authentication** - Service account credentials configured (via `GCP_KEY_FILE` or default credentials)
|
|
||||||
|
|
||||||
## Setup Instructions
|
|
||||||
|
|
||||||
### 1. Enable Secret Manager API
|
|
||||||
|
|
||||||
```bash
|
|
||||||
gcloud services enable secretmanager.googleapis.com --project=YOUR_PROJECT_ID
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Create Secrets in Google Secret Manager
|
|
||||||
|
|
||||||
Create secrets using the Google Cloud Console or gcloud CLI:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Example: Create a database password secret
|
|
||||||
echo -n "your-secure-password" | gcloud secrets create DB_PASSWORD \
|
|
||||||
--project=YOUR_PROJECT_ID \
|
|
||||||
--data-file=-
|
|
||||||
|
|
||||||
# Example: Create a JWT secret
|
|
||||||
echo -n "your-jwt-secret-key" | gcloud secrets create JWT_SECRET \
|
|
||||||
--project=YOUR_PROJECT_ID \
|
|
||||||
--data-file=-
|
|
||||||
|
|
||||||
# Grant service account access to secrets
|
|
||||||
gcloud secrets add-iam-policy-binding DB_PASSWORD \
|
|
||||||
--member="serviceAccount:YOUR_SERVICE_ACCOUNT@YOUR_PROJECT.iam.gserviceaccount.com" \
|
|
||||||
--role="roles/secretmanager.secretAccessor" \
|
|
||||||
--project=YOUR_PROJECT_ID
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Configure Environment Variables
|
|
||||||
|
|
||||||
Add the following to your `.env` file:
|
|
||||||
|
|
||||||
```env
|
|
||||||
# Google Secret Manager Configuration
|
|
||||||
USE_GOOGLE_SECRET_MANAGER=true
|
|
||||||
GCP_PROJECT_ID=your-project-id
|
|
||||||
|
|
||||||
# Optional: Prefix for all secret names (e.g., "prod" -> looks for "prod-DB_PASSWORD")
|
|
||||||
GCP_SECRET_PREFIX=
|
|
||||||
|
|
||||||
# Optional: JSON file mapping secret names to env var names
|
|
||||||
GCP_SECRET_MAP_FILE=./secret-map.json
|
|
||||||
```
|
|
||||||
|
|
||||||
**Important Notes:**
|
|
||||||
- Set `USE_GOOGLE_SECRET_MANAGER=true` to enable the integration
|
|
||||||
- `GCP_PROJECT_ID` must be set (same as used for GCS/Vertex AI)
|
|
||||||
- `GCP_KEY_FILE` should already be configured for other GCP services
|
|
||||||
- When `USE_GOOGLE_SECRET_MANAGER=false` or not set, the app uses `.env` file only
|
|
||||||
|
|
||||||
### 4. Secret Name Mapping
|
|
||||||
|
|
||||||
By default, secrets in Google Secret Manager are automatically mapped to environment variables:
|
|
||||||
- Secret name: `DB_PASSWORD` → Environment variable: `DB_PASSWORD`
|
|
||||||
- Secret name: `db-password` → Environment variable: `DB_PASSWORD` (hyphens converted to underscores, uppercase)
|
|
||||||
- Secret name: `jwt-secret-key` → Environment variable: `JWT_SECRET_KEY`
|
|
||||||
|
|
||||||
#### Custom Mapping (Optional)
|
|
||||||
|
|
||||||
If you need custom mappings, create a JSON file (e.g., `secret-map.json`):
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"db-password-prod": "DB_PASSWORD",
|
|
||||||
"jwt-secret-key": "JWT_SECRET",
|
|
||||||
"okta-client-secret-prod": "OKTA_CLIENT_SECRET"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Then set in `.env`:
|
|
||||||
```env
|
|
||||||
GCP_SECRET_MAP_FILE=./secret-map.json
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Secret Prefix (Optional)
|
|
||||||
|
|
||||||
If all your secrets share a common prefix:
|
|
||||||
|
|
||||||
```env
|
|
||||||
GCP_SECRET_PREFIX=prod
|
|
||||||
```
|
|
||||||
|
|
||||||
This will look for secrets named `prod-DB_PASSWORD`, `prod-JWT_SECRET`, etc.
|
|
||||||
|
|
||||||
## How It Works
|
|
||||||
|
|
||||||
1. **Application Startup:**
|
|
||||||
- `.env` file is loaded first (provides fallback values)
|
|
||||||
- If `USE_GOOGLE_SECRET_MANAGER=true`, secrets are fetched from Google Secret Manager
|
|
||||||
- Secrets are merged into `process.env`, overriding `.env` values if they exist
|
|
||||||
- Application continues with merged environment variables
|
|
||||||
|
|
||||||
2. **Fallback Behavior:**
|
|
||||||
- If Secret Manager is disabled or fails, the app falls back to `.env` file
|
|
||||||
- No errors are thrown - the app continues with available configuration
|
|
||||||
- Logs indicate whether secrets were loaded successfully
|
|
||||||
|
|
||||||
3. **Existing Code Compatibility:**
|
|
||||||
- No changes needed to existing code
|
|
||||||
- Continue using `process.env.VARIABLE_NAME` as before
|
|
||||||
- Secrets from GCS automatically populate `process.env`
|
|
||||||
|
|
||||||
## Default Secrets Loaded
|
|
||||||
|
|
||||||
The service automatically attempts to load these common secrets (if they exist in Secret Manager):
|
|
||||||
|
|
||||||
**Database:**
|
|
||||||
- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD`
|
|
||||||
|
|
||||||
**Authentication:**
|
|
||||||
- `JWT_SECRET`, `REFRESH_TOKEN_SECRET`, `SESSION_SECRET`
|
|
||||||
|
|
||||||
**SSO/Okta:**
|
|
||||||
- `OKTA_DOMAIN`, `OKTA_CLIENT_ID`, `OKTA_CLIENT_SECRET`, `OKTA_API_TOKEN`
|
|
||||||
|
|
||||||
**Email:**
|
|
||||||
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USER`, `SMTP_PASSWORD`
|
|
||||||
|
|
||||||
**Web Push (VAPID):**
|
|
||||||
- `VAPID_PUBLIC_KEY`, `VAPID_PRIVATE_KEY`
|
|
||||||
|
|
||||||
**Logging:**
|
|
||||||
- `LOKI_HOST`, `LOKI_USER`, `LOKI_PASSWORD`
|
|
||||||
|
|
||||||
### Loading Custom Secrets
|
|
||||||
|
|
||||||
To load additional secrets, modify the code:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// In server.ts or app.ts
|
|
||||||
import { googleSecretManager } from './services/googleSecretManager.service';
|
|
||||||
|
|
||||||
// Load default secrets + custom ones
|
|
||||||
await googleSecretManager.loadSecrets([
|
|
||||||
'DB_PASSWORD',
|
|
||||||
'JWT_SECRET',
|
|
||||||
'CUSTOM_API_KEY', // Your custom secret
|
|
||||||
'CUSTOM_SECRET_2'
|
|
||||||
]);
|
|
||||||
```
|
|
||||||
|
|
||||||
Or load a single secret on-demand:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { googleSecretManager } from './services/googleSecretManager.service';
|
|
||||||
|
|
||||||
const apiKey = await googleSecretManager.getSecretValue('CUSTOM_API_KEY', 'API_KEY');
|
|
||||||
```
|
|
||||||
|
|
||||||
## Security Best Practices
|
|
||||||
|
|
||||||
1. **Service Account Permissions:**
|
|
||||||
- Grant only `roles/secretmanager.secretAccessor` role
|
|
||||||
- Use separate service accounts for different environments
|
|
||||||
- Never grant `roles/owner` or `roles/editor` to service accounts
|
|
||||||
|
|
||||||
2. **Secret Rotation:**
|
|
||||||
- Rotate secrets regularly in Google Secret Manager
|
|
||||||
- The app automatically uses the `latest` version of each secret
|
|
||||||
- No code changes needed when secrets are rotated
|
|
||||||
|
|
||||||
3. **Environment Separation:**
|
|
||||||
- Use different Google Cloud projects for dev/staging/prod
|
|
||||||
- Use `GCP_SECRET_PREFIX` to namespace secrets by environment
|
|
||||||
- Never commit `.env` files with production secrets to version control
|
|
||||||
|
|
||||||
4. **Access Control:**
|
|
||||||
- Use IAM policies to control who can read secrets
|
|
||||||
- Enable audit logging for secret access
|
|
||||||
- Regularly review secret access logs
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Secrets Not Loading
|
|
||||||
|
|
||||||
**Check logs for:**
|
|
||||||
```
|
|
||||||
[Secret Manager] Google Secret Manager is disabled (USE_GOOGLE_SECRET_MANAGER != true)
|
|
||||||
[Secret Manager] GCP_PROJECT_ID not set, skipping Google Secret Manager
|
|
||||||
[Secret Manager] Failed to load secrets: [error message]
|
|
||||||
```
|
|
||||||
|
|
||||||
**Common issues:**
|
|
||||||
1. `USE_GOOGLE_SECRET_MANAGER` not set to `true`
|
|
||||||
2. `GCP_PROJECT_ID` not configured
|
|
||||||
3. Service account lacks Secret Manager permissions
|
|
||||||
4. Secrets don't exist in Secret Manager
|
|
||||||
5. Incorrect secret names (check case sensitivity)
|
|
||||||
|
|
||||||
### Service Account Authentication
|
|
||||||
|
|
||||||
Ensure service account credentials are available:
|
|
||||||
- Set `GCP_KEY_FILE` to point to service account JSON file
|
|
||||||
- Or configure Application Default Credentials (ADC)
|
|
||||||
- Test with: `gcloud auth application-default login`
|
|
||||||
|
|
||||||
### Secret Not Found
|
|
||||||
|
|
||||||
If a secret doesn't exist in Secret Manager:
|
|
||||||
- The app logs a debug message and continues
|
|
||||||
- Falls back to `.env` file value
|
|
||||||
- This is expected behavior - not all secrets need to be in GCS
|
|
||||||
|
|
||||||
### Debugging
|
|
||||||
|
|
||||||
Enable debug logging by setting:
|
|
||||||
```env
|
|
||||||
LOG_LEVEL=debug
|
|
||||||
```
|
|
||||||
|
|
||||||
This will show detailed logs about which secrets are being loaded.
|
|
||||||
|
|
||||||
## Example Configuration
|
|
||||||
|
|
||||||
**Local Development (.env):**
|
|
||||||
```env
|
|
||||||
USE_GOOGLE_SECRET_MANAGER=false
|
|
||||||
DB_PASSWORD=local-dev-password
|
|
||||||
JWT_SECRET=local-jwt-secret
|
|
||||||
```
|
|
||||||
|
|
||||||
**Production (.env):**
|
|
||||||
```env
|
|
||||||
USE_GOOGLE_SECRET_MANAGER=true
|
|
||||||
GCP_PROJECT_ID=re-platform-workflow-dealer
|
|
||||||
GCP_SECRET_PREFIX=prod
|
|
||||||
GCP_KEY_FILE=./credentials/service-account.json
|
|
||||||
# DB_PASSWORD and other secrets loaded from GCS
|
|
||||||
```
|
|
||||||
|
|
||||||
## Migration Strategy
|
|
||||||
|
|
||||||
1. **Phase 1: Setup**
|
|
||||||
- Create secrets in Google Secret Manager
|
|
||||||
- Keep `.env` file with current values (as backup)
|
|
||||||
|
|
||||||
2. **Phase 2: Test**
|
|
||||||
- Set `USE_GOOGLE_SECRET_MANAGER=true` in development
|
|
||||||
- Verify secrets are loaded correctly
|
|
||||||
- Test application functionality
|
|
||||||
|
|
||||||
3. **Phase 3: Production**
|
|
||||||
- Deploy with `USE_GOOGLE_SECRET_MANAGER=true`
|
|
||||||
- Monitor logs for secret loading success
|
|
||||||
- Remove sensitive values from `.env` file (keep placeholders)
|
|
||||||
|
|
||||||
4. **Phase 4: Cleanup**
|
|
||||||
- Remove production secrets from `.env` file
|
|
||||||
- Ensure all secrets are in Secret Manager
|
|
||||||
- Document secret names and mappings
|
|
||||||
|
|
||||||
## Additional Resources
|
|
||||||
|
|
||||||
- [Google Secret Manager Documentation](https://cloud.google.com/secret-manager/docs)
|
|
||||||
- [Secret Manager Client Library](https://cloud.google.com/nodejs/docs/reference/secret-manager/latest)
|
|
||||||
- [Service Account Best Practices](https://cloud.google.com/iam/docs/best-practices-service-accounts)
|
|
||||||
|
|
||||||
@ -72,8 +72,8 @@ The Users API returns a complete user object:
|
|||||||
"employeeID": "E09994",
|
"employeeID": "E09994",
|
||||||
"title": "Supports Business Applications (SAP) portfolio",
|
"title": "Supports Business Applications (SAP) portfolio",
|
||||||
"department": "Deputy Manager - Digital & IT",
|
"department": "Deputy Manager - Digital & IT",
|
||||||
"login": "sanjaysahu@{{APP_DOMAIN}}",
|
"login": "sanjaysahu@Royalenfield.com",
|
||||||
"email": "sanjaysahu@{{APP_DOMAIN}}"
|
"email": "sanjaysahu@royalenfield.com"
|
||||||
},
|
},
|
||||||
...
|
...
|
||||||
}
|
}
|
||||||
@ -127,7 +127,7 @@ Example log:
|
|||||||
### Test with curl
|
### Test with curl
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl --location 'https://{{IDP_DOMAIN}}/api/v1/users/testuser10@eichergroup.com' \
|
curl --location 'https://dev-830839.oktapreview.com/api/v1/users/testuser10@eichergroup.com' \
|
||||||
--header 'Authorization: SSWS YOUR_OKTA_API_TOKEN' \
|
--header 'Authorization: SSWS YOUR_OKTA_API_TOKEN' \
|
||||||
--header 'Accept: application/json'
|
--header 'Accept: application/json'
|
||||||
```
|
```
|
||||||
|
|||||||
@ -450,16 +450,16 @@ Before Migration:
|
|||||||
+-------------------------+-----------+
|
+-------------------------+-----------+
|
||||||
| email | is_admin |
|
| email | is_admin |
|
||||||
+-------------------------+-----------+
|
+-------------------------+-----------+
|
||||||
| admin@{{APP_DOMAIN}} | true |
|
| admin@royalenfield.com | true |
|
||||||
| user1@{{APP_DOMAIN}} | false |
|
| user1@royalenfield.com | false |
|
||||||
+-------------------------+-----------+
|
+-------------------------+-----------+
|
||||||
|
|
||||||
After Migration:
|
After Migration:
|
||||||
+-------------------------+-----------+-----------+
|
+-------------------------+-----------+-----------+
|
||||||
| email | role | is_admin |
|
| email | role | is_admin |
|
||||||
+-------------------------+-----------+-----------+
|
+-------------------------+-----------+-----------+
|
||||||
| admin@{{APP_DOMAIN}} | ADMIN | true |
|
| admin@royalenfield.com | ADMIN | true |
|
||||||
| user1@{{APP_DOMAIN}} | USER | false |
|
| user1@royalenfield.com | USER | false |
|
||||||
+-------------------------+-----------+-----------+
|
+-------------------------+-----------+-----------+
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -473,17 +473,17 @@ After Migration:
|
|||||||
-- Make user a MANAGEMENT role
|
-- Make user a MANAGEMENT role
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'MANAGEMENT', is_admin = false
|
SET role = 'MANAGEMENT', is_admin = false
|
||||||
WHERE email = 'manager@{{APP_DOMAIN}}';
|
WHERE email = 'manager@royalenfield.com';
|
||||||
|
|
||||||
-- Make user an ADMIN role
|
-- Make user an ADMIN role
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'ADMIN', is_admin = true
|
SET role = 'ADMIN', is_admin = true
|
||||||
WHERE email = 'admin@{{APP_DOMAIN}}';
|
WHERE email = 'admin@royalenfield.com';
|
||||||
|
|
||||||
-- Revert to USER role
|
-- Revert to USER role
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'USER', is_admin = false
|
SET role = 'USER', is_admin = false
|
||||||
WHERE email = 'user@{{APP_DOMAIN}}';
|
WHERE email = 'user@royalenfield.com';
|
||||||
```
|
```
|
||||||
|
|
||||||
### Via API (Admin Endpoint)
|
### Via API (Admin Endpoint)
|
||||||
|
|||||||
@ -47,12 +47,12 @@ psql -d royal_enfield_db -f scripts/assign-user-roles.sql
|
|||||||
-- Make specific users ADMIN
|
-- Make specific users ADMIN
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'ADMIN', is_admin = true
|
SET role = 'ADMIN', is_admin = true
|
||||||
WHERE email IN ('admin@{{APP_DOMAIN}}', 'it.admin@{{APP_DOMAIN}}');
|
WHERE email IN ('admin@royalenfield.com', 'it.admin@royalenfield.com');
|
||||||
|
|
||||||
-- Make specific users MANAGEMENT
|
-- Make specific users MANAGEMENT
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'MANAGEMENT', is_admin = false
|
SET role = 'MANAGEMENT', is_admin = false
|
||||||
WHERE email IN ('manager@{{APP_DOMAIN}}', 'auditor@{{APP_DOMAIN}}');
|
WHERE email IN ('manager@royalenfield.com', 'auditor@royalenfield.com');
|
||||||
|
|
||||||
-- Verify roles
|
-- Verify roles
|
||||||
SELECT email, display_name, role, is_admin FROM users ORDER BY role, email;
|
SELECT email, display_name, role, is_admin FROM users ORDER BY role, email;
|
||||||
@ -219,7 +219,7 @@ GROUP BY role;
|
|||||||
-- Check specific user
|
-- Check specific user
|
||||||
SELECT email, role, is_admin
|
SELECT email, role, is_admin
|
||||||
FROM users
|
FROM users
|
||||||
WHERE email = 'your-email@{{APP_DOMAIN}}';
|
WHERE email = 'your-email@royalenfield.com';
|
||||||
```
|
```
|
||||||
|
|
||||||
### Test 2: Test API Access
|
### Test 2: Test API Access
|
||||||
@ -356,7 +356,7 @@ WHERE designation ILIKE '%manager%' OR designation ILIKE '%head%';
|
|||||||
```sql
|
```sql
|
||||||
SELECT email, role, is_admin
|
SELECT email, role, is_admin
|
||||||
FROM users
|
FROM users
|
||||||
WHERE email = 'your-email@{{APP_DOMAIN}}';
|
WHERE email = 'your-email@royalenfield.com';
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@ -314,7 +314,7 @@ JWT_EXPIRY=24h
|
|||||||
REFRESH_TOKEN_EXPIRY=7d
|
REFRESH_TOKEN_EXPIRY=7d
|
||||||
|
|
||||||
# Okta Configuration
|
# Okta Configuration
|
||||||
OKTA_DOMAIN=https://{{IDP_DOMAIN}}
|
OKTA_DOMAIN=https://dev-830839.oktapreview.com
|
||||||
OKTA_CLIENT_ID=your-client-id
|
OKTA_CLIENT_ID=your-client-id
|
||||||
OKTA_CLIENT_SECRET=your-client-secret
|
OKTA_CLIENT_SECRET=your-client-secret
|
||||||
|
|
||||||
@ -334,7 +334,7 @@ GCP_BUCKET_PUBLIC=true
|
|||||||
|
|
||||||
**Identity Provider**: Okta
|
**Identity Provider**: Okta
|
||||||
- **Domain**: Configurable via `OKTA_DOMAIN` environment variable
|
- **Domain**: Configurable via `OKTA_DOMAIN` environment variable
|
||||||
- **Default**: `https://{{IDP_DOMAIN}}`
|
- **Default**: `https://dev-830839.oktapreview.com`
|
||||||
- **Protocol**: OAuth 2.0 / OpenID Connect (OIDC)
|
- **Protocol**: OAuth 2.0 / OpenID Connect (OIDC)
|
||||||
- **Grant Types**: Authorization Code, Resource Owner Password Credentials
|
- **Grant Types**: Authorization Code, Resource Owner Password Credentials
|
||||||
|
|
||||||
@ -650,7 +650,7 @@ graph LR
|
|||||||
{
|
{
|
||||||
"userId": "uuid",
|
"userId": "uuid",
|
||||||
"employeeId": "EMP001",
|
"employeeId": "EMP001",
|
||||||
"email": "user@{{APP_DOMAIN}}",
|
"email": "user@royalenfield.com",
|
||||||
"role": "USER" | "MANAGEMENT" | "ADMIN",
|
"role": "USER" | "MANAGEMENT" | "ADMIN",
|
||||||
"iat": 1234567890,
|
"iat": 1234567890,
|
||||||
"exp": 1234654290
|
"exp": 1234654290
|
||||||
@ -1048,7 +1048,7 @@ JWT_EXPIRY=24h
|
|||||||
REFRESH_TOKEN_EXPIRY=7d
|
REFRESH_TOKEN_EXPIRY=7d
|
||||||
|
|
||||||
# Okta
|
# Okta
|
||||||
OKTA_DOMAIN=https://{{IDP_DOMAIN}}
|
OKTA_DOMAIN=https://dev-830839.oktapreview.com
|
||||||
OKTA_CLIENT_ID=your-client-id
|
OKTA_CLIENT_ID=your-client-id
|
||||||
OKTA_CLIENT_SECRET=your-client-secret
|
OKTA_CLIENT_SECRET=your-client-secret
|
||||||
|
|
||||||
@ -1063,7 +1063,7 @@ GCP_BUCKET_PUBLIC=true
|
|||||||
**Frontend (.env):**
|
**Frontend (.env):**
|
||||||
```env
|
```env
|
||||||
VITE_API_BASE_URL=https://api.rebridge.co.in/api/v1
|
VITE_API_BASE_URL=https://api.rebridge.co.in/api/v1
|
||||||
VITE_OKTA_DOMAIN=https://{{IDP_DOMAIN}}
|
VITE_OKTA_DOMAIN=https://dev-830839.oktapreview.com
|
||||||
VITE_OKTA_CLIENT_ID=your-client-id
|
VITE_OKTA_CLIENT_ID=your-client-id
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@ -64,7 +64,7 @@ await this.createClaimApprovalLevels(
|
|||||||
isAuto: false,
|
isAuto: false,
|
||||||
approverType: 'department_lead' as const,
|
approverType: 'department_lead' as const,
|
||||||
approverId: departmentLead?.userId || null,
|
approverId: departmentLead?.userId || null,
|
||||||
approverEmail: departmentLead?.email || initiator.manager || `deptlead@${appDomain}`,
|
approverEmail: departmentLead?.email || initiator.manager || 'deptlead@royalenfield.com',
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@ -1,201 +0,0 @@
|
|||||||
# Tanflow SSO User Data Mapping
|
|
||||||
|
|
||||||
This document outlines all user information available from Tanflow IAM Suite and how it maps to our User model for user creation.
|
|
||||||
|
|
||||||
## Tanflow Userinfo Endpoint Response
|
|
||||||
|
|
||||||
Tanflow uses **OpenID Connect (OIDC) standard claims** via the `/protocol/openid-connect/userinfo` endpoint. The following fields are available:
|
|
||||||
|
|
||||||
### Standard OIDC Claims (Available from Tanflow)
|
|
||||||
|
|
||||||
| Tanflow Field | OIDC Standard Claim | Type | Description | Currently Extracted |
|
|
||||||
|--------------|---------------------|------|--------------|-------------------|
|
|
||||||
| `sub` | `sub` | string | **REQUIRED** - Subject identifier (unique user ID) | ✅ Yes (as `oktaSub`) |
|
|
||||||
| `email` | `email` | string | Email address | ✅ Yes |
|
|
||||||
| `email_verified` | `email_verified` | boolean | Email verification status | ❌ No |
|
|
||||||
| `preferred_username` | `preferred_username` | string | Preferred username (fallback for email) | ✅ Yes (fallback) |
|
|
||||||
| `name` | `name` | string | Full display name | ✅ Yes (as `displayName`) |
|
|
||||||
| `given_name` | `given_name` | string | First name | ✅ Yes (as `firstName`) |
|
|
||||||
| `family_name` | `family_name` | string | Last name | ✅ Yes (as `lastName`) |
|
|
||||||
| `phone_number` | `phone_number` | string | Phone number | ✅ Yes (as `phone`) |
|
|
||||||
| `phone_number_verified` | `phone_number_verified` | boolean | Phone verification status | ❌ No |
|
|
||||||
| `address` | `address` | object | Address object (structured) | ❌ No |
|
|
||||||
| `locale` | `locale` | string | User locale (e.g., "en-US") | ❌ No |
|
|
||||||
| `picture` | `picture` | string | Profile picture URL | ❌ No |
|
|
||||||
| `website` | `website` | string | Website URL | ❌ No |
|
|
||||||
| `profile` | `profile` | string | Profile page URL | ❌ No |
|
|
||||||
| `birthdate` | `birthdate` | string | Date of birth | ❌ No |
|
|
||||||
| `gender` | `gender` | string | Gender | ❌ No |
|
|
||||||
| `zoneinfo` | `zoneinfo` | string | Timezone (e.g., "America/New_York") | ❌ No |
|
|
||||||
| `updated_at` | `updated_at` | number | Last update timestamp | ❌ No |
|
|
||||||
|
|
||||||
### Custom Tanflow Claims (May be available)
|
|
||||||
|
|
||||||
These are **custom claims** that Tanflow may include based on their configuration:
|
|
||||||
|
|
||||||
| Tanflow Field | Type | Description | Currently Extracted |
|
|
||||||
|--------------|------|-------------|-------------------|
|
|
||||||
| `employeeId` | string | Employee ID from HR system | ✅ Yes |
|
|
||||||
| `employee_id` | string | Alternative employee ID field | ✅ Yes (fallback) |
|
|
||||||
| `department` | string | Department/Division | ✅ Yes |
|
|
||||||
| `designation` | string | Job designation/position | ✅ Yes |
|
|
||||||
| `title` | string | Job title | ❌ No |
|
|
||||||
| `designation` | string | Job designation/position | ✅ Yes (as `designation`) |
|
|
||||||
| `employeeType` | string | Employee type (Dealer, Full-time, Contract, etc.) | ✅ Yes (as `jobTitle`) |
|
|
||||||
| `organization` | string | Organization name | ❌ No |
|
|
||||||
| `division` | string | Division name | ❌ No |
|
|
||||||
| `location` | string | Office location | ❌ No |
|
|
||||||
| `manager` | string | Manager name/email | ❌ No |
|
|
||||||
| `manager_id` | string | Manager employee ID | ❌ No |
|
|
||||||
| `cost_center` | string | Cost center code | ❌ No |
|
|
||||||
| `hire_date` | string | Date of hire | ❌ No |
|
|
||||||
| `office_location` | string | Office location | ❌ No |
|
|
||||||
| `country` | string | Country code | ❌ No |
|
|
||||||
| `city` | string | City name | ❌ No |
|
|
||||||
| `state` | string | State/Province | ❌ No |
|
|
||||||
| `postal_code` | string | Postal/ZIP code | ❌ No |
|
|
||||||
| `groups` | array | Group memberships | ❌ No |
|
|
||||||
| `roles` | array | User roles | ❌ No |
|
|
||||||
|
|
||||||
## Current Extraction Logic
|
|
||||||
|
|
||||||
**Location:** `Re_Backend/src/services/auth.service.ts` → `exchangeTanflowCodeForTokens()`
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const userData: SSOUserData = {
|
|
||||||
oktaSub: tanflowSub, // Reuse oktaSub field for Tanflow sub
|
|
||||||
email: tanflowUserInfo.email || tanflowUserInfo.preferred_username || '',
|
|
||||||
employeeId: tanflowUserInfo.employeeId || tanflowUserInfo.employee_id || undefined,
|
|
||||||
firstName: tanflowUserInfo.given_name || tanflowUserInfo.firstName || undefined,
|
|
||||||
lastName: tanflowUserInfo.family_name || tanflowUserInfo.lastName || undefined,
|
|
||||||
displayName: tanflowUserInfo.name || tanflowUserInfo.displayName || undefined,
|
|
||||||
department: tanflowUserInfo.department || undefined,
|
|
||||||
designation: tanflowUserInfo.designation || undefined, // Map designation to designation
|
|
||||||
phone: tanflowUserInfo.phone_number || tanflowUserInfo.phone || undefined,
|
|
||||||
// Additional fields
|
|
||||||
manager: tanflowUserInfo.manager || undefined,
|
|
||||||
jobTitle: tanflowUserInfo.employeeType || undefined, // Map employeeType to jobTitle
|
|
||||||
postalAddress: tanflowUserInfo.address ? (typeof tanflowUserInfo.address === 'string' ? tanflowUserInfo.address : JSON.stringify(tanflowUserInfo.address)) : undefined,
|
|
||||||
mobilePhone: tanflowUserInfo.mobile_phone || tanflowUserInfo.mobilePhone || undefined,
|
|
||||||
adGroups: Array.isArray(tanflowUserInfo.groups) ? tanflowUserInfo.groups : undefined,
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
## User Model Fields Mapping
|
|
||||||
|
|
||||||
**Location:** `Re_Backend/src/models/User.ts`
|
|
||||||
|
|
||||||
| User Model Field | Tanflow Source | Required | Notes |
|
|
||||||
|-----------------|----------------|----------|-------|
|
|
||||||
| `userId` | Auto-generated UUID | ✅ | Primary key |
|
|
||||||
| `oktaSub` | `sub` | ✅ | Unique identifier from Tanflow |
|
|
||||||
| `email` | `email` or `preferred_username` | ✅ | Primary identifier |
|
|
||||||
| `employeeId` | `employeeId` or `employee_id` | ❌ | Optional HR system ID |
|
|
||||||
| `firstName` | `given_name` or `firstName` | ❌ | Optional |
|
|
||||||
| `lastName` | `family_name` or `lastName` | ❌ | Optional |
|
|
||||||
| `displayName` | `name` or `displayName` | ❌ | Auto-generated if missing |
|
|
||||||
| `department` | `department` | ❌ | Optional |
|
|
||||||
| `designation` | `designation` | ❌ | Optional |
|
|
||||||
| `phone` | `phone_number` or `phone` | ❌ | Optional |
|
|
||||||
| `manager` | `manager` | ❌ | Optional (extracted if available) |
|
|
||||||
| `secondEmail` | N/A | ❌ | Not available from Tanflow |
|
|
||||||
| `jobTitle` | `employeeType` | ❌ | Optional (maps employeeType to jobTitle) |
|
|
||||||
| `employeeNumber` | N/A | ❌ | Not available from Tanflow |
|
|
||||||
| `postalAddress` | `address` (structured) | ❌ | **NOT currently extracted** |
|
|
||||||
| `mobilePhone` | N/A | ❌ | Not available from Tanflow |
|
|
||||||
| `adGroups` | `groups` | ❌ | **NOT currently extracted** |
|
|
||||||
| `location` | `address`, `city`, `state`, `country` | ❌ | **NOT currently extracted** |
|
|
||||||
| `role` | Default: 'USER' | ✅ | Default role assigned |
|
|
||||||
| `isActive` | Default: true | ✅ | Auto-set to true |
|
|
||||||
| `lastLogin` | Current timestamp | ✅ | Auto-set on login |
|
|
||||||
|
|
||||||
## Recommended Enhancements
|
|
||||||
|
|
||||||
### 1. Extract Additional Fields
|
|
||||||
|
|
||||||
Consider extracting these fields if available from Tanflow:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Enhanced extraction (to be implemented)
|
|
||||||
const userData: SSOUserData = {
|
|
||||||
// ... existing fields ...
|
|
||||||
|
|
||||||
// Additional fields (already implemented)
|
|
||||||
manager: tanflowUserInfo.manager || undefined,
|
|
||||||
jobTitle: tanflowUserInfo.employeeType || undefined, // Map employeeType to jobTitle
|
|
||||||
postalAddress: tanflowUserInfo.address ? (typeof tanflowUserInfo.address === 'string' ? tanflowUserInfo.address : JSON.stringify(tanflowUserInfo.address)) : undefined,
|
|
||||||
mobilePhone: tanflowUserInfo.mobile_phone || tanflowUserInfo.mobilePhone || undefined,
|
|
||||||
adGroups: Array.isArray(tanflowUserInfo.groups) ? tanflowUserInfo.groups : undefined,
|
|
||||||
|
|
||||||
// Location object
|
|
||||||
location: {
|
|
||||||
city: tanflowUserInfo.city || undefined,
|
|
||||||
state: tanflowUserInfo.state || undefined,
|
|
||||||
country: tanflowUserInfo.country || undefined,
|
|
||||||
office: tanflowUserInfo.office_location || undefined,
|
|
||||||
timezone: tanflowUserInfo.zoneinfo || undefined,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Log Available Fields
|
|
||||||
|
|
||||||
Add logging to see what Tanflow actually returns:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
logger.info('Tanflow userinfo response', {
|
|
||||||
availableFields: Object.keys(tanflowUserInfo),
|
|
||||||
hasEmail: !!tanflowUserInfo.email,
|
|
||||||
hasEmployeeId: !!(tanflowUserInfo.employeeId || tanflowUserInfo.employee_id),
|
|
||||||
hasDepartment: !!tanflowUserInfo.department,
|
|
||||||
hasManager: !!tanflowUserInfo.manager,
|
|
||||||
hasGroups: Array.isArray(tanflowUserInfo.groups),
|
|
||||||
groupsCount: Array.isArray(tanflowUserInfo.groups) ? tanflowUserInfo.groups.length : 0,
|
|
||||||
sampleData: {
|
|
||||||
sub: tanflowUserInfo.sub?.substring(0, 10) + '...',
|
|
||||||
email: tanflowUserInfo.email?.substring(0, 10) + '...',
|
|
||||||
name: tanflowUserInfo.name,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## User Creation Flow
|
|
||||||
|
|
||||||
1. **Token Exchange** → Get `access_token` from Tanflow
|
|
||||||
2. **Userinfo Call** → Call `/protocol/openid-connect/userinfo` with `access_token`
|
|
||||||
3. **Extract Data** → Map Tanflow fields to `SSOUserData` interface
|
|
||||||
4. **User Lookup** → Check if user exists by `email`
|
|
||||||
5. **Create/Update** → Create new user or update existing user
|
|
||||||
6. **Generate Tokens** → Generate JWT access/refresh tokens
|
|
||||||
|
|
||||||
## Testing Recommendations
|
|
||||||
|
|
||||||
1. **Test with Real Tanflow Account**
|
|
||||||
- Log actual userinfo response
|
|
||||||
- Document all available fields
|
|
||||||
- Verify field mappings
|
|
||||||
|
|
||||||
2. **Handle Missing Fields**
|
|
||||||
- Ensure graceful fallbacks
|
|
||||||
- Don't fail if optional fields are missing
|
|
||||||
- Log warnings for missing expected fields
|
|
||||||
|
|
||||||
3. **Validate Required Fields**
|
|
||||||
- `sub` (oktaSub) - REQUIRED
|
|
||||||
- `email` or `preferred_username` - REQUIRED
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
1. ✅ **Current Implementation** - Basic OIDC claims extraction
|
|
||||||
2. 🔄 **Enhancement** - Extract additional custom claims (manager, groups, location)
|
|
||||||
3. 🔄 **Logging** - Add detailed logging of Tanflow response
|
|
||||||
4. 🔄 **Testing** - Test with real Tanflow account to see actual fields
|
|
||||||
5. 🔄 **Documentation** - Update this doc with actual Tanflow response structure
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
|
|
||||||
- Tanflow uses **Keycloak** under the hood (based on URL structure)
|
|
||||||
- Keycloak supports custom user attributes that may be available
|
|
||||||
- Some fields may require specific realm/client configuration in Tanflow
|
|
||||||
- Contact Tanflow support to confirm available custom claims
|
|
||||||
|
|
||||||
@ -181,7 +181,7 @@ POST http://localhost:5000/api/v1/auth/login
|
|||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
|
|
||||||
{
|
{
|
||||||
"username": "john.doe@{{APP_DOMAIN}}",
|
"username": "john.doe@royalenfield.com",
|
||||||
"password": "SecurePassword123!"
|
"password": "SecurePassword123!"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
27
env.example
27
env.example
@ -26,24 +26,17 @@ REFRESH_TOKEN_EXPIRY=7d
|
|||||||
SESSION_SECRET=your_session_secret_here_min_32_chars
|
SESSION_SECRET=your_session_secret_here_min_32_chars
|
||||||
|
|
||||||
# Cloud Storage (GCP)
|
# Cloud Storage (GCP)
|
||||||
GCP_PROJECT_ID={{GCP_PROJECT_ID}}
|
GCP_PROJECT_ID=re-workflow-project
|
||||||
GCP_BUCKET_NAME={{GCP_BUCKET_NAME}}
|
GCP_BUCKET_NAME=re-workflow-documents
|
||||||
GCP_KEY_FILE=./config/gcp-key.json
|
GCP_KEY_FILE=./config/gcp-key.json
|
||||||
|
|
||||||
# Google Secret Manager (Optional - for production)
|
|
||||||
# Set USE_GOOGLE_SECRET_MANAGER=true to enable loading secrets from Google Secret Manager
|
|
||||||
# Secrets from GCS will override .env file values
|
|
||||||
USE_GOOGLE_SECRET_MANAGER=false
|
|
||||||
# GCP_SECRET_PREFIX=optional-prefix-for-secret-names (e.g., "prod" -> looks for "prod-DB_PASSWORD")
|
|
||||||
# GCP_SECRET_MAP_FILE=./secret-map.json (optional JSON file to map secret names to env var names)
|
|
||||||
|
|
||||||
# Email Service (Optional)
|
# Email Service (Optional)
|
||||||
SMTP_HOST=smtp.gmail.com
|
SMTP_HOST=smtp.gmail.com
|
||||||
SMTP_PORT=587
|
SMTP_PORT=587
|
||||||
SMTP_SECURE=false
|
SMTP_SECURE=false
|
||||||
SMTP_USER=notifications@{{APP_DOMAIN}}
|
SMTP_USER=notifications@royalenfield.com
|
||||||
SMTP_PASSWORD=your_smtp_password
|
SMTP_PASSWORD=your_smtp_password
|
||||||
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}>
|
EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
|
||||||
|
|
||||||
# AI Service (for conclusion generation) - Vertex AI Gemini
|
# AI Service (for conclusion generation) - Vertex AI Gemini
|
||||||
# Uses service account credentials from GCP_KEY_FILE
|
# Uses service account credentials from GCP_KEY_FILE
|
||||||
@ -55,7 +48,7 @@ VERTEX_AI_LOCATION=asia-south1
|
|||||||
# Logging
|
# Logging
|
||||||
LOG_LEVEL=info
|
LOG_LEVEL=info
|
||||||
LOG_FILE_PATH=./logs
|
LOG_FILE_PATH=./logs
|
||||||
APP_VERSION={{APP_VERSION}}
|
APP_VERSION=1.2.0
|
||||||
|
|
||||||
# ============ Loki Configuration (Grafana Log Aggregation) ============
|
# ============ Loki Configuration (Grafana Log Aggregation) ============
|
||||||
LOKI_HOST= # e.g., http://loki:3100 or http://monitoring.cloudtopiaa.com:3100
|
LOKI_HOST= # e.g., http://loki:3100 or http://monitoring.cloudtopiaa.com:3100
|
||||||
@ -66,7 +59,7 @@ LOKI_PASSWORD= # Optional: Basic auth password
|
|||||||
CORS_ORIGIN="*"
|
CORS_ORIGIN="*"
|
||||||
|
|
||||||
# Rate Limiting
|
# Rate Limiting
|
||||||
RATE_LIMIT_WINDOW_MS=900000 # 15 minutes
|
RATE_LIMIT_WINDOW_MS=900000
|
||||||
RATE_LIMIT_MAX_REQUESTS=100
|
RATE_LIMIT_MAX_REQUESTS=100
|
||||||
|
|
||||||
# File Upload
|
# File Upload
|
||||||
@ -83,16 +76,16 @@ OKTA_CLIENT_ID={{okta_client_id}}
|
|||||||
OKTA_CLIENT_SECRET={{okta_client_secret}}
|
OKTA_CLIENT_SECRET={{okta_client_secret}}
|
||||||
|
|
||||||
# Notificaton Service Worker credentials
|
# Notificaton Service Worker credentials
|
||||||
VAPID_PUBLIC_KEY={{VAPID_PUBLIC_KEY}}
|
VAPID_PUBLIC_KEY={{vapid_public_key}} note: same key need to add on front end for web push
|
||||||
VAPID_PRIVATE_KEY={{vapid_private_key}}
|
VAPID_PRIVATE_KEY={{vapid_private_key}}
|
||||||
VAPID_CONTACT=mailto:you@example.com
|
VAPID_CONTACT=mailto:you@example.com
|
||||||
|
|
||||||
#Redis
|
#Redis
|
||||||
REDIS_URL={{REDIS_URL}}
|
REDIS_URL={{REDIS_URL_FOR DELAY JoBS create redis setup and add url here}}
|
||||||
TAT_TEST_MODE=false # Set to true to accelerate TAT for testing
|
TAT_TEST_MODE=false (on true it will consider 1 hour==1min)
|
||||||
|
|
||||||
# SAP Integration (OData Service via Zscaler)
|
# SAP Integration (OData Service via Zscaler)
|
||||||
SAP_BASE_URL=https://{{SAP_DOMAIN_HERE}}:{{PORT}}
|
SAP_BASE_URL=https://RENOIHND01.Eichergroup.com:1443
|
||||||
SAP_USERNAME={{SAP_USERNAME}}
|
SAP_USERNAME={{SAP_USERNAME}}
|
||||||
SAP_PASSWORD={{SAP_PASSWORD}}
|
SAP_PASSWORD={{SAP_PASSWORD}}
|
||||||
SAP_TIMEOUT_MS=30000
|
SAP_TIMEOUT_MS=30000
|
||||||
|
|||||||
@ -52,8 +52,6 @@ scrape_configs:
|
|||||||
metrics_path: /metrics
|
metrics_path: /metrics
|
||||||
scrape_interval: 10s
|
scrape_interval: 10s
|
||||||
scrape_timeout: 5s
|
scrape_timeout: 5s
|
||||||
authorization:
|
|
||||||
credentials: 're_c92b9cf291d2be65a1704207aa25352d69432b643e6c9e9a172938c964809f2d'
|
|
||||||
|
|
||||||
# ============================================
|
# ============================================
|
||||||
# Node Exporter - Host Metrics
|
# Node Exporter - Host Metrics
|
||||||
|
|||||||
1271
package-lock.json
generated
1271
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
12
package.json
12
package.json
@ -4,8 +4,8 @@
|
|||||||
"description": "Royal Enfield Workflow Management System - Backend API (TypeScript)",
|
"description": "Royal Enfield Workflow Management System - Backend API (TypeScript)",
|
||||||
"main": "dist/server.js",
|
"main": "dist/server.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "npm install && npm run build && npm run setup && npm run start:prod",
|
"start": "npm run setup && npm run build && npm run start:prod",
|
||||||
"dev": "npm run setup && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
|
"dev": "npm run setup && npm run migrate && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
|
||||||
"dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
|
"dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
|
||||||
"build": "tsc && tsc-alias",
|
"build": "tsc && tsc-alias",
|
||||||
"build:watch": "tsc --watch",
|
"build:watch": "tsc --watch",
|
||||||
@ -18,11 +18,10 @@
|
|||||||
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
||||||
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
||||||
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
||||||
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
"seed:dealers": "ts-node -r tsconfig-paths/register src/scripts/seed-dealers.ts",
|
||||||
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
|
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google-cloud/secret-manager": "^6.1.1",
|
|
||||||
"@google-cloud/storage": "^7.18.0",
|
"@google-cloud/storage": "^7.18.0",
|
||||||
"@google-cloud/vertexai": "^1.10.0",
|
"@google-cloud/vertexai": "^1.10.0",
|
||||||
"@types/nodemailer": "^7.0.4",
|
"@types/nodemailer": "^7.0.4",
|
||||||
@ -30,7 +29,6 @@
|
|||||||
"axios": "^1.7.9",
|
"axios": "^1.7.9",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
"bullmq": "^5.63.0",
|
"bullmq": "^5.63.0",
|
||||||
"clamscan": "^2.4.0",
|
|
||||||
"cookie-parser": "^1.4.7",
|
"cookie-parser": "^1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dayjs": "^1.11.19",
|
"dayjs": "^1.11.19",
|
||||||
@ -51,15 +49,12 @@
|
|||||||
"pg": "^8.13.1",
|
"pg": "^8.13.1",
|
||||||
"pg-hstore": "^2.3.4",
|
"pg-hstore": "^2.3.4",
|
||||||
"prom-client": "^15.1.3",
|
"prom-client": "^15.1.3",
|
||||||
"puppeteer": "^24.37.2",
|
|
||||||
"sanitize-html": "^2.17.1",
|
|
||||||
"sequelize": "^6.37.5",
|
"sequelize": "^6.37.5",
|
||||||
"socket.io": "^4.8.1",
|
"socket.io": "^4.8.1",
|
||||||
"uuid": "^8.3.2",
|
"uuid": "^8.3.2",
|
||||||
"web-push": "^3.6.7",
|
"web-push": "^3.6.7",
|
||||||
"winston": "^3.17.0",
|
"winston": "^3.17.0",
|
||||||
"winston-loki": "^6.1.3",
|
"winston-loki": "^6.1.3",
|
||||||
"xss": "^1.0.15",
|
|
||||||
"zod": "^3.24.1"
|
"zod": "^3.24.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@ -75,7 +70,6 @@
|
|||||||
"@types/passport": "^1.0.16",
|
"@types/passport": "^1.0.16",
|
||||||
"@types/passport-jwt": "^4.0.1",
|
"@types/passport-jwt": "^4.0.1",
|
||||||
"@types/pg": "^8.15.6",
|
"@types/pg": "^8.15.6",
|
||||||
"@types/sanitize-html": "^2.16.0",
|
|
||||||
"@types/supertest": "^6.0.2",
|
"@types/supertest": "^6.0.2",
|
||||||
"@types/web-push": "^3.6.4",
|
"@types/web-push": "^3.6.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
||||||
|
|||||||
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'ADMIN'
|
SET role = 'ADMIN'
|
||||||
WHERE email = 'YOUR_EMAIL@{{APP_DOMAIN}}' -- ← CHANGE THIS
|
WHERE email = 'YOUR_EMAIL@royalenfield.com' -- ← CHANGE THIS
|
||||||
RETURNING
|
RETURNING
|
||||||
user_id,
|
user_id,
|
||||||
email,
|
email,
|
||||||
|
|||||||
@ -21,9 +21,9 @@
|
|||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'ADMIN'
|
SET role = 'ADMIN'
|
||||||
WHERE email IN (
|
WHERE email IN (
|
||||||
'admin@{{APP_DOMAIN}}',
|
'admin@royalenfield.com',
|
||||||
'it.admin@{{APP_DOMAIN}}',
|
'it.admin@royalenfield.com',
|
||||||
'system.admin@{{APP_DOMAIN}}'
|
'system.admin@royalenfield.com'
|
||||||
-- Add more admin emails here
|
-- Add more admin emails here
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -45,9 +45,9 @@ ORDER BY email;
|
|||||||
UPDATE users
|
UPDATE users
|
||||||
SET role = 'MANAGEMENT'
|
SET role = 'MANAGEMENT'
|
||||||
WHERE email IN (
|
WHERE email IN (
|
||||||
'manager1@{{APP_DOMAIN}}',
|
'manager1@royalenfield.com',
|
||||||
'dept.head@{{APP_DOMAIN}}',
|
'dept.head@royalenfield.com',
|
||||||
'auditor@{{APP_DOMAIN}}'
|
'auditor@royalenfield.com'
|
||||||
-- Add more management emails here
|
-- Add more management emails here
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@ -1,74 +0,0 @@
|
|||||||
const axios = require('axios');
|
|
||||||
|
|
||||||
const BASE_URL = 'http://localhost:3000';
|
|
||||||
|
|
||||||
async function verifySecurity() {
|
|
||||||
try {
|
|
||||||
console.log('--- Verifying Security Fixes ---');
|
|
||||||
|
|
||||||
console.log('\n1. Verifying Security Headers...');
|
|
||||||
const response = await axios.get(`${BASE_URL}/health`);
|
|
||||||
const headers = response.headers;
|
|
||||||
|
|
||||||
console.log('\n1b. Verifying Security Headers on 404...');
|
|
||||||
try {
|
|
||||||
const res404 = await axios.get(`${BASE_URL}/non-existent`, { validateStatus: false });
|
|
||||||
console.log('404 Status:', res404.status);
|
|
||||||
console.log('404 CSP:', res404.headers['content-security-policy']);
|
|
||||||
|
|
||||||
console.log('\n1c. Verifying Security Headers on /assets (Redirect check)...');
|
|
||||||
const resAssets = await axios.get(`${BASE_URL}/assets`, {
|
|
||||||
validateStatus: false,
|
|
||||||
maxRedirects: 0 // Don't follow to see the first response (likely 301)
|
|
||||||
});
|
|
||||||
console.log('Assets Status:', resAssets.status);
|
|
||||||
console.log('Assets CSP:', resAssets.headers['content-security-policy']);
|
|
||||||
} catch (e) {
|
|
||||||
console.log('Error checking 404/Redirect:', e.message);
|
|
||||||
if (e.response) {
|
|
||||||
console.log('Response Status:', e.response.status);
|
|
||||||
console.log('Response CSP:', e.response.headers['content-security-policy']);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check CSP
|
|
||||||
const csp = headers['content-security-policy'];
|
|
||||||
console.log('CSP:', csp);
|
|
||||||
if (csp && csp.includes("frame-ancestors 'self'")) {
|
|
||||||
console.log('✅ Clickjacking Protection (frame-ancestors) is present.');
|
|
||||||
} else {
|
|
||||||
console.log('❌ Clickjacking Protection (frame-ancestors) is MISSING.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check X-Frame-Options
|
|
||||||
const xfo = headers['x-frame-options'];
|
|
||||||
console.log('X-Frame-Options:', xfo);
|
|
||||||
if (xfo === 'SAMEORIGIN') {
|
|
||||||
console.log('✅ X-Frame-Options: SAMEORIGIN is present.');
|
|
||||||
} else {
|
|
||||||
console.log('❌ X-Frame-Options: SAMEORIGIN is MISSING.');
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('\n2. Verifying Cookie Security Flags (requires login)...');
|
|
||||||
console.log('Note: This is best verified in a real browser or by checking the code changes in auth.controller.ts.');
|
|
||||||
|
|
||||||
console.log('\n3. Verifying Sanitization Utility...');
|
|
||||||
// This is verified by the unit test if we create one, but we can also do a manual check if the server is running.
|
|
||||||
|
|
||||||
console.log('\n--- Verification Summary ---');
|
|
||||||
console.log('Content-Security-Policy: frame-ancestors added.');
|
|
||||||
console.log('X-Frame-Options: set to SAMEORIGIN.');
|
|
||||||
console.log('Cookie flags: sameSite set to lax, secure flag ensured in production.');
|
|
||||||
console.log('Sanitization: Implemented in WorkNotes, Holidays, Workflow Requests, and Conclusions.');
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
if (error.code === 'ECONNREFUSED') {
|
|
||||||
console.error('❌ Error: Could not connect to the backend server at', BASE_URL);
|
|
||||||
console.error('Please ensure the server is running (npm run dev).');
|
|
||||||
} else {
|
|
||||||
console.error('❌ Error during verification:', error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySecurity();
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"_comment": "Optional: Map Google Secret Manager secret names to environment variable names",
|
|
||||||
"_comment2": "If not provided, secrets are mapped automatically: secret-name -> SECRET_NAME (uppercase)",
|
|
||||||
|
|
||||||
"examples": {
|
|
||||||
"db-password": "DB_PASSWORD",
|
|
||||||
"jwt-secret-key": "JWT_SECRET",
|
|
||||||
"okta-client-secret": "OKTA_CLIENT_SECRET"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -162,7 +162,7 @@ SMTP_PORT=587
|
|||||||
SMTP_SECURE=false
|
SMTP_SECURE=false
|
||||||
SMTP_USER=${SMTP_USER}
|
SMTP_USER=${SMTP_USER}
|
||||||
SMTP_PASSWORD=${SMTP_PASSWORD}
|
SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||||
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}>
|
EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
|
||||||
|
|
||||||
# Vertex AI Gemini Configuration (for conclusion generation)
|
# Vertex AI Gemini Configuration (for conclusion generation)
|
||||||
# Service account credentials should be placed in ./credentials/ folder
|
# Service account credentials should be placed in ./credentials/ folder
|
||||||
@ -232,7 +232,7 @@ show_vapid_instructions() {
|
|||||||
echo " VITE_PUBLIC_VAPID_KEY=<your-public-key>"
|
echo " VITE_PUBLIC_VAPID_KEY=<your-public-key>"
|
||||||
echo ""
|
echo ""
|
||||||
echo "5. The VAPID_CONTACT should be a valid mailto: URL"
|
echo "5. The VAPID_CONTACT should be a valid mailto: URL"
|
||||||
echo " Example: mailto:admin@{{APP_DOMAIN}}"
|
echo " Example: mailto:admin@royalenfield.com"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Note: Keep your VAPID_PRIVATE_KEY secure and never commit it to version control!"
|
echo "Note: Keep your VAPID_PRIVATE_KEY secure and never commit it to version control!"
|
||||||
echo ""
|
echo ""
|
||||||
|
|||||||
162
src/app.ts
162
src/app.ts
@ -7,101 +7,29 @@ import { UserService } from './services/user.service';
|
|||||||
import { SSOUserData } from './types/auth.types';
|
import { SSOUserData } from './types/auth.types';
|
||||||
import { sequelize } from './config/database';
|
import { sequelize } from './config/database';
|
||||||
import { corsMiddleware } from './middlewares/cors.middleware';
|
import { corsMiddleware } from './middlewares/cors.middleware';
|
||||||
import { authenticateToken } from './middlewares/auth.middleware';
|
|
||||||
import { requireAdmin } from './middlewares/authorization.middleware';
|
|
||||||
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
||||||
import routes from './routes/index';
|
import routes from './routes/index';
|
||||||
import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
|
import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
|
||||||
import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
|
|
||||||
import { sanitizationMiddleware } from './middlewares/sanitization.middleware';
|
|
||||||
import { rateLimiter } from './middlewares/rateLimiter.middleware';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
// Load environment variables from .env file first
|
// Load environment variables
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
// Secrets are now initialized in server.ts before app is imported
|
|
||||||
|
|
||||||
const app: express.Application = express();
|
const app: express.Application = express();
|
||||||
|
|
||||||
// 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc.
|
|
||||||
// This handles a specific Express/Helmet edge case where redirects lose headers.
|
|
||||||
app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
|
|
||||||
const isDev = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'local';
|
|
||||||
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
|
|
||||||
|
|
||||||
// Build connect-src dynamically
|
|
||||||
const connectSrc = ["'self'", "blob:", "data:"];
|
|
||||||
if (isDev) {
|
|
||||||
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000");
|
|
||||||
if (frontendUrl.includes('localhost')) connectSrc.push(frontendUrl);
|
|
||||||
} else if (frontendUrl && frontendUrl !== '*') {
|
|
||||||
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
|
||||||
connectSrc.push(...origins);
|
|
||||||
}
|
|
||||||
|
|
||||||
const apiDomain = process.env.APP_DOMAIN || 'royalenfield.com';
|
|
||||||
|
|
||||||
// Define strict CSP directives
|
|
||||||
//: Move frame-ancestors, form-action, and base-uri to the front to ensure VAPT compliance
|
|
||||||
// even if the header is truncated in certain response types (like 301 redirects).
|
|
||||||
const directives = [
|
|
||||||
"frame-ancestors 'self'",
|
|
||||||
"form-action 'self'",
|
|
||||||
"base-uri 'self'",
|
|
||||||
"default-src 'none'",
|
|
||||||
`connect-src ${connectSrc.join(' ')}`,
|
|
||||||
"style-src 'self' https://fonts.googleapis.com 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' 'sha256-Od9mHMH7x2G6QuoV3hsPkDCwIyqbg2DX3F5nLeCYQBc=' 'sha256-eSB4TBEI8J+pgd6+gnmCP4Q+C+Yrx5BdjBEoPvZUzZI=' 'sha256-nzTgYzXYDNe6BAHiiI7NNlfK8n/auuOAhh2t92YvuXo=' 'sha256-441zG27rExd4/il+NvIqyL8zFx5XmyNQtE381kSkUJk='",
|
|
||||||
"style-src-elem 'self' https://fonts.googleapis.com 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' 'sha256-Od9mHMH7x2G6QuoV3hsPkDCwIyqbg2DX3F5nLeCYQBc=' 'sha256-eSB4TBEI8J+pgd6+gnmCP4Q+C+Yrx5BdjBEoPvZUzZI=' 'sha256-nzTgYzXYDNe6BAHiiI7NNlfK8n/auuOAhh2t92YvuXo=' 'sha256-441zG27rExd4/il+NvIqyL8zFx5XmyNQtE381kSkUJk='",
|
|
||||||
"style-src-attr 'unsafe-inline'",
|
|
||||||
"script-src 'self'",
|
|
||||||
"script-src-elem 'self'",
|
|
||||||
"script-src-attr 'none'",
|
|
||||||
`img-src 'self' data: blob: https://*.${apiDomain} https://*.okta.com https://*.oktapreview.com https://*.googleapis.com https://*.gstatic.com`,
|
|
||||||
"frame-src 'self' blob: data:",
|
|
||||||
"font-src 'self' https://fonts.gstatic.com data:",
|
|
||||||
"object-src 'none'",
|
|
||||||
"worker-src 'self' blob:",
|
|
||||||
"manifest-src 'self'",
|
|
||||||
!isDev ? "upgrade-insecure-requests" : ""
|
|
||||||
].filter(Boolean).join("; ");
|
|
||||||
|
|
||||||
res.setHeader('Content-Security-Policy', directives);
|
|
||||||
next();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Configure other security headers via Helmet (with CSP disabled since we set it manually)
|
|
||||||
app.use(helmet({
|
|
||||||
contentSecurityPolicy: false, // Handled manually above to ensure redirect compatibility
|
|
||||||
crossOriginEmbedderPolicy: false,
|
|
||||||
crossOriginResourcePolicy: { policy: "cross-origin" },
|
|
||||||
xFrameOptions: { action: "sameorigin" },
|
|
||||||
}));
|
|
||||||
|
|
||||||
// 2. CORS middleware - MUST be before other middleware
|
|
||||||
app.use(corsMiddleware);
|
|
||||||
|
|
||||||
// Handle /assets trailing slash redirect manually to avoid CSP truncation by express.static
|
|
||||||
app.get('/assets', (req, res) => {
|
|
||||||
res.redirect(301, '/assets/');
|
|
||||||
});
|
|
||||||
|
|
||||||
// 3. Cookie parser middleware - MUST be before routes
|
|
||||||
app.use(cookieParser());
|
|
||||||
|
|
||||||
const userService = new UserService();
|
const userService = new UserService();
|
||||||
|
|
||||||
// Initializer for database connection (called from server.ts)
|
// Initialize database connection
|
||||||
export const initializeAppDatabase = async () => {
|
const initializeDatabase = async () => {
|
||||||
try {
|
try {
|
||||||
await sequelize.authenticate();
|
await sequelize.authenticate();
|
||||||
console.log('✅ App database connection established');
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('❌ App database connection failed:', error);
|
console.error('❌ Database connection failed:', error);
|
||||||
throw error;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Initialize database
|
||||||
|
initializeDatabase();
|
||||||
|
|
||||||
// Trust proxy - Enable this when behind a reverse proxy (nginx, load balancer, etc.)
|
// Trust proxy - Enable this when behind a reverse proxy (nginx, load balancer, etc.)
|
||||||
// This allows Express to read X-Forwarded-* headers correctly
|
// This allows Express to read X-Forwarded-* headers correctly
|
||||||
// Set to true in production, false in development
|
// Set to true in production, false in development
|
||||||
@ -112,16 +40,65 @@ if (process.env.TRUST_PROXY === 'true' || process.env.NODE_ENV === 'production')
|
|||||||
app.set('trust proxy', 1);
|
app.set('trust proxy', 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CORS middleware - MUST be before other middleware
|
||||||
|
app.use(corsMiddleware);
|
||||||
|
|
||||||
|
// Security middleware - Configure Helmet to work with CORS
|
||||||
|
// Get frontend URL for CSP - allow cross-origin connections in development
|
||||||
|
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
|
||||||
|
const isDevelopment = process.env.NODE_ENV !== 'production';
|
||||||
|
|
||||||
|
// Build connect-src directive - allow backend API and blob URLs
|
||||||
|
const connectSrc = ["'self'", "blob:", "data:"];
|
||||||
|
if (isDevelopment) {
|
||||||
|
// In development, allow connections to common dev ports
|
||||||
|
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000");
|
||||||
|
// Also allow the configured frontend URL if it's a localhost URL
|
||||||
|
if (frontendUrl.includes('localhost')) {
|
||||||
|
connectSrc.push(frontendUrl);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// In production, only allow the configured frontend URL
|
||||||
|
if (frontendUrl && frontendUrl !== '*') {
|
||||||
|
const frontendOrigins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
||||||
|
connectSrc.push(...frontendOrigins);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build CSP directives - conditionally include upgradeInsecureRequests
|
||||||
|
const cspDirectives: any = {
|
||||||
|
defaultSrc: ["'self'", "blob:"],
|
||||||
|
styleSrc: ["'self'", "'unsafe-inline'", "https://fonts.googleapis.com"],
|
||||||
|
scriptSrc: ["'self'"],
|
||||||
|
imgSrc: ["'self'", "data:", "https:", "blob:"],
|
||||||
|
connectSrc: connectSrc,
|
||||||
|
frameSrc: ["'self'", "blob:"],
|
||||||
|
fontSrc: ["'self'", "https://fonts.gstatic.com", "data:"],
|
||||||
|
objectSrc: ["'none'"],
|
||||||
|
baseUri: ["'self'"],
|
||||||
|
formAction: ["'self'"],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Only add upgradeInsecureRequests in production (it forces HTTPS)
|
||||||
|
if (!isDevelopment) {
|
||||||
|
cspDirectives.upgradeInsecureRequests = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
app.use(helmet({
|
||||||
|
crossOriginEmbedderPolicy: false,
|
||||||
|
crossOriginResourcePolicy: { policy: "cross-origin" },
|
||||||
|
contentSecurityPolicy: {
|
||||||
|
directives: cspDirectives,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Cookie parser middleware - MUST be before routes
|
||||||
|
app.use(cookieParser());
|
||||||
|
|
||||||
// Body parsing middleware
|
// Body parsing middleware
|
||||||
app.use(express.json({ limit: '10mb' }));
|
app.use(express.json({ limit: '10mb' }));
|
||||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||||
|
|
||||||
// Global rate limiting disabled — nginx handles rate limiting in production
|
|
||||||
// app.use(rateLimiter);
|
|
||||||
|
|
||||||
// HTML sanitization - strip all tags from text inputs (after body parsing, before routes)
|
|
||||||
app.use(sanitizationMiddleware);
|
|
||||||
|
|
||||||
// Logging middleware
|
// Logging middleware
|
||||||
app.use(morgan('combined'));
|
app.use(morgan('combined'));
|
||||||
|
|
||||||
@ -129,7 +106,7 @@ app.use(morgan('combined'));
|
|||||||
app.use(metricsMiddleware);
|
app.use(metricsMiddleware);
|
||||||
|
|
||||||
// Prometheus metrics endpoint - expose metrics for scraping
|
// Prometheus metrics endpoint - expose metrics for scraping
|
||||||
app.use('/metrics', authenticateToken, requireAdmin, createMetricsRouter());
|
app.use(createMetricsRouter());
|
||||||
|
|
||||||
// Health check endpoint (before API routes)
|
// Health check endpoint (before API routes)
|
||||||
app.get('/health', (_req: express.Request, res: express.Response) => {
|
app.get('/health', (_req: express.Request, res: express.Response) => {
|
||||||
@ -146,16 +123,7 @@ app.use('/api/v1', routes);
|
|||||||
|
|
||||||
// Serve uploaded files statically
|
// Serve uploaded files statically
|
||||||
ensureUploadDir();
|
ensureUploadDir();
|
||||||
app.use('/uploads', authenticateToken, express.static(UPLOAD_DIR));
|
app.use('/uploads', express.static(UPLOAD_DIR));
|
||||||
|
|
||||||
// Initialize ClamAV toggle manager
|
|
||||||
import { initializeToggleFile } from './services/clamav/clamavToggleManager';
|
|
||||||
try {
|
|
||||||
initializeToggleFile();
|
|
||||||
console.log(`✅ ClamAV toggle initialized (ENABLE_CLAMAV=${process.env.ENABLE_CLAMAV || 'true'})`);
|
|
||||||
} catch (err) {
|
|
||||||
console.warn('⚠️ ClamAV toggle initialization warning:', err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Legacy SSO Callback endpoint for user creation/update (kept for backward compatibility)
|
// Legacy SSO Callback endpoint for user creation/update (kept for backward compatibility)
|
||||||
app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.Response): Promise<void> => {
|
app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.Response): Promise<void> => {
|
||||||
@ -209,7 +177,7 @@ app.post('/api/v1/auth/sso-callback', async (req: express.Request, res: express.
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Get all users endpoint
|
// Get all users endpoint
|
||||||
app.get('/api/v1/users', authenticateToken, requireAdmin, async (_req: express.Request, res: express.Response): Promise<void> => {
|
app.get('/api/v1/users', async (_req: express.Request, res: express.Response): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const users = await userService.getAllUsers();
|
const users = await userService.getAllUsers();
|
||||||
|
|
||||||
|
|||||||
@ -3,16 +3,6 @@ import dotenv from 'dotenv';
|
|||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
// 1. Debugging: Print what the app actually sees
|
|
||||||
console.log('--- Database Config Debug ---');
|
|
||||||
console.log(`DB_HOST: ${process.env.DB_HOST}`);
|
|
||||||
console.log(`DB_SSL (Raw): '${process.env.DB_SSL}`); // Quotes help see trailing spaces
|
|
||||||
|
|
||||||
// 2. Fix: Trim whitespace to ensure "true " becomes "true"
|
|
||||||
const isSSL = (process.env.DB_SSL || '').trim() === 'true';
|
|
||||||
console.log(`SSL Enabled: ${isSSL}`);
|
|
||||||
console.log('---------------------------');
|
|
||||||
|
|
||||||
const sequelize = new Sequelize({
|
const sequelize = new Sequelize({
|
||||||
host: process.env.DB_HOST || 'localhost',
|
host: process.env.DB_HOST || 'localhost',
|
||||||
port: parseInt(process.env.DB_PORT || '5432', 10),
|
port: parseInt(process.env.DB_PORT || '5432', 10),
|
||||||
@ -20,7 +10,7 @@ const sequelize = new Sequelize({
|
|||||||
username: process.env.DB_USER || 'postgres',
|
username: process.env.DB_USER || 'postgres',
|
||||||
password: process.env.DB_PASSWORD || 'postgres',
|
password: process.env.DB_PASSWORD || 'postgres',
|
||||||
dialect: 'postgres',
|
dialect: 'postgres',
|
||||||
logging: false,
|
logging: false, // Disable SQL query logging for cleaner console output
|
||||||
pool: {
|
pool: {
|
||||||
min: parseInt(process.env.DB_POOL_MIN || '2', 10),
|
min: parseInt(process.env.DB_POOL_MIN || '2', 10),
|
||||||
max: parseInt(process.env.DB_POOL_MAX || '10', 10),
|
max: parseInt(process.env.DB_POOL_MAX || '10', 10),
|
||||||
@ -28,8 +18,7 @@ const sequelize = new Sequelize({
|
|||||||
idle: 10000,
|
idle: 10000,
|
||||||
},
|
},
|
||||||
dialectOptions: {
|
dialectOptions: {
|
||||||
// 3. Use the robust boolean we calculated above
|
ssl: process.env.DB_SSL === 'true' ? {
|
||||||
ssl: isSSL ? {
|
|
||||||
require: true,
|
require: true,
|
||||||
rejectUnauthorized: false,
|
rejectUnauthorized: false,
|
||||||
} : false,
|
} : false,
|
||||||
|
|||||||
@ -9,7 +9,7 @@ export const emailConfig = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
from: process.env.EMAIL_FROM || `RE Workflow System <notifications@${process.env.APP_DOMAIN || 'royalenfield.com'}>`,
|
from: process.env.EMAIL_FROM || 'RE Workflow System <notifications@royalenfield.com>',
|
||||||
|
|
||||||
// Email templates
|
// Email templates
|
||||||
templates: {
|
templates: {
|
||||||
|
|||||||
@ -1,25 +1,17 @@
|
|||||||
import { SSOConfig, SSOUserData } from '../types/auth.types';
|
import { SSOConfig, SSOUserData } from '../types/auth.types';
|
||||||
|
|
||||||
// Use getter functions to read from process.env dynamically
|
|
||||||
// This ensures values are read after secrets are loaded from Google Secret Manager
|
|
||||||
const ssoConfig: SSOConfig = {
|
const ssoConfig: SSOConfig = {
|
||||||
get jwtSecret() { return process.env.JWT_SECRET || ''; },
|
jwtSecret: process.env.JWT_SECRET || '',
|
||||||
get jwtExpiry() { return process.env.JWT_EXPIRY || '24h'; },
|
jwtExpiry: process.env.JWT_EXPIRY || '24h',
|
||||||
get refreshTokenExpiry() { return process.env.REFRESH_TOKEN_EXPIRY || '7d'; },
|
refreshTokenExpiry: process.env.REFRESH_TOKEN_EXPIRY || '7d',
|
||||||
get sessionSecret() { return process.env.SESSION_SECRET || ''; },
|
sessionSecret: process.env.SESSION_SECRET || '',
|
||||||
// Use only FRONTEND_URL from environment - no fallbacks
|
// Use only FRONTEND_URL from environment - no fallbacks
|
||||||
get allowedOrigins() {
|
allowedOrigins: process.env.FRONTEND_URL?.split(',').map(s => s.trim()).filter(Boolean) || [],
|
||||||
return process.env.FRONTEND_URL?.split(',').map(s => s.trim()).filter(Boolean) || [];
|
|
||||||
},
|
|
||||||
// Okta/Auth0 configuration for token exchange
|
// Okta/Auth0 configuration for token exchange
|
||||||
get oktaDomain() { return process.env.OKTA_DOMAIN || `{{IDP_DOMAIN}}`; },
|
oktaDomain: process.env.OKTA_DOMAIN || 'https://dev-830839.oktapreview.com',
|
||||||
get oktaClientId() { return process.env.OKTA_CLIENT_ID || ''; },
|
oktaClientId: process.env.OKTA_CLIENT_ID || '',
|
||||||
get oktaClientSecret() { return process.env.OKTA_CLIENT_SECRET || ''; },
|
oktaClientSecret: process.env.OKTA_CLIENT_SECRET || '',
|
||||||
get oktaApiToken() { return process.env.OKTA_API_TOKEN || ''; }, // SSWS token for Users API
|
oktaApiToken: process.env.OKTA_API_TOKEN || '', // SSWS token for Users API
|
||||||
// Tanflow configuration for token exchange
|
|
||||||
get tanflowBaseUrl() { return process.env.TANFLOW_BASE_URL || `{{IDP_DOMAIN}}/realms/RE`; },
|
|
||||||
get tanflowClientId() { return process.env.TANFLOW_CLIENT_ID || 'REFLOW'; },
|
|
||||||
get tanflowClientSecret() { return process.env.TANFLOW_CLIENT_SECRET || `{{TANFLOW_CLIENT_SECRET}}`; },
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export { ssoConfig };
|
export { ssoConfig };
|
||||||
|
|||||||
@ -1,14 +1,12 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { Holiday, HolidayType } from '@models/Holiday';
|
import { Holiday, HolidayType } from '@models/Holiday';
|
||||||
import { holidayService } from '@services/holiday.service';
|
import { holidayService } from '@services/holiday.service';
|
||||||
import { activityTypeService } from '@services/activityType.service';
|
|
||||||
import { sequelize } from '@config/database';
|
import { sequelize } from '@config/database';
|
||||||
import { QueryTypes, Op } from 'sequelize';
|
import { QueryTypes, Op } from 'sequelize';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
||||||
import { clearConfigCache } from '@services/configReader.service';
|
import { clearConfigCache } from '@services/configReader.service';
|
||||||
import { User, UserRole } from '@models/User';
|
import { User, UserRole } from '@models/User';
|
||||||
import { sanitizeHtml } from '@utils/sanitizer';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all holidays (with optional year filter)
|
* Get all holidays (with optional year filter)
|
||||||
@ -104,7 +102,7 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
const holiday = await holidayService.createHoliday({
|
const holiday = await holidayService.createHoliday({
|
||||||
holidayDate,
|
holidayDate,
|
||||||
holidayName,
|
holidayName,
|
||||||
description: description ? sanitizeHtml(description) : description,
|
description,
|
||||||
holidayType: holidayType || HolidayType.ORGANIZATIONAL,
|
holidayType: holidayType || HolidayType.ORGANIZATIONAL,
|
||||||
isRecurring: isRecurring || false,
|
isRecurring: isRecurring || false,
|
||||||
recurrenceRule,
|
recurrenceRule,
|
||||||
@ -146,9 +144,6 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
|
|
||||||
const { holidayId } = req.params;
|
const { holidayId } = req.params;
|
||||||
const updates = req.body;
|
const updates = req.body;
|
||||||
if (updates.description) {
|
|
||||||
updates.description = sanitizeHtml(updates.description);
|
|
||||||
}
|
|
||||||
|
|
||||||
const holiday = await holidayService.updateHoliday(holidayId, updates, userId);
|
const holiday = await holidayService.updateHoliday(holidayId, updates, userId);
|
||||||
|
|
||||||
@ -254,7 +249,7 @@ export const getPublicConfigurations = async (req: Request, res: Response): Prom
|
|||||||
const { category } = req.query;
|
const { category } = req.query;
|
||||||
|
|
||||||
// Only allow certain categories for public access
|
// Only allow certain categories for public access
|
||||||
const allowedCategories = ['DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING', 'SYSTEM_SETTINGS'];
|
const allowedCategories = ['DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING'];
|
||||||
if (category && !allowedCategories.includes(category as string)) {
|
if (category && !allowedCategories.includes(category as string)) {
|
||||||
res.status(403).json({
|
res.status(403).json({
|
||||||
success: false,
|
success: false,
|
||||||
@ -267,7 +262,7 @@ export const getPublicConfigurations = async (req: Request, res: Response): Prom
|
|||||||
if (category) {
|
if (category) {
|
||||||
whereClause = `WHERE config_category = '${category}' AND is_sensitive = false`;
|
whereClause = `WHERE config_category = '${category}' AND is_sensitive = false`;
|
||||||
} else {
|
} else {
|
||||||
whereClause = `WHERE config_category IN ('DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING', 'SYSTEM_SETTINGS') AND is_sensitive = false`;
|
whereClause = `WHERE config_category IN ('DOCUMENT_POLICY', 'TAT_SETTINGS', 'WORKFLOW_SHARING') AND is_sensitive = false`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rawConfigurations = await sequelize.query(`
|
const rawConfigurations = await sequelize.query(`
|
||||||
@ -393,7 +388,7 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { configKey } = req.params;
|
const { configKey } = req.params;
|
||||||
let { configValue } = req.body;
|
const { configValue } = req.body;
|
||||||
|
|
||||||
if (configValue === undefined) {
|
if (configValue === undefined) {
|
||||||
res.status(400).json({
|
res.status(400).json({
|
||||||
@ -403,12 +398,6 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sanitize config value if it's likely to be rendered as HTML
|
|
||||||
// We can be selective or just sanitize all strings for safety
|
|
||||||
if (typeof configValue === 'string') {
|
|
||||||
configValue = sanitizeHtml(configValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update configuration
|
// Update configuration
|
||||||
const result = await sequelize.query(`
|
const result = await sequelize.query(`
|
||||||
UPDATE admin_configurations
|
UPDATE admin_configurations
|
||||||
@ -889,174 +878,3 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// ==================== Activity Type Management Routes ====================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all activity types (optionally filtered by active status)
|
|
||||||
*/
|
|
||||||
export const getAllActivityTypes = async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { activeOnly } = req.query;
|
|
||||||
const activeOnlyBool = activeOnly === 'true';
|
|
||||||
|
|
||||||
const activityTypes = await activityTypeService.getAllActivityTypes(activeOnlyBool);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
data: activityTypes,
|
|
||||||
count: activityTypes.length
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[Admin] Error fetching activity types:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: error.message || 'Failed to fetch activity types'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a single activity type by ID
|
|
||||||
*/
|
|
||||||
export const getActivityTypeById = async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { activityTypeId } = req.params;
|
|
||||||
|
|
||||||
const activityType = await activityTypeService.getActivityTypeById(activityTypeId);
|
|
||||||
|
|
||||||
if (!activityType) {
|
|
||||||
res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
error: 'Activity type not found'
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
data: activityType
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[Admin] Error fetching activity type:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: error.message || 'Failed to fetch activity type'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new activity type
|
|
||||||
*/
|
|
||||||
export const createActivityType = async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const userId = req.user?.userId;
|
|
||||||
if (!userId) {
|
|
||||||
res.status(401).json({
|
|
||||||
success: false,
|
|
||||||
error: 'User not authenticated'
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
title,
|
|
||||||
itemCode,
|
|
||||||
taxationType,
|
|
||||||
sapRefNo
|
|
||||||
} = req.body;
|
|
||||||
|
|
||||||
// Validate required fields
|
|
||||||
if (!title) {
|
|
||||||
res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
error: 'Activity type title is required'
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const activityType = await activityTypeService.createActivityType({
|
|
||||||
title,
|
|
||||||
itemCode: itemCode || null,
|
|
||||||
taxationType: taxationType || null,
|
|
||||||
sapRefNo: sapRefNo || null,
|
|
||||||
createdBy: userId
|
|
||||||
});
|
|
||||||
|
|
||||||
res.status(201).json({
|
|
||||||
success: true,
|
|
||||||
message: 'Activity type created successfully',
|
|
||||||
data: activityType
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[Admin] Error creating activity type:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: error.message || 'Failed to create activity type'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update an activity type
|
|
||||||
*/
|
|
||||||
export const updateActivityType = async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const userId = req.user?.userId;
|
|
||||||
if (!userId) {
|
|
||||||
res.status(401).json({
|
|
||||||
success: false,
|
|
||||||
error: 'User not authenticated'
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { activityTypeId } = req.params;
|
|
||||||
const updates = req.body;
|
|
||||||
|
|
||||||
const activityType = await activityTypeService.updateActivityType(activityTypeId, updates, userId);
|
|
||||||
|
|
||||||
if (!activityType) {
|
|
||||||
res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
error: 'Activity type not found'
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
message: 'Activity type updated successfully',
|
|
||||||
data: activityType
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[Admin] Error updating activity type:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: error.message || 'Failed to update activity type'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete (deactivate) an activity type
|
|
||||||
*/
|
|
||||||
export const deleteActivityType = async (req: Request, res: Response): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const { activityTypeId } = req.params;
|
|
||||||
|
|
||||||
await activityTypeService.deleteActivityType(activityTypeId);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
message: 'Activity type deleted successfully'
|
|
||||||
});
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[Admin] Error deleting activity type:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: error.message || 'Failed to delete activity type'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|||||||
@ -1,79 +0,0 @@
|
|||||||
import { Request, Response } from 'express';
|
|
||||||
import { ApiTokenService } from '../services/apiToken.service';
|
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
|
||||||
import { AuthenticatedRequest } from '../types/express';
|
|
||||||
import { z } from 'zod';
|
|
||||||
|
|
||||||
const createTokenSchema = z.object({
|
|
||||||
name: z.string().min(1).max(100),
|
|
||||||
expiresInDays: z.number().int().positive().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export class ApiTokenController {
|
|
||||||
private apiTokenService: ApiTokenService;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.apiTokenService = new ApiTokenService();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new API Token
|
|
||||||
*/
|
|
||||||
async create(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const validation = createTokenSchema.safeParse(req.body);
|
|
||||||
if (!validation.success) {
|
|
||||||
ResponseHandler.error(res, 'Validation error', 400, validation.error.message);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { name, expiresInDays } = validation.data;
|
|
||||||
const userId = req.user.userId;
|
|
||||||
|
|
||||||
const result = await this.apiTokenService.createToken(userId, name, expiresInDays);
|
|
||||||
|
|
||||||
ResponseHandler.success(res, {
|
|
||||||
token: result.token,
|
|
||||||
apiToken: result.apiToken
|
|
||||||
}, 'API Token created successfully. Please copy the token now, you will not be able to see it again.');
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
ResponseHandler.error(res, 'Failed to create API token', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List user's API Tokens
|
|
||||||
*/
|
|
||||||
async list(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const userId = req.user.userId;
|
|
||||||
const tokens = await this.apiTokenService.listTokens(userId);
|
|
||||||
ResponseHandler.success(res, { tokens }, 'API Tokens retrieved successfully');
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
ResponseHandler.error(res, 'Failed to list API tokens', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Revoke an API Token
|
|
||||||
*/
|
|
||||||
async revoke(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const userId = req.user.userId;
|
|
||||||
const { id } = req.params;
|
|
||||||
|
|
||||||
const success = await this.apiTokenService.revokeToken(userId, id);
|
|
||||||
|
|
||||||
if (success) {
|
|
||||||
ResponseHandler.success(res, null, 'API Token revoked successfully');
|
|
||||||
} else {
|
|
||||||
ResponseHandler.notFound(res, 'Token not found or already revoked');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
ResponseHandler.error(res, 'Failed to revoke API token', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,15 +1,11 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { ApprovalService } from '@services/approval.service';
|
import { ApprovalService } from '@services/approval.service';
|
||||||
import { DealerClaimApprovalService } from '@services/dealerClaimApproval.service';
|
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
|
||||||
import { validateApprovalAction } from '@validators/approval.validator';
|
import { validateApprovalAction } from '@validators/approval.validator';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
const approvalService = new ApprovalService();
|
const approvalService = new ApprovalService();
|
||||||
const dealerClaimApprovalService = new DealerClaimApprovalService();
|
|
||||||
|
|
||||||
export class ApprovalController {
|
export class ApprovalController {
|
||||||
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async approveLevel(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -17,54 +13,18 @@ export class ApprovalController {
|
|||||||
const { levelId } = req.params;
|
const { levelId } = req.params;
|
||||||
const validatedData = validateApprovalAction(req.body);
|
const validatedData = validateApprovalAction(req.body);
|
||||||
|
|
||||||
// Determine which service to use based on workflow type
|
const requestMeta = getRequestMetadata(req);
|
||||||
const level = await ApprovalLevel.findByPk(levelId);
|
const level = await approvalService.approveLevel(levelId, validatedData, req.user.userId, {
|
||||||
|
ipAddress: requestMeta.ipAddress,
|
||||||
|
userAgent: requestMeta.userAgent
|
||||||
|
});
|
||||||
|
|
||||||
if (!level) {
|
if (!level) {
|
||||||
ResponseHandler.notFound(res, 'Approval level not found');
|
ResponseHandler.notFound(res, 'Approval level not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflow = await WorkflowRequest.findByPk(level.requestId);
|
ResponseHandler.success(res, level, 'Approval level updated successfully');
|
||||||
if (!workflow) {
|
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowType = (workflow as any)?.workflowType;
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
|
||||||
|
|
||||||
// Route to appropriate service based on workflow type
|
|
||||||
let approvedLevel: any;
|
|
||||||
if (workflowType === 'CLAIM_MANAGEMENT') {
|
|
||||||
// Use DealerClaimApprovalService for claim management workflows
|
|
||||||
approvedLevel = await dealerClaimApprovalService.approveLevel(
|
|
||||||
levelId,
|
|
||||||
validatedData,
|
|
||||||
req.user.userId,
|
|
||||||
{
|
|
||||||
ipAddress: requestMeta.ipAddress,
|
|
||||||
userAgent: requestMeta.userAgent
|
|
||||||
}
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
// Use ApprovalService for custom workflows
|
|
||||||
approvedLevel = await approvalService.approveLevel(
|
|
||||||
levelId,
|
|
||||||
validatedData,
|
|
||||||
req.user.userId,
|
|
||||||
{
|
|
||||||
ipAddress: requestMeta.ipAddress,
|
|
||||||
userAgent: requestMeta.userAgent
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!approvedLevel) {
|
|
||||||
ResponseHandler.notFound(res, 'Approval level not found');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
ResponseHandler.success(res, approvedLevel, 'Approval level updated successfully');
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
ResponseHandler.error(res, 'Failed to update approval level', 400, errorMessage);
|
ResponseHandler.error(res, 'Failed to update approval level', 400, errorMessage);
|
||||||
@ -74,23 +34,7 @@ export class ApprovalController {
|
|||||||
async getCurrentApprovalLevel(req: Request, res: Response): Promise<void> {
|
async getCurrentApprovalLevel(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
const level = await approvalService.getCurrentApprovalLevel(id);
|
||||||
// Determine which service to use based on workflow type
|
|
||||||
const workflow = await WorkflowRequest.findByPk(id);
|
|
||||||
if (!workflow) {
|
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowType = (workflow as any)?.workflowType;
|
|
||||||
|
|
||||||
// Route to appropriate service based on workflow type
|
|
||||||
let level: any;
|
|
||||||
if (workflowType === 'CLAIM_MANAGEMENT') {
|
|
||||||
level = await dealerClaimApprovalService.getCurrentApprovalLevel(id);
|
|
||||||
} else {
|
|
||||||
level = await approvalService.getCurrentApprovalLevel(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
ResponseHandler.success(res, level, 'Current approval level retrieved successfully');
|
ResponseHandler.success(res, level, 'Current approval level retrieved successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -102,23 +46,7 @@ export class ApprovalController {
|
|||||||
async getApprovalLevels(req: Request, res: Response): Promise<void> {
|
async getApprovalLevels(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
const levels = await approvalService.getApprovalLevels(id);
|
||||||
// Determine which service to use based on workflow type
|
|
||||||
const workflow = await WorkflowRequest.findByPk(id);
|
|
||||||
if (!workflow) {
|
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowType = (workflow as any)?.workflowType;
|
|
||||||
|
|
||||||
// Route to appropriate service based on workflow type
|
|
||||||
let levels: any[];
|
|
||||||
if (workflowType === 'CLAIM_MANAGEMENT') {
|
|
||||||
levels = await dealerClaimApprovalService.getApprovalLevels(id);
|
|
||||||
} else {
|
|
||||||
levels = await approvalService.getApprovalLevels(id);
|
|
||||||
}
|
|
||||||
|
|
||||||
ResponseHandler.success(res, levels, 'Approval levels retrieved successfully');
|
ResponseHandler.success(res, levels, 'Approval levels retrieved successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@ -84,7 +84,6 @@ export class AuthController {
|
|||||||
displayName: user.displayName,
|
displayName: user.displayName,
|
||||||
department: user.department,
|
department: user.department,
|
||||||
designation: user.designation,
|
designation: user.designation,
|
||||||
jobTitle: user.jobTitle,
|
|
||||||
phone: user.phone,
|
phone: user.phone,
|
||||||
location: user.location,
|
location: user.location,
|
||||||
role: user.role,
|
role: user.role,
|
||||||
@ -132,13 +131,10 @@ export class AuthController {
|
|||||||
|
|
||||||
// Set new access token in cookie if using cookie-based auth
|
// Set new access token in cookie if using cookie-based auth
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
const isProduction = process.env.NODE_ENV === 'production';
|
||||||
const isUat = process.env.NODE_ENV === 'uat';
|
|
||||||
const isSecureEnv = isProduction || isUat;
|
|
||||||
|
|
||||||
const cookieOptions = {
|
const cookieOptions = {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isProduction,
|
||||||
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' is safer and works on same-domain
|
sameSite: isProduction ? 'none' as const : 'lax' as const, // 'none' for cross-domain in production
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -151,7 +147,7 @@ export class AuthController {
|
|||||||
message: 'Token refreshed successfully'
|
message: 'Token refreshed successfully'
|
||||||
}, 'Token refreshed successfully');
|
}, 'Token refreshed successfully');
|
||||||
} else {
|
} else {
|
||||||
// Dev: Include token for debugging
|
// Development: Include token for debugging
|
||||||
ResponseHandler.success(res, {
|
ResponseHandler.success(res, {
|
||||||
accessToken: newAccessToken
|
accessToken: newAccessToken
|
||||||
}, 'Token refreshed successfully');
|
}, 'Token refreshed successfully');
|
||||||
@ -163,134 +159,6 @@ export class AuthController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Exchange Tanflow authorization code for tokens
|
|
||||||
* POST /api/v1/auth/tanflow/token-exchange
|
|
||||||
*/
|
|
||||||
async exchangeTanflowToken(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
logger.info('Tanflow token exchange request received', {
|
|
||||||
body: {
|
|
||||||
code: req.body?.code ? `${req.body.code.substring(0, 10)}...` : 'MISSING',
|
|
||||||
redirectUri: req.body?.redirectUri,
|
|
||||||
state: req.body?.state ? 'PRESENT' : 'MISSING',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { code, redirectUri } = validateTokenExchange(req.body);
|
|
||||||
logger.info('Tanflow token exchange validation passed', { redirectUri });
|
|
||||||
|
|
||||||
const result = await this.authService.exchangeTanflowCodeForTokens(code, redirectUri);
|
|
||||||
|
|
||||||
// Log login activity
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
|
||||||
await activityService.log({
|
|
||||||
requestId: SYSTEM_EVENT_REQUEST_ID,
|
|
||||||
type: 'login',
|
|
||||||
user: {
|
|
||||||
userId: result.user.userId,
|
|
||||||
name: result.user.displayName || result.user.email,
|
|
||||||
email: result.user.email
|
|
||||||
},
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
action: 'User Login',
|
|
||||||
details: `User logged in via Tanflow SSO from ${requestMeta.ipAddress || 'unknown IP'}`,
|
|
||||||
metadata: {
|
|
||||||
loginMethod: 'TANFLOW_SSO',
|
|
||||||
employeeId: result.user.employeeId,
|
|
||||||
department: result.user.department,
|
|
||||||
role: result.user.role
|
|
||||||
},
|
|
||||||
ipAddress: requestMeta.ipAddress,
|
|
||||||
userAgent: requestMeta.userAgent,
|
|
||||||
category: 'AUTHENTICATION',
|
|
||||||
severity: 'INFO'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set tokens in httpOnly cookies (production) or return in body (development)
|
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
|
||||||
const isUat = process.env.NODE_ENV === 'uat';
|
|
||||||
const isSecureEnv = isProduction || isUat;
|
|
||||||
|
|
||||||
const cookieOptions = {
|
|
||||||
httpOnly: true,
|
|
||||||
secure: isSecureEnv,
|
|
||||||
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
|
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
|
||||||
path: '/',
|
|
||||||
};
|
|
||||||
|
|
||||||
res.cookie('accessToken', result.accessToken, cookieOptions);
|
|
||||||
res.cookie('refreshToken', result.refreshToken, cookieOptions);
|
|
||||||
|
|
||||||
// In production, don't return tokens in response body (security)
|
|
||||||
// In development, include tokens for cross-port setup
|
|
||||||
if (isProduction) {
|
|
||||||
ResponseHandler.success(res, {
|
|
||||||
user: result.user,
|
|
||||||
idToken: result.oktaIdToken, // Include id_token for Tanflow logout
|
|
||||||
}, 'Authentication successful');
|
|
||||||
} else {
|
|
||||||
ResponseHandler.success(res, {
|
|
||||||
user: result.user,
|
|
||||||
accessToken: result.accessToken,
|
|
||||||
refreshToken: result.refreshToken,
|
|
||||||
idToken: result.oktaIdToken,
|
|
||||||
}, 'Authentication successful');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Tanflow token exchange failed:', error);
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
ResponseHandler.error(res, 'Tanflow authentication failed', 400, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Refresh Tanflow access token
|
|
||||||
* POST /api/v1/auth/tanflow/refresh
|
|
||||||
*/
|
|
||||||
async refreshTanflowToken(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const refreshToken = req.body?.refreshToken;
|
|
||||||
|
|
||||||
if (!refreshToken) {
|
|
||||||
ResponseHandler.error(res, 'Refresh token is required', 400, 'Refresh token is required in request body');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newAccessToken = await this.authService.refreshTanflowToken(refreshToken);
|
|
||||||
|
|
||||||
// Set new access token in cookie
|
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
|
||||||
const isUat = process.env.NODE_ENV === 'uat';
|
|
||||||
const isSecureEnv = isProduction || isUat;
|
|
||||||
|
|
||||||
const cookieOptions = {
|
|
||||||
httpOnly: true,
|
|
||||||
secure: isSecureEnv,
|
|
||||||
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
|
|
||||||
maxAge: 24 * 60 * 60 * 1000,
|
|
||||||
path: '/',
|
|
||||||
};
|
|
||||||
|
|
||||||
res.cookie('accessToken', newAccessToken, cookieOptions);
|
|
||||||
|
|
||||||
if (isProduction) {
|
|
||||||
ResponseHandler.success(res, {
|
|
||||||
message: 'Token refreshed successfully'
|
|
||||||
}, 'Token refreshed successfully');
|
|
||||||
} else {
|
|
||||||
ResponseHandler.success(res, {
|
|
||||||
accessToken: newAccessToken
|
|
||||||
}, 'Token refreshed successfully');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Tanflow token refresh failed:', error);
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
ResponseHandler.error(res, 'Token refresh failed', 401, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Logout user
|
* Logout user
|
||||||
* POST /api/v1/auth/logout
|
* POST /api/v1/auth/logout
|
||||||
@ -302,16 +170,13 @@ export class AuthController {
|
|||||||
|
|
||||||
// Helper function to clear cookies with all possible option combinations
|
// Helper function to clear cookies with all possible option combinations
|
||||||
const clearCookiesCompletely = () => {
|
const clearCookiesCompletely = () => {
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
|
||||||
const isUat = process.env.NODE_ENV === 'uat';
|
|
||||||
const isSecureEnv = isProduction || isUat;
|
|
||||||
const cookieNames = ['accessToken', 'refreshToken'];
|
const cookieNames = ['accessToken', 'refreshToken'];
|
||||||
|
|
||||||
// Get the EXACT options used when setting cookies (from exchangeToken)
|
// Get the EXACT options used when setting cookies (from exchangeToken)
|
||||||
// These MUST match exactly: httpOnly, secure, sameSite, path
|
// These MUST match exactly: httpOnly, secure, sameSite, path
|
||||||
const cookieOptions = {
|
const cookieOptions = {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isProduction,
|
||||||
sameSite: 'lax' as const,
|
sameSite: 'lax' as const,
|
||||||
path: '/',
|
path: '/',
|
||||||
};
|
};
|
||||||
@ -481,13 +346,10 @@ export class AuthController {
|
|||||||
|
|
||||||
// Set cookies for web clients
|
// Set cookies for web clients
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
const isProduction = process.env.NODE_ENV === 'production';
|
||||||
const isUat = process.env.NODE_ENV === 'uat';
|
|
||||||
const isSecureEnv = isProduction || isUat;
|
|
||||||
|
|
||||||
const cookieOptions = {
|
const cookieOptions = {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isProduction,
|
||||||
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const,
|
sameSite: isProduction ? 'none' as const : 'lax' as const,
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -564,13 +426,10 @@ export class AuthController {
|
|||||||
|
|
||||||
// Set cookies with httpOnly flag for security
|
// Set cookies with httpOnly flag for security
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
const isProduction = process.env.NODE_ENV === 'production';
|
||||||
const isUat = process.env.NODE_ENV === 'uat';
|
|
||||||
const isSecureEnv = isProduction || isUat;
|
|
||||||
|
|
||||||
const cookieOptions = {
|
const cookieOptions = {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isProduction,
|
||||||
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' for same-domain
|
sameSite: isProduction ? 'none' as const : 'lax' as const, // 'none' for cross-domain in production
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token
|
maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -602,7 +461,7 @@ export class AuthController {
|
|||||||
idToken: result.oktaIdToken
|
idToken: result.oktaIdToken
|
||||||
}, 'Token exchange successful');
|
}, 'Token exchange successful');
|
||||||
} else {
|
} else {
|
||||||
// Dev: Include tokens for debugging and different-port setup
|
// Development: Include tokens for debugging and different-port setup
|
||||||
ResponseHandler.success(res, {
|
ResponseHandler.success(res, {
|
||||||
user: result.user,
|
user: result.user,
|
||||||
accessToken: result.accessToken,
|
accessToken: result.accessToken,
|
||||||
|
|||||||
@ -5,7 +5,6 @@ import { activityService } from '@services/activity.service';
|
|||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
|
|
||||||
|
|
||||||
export class ConclusionController {
|
export class ConclusionController {
|
||||||
/**
|
/**
|
||||||
* Generate AI conclusion remark for a request
|
* Generate AI conclusion remark for a request
|
||||||
@ -80,7 +79,7 @@ export class ConclusionController {
|
|||||||
const workNotes = await WorkNote.findAll({
|
const workNotes = await WorkNote.findAll({
|
||||||
where: { requestId },
|
where: { requestId },
|
||||||
order: [['createdAt', 'ASC']],
|
order: [['createdAt', 'ASC']],
|
||||||
limit: 20 // Last 20 work notes - keep full context for better conclusions
|
limit: 20 // Last 20 work notes
|
||||||
});
|
});
|
||||||
|
|
||||||
const documents = await Document.findAll({
|
const documents = await Document.findAll({
|
||||||
@ -91,7 +90,7 @@ export class ConclusionController {
|
|||||||
const activities = await Activity.findAll({
|
const activities = await Activity.findAll({
|
||||||
where: { requestId },
|
where: { requestId },
|
||||||
order: [['createdAt', 'ASC']],
|
order: [['createdAt', 'ASC']],
|
||||||
limit: 50 // Last 50 activities - keep full context for better conclusions
|
limit: 50 // Last 50 activities
|
||||||
});
|
});
|
||||||
|
|
||||||
// Build context object
|
// Build context object
|
||||||
@ -249,7 +248,6 @@ export class ConclusionController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Update conclusion
|
// Update conclusion
|
||||||
// Note: finalRemark is already sanitized by the sanitization middleware (RICH_TEXT_FIELDS)
|
|
||||||
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
|
const wasEdited = (conclusion as any).aiGeneratedRemark !== finalRemark;
|
||||||
|
|
||||||
await conclusion.update({
|
await conclusion.update({
|
||||||
@ -285,8 +283,6 @@ export class ConclusionController {
|
|||||||
return res.status(400).json({ error: 'Final remark is required' });
|
return res.status(400).json({ error: 'Final remark is required' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: finalRemark is already sanitized by the sanitization middleware (RICH_TEXT_FIELDS)
|
|
||||||
|
|
||||||
// Fetch request
|
// Fetch request
|
||||||
const request = await WorkflowRequest.findOne({
|
const request = await WorkflowRequest.findOne({
|
||||||
where: { requestId },
|
where: { requestId },
|
||||||
|
|||||||
@ -46,7 +46,6 @@ export class DashboardController {
|
|||||||
const endDate = req.query.endDate as string | undefined;
|
const endDate = req.query.endDate as string | undefined;
|
||||||
const status = req.query.status as string | undefined; // Status filter (not used in stats - stats show all statuses)
|
const status = req.query.status as string | undefined; // Status filter (not used in stats - stats show all statuses)
|
||||||
const priority = req.query.priority as string | undefined;
|
const priority = req.query.priority as string | undefined;
|
||||||
const templateType = req.query.templateType as string | undefined;
|
|
||||||
const department = req.query.department as string | undefined;
|
const department = req.query.department as string | undefined;
|
||||||
const initiator = req.query.initiator as string | undefined;
|
const initiator = req.query.initiator as string | undefined;
|
||||||
const approver = req.query.approver as string | undefined;
|
const approver = req.query.approver as string | undefined;
|
||||||
@ -62,7 +61,6 @@ export class DashboardController {
|
|||||||
endDate,
|
endDate,
|
||||||
status,
|
status,
|
||||||
priority,
|
priority,
|
||||||
templateType,
|
|
||||||
department,
|
department,
|
||||||
initiator,
|
initiator,
|
||||||
approver,
|
approver,
|
||||||
|
|||||||
@ -7,15 +7,11 @@ import logger from '../utils/logger';
|
|||||||
export class DealerController {
|
export class DealerController {
|
||||||
/**
|
/**
|
||||||
* Get all dealers
|
* Get all dealers
|
||||||
* GET /api/v1/dealers?q=searchTerm&limit=10 (optional search and limit)
|
* GET /api/v1/dealers
|
||||||
*/
|
*/
|
||||||
async getAllDealers(req: Request, res: Response): Promise<void> {
|
async getAllDealers(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const searchTerm = req.query.q as string | undefined;
|
const dealers = await dealerService.getAllDealers();
|
||||||
const limitParam = req.query.limit as string | undefined;
|
|
||||||
// Parse limit, default to 10, max 100
|
|
||||||
const limit = limitParam ? Math.min(Math.max(1, parseInt(limitParam, 10)), 100) : 10;
|
|
||||||
const dealers = await dealerService.getAllDealers(searchTerm, limit);
|
|
||||||
return ResponseHandler.success(res, dealers, 'Dealers fetched successfully');
|
return ResponseHandler.success(res, dealers, 'Dealers fetched successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -68,19 +64,17 @@ export class DealerController {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Search dealers
|
* Search dealers
|
||||||
* GET /api/v1/dealers/search?q=searchTerm&limit=10
|
* GET /api/v1/dealers/search?q=searchTerm
|
||||||
*/
|
*/
|
||||||
async searchDealers(req: Request, res: Response): Promise<void> {
|
async searchDealers(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { q, limit: limitParam } = req.query;
|
const { q } = req.query;
|
||||||
|
|
||||||
if (!q || typeof q !== 'string') {
|
if (!q || typeof q !== 'string') {
|
||||||
return ResponseHandler.error(res, 'Search term is required', 400);
|
return ResponseHandler.error(res, 'Search term is required', 400);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse limit, default to 10, max 100
|
const dealers = await dealerService.searchDealers(q);
|
||||||
const limit = limitParam ? Math.min(Math.max(1, parseInt(limitParam as string, 10)), 100) : 10;
|
|
||||||
const dealers = await dealerService.searchDealers(q, limit);
|
|
||||||
return ResponseHandler.success(res, dealers, 'Dealers searched successfully');
|
return ResponseHandler.success(res, dealers, 'Dealers searched successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -88,36 +82,5 @@ export class DealerController {
|
|||||||
return ResponseHandler.error(res, 'Failed to search dealers', 500, errorMessage);
|
return ResponseHandler.error(res, 'Failed to search dealers', 500, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Verify dealer is logged in
|
|
||||||
* GET /api/v1/dealers/verify/:dealerCode
|
|
||||||
* Returns dealer info with isLoggedIn flag
|
|
||||||
*/
|
|
||||||
async verifyDealerLogin(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { dealerCode } = req.params;
|
|
||||||
const dealer = await dealerService.getDealerByCode(dealerCode);
|
|
||||||
|
|
||||||
if (!dealer) {
|
|
||||||
return ResponseHandler.error(res, 'Dealer not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!dealer.isLoggedIn) {
|
|
||||||
return ResponseHandler.error(
|
|
||||||
res,
|
|
||||||
'Dealer not logged in to the system',
|
|
||||||
400,
|
|
||||||
`The dealer with code ${dealerCode} (${dealer.email}) has not logged in to the system. Please ask them to log in first.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ResponseHandler.success(res, dealer, 'Dealer verified and logged in');
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DealerController] Error verifying dealer login:', error);
|
|
||||||
return ResponseHandler.error(res, 'Failed to verify dealer', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import { Request, Response } from 'express';
|
|||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { DealerClaimService } from '../services/dealerClaim.service';
|
import { DealerClaimService } from '../services/dealerClaim.service';
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
import { translateEInvoiceError } from '../utils/einvoiceErrors';
|
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { gcsStorageService } from '../services/gcsStorage.service';
|
import { gcsStorageService } from '../services/gcsStorage.service';
|
||||||
import { Document } from '../models/Document';
|
import { Document } from '../models/Document';
|
||||||
@ -12,11 +11,6 @@ import { sapIntegrationService } from '../services/sapIntegration.service';
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { WorkflowRequest } from '../models/WorkflowRequest';
|
|
||||||
import { DealerClaimDetails } from '../models/DealerClaimDetails';
|
|
||||||
import { ClaimInvoice } from '../models/ClaimInvoice';
|
|
||||||
import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem';
|
|
||||||
import { ActivityType } from '../models/ActivityType';
|
|
||||||
|
|
||||||
export class DealerClaimController {
|
export class DealerClaimController {
|
||||||
private dealerClaimService = new DealerClaimService();
|
private dealerClaimService = new DealerClaimService();
|
||||||
@ -256,7 +250,6 @@ export class DealerClaimController {
|
|||||||
numberOfParticipants,
|
numberOfParticipants,
|
||||||
closedExpenses,
|
closedExpenses,
|
||||||
totalClosedExpenses,
|
totalClosedExpenses,
|
||||||
completionDescription,
|
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
// Parse closedExpenses if it's a JSON string
|
// Parse closedExpenses if it's a JSON string
|
||||||
@ -547,7 +540,6 @@ export class DealerClaimController {
|
|||||||
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
|
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
|
||||||
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
|
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
|
||||||
attendanceSheet: attendanceSheet || undefined,
|
attendanceSheet: attendanceSheet || undefined,
|
||||||
completionDescription: completionDescription || undefined,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');
|
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');
|
||||||
@ -757,66 +749,7 @@ export class DealerClaimController {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
logger.error('[DealerClaimController] Error updating e-invoice:', error);
|
logger.error('[DealerClaimController] Error updating e-invoice:', error);
|
||||||
|
return ResponseHandler.error(res, 'Failed to update e-invoice details', 500, errorMessage);
|
||||||
// Translate technical PWC/IRP error codes to user-friendly messages
|
|
||||||
const userFacingMessage = translateEInvoiceError(errorMessage);
|
|
||||||
|
|
||||||
return ResponseHandler.error(res, userFacingMessage, 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download E-Invoice PDF
|
|
||||||
* GET /api/v1/dealer-claims/:requestId/e-invoice/pdf
|
|
||||||
*/
|
|
||||||
async downloadInvoicePdf(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const identifier = req.params.requestId; // Can be UUID or requestNumber
|
|
||||||
|
|
||||||
// Find workflow to get actual UUID
|
|
||||||
const workflow = await this.findWorkflowByIdentifier(identifier);
|
|
||||||
if (!workflow) {
|
|
||||||
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestId = (workflow as any).requestId || (workflow as any).request_id;
|
|
||||||
if (!requestId) {
|
|
||||||
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { ClaimInvoice } = await import('../models/ClaimInvoice');
|
|
||||||
let invoice = await ClaimInvoice.findOne({ where: { requestId } });
|
|
||||||
|
|
||||||
if (!invoice) {
|
|
||||||
return ResponseHandler.error(res, 'Invoice record not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate PDF on the fly
|
|
||||||
try {
|
|
||||||
const { pdfService } = await import('../services/pdf.service');
|
|
||||||
const pdfBuffer = await pdfService.generateInvoicePdf(requestId);
|
|
||||||
|
|
||||||
const requestNumber = workflow.requestNumber || 'invoice';
|
|
||||||
const fileName = `Invoice_${requestNumber}.pdf`;
|
|
||||||
|
|
||||||
res.setHeader('Content-Type', 'application/pdf');
|
|
||||||
res.setHeader('Content-Disposition', `inline; filename="${fileName}"`);
|
|
||||||
res.setHeader('Content-Length', pdfBuffer.length);
|
|
||||||
|
|
||||||
// Convert Buffer to stream
|
|
||||||
const { Readable } = await import('stream');
|
|
||||||
const stream = new Readable();
|
|
||||||
stream.push(pdfBuffer);
|
|
||||||
stream.push(null);
|
|
||||||
stream.pipe(res);
|
|
||||||
} catch (pdfError) {
|
|
||||||
logger.error(`[DealerClaimController] Failed to generate PDF:`, pdfError);
|
|
||||||
return ResponseHandler.error(res, 'Failed to generate invoice PDF', 500);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DealerClaimController] Error downloading invoice PDF:', error);
|
|
||||||
return ResponseHandler.error(res, 'Failed to download invoice PDF', 500, errorMessage);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -905,208 +838,5 @@ export class DealerClaimController {
|
|||||||
return ResponseHandler.error(res, 'Failed to send credit note to dealer', 500, errorMessage);
|
return ResponseHandler.error(res, 'Failed to send credit note to dealer', 500, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Test SAP Budget Blocking (for testing/debugging)
|
|
||||||
* POST /api/v1/dealer-claims/test/sap-block
|
|
||||||
*
|
|
||||||
* This endpoint allows direct testing of SAP budget blocking without creating a full request
|
|
||||||
*/
|
|
||||||
async testSapBudgetBlock(req: AuthenticatedRequest, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const userId = req.user?.userId;
|
|
||||||
if (!userId) {
|
|
||||||
return ResponseHandler.error(res, 'Unauthorized', 401);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const { ioNumber, amount, requestNumber } = req.body;
|
|
||||||
|
|
||||||
// Validation
|
|
||||||
if (!ioNumber || !amount) {
|
|
||||||
return ResponseHandler.error(res, 'Missing required fields: ioNumber and amount are required', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
const blockAmount = parseFloat(amount);
|
|
||||||
if (isNaN(blockAmount) || blockAmount <= 0) {
|
|
||||||
return ResponseHandler.error(res, 'Amount must be a positive number', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[DealerClaimController] Testing SAP budget block:`, {
|
|
||||||
ioNumber,
|
|
||||||
amount: blockAmount,
|
|
||||||
requestNumber: requestNumber || 'TEST-REQUEST',
|
|
||||||
userId
|
|
||||||
});
|
|
||||||
|
|
||||||
// First validate IO number
|
|
||||||
const ioValidation = await sapIntegrationService.validateIONumber(ioNumber);
|
|
||||||
|
|
||||||
if (!ioValidation.isValid) {
|
|
||||||
return ResponseHandler.error(res, `Invalid IO number: ${ioValidation.error || 'IO number not found in SAP'}`, 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[DealerClaimController] IO validation successful:`, {
|
|
||||||
ioNumber,
|
|
||||||
availableBalance: ioValidation.availableBalance
|
|
||||||
});
|
|
||||||
|
|
||||||
// Block budget in SAP
|
|
||||||
const testRequestNumber = requestNumber || `TEST-${Date.now()}`;
|
|
||||||
const blockResult = await sapIntegrationService.blockBudget(
|
|
||||||
ioNumber,
|
|
||||||
blockAmount,
|
|
||||||
testRequestNumber,
|
|
||||||
`Test budget block for ${testRequestNumber}`
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!blockResult.success) {
|
|
||||||
return ResponseHandler.error(res, `Failed to block budget in SAP: ${blockResult.error}`, 500);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return detailed response
|
|
||||||
return ResponseHandler.success(res, {
|
|
||||||
message: 'SAP budget block test successful',
|
|
||||||
ioNumber,
|
|
||||||
requestedAmount: blockAmount,
|
|
||||||
availableBalance: ioValidation.availableBalance,
|
|
||||||
sapResponse: {
|
|
||||||
success: blockResult.success,
|
|
||||||
blockedAmount: blockResult.blockedAmount,
|
|
||||||
remainingBalance: blockResult.remainingBalance,
|
|
||||||
sapDocumentNumber: blockResult.blockId || null,
|
|
||||||
error: blockResult.error || null
|
|
||||||
},
|
|
||||||
calculatedRemainingBalance: ioValidation.availableBalance - blockResult.blockedAmount,
|
|
||||||
validation: {
|
|
||||||
isValid: ioValidation.isValid,
|
|
||||||
availableBalance: ioValidation.availableBalance,
|
|
||||||
error: ioValidation.error || null
|
|
||||||
}
|
|
||||||
}, 'SAP budget block test completed');
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('[DealerClaimController] Error testing SAP budget block:', error);
|
|
||||||
return ResponseHandler.error(res, error.message || 'Failed to test SAP budget block', 500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Download Invoice CSV
|
|
||||||
* GET /api/v1/dealer-claims/:requestId/e-invoice/csv
|
|
||||||
*/
|
|
||||||
async downloadInvoiceCsv(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const identifier = req.params.requestId;
|
|
||||||
|
|
||||||
// Use helper to find workflow
|
|
||||||
const workflow = await this.findWorkflowByIdentifier(identifier);
|
|
||||||
if (!workflow) {
|
|
||||||
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestId = (workflow as any).requestId || (workflow as any).request_id;
|
|
||||||
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
|
|
||||||
|
|
||||||
// Fetch related data
|
|
||||||
logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`);
|
|
||||||
const [invoice, items, claimDetails, internalOrder] = await Promise.all([
|
|
||||||
ClaimInvoice.findOne({ where: { requestId } }),
|
|
||||||
ClaimInvoiceItem.findAll({ where: { requestId }, order: [['slNo', 'ASC']] }),
|
|
||||||
DealerClaimDetails.findOne({ where: { requestId } }),
|
|
||||||
InternalOrder.findOne({ where: { requestId } })
|
|
||||||
]);
|
|
||||||
|
|
||||||
logger.info(`[DealerClaimController] Found ${items.length} items to export for request ${requestNumber}`);
|
|
||||||
|
|
||||||
let sapRefNo = '';
|
|
||||||
let taxationType = 'GST';
|
|
||||||
if (claimDetails?.activityType) {
|
|
||||||
const activity = await ActivityType.findOne({ where: { title: claimDetails.activityType } });
|
|
||||||
sapRefNo = activity?.sapRefNo || '';
|
|
||||||
taxationType = activity?.taxationType || (claimDetails.activityType.toLowerCase().includes('non') ? 'Non GST' : 'GST');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Construct CSV
|
|
||||||
const headers = [
|
|
||||||
'TRNS_UNIQ_NO',
|
|
||||||
'CLAIM_NUMBER',
|
|
||||||
'INV_NUMBER',
|
|
||||||
'DEALER_CODE',
|
|
||||||
'IO_NUMBER',
|
|
||||||
'CLAIM_DOC_TYP',
|
|
||||||
'CLAIM_DATE',
|
|
||||||
'CLAIM_AMT',
|
|
||||||
'GST_AMT',
|
|
||||||
'GST_PERCENTAG'
|
|
||||||
];
|
|
||||||
|
|
||||||
const rows = items.map(item => {
|
|
||||||
const isNonGst = taxationType === 'Non GST' || taxationType === 'Non-GST';
|
|
||||||
|
|
||||||
// For Non-GST, we hide HSN (often stored in transactionCode) and GST details
|
|
||||||
const trnsUniqNo = isNonGst ? '' : (item.transactionCode || '');
|
|
||||||
const claimNumber = requestNumber;
|
|
||||||
const invNumber = invoice?.invoiceNumber || '';
|
|
||||||
const dealerCode = claimDetails?.dealerCode || '';
|
|
||||||
const ioNumber = internalOrder?.ioNumber || '';
|
|
||||||
const claimDocTyp = sapRefNo;
|
|
||||||
const claimDate = invoice?.createdAt ? new Date(invoice.createdAt).toISOString().split('T')[0] : '';
|
|
||||||
const claimAmt = item.assAmt;
|
|
||||||
|
|
||||||
// Zero out tax for Non-GST
|
|
||||||
const totalTax = isNonGst ? 0 : (Number(item.igstAmt || 0) + Number(item.cgstAmt || 0) + Number(item.sgstAmt || 0) + Number(item.utgstAmt || 0));
|
|
||||||
const gstPercentag = isNonGst ? 0 : (item.gstRt || 0);
|
|
||||||
|
|
||||||
return [
|
|
||||||
trnsUniqNo,
|
|
||||||
claimNumber,
|
|
||||||
invNumber,
|
|
||||||
dealerCode,
|
|
||||||
ioNumber,
|
|
||||||
claimDocTyp,
|
|
||||||
claimDate,
|
|
||||||
claimAmt,
|
|
||||||
totalTax.toFixed(2),
|
|
||||||
gstPercentag
|
|
||||||
].join(',');
|
|
||||||
});
|
|
||||||
|
|
||||||
const csvContent = [headers.join(','), ...rows].join('\n');
|
|
||||||
|
|
||||||
res.setHeader('Content-Type', 'text/csv');
|
|
||||||
res.setHeader('Content-Disposition', `attachment; filename="Invoice_${requestNumber}.csv"`);
|
|
||||||
|
|
||||||
res.status(200).send(csvContent);
|
|
||||||
return;
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DealerClaimController] Error downloading invoice CSV:', error);
|
|
||||||
return ResponseHandler.error(res, 'Failed to download invoice CSV', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Re-trigger WFM CSV push (Step 7)
|
|
||||||
* POST /api/v1/dealer-claims/:requestId/wfm/retrigger
|
|
||||||
*/
|
|
||||||
async retriggerWFMPush(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { requestId: identifier } = req.params;
|
|
||||||
|
|
||||||
const workflow = await this.findWorkflowByIdentifier(identifier);
|
|
||||||
if (!workflow) {
|
|
||||||
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestId = (workflow as any).id || (workflow as any).requestId;
|
|
||||||
|
|
||||||
await this.dealerClaimService.pushWFMCSV(requestId);
|
|
||||||
|
|
||||||
return ResponseHandler.success(res, {
|
|
||||||
message: 'WFM CSV push re-triggered successfully'
|
|
||||||
}, 'WFM push re-triggered');
|
|
||||||
} catch (error: any) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[DealerClaimController] Error re-triggering WFM push:', error);
|
|
||||||
return ResponseHandler.error(res, 'Failed to re-trigger WFM push', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,39 +0,0 @@
|
|||||||
import { Request, Response } from 'express';
|
|
||||||
import { dealerDashboardService } from '../services/dealerDashboard.service';
|
|
||||||
import logger from '@utils/logger';
|
|
||||||
|
|
||||||
export class DealerDashboardController {
|
|
||||||
/**
|
|
||||||
* Get dealer dashboard KPIs and category data
|
|
||||||
* GET /api/v1/dealer-claims/dashboard
|
|
||||||
*/
|
|
||||||
async getDashboard(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const userId = (req as any).user?.userId;
|
|
||||||
const userEmail = (req as any).user?.email;
|
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
|
||||||
const startDate = req.query.startDate as string | undefined;
|
|
||||||
const endDate = req.query.endDate as string | undefined;
|
|
||||||
|
|
||||||
const result = await dealerDashboardService.getDashboardKPIs(
|
|
||||||
userEmail,
|
|
||||||
userId,
|
|
||||||
dateRange,
|
|
||||||
startDate,
|
|
||||||
endDate
|
|
||||||
);
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
data: result
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[DealerDashboard] Error fetching dashboard:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to fetch dealer dashboard data'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
import { Request, Response } from 'express';
|
|
||||||
import { dealerExternalService } from '../services/dealerExternal.service';
|
|
||||||
import { ResponseHandler } from '../utils/responseHandler';
|
|
||||||
import logger from '../utils/logger';
|
|
||||||
|
|
||||||
export class DealerExternalController {
|
|
||||||
/**
|
|
||||||
* Search dealer by code via external API
|
|
||||||
* GET /api/v1/dealers-external/search/:dealerCode
|
|
||||||
*/
|
|
||||||
async searchByDealerCode(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { dealerCode } = req.params;
|
|
||||||
|
|
||||||
if (!dealerCode) {
|
|
||||||
return ResponseHandler.error(res, 'Dealer code is required', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
const dealerInfo = await dealerExternalService.getDealerByCode(dealerCode);
|
|
||||||
|
|
||||||
if (!dealerInfo) {
|
|
||||||
return ResponseHandler.error(res, 'Dealer not found in external system', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ResponseHandler.success(res, dealerInfo, 'Dealer found successfully');
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error(`[DealerExternalController] Error searching dealer ${req.params.dealerCode}:`, error);
|
|
||||||
return ResponseHandler.error(res, 'Failed to fetch dealer from external source', 500, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const dealerExternalController = new DealerExternalController();
|
|
||||||
@ -5,14 +5,9 @@ import fs from 'fs';
|
|||||||
import { Document } from '@models/Document';
|
import { Document } from '@models/Document';
|
||||||
import { User } from '@models/User';
|
import { User } from '@models/User';
|
||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
import { Participant } from '@models/Participant';
|
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
|
||||||
import { Op } from 'sequelize';
|
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityService } from '@services/activity.service';
|
||||||
import { gcsStorageService } from '@services/gcsStorage.service';
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
||||||
import { emailNotificationService } from '@services/emailNotification.service';
|
|
||||||
import { notificationService } from '@services/notification.service';
|
|
||||||
import type { AuthenticatedRequest } from '../types/express';
|
import type { AuthenticatedRequest } from '../types/express';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
||||||
@ -138,84 +133,16 @@ export class DocumentController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if storageUrl exceeds database column limit (500 chars)
|
const doc = await Document.create({
|
||||||
// GCS signed URLs can be very long (500-1000+ chars)
|
|
||||||
const MAX_STORAGE_URL_LENGTH = 500;
|
|
||||||
let finalStorageUrl = storageUrl;
|
|
||||||
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
||||||
logWithContext('warn', 'Storage URL exceeds database column limit, truncating', {
|
|
||||||
originalLength: storageUrl.length,
|
|
||||||
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
||||||
urlPrefix: storageUrl.substring(0, 100),
|
|
||||||
});
|
|
||||||
// For signed URLs, we can't truncate as it will break the URL
|
|
||||||
// Instead, store null and generate signed URLs on-demand when needed
|
|
||||||
// The filePath is sufficient to generate a new signed URL later
|
|
||||||
finalStorageUrl = null as any;
|
|
||||||
logWithContext('info', 'Storing null storageUrl - will generate signed URL on-demand', {
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
reason: 'Signed URL too long for database column',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate file names if they exceed database column limits (255 chars)
|
|
||||||
const MAX_FILE_NAME_LENGTH = 255;
|
|
||||||
const originalFileName = file.originalname;
|
|
||||||
let truncatedOriginalFileName = originalFileName;
|
|
||||||
|
|
||||||
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
// Preserve file extension when truncating
|
|
||||||
const ext = path.extname(originalFileName);
|
|
||||||
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
// If extension itself is too long, just use the extension
|
|
||||||
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('warn', 'File name truncated to fit database column', {
|
|
||||||
originalLength: originalFileName.length,
|
|
||||||
truncatedLength: truncatedOriginalFileName.length,
|
|
||||||
originalName: originalFileName.substring(0, 100) + '...',
|
|
||||||
truncatedName: truncatedOriginalFileName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate fileName (basename of the generated file name in GCS)
|
|
||||||
const generatedFileName = path.basename(gcsFilePath);
|
|
||||||
let truncatedFileName = generatedFileName;
|
|
||||||
|
|
||||||
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
const ext = path.extname(generatedFileName);
|
|
||||||
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('warn', 'Generated file name truncated', {
|
|
||||||
originalLength: generatedFileName.length,
|
|
||||||
truncatedLength: truncatedFileName.length,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare document data
|
|
||||||
const documentData = {
|
|
||||||
requestId,
|
requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: path.basename(file.filename || file.originalname),
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: file.originalname,
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
fileExtension: extension,
|
fileExtension: extension,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
filePath: gcsFilePath, // Store GCS path or local path
|
filePath: gcsFilePath, // Store GCS path or local path
|
||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: storageUrl, // Store GCS URL or local URL
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
isGoogleDoc: false,
|
||||||
@ -225,43 +152,7 @@ export class DocumentController {
|
|||||||
parentDocumentId: null as any,
|
parentDocumentId: null as any,
|
||||||
isDeleted: false,
|
isDeleted: false,
|
||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
};
|
} as any);
|
||||||
|
|
||||||
logWithContext('info', 'Creating document record', {
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
documentData: JSON.stringify(documentData, null, 2),
|
|
||||||
});
|
|
||||||
|
|
||||||
let doc;
|
|
||||||
try {
|
|
||||||
doc = await Document.create(documentData as any);
|
|
||||||
logWithContext('info', 'Document record created successfully', {
|
|
||||||
documentId: doc.documentId,
|
|
||||||
requestId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
});
|
|
||||||
} catch (createError) {
|
|
||||||
const createErrorMessage = createError instanceof Error ? createError.message : 'Unknown error';
|
|
||||||
const createErrorStack = createError instanceof Error ? createError.stack : undefined;
|
|
||||||
// Check if it's a Sequelize validation error
|
|
||||||
const sequelizeError = (createError as any)?.errors || (createError as any)?.parent;
|
|
||||||
logWithContext('error', 'Document.create() failed', {
|
|
||||||
error: createErrorMessage,
|
|
||||||
stack: createErrorStack,
|
|
||||||
sequelizeErrors: sequelizeError,
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
documentData: JSON.stringify(documentData, null, 2),
|
|
||||||
});
|
|
||||||
throw createError; // Re-throw to be caught by outer catch block
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log document upload event
|
// Log document upload event
|
||||||
logDocumentEvent('uploaded', doc.documentId, {
|
logDocumentEvent('uploaded', doc.documentId, {
|
||||||
@ -296,205 +187,6 @@ export class DocumentController {
|
|||||||
userAgent: requestMeta.userAgent
|
userAgent: requestMeta.userAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
// Send notifications for additional document added
|
|
||||||
try {
|
|
||||||
const initiatorId = (workflowRequest as any).initiatorId || (workflowRequest as any).initiator_id;
|
|
||||||
const isInitiator = userId === initiatorId;
|
|
||||||
|
|
||||||
// Get all participants (spectators)
|
|
||||||
const spectators = await Participant.findAll({
|
|
||||||
where: {
|
|
||||||
requestId,
|
|
||||||
participantType: 'SPECTATOR'
|
|
||||||
},
|
|
||||||
include: [{
|
|
||||||
model: User,
|
|
||||||
as: 'user',
|
|
||||||
attributes: ['userId', 'email', 'displayName']
|
|
||||||
}]
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get current approver (pending or in-progress approval level)
|
|
||||||
const currentApprovalLevel = await ApprovalLevel.findOne({
|
|
||||||
where: {
|
|
||||||
requestId,
|
|
||||||
status: { [Op.in]: ['PENDING', 'IN_PROGRESS'] }
|
|
||||||
},
|
|
||||||
order: [['levelNumber', 'ASC']],
|
|
||||||
include: [{
|
|
||||||
model: User,
|
|
||||||
as: 'approver',
|
|
||||||
attributes: ['userId', 'email', 'displayName']
|
|
||||||
}]
|
|
||||||
});
|
|
||||||
|
|
||||||
logWithContext('info', 'Current approver lookup for document notification', {
|
|
||||||
requestId,
|
|
||||||
currentApprovalLevelFound: !!currentApprovalLevel,
|
|
||||||
approverUserId: currentApprovalLevel ? ((currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver)?.userId : null,
|
|
||||||
isInitiator
|
|
||||||
});
|
|
||||||
|
|
||||||
// Determine who to notify based on who uploaded
|
|
||||||
const recipientsToNotify: Array<{ userId: string; email: string; displayName: string }> = [];
|
|
||||||
|
|
||||||
if (isInitiator) {
|
|
||||||
// Initiator added → notify spectators and current approver
|
|
||||||
spectators.forEach((spectator: any) => {
|
|
||||||
const spectatorUser = spectator.user || spectator.User;
|
|
||||||
if (spectatorUser && spectatorUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: spectatorUser.userId,
|
|
||||||
email: spectatorUser.email,
|
|
||||||
displayName: spectatorUser.displayName || spectatorUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (currentApprovalLevel) {
|
|
||||||
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
|
||||||
if (approverUser && approverUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: approverUser.userId,
|
|
||||||
email: approverUser.email,
|
|
||||||
displayName: approverUser.displayName || approverUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Check if uploader is a spectator
|
|
||||||
const uploaderParticipant = await Participant.findOne({
|
|
||||||
where: {
|
|
||||||
requestId,
|
|
||||||
userId,
|
|
||||||
participantType: 'SPECTATOR'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (uploaderParticipant) {
|
|
||||||
// Spectator added → notify initiator and current approver
|
|
||||||
const initiator = await User.findByPk(initiatorId);
|
|
||||||
if (initiator) {
|
|
||||||
const initiatorData = initiator.toJSON();
|
|
||||||
if (initiatorData.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: initiatorData.userId,
|
|
||||||
email: initiatorData.email,
|
|
||||||
displayName: initiatorData.displayName || initiatorData.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentApprovalLevel) {
|
|
||||||
const approverUser = (currentApprovalLevel as any).approver || (currentApprovalLevel as any).Approver;
|
|
||||||
if (approverUser && approverUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: approverUser.userId,
|
|
||||||
email: approverUser.email,
|
|
||||||
displayName: approverUser.displayName || approverUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Approver added → notify initiator and spectators
|
|
||||||
const initiator = await User.findByPk(initiatorId);
|
|
||||||
if (initiator) {
|
|
||||||
const initiatorData = initiator.toJSON();
|
|
||||||
if (initiatorData.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: initiatorData.userId,
|
|
||||||
email: initiatorData.email,
|
|
||||||
displayName: initiatorData.displayName || initiatorData.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
spectators.forEach((spectator: any) => {
|
|
||||||
const spectatorUser = spectator.user || spectator.User;
|
|
||||||
if (spectatorUser && spectatorUser.userId !== userId) {
|
|
||||||
recipientsToNotify.push({
|
|
||||||
userId: spectatorUser.userId,
|
|
||||||
email: spectatorUser.email,
|
|
||||||
displayName: spectatorUser.displayName || spectatorUser.email
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send notifications (email, in-app, and web-push)
|
|
||||||
const requestData = {
|
|
||||||
requestNumber: requestNumber,
|
|
||||||
requestId: requestId,
|
|
||||||
title: (workflowRequest as any).title || 'Request'
|
|
||||||
};
|
|
||||||
|
|
||||||
// Prepare user IDs for in-app and web-push notifications
|
|
||||||
const recipientUserIds = recipientsToNotify.map(r => r.userId);
|
|
||||||
|
|
||||||
// Send in-app and web-push notifications
|
|
||||||
if (recipientUserIds.length > 0) {
|
|
||||||
try {
|
|
||||||
await notificationService.sendToUsers(
|
|
||||||
recipientUserIds,
|
|
||||||
{
|
|
||||||
title: 'Additional Document Added',
|
|
||||||
body: `${uploaderName} added "${file.originalname}" to ${requestNumber}`,
|
|
||||||
requestId,
|
|
||||||
requestNumber,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'document_added',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
actionRequired: false,
|
|
||||||
metadata: {
|
|
||||||
documentName: file.originalname,
|
|
||||||
fileSize: file.size,
|
|
||||||
addedByName: uploaderName,
|
|
||||||
source: 'Documents Tab'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
logWithContext('info', 'In-app and web-push notifications sent for additional document', {
|
|
||||||
requestId,
|
|
||||||
documentName: file.originalname,
|
|
||||||
recipientsCount: recipientUserIds.length
|
|
||||||
});
|
|
||||||
} catch (notifyError) {
|
|
||||||
logWithContext('error', 'Failed to send in-app/web-push notifications for additional document', {
|
|
||||||
requestId,
|
|
||||||
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send email notifications
|
|
||||||
for (const recipient of recipientsToNotify) {
|
|
||||||
await emailNotificationService.sendAdditionalDocumentAdded(
|
|
||||||
requestData,
|
|
||||||
recipient,
|
|
||||||
{
|
|
||||||
documentName: file.originalname,
|
|
||||||
fileSize: file.size,
|
|
||||||
addedByName: uploaderName,
|
|
||||||
source: 'Documents Tab'
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
logWithContext('info', 'Additional document notifications sent', {
|
|
||||||
requestId,
|
|
||||||
documentName: file.originalname,
|
|
||||||
recipientsCount: recipientsToNotify.length,
|
|
||||||
isInitiator
|
|
||||||
});
|
|
||||||
} catch (notifyError) {
|
|
||||||
// Don't fail document upload if notifications fail
|
|
||||||
logWithContext('error', 'Failed to send additional document notifications', {
|
|
||||||
requestId,
|
|
||||||
error: notifyError instanceof Error ? notifyError.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
ResponseHandler.success(res, doc, 'File uploaded', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
|||||||
@ -19,7 +19,6 @@ export class TemplateController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const {
|
const {
|
||||||
// New fields
|
|
||||||
templateName,
|
templateName,
|
||||||
templateCode,
|
templateCode,
|
||||||
templateDescription,
|
templateDescription,
|
||||||
@ -31,34 +30,20 @@ export class TemplateController {
|
|||||||
userFieldMappings,
|
userFieldMappings,
|
||||||
dynamicApproverConfig,
|
dynamicApproverConfig,
|
||||||
isActive,
|
isActive,
|
||||||
|
|
||||||
// Legacy fields (from frontend)
|
|
||||||
name,
|
|
||||||
description,
|
|
||||||
category,
|
|
||||||
approvers,
|
|
||||||
suggestedSLA
|
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
// Map legacy to new
|
if (!templateName) {
|
||||||
const finalTemplateName = templateName || name;
|
|
||||||
const finalTemplateDescription = templateDescription || description;
|
|
||||||
const finalTemplateCategory = templateCategory || category;
|
|
||||||
const finalApprovalLevelsConfig = approvalLevelsConfig || approvers;
|
|
||||||
const finalDefaultTatHours = defaultTatHours || suggestedSLA;
|
|
||||||
|
|
||||||
if (!finalTemplateName) {
|
|
||||||
return ResponseHandler.error(res, 'Template name is required', 400);
|
return ResponseHandler.error(res, 'Template name is required', 400);
|
||||||
}
|
}
|
||||||
|
|
||||||
const template = await this.templateService.createTemplate(userId, {
|
const template = await this.templateService.createTemplate(userId, {
|
||||||
templateName: finalTemplateName,
|
templateName,
|
||||||
templateCode,
|
templateCode,
|
||||||
templateDescription: finalTemplateDescription,
|
templateDescription,
|
||||||
templateCategory: finalTemplateCategory,
|
templateCategory,
|
||||||
workflowType,
|
workflowType,
|
||||||
approvalLevelsConfig: finalApprovalLevelsConfig,
|
approvalLevelsConfig,
|
||||||
defaultTatHours: finalDefaultTatHours ? parseFloat(finalDefaultTatHours) : undefined,
|
defaultTatHours: defaultTatHours ? parseFloat(defaultTatHours) : undefined,
|
||||||
formStepsConfig,
|
formStepsConfig,
|
||||||
userFieldMappings,
|
userFieldMappings,
|
||||||
dynamicApproverConfig,
|
dynamicApproverConfig,
|
||||||
@ -164,21 +149,14 @@ export class TemplateController {
|
|||||||
userFieldMappings,
|
userFieldMappings,
|
||||||
dynamicApproverConfig,
|
dynamicApproverConfig,
|
||||||
isActive,
|
isActive,
|
||||||
|
|
||||||
// Legacy
|
|
||||||
name,
|
|
||||||
description,
|
|
||||||
category,
|
|
||||||
approvers,
|
|
||||||
suggestedSLA
|
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
const template = await this.templateService.updateTemplate(templateId, userId, {
|
const template = await this.templateService.updateTemplate(templateId, userId, {
|
||||||
templateName: templateName || name,
|
templateName,
|
||||||
templateDescription: templateDescription || description,
|
templateDescription,
|
||||||
templateCategory: templateCategory || category,
|
templateCategory,
|
||||||
approvalLevelsConfig: approvalLevelsConfig || approvers,
|
approvalLevelsConfig,
|
||||||
defaultTatHours: (defaultTatHours || suggestedSLA) ? parseFloat(defaultTatHours || suggestedSLA) : undefined,
|
defaultTatHours: defaultTatHours ? parseFloat(defaultTatHours) : undefined,
|
||||||
formStepsConfig,
|
formStepsConfig,
|
||||||
userFieldMappings,
|
userFieldMappings,
|
||||||
dynamicApproverConfig,
|
dynamicApproverConfig,
|
||||||
|
|||||||
@ -10,37 +10,13 @@ export class UserController {
|
|||||||
this.userService = new UserService();
|
this.userService = new UserService();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getAllUsers(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const users = await this.userService.getAllUsers();
|
|
||||||
|
|
||||||
const result = {
|
|
||||||
users: users.map(u => ({
|
|
||||||
userId: u.userId,
|
|
||||||
email: u.email,
|
|
||||||
displayName: u.displayName,
|
|
||||||
department: u.department,
|
|
||||||
designation: u.designation,
|
|
||||||
isActive: u.isActive,
|
|
||||||
})),
|
|
||||||
total: users.length
|
|
||||||
};
|
|
||||||
|
|
||||||
ResponseHandler.success(res, result, 'All users fetched');
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to fetch all users', { error });
|
|
||||||
ResponseHandler.error(res, 'Failed to fetch all users', 500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async searchUsers(req: Request, res: Response): Promise<void> {
|
async searchUsers(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const q = String(req.query.q || '').trim();
|
const q = String(req.query.q || '').trim();
|
||||||
const limit = Number(req.query.limit || 10);
|
const limit = Number(req.query.limit || 10);
|
||||||
const source = String(req.query.source || 'default') as 'local' | 'okta' | 'default';
|
|
||||||
const currentUserId = (req as any).user?.userId || (req as any).user?.id;
|
const currentUserId = (req as any).user?.userId || (req as any).user?.id;
|
||||||
|
|
||||||
const users = await this.userService.searchUsers(q, limit, currentUserId, source);
|
const users = await this.userService.searchUsers(q, limit, currentUserId);
|
||||||
|
|
||||||
const result = users.map(u => ({
|
const result = users.map(u => ({
|
||||||
userId: (u as any).userId,
|
userId: (u as any).userId,
|
||||||
@ -93,31 +69,6 @@ export class UserController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getUserById(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { userId } = req.params;
|
|
||||||
const user = await this.userService.getUserById(userId);
|
|
||||||
|
|
||||||
if (!user) {
|
|
||||||
ResponseHandler.error(res, 'User not found', 404);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
ResponseHandler.success(res, {
|
|
||||||
userId: user.userId,
|
|
||||||
email: user.email,
|
|
||||||
displayName: user.displayName,
|
|
||||||
firstName: user.firstName,
|
|
||||||
lastName: user.lastName,
|
|
||||||
department: user.department,
|
|
||||||
isActive: user.isActive
|
|
||||||
}, 'User fetched');
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to fetch user by ID', { error });
|
|
||||||
ResponseHandler.error(res, 'Failed to fetch user by ID', 500);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensure user exists in database (create if not exists)
|
* Ensure user exists in database (create if not exists)
|
||||||
* Called when user is selected/tagged in the frontend
|
* Called when user is selected/tagged in the frontend
|
||||||
|
|||||||
@ -12,12 +12,10 @@ import fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { enrichApprovalLevels, enrichSpectators, validateInitiator, validateDealerUser } from '@services/userEnrichment.service';
|
import { enrichApprovalLevels, enrichSpectators, validateInitiator } from '@services/userEnrichment.service';
|
||||||
import { DealerClaimService } from '@services/dealerClaim.service';
|
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowService();
|
||||||
const dealerClaimService = new DealerClaimService();
|
|
||||||
|
|
||||||
export class WorkflowController {
|
export class WorkflowController {
|
||||||
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -27,15 +25,6 @@ export class WorkflowController {
|
|||||||
// Validate initiator exists
|
// Validate initiator exists
|
||||||
await validateInitiator(req.user.userId);
|
await validateInitiator(req.user.userId);
|
||||||
|
|
||||||
// Dealer Validation if dealerCode is provided or it's a DEALER CLAIM
|
|
||||||
const dealerCode = req.body.dealerCode || (req.body as any).dealer_code;
|
|
||||||
if (dealerCode || validatedData.templateType === 'DEALER CLAIM') {
|
|
||||||
if (!dealerCode) {
|
|
||||||
throw new Error('Dealer code is required for dealer claim requests');
|
|
||||||
}
|
|
||||||
await validateDealerUser(dealerCode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle frontend format: map 'approvers' -> 'approvalLevels' for backward compatibility
|
// Handle frontend format: map 'approvers' -> 'approvalLevels' for backward compatibility
|
||||||
let approvalLevels = validatedData.approvalLevels || [];
|
let approvalLevels = validatedData.approvalLevels || [];
|
||||||
if (!approvalLevels.length && (req.body as any).approvers) {
|
if (!approvalLevels.length && (req.body as any).approvers) {
|
||||||
@ -179,15 +168,6 @@ export class WorkflowController {
|
|||||||
// Validate initiator exists
|
// Validate initiator exists
|
||||||
await validateInitiator(userId);
|
await validateInitiator(userId);
|
||||||
|
|
||||||
// Dealer Validation if dealerCode is provided or it's a DEALER CLAIM
|
|
||||||
const dealerCode = parsed.dealerCode || parsed.dealer_code;
|
|
||||||
if (dealerCode || validated.templateType === 'DEALER CLAIM') {
|
|
||||||
if (!dealerCode) {
|
|
||||||
throw new Error('Dealer code is required for dealer claim requests');
|
|
||||||
}
|
|
||||||
await validateDealerUser(dealerCode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use the approval levels from validation (already transformed above)
|
// Use the approval levels from validation (already transformed above)
|
||||||
let approvalLevels = validated.approvalLevels || [];
|
let approvalLevels = validated.approvalLevels || [];
|
||||||
|
|
||||||
@ -254,7 +234,6 @@ export class WorkflowController {
|
|||||||
priority: validated.priority as Priority,
|
priority: validated.priority as Priority,
|
||||||
approvalLevels: enrichedApprovalLevels,
|
approvalLevels: enrichedApprovalLevels,
|
||||||
participants: autoGeneratedParticipants,
|
participants: autoGeneratedParticipants,
|
||||||
isDraft: parsed.isDraft === true, // Submit by default unless isDraft is explicitly true
|
|
||||||
} as any;
|
} as any;
|
||||||
|
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
@ -300,85 +279,16 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Truncate file names if they exceed database column limits (255 chars)
|
|
||||||
const MAX_FILE_NAME_LENGTH = 255;
|
|
||||||
const originalFileName = file.originalname;
|
|
||||||
let truncatedOriginalFileName = originalFileName;
|
|
||||||
|
|
||||||
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
// Preserve file extension when truncating
|
|
||||||
const ext = path.extname(originalFileName);
|
|
||||||
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
// If extension itself is too long, just use the extension
|
|
||||||
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn('[Workflow] File name truncated to fit database column', {
|
|
||||||
originalLength: originalFileName.length,
|
|
||||||
truncatedLength: truncatedOriginalFileName.length,
|
|
||||||
originalName: originalFileName.substring(0, 100) + '...',
|
|
||||||
truncatedName: truncatedOriginalFileName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate fileName (basename of the generated file name in GCS)
|
|
||||||
const generatedFileName = path.basename(gcsFilePath);
|
|
||||||
let truncatedFileName = generatedFileName;
|
|
||||||
|
|
||||||
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
const ext = path.extname(generatedFileName);
|
|
||||||
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn('[Workflow] Generated file name truncated', {
|
|
||||||
originalLength: generatedFileName.length,
|
|
||||||
truncatedLength: truncatedFileName.length,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if storageUrl exceeds database column limit (500 chars)
|
|
||||||
const MAX_STORAGE_URL_LENGTH = 500;
|
|
||||||
let finalStorageUrl = storageUrl;
|
|
||||||
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
||||||
logger.warn('[Workflow] Storage URL exceeds database column limit, storing null', {
|
|
||||||
originalLength: storageUrl.length,
|
|
||||||
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
||||||
urlPrefix: storageUrl.substring(0, 100),
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
});
|
|
||||||
// For signed URLs, store null and generate on-demand later
|
|
||||||
finalStorageUrl = null as any;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('[Workflow] Creating document record', {
|
|
||||||
fileName: truncatedOriginalFileName,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
|
||||||
requestId: workflow.requestId
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const doc = await Document.create({
|
const doc = await Document.create({
|
||||||
requestId: workflow.requestId,
|
requestId: workflow.requestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: path.basename(file.filename || file.originalname),
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: file.originalname,
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
fileExtension: extension,
|
fileExtension: extension,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
filePath: gcsFilePath, // Store GCS path or local path
|
filePath: gcsFilePath, // Store GCS path or local path
|
||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: storageUrl, // Store GCS URL or local URL
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
isGoogleDoc: false,
|
||||||
@ -390,24 +300,6 @@ export class WorkflowController {
|
|||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
} as any);
|
} as any);
|
||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
logger.info('[Workflow] Document record created successfully', {
|
|
||||||
documentId: doc.documentId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
});
|
|
||||||
} catch (docError) {
|
|
||||||
const docErrorMessage = docError instanceof Error ? docError.message : 'Unknown error';
|
|
||||||
const docErrorStack = docError instanceof Error ? docError.stack : undefined;
|
|
||||||
logger.error('[Workflow] Failed to create document record', {
|
|
||||||
error: docErrorMessage,
|
|
||||||
stack: docErrorStack,
|
|
||||||
fileName: file.originalname,
|
|
||||||
requestId: workflow.requestId,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
});
|
|
||||||
// Re-throw to be caught by outer catch block
|
|
||||||
throw docError;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log document upload activity
|
// Log document upload activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
@ -428,13 +320,6 @@ export class WorkflowController {
|
|||||||
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
|
ResponseHandler.success(res, { requestId: workflow.requestId, documents: docs }, 'Workflow created with documents', 201);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
|
||||||
logger.error('[WorkflowController] createWorkflowMultipart failed', {
|
|
||||||
error: errorMessage,
|
|
||||||
stack: errorStack,
|
|
||||||
userId: req.user?.userId,
|
|
||||||
filesCount: (req as any).files?.length || 0,
|
|
||||||
});
|
|
||||||
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -495,7 +380,6 @@ export class WorkflowController {
|
|||||||
search: req.query.search as string | undefined,
|
search: req.query.search as string | undefined,
|
||||||
status: req.query.status as string | undefined,
|
status: req.query.status as string | undefined,
|
||||||
priority: req.query.priority as string | undefined,
|
priority: req.query.priority as string | undefined,
|
||||||
templateType: req.query.templateType as string | undefined,
|
|
||||||
department: req.query.department as string | undefined,
|
department: req.query.department as string | undefined,
|
||||||
initiator: req.query.initiator as string | undefined,
|
initiator: req.query.initiator as string | undefined,
|
||||||
approver: req.query.approver as string | undefined,
|
approver: req.query.approver as string | undefined,
|
||||||
@ -557,7 +441,6 @@ export class WorkflowController {
|
|||||||
const search = req.query.search as string | undefined;
|
const search = req.query.search as string | undefined;
|
||||||
const status = req.query.status as string | undefined;
|
const status = req.query.status as string | undefined;
|
||||||
const priority = req.query.priority as string | undefined;
|
const priority = req.query.priority as string | undefined;
|
||||||
const templateType = req.query.templateType as string | undefined;
|
|
||||||
const department = req.query.department as string | undefined;
|
const department = req.query.department as string | undefined;
|
||||||
const initiator = req.query.initiator as string | undefined;
|
const initiator = req.query.initiator as string | undefined;
|
||||||
const approver = req.query.approver as string | undefined;
|
const approver = req.query.approver as string | undefined;
|
||||||
@ -567,7 +450,7 @@ export class WorkflowController {
|
|||||||
const startDate = req.query.startDate as string | undefined;
|
const startDate = req.query.startDate as string | undefined;
|
||||||
const endDate = req.query.endDate as string | undefined;
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'Participant requests fetched');
|
ResponseHandler.success(res, result, 'Participant requests fetched');
|
||||||
@ -590,14 +473,13 @@ export class WorkflowController {
|
|||||||
const search = req.query.search as string | undefined;
|
const search = req.query.search as string | undefined;
|
||||||
const status = req.query.status as string | undefined;
|
const status = req.query.status as string | undefined;
|
||||||
const priority = req.query.priority as string | undefined;
|
const priority = req.query.priority as string | undefined;
|
||||||
const templateType = req.query.templateType as string | undefined;
|
|
||||||
const department = req.query.department as string | undefined;
|
const department = req.query.department as string | undefined;
|
||||||
const slaCompliance = req.query.slaCompliance as string | undefined;
|
const slaCompliance = req.query.slaCompliance as string | undefined;
|
||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
const startDate = req.query.startDate as string | undefined;
|
const startDate = req.query.startDate as string | undefined;
|
||||||
const endDate = req.query.endDate as string | undefined;
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, department, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
||||||
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
||||||
@ -617,8 +499,7 @@ export class WorkflowController {
|
|||||||
const filters = {
|
const filters = {
|
||||||
search: req.query.search as string | undefined,
|
search: req.query.search as string | undefined,
|
||||||
status: req.query.status as string | undefined,
|
status: req.query.status as string | undefined,
|
||||||
priority: req.query.priority as string | undefined,
|
priority: req.query.priority as string | undefined
|
||||||
templateType: req.query.templateType as string | undefined
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Extract sorting parameters
|
// Extract sorting parameters
|
||||||
@ -643,8 +524,7 @@ export class WorkflowController {
|
|||||||
const filters = {
|
const filters = {
|
||||||
search: req.query.search as string | undefined,
|
search: req.query.search as string | undefined,
|
||||||
status: req.query.status as string | undefined,
|
status: req.query.status as string | undefined,
|
||||||
priority: req.query.priority as string | undefined,
|
priority: req.query.priority as string | undefined
|
||||||
templateType: req.query.templateType as string | undefined
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Extract sorting parameters
|
// Extract sorting parameters
|
||||||
@ -701,37 +581,17 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
const parsed = JSON.parse(raw);
|
const parsed = JSON.parse(raw);
|
||||||
const validated = validateUpdateWorkflow(parsed);
|
const validated = validateUpdateWorkflow(parsed);
|
||||||
const updateData: UpdateWorkflowRequest = {
|
const updateData: UpdateWorkflowRequest = { ...validated } as any;
|
||||||
...validated,
|
|
||||||
isDraft: parsed.isDraft !== undefined ? (parsed.isDraft === true) : undefined
|
|
||||||
} as any;
|
|
||||||
if (validated.priority) {
|
if (validated.priority) {
|
||||||
updateData.priority = validated.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
updateData.priority = validated.priority === 'EXPRESS' ? Priority.EXPRESS : Priority.STANDARD;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update workflow
|
// Update workflow
|
||||||
let workflow;
|
const workflow = await workflowService.updateWorkflow(id, updateData);
|
||||||
try {
|
|
||||||
workflow = await workflowService.updateWorkflow(id, updateData);
|
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
logger.info('[WorkflowController] Workflow updated successfully', {
|
|
||||||
requestId: id,
|
|
||||||
workflowId: (workflow as any).requestId,
|
|
||||||
});
|
|
||||||
} catch (updateError) {
|
|
||||||
const updateErrorMessage = updateError instanceof Error ? updateError.message : 'Unknown error';
|
|
||||||
const updateErrorStack = updateError instanceof Error ? updateError.stack : undefined;
|
|
||||||
logger.error('[WorkflowController] updateWorkflow failed', {
|
|
||||||
error: updateErrorMessage,
|
|
||||||
stack: updateErrorStack,
|
|
||||||
requestId: id,
|
|
||||||
updateData: JSON.stringify(updateData, null, 2),
|
|
||||||
});
|
|
||||||
throw updateError; // Re-throw to be caught by outer catch block
|
|
||||||
}
|
|
||||||
|
|
||||||
// Attach new files as documents
|
// Attach new files as documents
|
||||||
const files = (req as any).files as Express.Multer.File[] | undefined;
|
const files = (req as any).files as Express.Multer.File[] | undefined;
|
||||||
@ -767,85 +627,23 @@ export class WorkflowController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Truncate file names if they exceed database column limits (255 chars)
|
|
||||||
const MAX_FILE_NAME_LENGTH = 255;
|
|
||||||
const originalFileName = file.originalname;
|
|
||||||
let truncatedOriginalFileName = originalFileName;
|
|
||||||
|
|
||||||
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
// Preserve file extension when truncating
|
|
||||||
const ext = path.extname(originalFileName);
|
|
||||||
const nameWithoutExt = path.basename(originalFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
// If extension itself is too long, just use the extension
|
|
||||||
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn('[Workflow] File name truncated to fit database column', {
|
|
||||||
originalLength: originalFileName.length,
|
|
||||||
truncatedLength: truncatedOriginalFileName.length,
|
|
||||||
originalName: originalFileName.substring(0, 100) + '...',
|
|
||||||
truncatedName: truncatedOriginalFileName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate fileName (basename of the generated file name in GCS)
|
|
||||||
const generatedFileName = path.basename(gcsFilePath);
|
|
||||||
let truncatedFileName = generatedFileName;
|
|
||||||
|
|
||||||
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
|
|
||||||
const ext = path.extname(generatedFileName);
|
|
||||||
const nameWithoutExt = path.basename(generatedFileName, ext);
|
|
||||||
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
|
|
||||||
|
|
||||||
if (maxNameLength > 0) {
|
|
||||||
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
|
|
||||||
} else {
|
|
||||||
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn('[Workflow] Generated file name truncated', {
|
|
||||||
originalLength: generatedFileName.length,
|
|
||||||
truncatedLength: truncatedFileName.length,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if storageUrl exceeds database column limit (500 chars)
|
|
||||||
const MAX_STORAGE_URL_LENGTH = 500;
|
|
||||||
let finalStorageUrl = storageUrl;
|
|
||||||
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
|
|
||||||
logger.warn('[Workflow] Storage URL exceeds database column limit, storing null', {
|
|
||||||
originalLength: storageUrl.length,
|
|
||||||
maxLength: MAX_STORAGE_URL_LENGTH,
|
|
||||||
urlPrefix: storageUrl.substring(0, 100),
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
});
|
|
||||||
// For signed URLs, store null and generate on-demand later
|
|
||||||
finalStorageUrl = null as any;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('[Workflow] Creating document record', {
|
logger.info('[Workflow] Creating document record', {
|
||||||
fileName: truncatedOriginalFileName,
|
fileName: file.originalname,
|
||||||
filePath: gcsFilePath,
|
filePath: gcsFilePath,
|
||||||
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
|
storageUrl: storageUrl,
|
||||||
requestId: actualRequestId
|
requestId: actualRequestId
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
|
||||||
const doc = await Document.create({
|
const doc = await Document.create({
|
||||||
requestId: actualRequestId,
|
requestId: actualRequestId,
|
||||||
uploadedBy: userId,
|
uploadedBy: userId,
|
||||||
fileName: truncatedFileName,
|
fileName: path.basename(file.filename || file.originalname),
|
||||||
originalFileName: truncatedOriginalFileName,
|
originalFileName: file.originalname,
|
||||||
fileType: extension,
|
fileType: extension,
|
||||||
fileExtension: extension,
|
fileExtension: extension,
|
||||||
fileSize: file.size,
|
fileSize: file.size,
|
||||||
filePath: gcsFilePath, // Store GCS path or local path
|
filePath: gcsFilePath, // Store GCS path or local path
|
||||||
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
|
storageUrl: storageUrl, // Store GCS URL or local URL
|
||||||
mimeType: file.mimetype,
|
mimeType: file.mimetype,
|
||||||
checksum,
|
checksum,
|
||||||
isGoogleDoc: false,
|
isGoogleDoc: false,
|
||||||
@ -857,39 +655,12 @@ export class WorkflowController {
|
|||||||
downloadCount: 0,
|
downloadCount: 0,
|
||||||
} as any);
|
} as any);
|
||||||
docs.push(doc);
|
docs.push(doc);
|
||||||
logger.info('[Workflow] Document record created successfully', {
|
|
||||||
documentId: doc.documentId,
|
|
||||||
fileName: file.originalname,
|
|
||||||
});
|
|
||||||
} catch (docError) {
|
|
||||||
const docErrorMessage = docError instanceof Error ? docError.message : 'Unknown error';
|
|
||||||
const docErrorStack = docError instanceof Error ? docError.stack : undefined;
|
|
||||||
logger.error('[Workflow] Failed to create document record', {
|
|
||||||
error: docErrorMessage,
|
|
||||||
stack: docErrorStack,
|
|
||||||
fileName: file.originalname,
|
|
||||||
requestId: actualRequestId,
|
|
||||||
filePath: gcsFilePath,
|
|
||||||
storageUrl: storageUrl,
|
|
||||||
});
|
|
||||||
// Continue with other files, but log the error
|
|
||||||
// Don't throw here - let the workflow update complete
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ResponseHandler.success(res, { workflow, newDocuments: docs }, 'Workflow updated with documents', 200);
|
ResponseHandler.success(res, { workflow, newDocuments: docs }, 'Workflow updated with documents', 200);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
const errorStack = error instanceof Error ? error.stack : undefined;
|
|
||||||
logger.error('[WorkflowController] updateWorkflowMultipart failed', {
|
|
||||||
error: errorMessage,
|
|
||||||
stack: errorStack,
|
|
||||||
requestId: req.params.id,
|
|
||||||
userId: req.user?.userId,
|
|
||||||
hasFiles: !!(req as any).files && (req as any).files.length > 0,
|
|
||||||
fileCount: (req as any).files ? (req as any).files.length : 0,
|
|
||||||
});
|
|
||||||
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
|
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -910,54 +681,4 @@ export class WorkflowController {
|
|||||||
ResponseHandler.error(res, 'Failed to submit workflow', 400, errorMessage);
|
ResponseHandler.error(res, 'Failed to submit workflow', 400, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleInitiatorAction(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { id } = req.params;
|
|
||||||
const { action, ...data } = req.body;
|
|
||||||
const userId = req.user?.userId;
|
|
||||||
|
|
||||||
if (!userId) {
|
|
||||||
ResponseHandler.unauthorized(res, 'User ID missing from request');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await dealerClaimService.handleInitiatorAction(id, userId, action as any, data);
|
|
||||||
ResponseHandler.success(res, null, `Action ${action} performed successfully`);
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[WorkflowController] handleInitiatorAction failed', {
|
|
||||||
error: errorMessage,
|
|
||||||
requestId: req.params.id,
|
|
||||||
userId: req.user?.userId,
|
|
||||||
action: req.body.action
|
|
||||||
});
|
|
||||||
ResponseHandler.error(res, 'Failed to perform initiator action', 400, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async getHistory(req: Request, res: Response): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { id } = req.params;
|
|
||||||
|
|
||||||
// Resolve requestId UUID from identifier (could be requestNumber or UUID)
|
|
||||||
const workflowService = new WorkflowService();
|
|
||||||
const wf = await (workflowService as any).findWorkflowByIdentifier(id);
|
|
||||||
if (!wf) {
|
|
||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const requestId = wf.getDataValue('requestId');
|
|
||||||
|
|
||||||
const history = await dealerClaimService.getHistory(requestId);
|
|
||||||
ResponseHandler.success(res, history, 'Revision history fetched successfully');
|
|
||||||
} catch (error) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
||||||
logger.error('[WorkflowController] getHistory failed', {
|
|
||||||
error: errorMessage,
|
|
||||||
requestId: req.params.id
|
|
||||||
});
|
|
||||||
ResponseHandler.error(res, 'Failed to fetch revision history', 400, errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,130 +0,0 @@
|
|||||||
import { Request, Response } from 'express';
|
|
||||||
import { WorkflowTemplate } from '../models';
|
|
||||||
import logger from '../utils/logger';
|
|
||||||
|
|
||||||
export const createTemplate = async (req: Request, res: Response) => {
|
|
||||||
try {
|
|
||||||
const { name, description, category, priority, estimatedTime, approvers, suggestedSLA } = req.body;
|
|
||||||
const userId = (req as any).user?.userId;
|
|
||||||
|
|
||||||
const template = await WorkflowTemplate.create({
|
|
||||||
templateName: name,
|
|
||||||
templateDescription: description,
|
|
||||||
templateCategory: category,
|
|
||||||
approvalLevelsConfig: approvers,
|
|
||||||
defaultTatHours: suggestedSLA,
|
|
||||||
createdBy: userId,
|
|
||||||
isActive: true,
|
|
||||||
isSystemTemplate: false,
|
|
||||||
usageCount: 0
|
|
||||||
});
|
|
||||||
|
|
||||||
res.status(201).json({
|
|
||||||
success: true,
|
|
||||||
message: 'Workflow template created successfully',
|
|
||||||
data: template
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error creating workflow template:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Failed to create workflow template',
|
|
||||||
error: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTemplates = async (req: Request, res: Response) => {
|
|
||||||
try {
|
|
||||||
const templates = await WorkflowTemplate.findAll({
|
|
||||||
where: { isActive: true },
|
|
||||||
order: [['createdAt', 'DESC']]
|
|
||||||
});
|
|
||||||
|
|
||||||
res.status(200).json({
|
|
||||||
success: true,
|
|
||||||
data: templates
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching workflow templates:', error);
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Failed to fetch workflow templates',
|
|
||||||
error: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
export const updateTemplate = async (req: Request, res: Response) => {
|
|
||||||
try {
|
|
||||||
const { id } = req.params;
|
|
||||||
const { name, description, category, approvers, suggestedSLA, isActive } = req.body;
|
|
||||||
|
|
||||||
const updates: any = {};
|
|
||||||
if (name) updates.templateName = name;
|
|
||||||
if (description) updates.templateDescription = description;
|
|
||||||
if (category) updates.templateCategory = category;
|
|
||||||
if (approvers) updates.approvalLevelsConfig = approvers;
|
|
||||||
if (suggestedSLA) updates.defaultTatHours = suggestedSLA;
|
|
||||||
if (isActive !== undefined) updates.isActive = isActive;
|
|
||||||
|
|
||||||
const template = await WorkflowTemplate.findByPk(id);
|
|
||||||
|
|
||||||
if (!template) {
|
|
||||||
return res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Workflow template not found'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await template.update(updates);
|
|
||||||
|
|
||||||
return res.status(200).json({
|
|
||||||
success: true,
|
|
||||||
message: 'Workflow template updated successfully',
|
|
||||||
data: template
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error updating workflow template:', error);
|
|
||||||
return res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Failed to update workflow template',
|
|
||||||
error: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteTemplate = async (req: Request, res: Response) => {
|
|
||||||
try {
|
|
||||||
const { id } = req.params;
|
|
||||||
const template = await WorkflowTemplate.findByPk(id);
|
|
||||||
|
|
||||||
if (!template) {
|
|
||||||
return res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Workflow template not found'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hard delete or Soft delete based on preference.
|
|
||||||
// Since we have isActive flag, let's use that (Soft Delete) or just destroy if it's unused.
|
|
||||||
// For now, let's do a hard delete to match the expectation of "Delete" in the UI
|
|
||||||
// unless there are FK constraints (which sequelize handles).
|
|
||||||
// Actually, safer to Soft Delete by setting isActive = false if we want history,
|
|
||||||
// but user asked for Delete. Let's do destroy.
|
|
||||||
await template.destroy();
|
|
||||||
|
|
||||||
return res.status(200).json({
|
|
||||||
success: true,
|
|
||||||
message: 'Workflow template deleted successfully'
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error deleting workflow template:', error);
|
|
||||||
return res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Failed to delete workflow template',
|
|
||||||
error: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@ -1,297 +0,0 @@
|
|||||||
# Additional Approver Handling in Dealer Claim Email Templates
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
This document explains how the dealer claim email notification system handles additional approvers that are added dynamically between fixed workflow steps.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## How Additional Approvers Work
|
|
||||||
|
|
||||||
### 1. **Additional Approver Detection**
|
|
||||||
Additional approvers are identified by their `levelName` containing "Additional Approver". The system uses this to:
|
|
||||||
- Exclude them from dealer-specific templates
|
|
||||||
- Use appropriate templates for their notifications
|
|
||||||
- Track them in the approval chain
|
|
||||||
|
|
||||||
### 2. **Step Number Shifting**
|
|
||||||
When additional approvers are added:
|
|
||||||
- **Before Step 1**: Dealer Proposal Submission remains Step 1
|
|
||||||
- **Between Step 1 and Step 2**: Additional approver becomes Step 2, Requestor Evaluation shifts to Step 3
|
|
||||||
- **Between Step 2 and Step 3**: Additional approver inserted, subsequent steps shift
|
|
||||||
- And so on...
|
|
||||||
|
|
||||||
The system handles this by:
|
|
||||||
- Using `levelName` to identify steps (not just `levelNumber`)
|
|
||||||
- Finding the next PENDING level dynamically (not just sequential)
|
|
||||||
- Detecting additional approvers by checking `levelName`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Email Notification Scenarios
|
|
||||||
|
|
||||||
### Scenario 1: Dealer Submits Proposal (Step 1)
|
|
||||||
|
|
||||||
#### **Initiator Notification**
|
|
||||||
- **When**: Dealer proposal is approved (Step 1 → Step 2)
|
|
||||||
- **Template**: `dealerProposalSubmitted.template.ts`
|
|
||||||
- **Notification Type**: `proposal_submitted`
|
|
||||||
- **Handles Additional Approvers**: ✅ Yes
|
|
||||||
- If next approver is additional: Shows "Additional Approver" as next approver name
|
|
||||||
- If next approver is Step 2: Shows "Requestor Evaluation" approver name
|
|
||||||
- Uses `nextLevel` which is found dynamically (handles step shifts)
|
|
||||||
|
|
||||||
#### **Next Approver Notification**
|
|
||||||
- **When**: Next approver is assigned (could be Step 2 or Additional Approver)
|
|
||||||
- **Template**:
|
|
||||||
- If Additional Approver: `approvalRequest.template.ts` or `multiApproverRequest.template.ts`
|
|
||||||
- If Step 2 (Requestor): `approvalRequest.template.ts` or `multiApproverRequest.template.ts`
|
|
||||||
- **Notification Type**: `assignment`
|
|
||||||
- **Handles Additional Approvers**: ✅ Yes
|
|
||||||
- Additional approvers get standard approval request template
|
|
||||||
- Not dealer-specific (correct behavior)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Scenario 2: Additional Approver Added Between Step 1 and Step 2
|
|
||||||
|
|
||||||
**Workflow Structure:**
|
|
||||||
```
|
|
||||||
Step 1: Dealer Proposal Submission (Dealer)
|
|
||||||
Step 2: Additional Approver (New Approver) ← Added dynamically
|
|
||||||
Step 3: Requestor Evaluation (Initiator) ← Shifted from Step 2
|
|
||||||
Step 4: Department Lead Approval
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
**Email Flow:**
|
|
||||||
1. **Dealer submits proposal** → Step 1 approved
|
|
||||||
2. **Initiator gets email**:
|
|
||||||
- Template: `dealerProposalSubmitted.template.ts`
|
|
||||||
- Shows: "Next approver: Additional Approver" (if metadata includes `nextApproverIsAdditional`)
|
|
||||||
3. **Additional Approver gets email**:
|
|
||||||
- Template: `approvalRequest.template.ts` (standard approval request)
|
|
||||||
- Type: `assignment`
|
|
||||||
- Shows approval chain if multiple approvers exist
|
|
||||||
4. **Additional Approver approves** → Step 2 approved
|
|
||||||
5. **Initiator gets email**:
|
|
||||||
- Template: `approvalConfirmation.template.ts` (standard approval confirmation)
|
|
||||||
- Type: `approval`
|
|
||||||
6. **Requestor (Step 3) gets email**:
|
|
||||||
- Template: `approvalRequest.template.ts` (standard approval request)
|
|
||||||
- Type: `assignment`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Scenario 3: Dealer Submits Completion Documents (Step 4)
|
|
||||||
|
|
||||||
#### **Initiator Notification**
|
|
||||||
- **When**: Dealer completion documents are approved (Step 4 → Step 5)
|
|
||||||
- **Template**: `completionDocumentsSubmitted.template.ts`
|
|
||||||
- **Notification Type**: `completion_submitted`
|
|
||||||
- **Handles Additional Approvers**: ✅ Yes
|
|
||||||
- If next approver is additional: Shows "Additional Approver" as next approver name
|
|
||||||
- If next approver is Step 5: Shows "Requestor Claim Approval" approver name
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Key Implementation Details
|
|
||||||
|
|
||||||
### 1. **Dynamic Next Level Finding**
|
|
||||||
```typescript
|
|
||||||
// In dealerClaimApproval.service.ts
|
|
||||||
// First try sequential approach
|
|
||||||
let nextLevel = await ApprovalLevel.findOne({
|
|
||||||
where: {
|
|
||||||
requestId: level.requestId,
|
|
||||||
levelNumber: currentLevelNumber + 1
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// If sequential level doesn't exist, search for next PENDING level
|
|
||||||
// This handles cases where additional approvers are added dynamically
|
|
||||||
if (!nextLevel) {
|
|
||||||
nextLevel = await ApprovalLevel.findOne({
|
|
||||||
where: {
|
|
||||||
requestId: level.requestId,
|
|
||||||
levelNumber: { [Op.gt]: currentLevelNumber },
|
|
||||||
status: ApprovalStatus.PENDING
|
|
||||||
},
|
|
||||||
order: [['levelNumber', 'ASC']]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. **Additional Approver Detection**
|
|
||||||
```typescript
|
|
||||||
// Check if next approver is an additional approver
|
|
||||||
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
|
|
||||||
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. **Template Selection Logic**
|
|
||||||
|
|
||||||
#### **Assignment Notifications** (`notification.service.ts`)
|
|
||||||
```typescript
|
|
||||||
case 'assignment':
|
|
||||||
// Check if this is a dealer proposal step
|
|
||||||
const levelName = currentLevel ? (currentLevel.levelName || '').toLowerCase() : '';
|
|
||||||
const isAdditionalApprover = levelName.includes('additional approver');
|
|
||||||
|
|
||||||
const isDealerProposalStep = currentLevel && !isAdditionalApprover && (
|
|
||||||
(currentLevel.levelName && (
|
|
||||||
currentLevel.levelName.toLowerCase().includes('dealer') &&
|
|
||||||
currentLevel.levelName.toLowerCase().includes('proposal')
|
|
||||||
)) ||
|
|
||||||
(currentLevel.levelNumber === 1 && requestData.workflowType === 'CLAIM_MANAGEMENT')
|
|
||||||
);
|
|
||||||
|
|
||||||
if (isDealerProposalStep) {
|
|
||||||
// Use dealer-specific template
|
|
||||||
await emailNotificationService.sendDealerProposalRequired(...);
|
|
||||||
} else {
|
|
||||||
// Use standard approval request template (works for additional approvers)
|
|
||||||
await emailNotificationService.sendApprovalRequest(...);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. **Proposal Submitted Notification**
|
|
||||||
```typescript
|
|
||||||
// In dealerClaimApproval.service.ts
|
|
||||||
// When dealer proposal is approved
|
|
||||||
if (isDealerProposalApproval && (wf as any).initiatorId) {
|
|
||||||
// Get next approver (could be Step 2 or Additional Approver)
|
|
||||||
const nextApproverData = nextLevel ? await User.findByPk((nextLevel as any).approverId) : null;
|
|
||||||
|
|
||||||
// Check if next approver is additional
|
|
||||||
const nextLevelName = nextLevel ? ((nextLevel as any).levelName || '').toLowerCase() : '';
|
|
||||||
const isNextAdditionalApprover = nextLevelName.includes('additional approver');
|
|
||||||
|
|
||||||
// Send proposal_submitted notification (not approval notification)
|
|
||||||
await notificationService.sendToUsers([(wf as any).initiatorId], {
|
|
||||||
type: 'proposal_submitted', // NOT 'approval'
|
|
||||||
metadata: {
|
|
||||||
proposalData: {
|
|
||||||
nextApproverIsAdditional: isNextAdditionalApprover
|
|
||||||
},
|
|
||||||
nextApproverId: nextApproverData ? nextApproverData.userId : undefined
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Email Template Behavior
|
|
||||||
|
|
||||||
### ✅ **Templates That Handle Additional Approvers**
|
|
||||||
|
|
||||||
1. **Dealer Proposal Submitted** (`dealerProposalSubmitted.template.ts`)
|
|
||||||
- Shows next approver name (or "Additional Approver" if applicable)
|
|
||||||
- Works correctly when next approver is additional
|
|
||||||
|
|
||||||
2. **Completion Documents Submitted** (`completionDocumentsSubmitted.template.ts`)
|
|
||||||
- Shows next approver name (or "Additional Approver" if applicable)
|
|
||||||
- Works correctly when next approver is additional
|
|
||||||
|
|
||||||
3. **Approval Request** (`approvalRequest.template.ts` / `multiApproverRequest.template.ts`)
|
|
||||||
- Used for additional approvers
|
|
||||||
- Shows approval chain if multiple approvers exist
|
|
||||||
- Works correctly for all approvers (fixed and additional)
|
|
||||||
|
|
||||||
4. **Approval Confirmation** (`approvalConfirmation.template.ts`)
|
|
||||||
- Used when additional approvers approve
|
|
||||||
- Shows next approver in chain
|
|
||||||
- Works correctly
|
|
||||||
|
|
||||||
### ❌ **Templates That Should NOT Be Used for Additional Approvers**
|
|
||||||
|
|
||||||
1. **Dealer Proposal Required** (`dealerProposalRequired.template.ts`)
|
|
||||||
- Only for dealer (Step 1)
|
|
||||||
- Additional approvers excluded via `!isAdditionalApprover` check
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Testing Scenarios
|
|
||||||
|
|
||||||
### Test Case 1: No Additional Approvers
|
|
||||||
- **Step 1** (Dealer) → Gets `dealerProposalRequired` template ✅
|
|
||||||
- **Step 1 approved** → Initiator gets `dealerProposalSubmitted` template ✅
|
|
||||||
- **Step 2** (Requestor) → Gets `approvalRequest` template ✅
|
|
||||||
|
|
||||||
### Test Case 2: Additional Approver Between Step 1 and Step 2
|
|
||||||
- **Step 1** (Dealer) → Gets `dealerProposalRequired` template ✅
|
|
||||||
- **Step 1 approved** → Initiator gets `dealerProposalSubmitted` template with "Additional Approver" as next ✅
|
|
||||||
- **Step 2** (Additional Approver) → Gets `approvalRequest` template ✅
|
|
||||||
- **Step 2 approved** → Initiator gets `approvalConfirmation` template ✅
|
|
||||||
- **Step 3** (Requestor) → Gets `approvalRequest` template ✅
|
|
||||||
|
|
||||||
### Test Case 3: Multiple Additional Approvers
|
|
||||||
- **Step 1** (Dealer) → Gets `dealerProposalRequired` template ✅
|
|
||||||
- **Step 1 approved** → Initiator gets `dealerProposalSubmitted` template ✅
|
|
||||||
- **Step 2** (Additional Approver 1) → Gets `multiApproverRequest` template (if multiple approvers) ✅
|
|
||||||
- **Step 2 approved** → Next approver notified ✅
|
|
||||||
- **Step 3** (Additional Approver 2) → Gets `multiApproverRequest` template ✅
|
|
||||||
- And so on...
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
### ✅ **What Works Correctly**
|
|
||||||
|
|
||||||
1. **Dealer Assignment**: Uses dealer-specific template (not multi-level approval)
|
|
||||||
2. **Proposal Submitted**: Initiator gets `proposal_submitted` template (not multi-level approval)
|
|
||||||
3. **Additional Approvers**: Get standard approval request templates
|
|
||||||
4. **Next Approver Detection**: Dynamically finds next approver (handles step shifts)
|
|
||||||
5. **Template Selection**: Correctly identifies dealer steps vs additional approvers
|
|
||||||
|
|
||||||
### 🔧 **Key Logic**
|
|
||||||
|
|
||||||
- **Dealer Proposal Step Detection**: Checks `levelName` contains "dealer" and "proposal" OR `levelNumber === 1` AND `workflowType === 'CLAIM_MANAGEMENT'`
|
|
||||||
- **Additional Approver Detection**: Checks `levelName` contains "additional approver"
|
|
||||||
- **Next Level Finding**: Uses dynamic search for next PENDING level (not just sequential)
|
|
||||||
- **Template Selection**: Excludes additional approvers from dealer-specific templates
|
|
||||||
|
|
||||||
### 📧 **Email Flow**
|
|
||||||
|
|
||||||
```
|
|
||||||
Dealer Submits Proposal
|
|
||||||
↓
|
|
||||||
Step 1 Approved (System)
|
|
||||||
↓
|
|
||||||
Initiator: proposal_submitted email ✅ (NOT multi-level approval)
|
|
||||||
Next Approver: assignment email ✅ (Standard approval request)
|
|
||||||
↓
|
|
||||||
If Next is Additional Approver:
|
|
||||||
- Gets standard approval request template ✅
|
|
||||||
- Shows in approval chain ✅
|
|
||||||
- When approved, next approver gets assignment ✅
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Files Modified
|
|
||||||
|
|
||||||
1. **`dealerClaimApproval.service.ts`**
|
|
||||||
- Added detection for additional approvers
|
|
||||||
- Changed notification type from `approval` to `proposal_submitted` for dealer proposal
|
|
||||||
- Added `nextApproverIsAdditional` metadata
|
|
||||||
|
|
||||||
2. **`notification.service.ts`**
|
|
||||||
- Added check to exclude additional approvers from dealer-specific templates
|
|
||||||
- Improved dealer proposal step detection
|
|
||||||
|
|
||||||
3. **`emailNotification.service.ts`**
|
|
||||||
- Updated to handle `nextApproverIsAdditional` flag
|
|
||||||
- Shows "Additional Approver" when next approver is additional
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
|
|
||||||
The system now correctly handles additional approvers:
|
|
||||||
- ✅ Initiator gets `proposal_submitted` template (not multi-level approval)
|
|
||||||
- ✅ Additional approvers get standard approval request templates
|
|
||||||
- ✅ Next approver is correctly identified even when steps shift
|
|
||||||
- ✅ All templates work seamlessly with dynamic approval chains
|
|
||||||
|
|
||||||
@ -1,393 +0,0 @@
|
|||||||
# Dealer Claim Email Templates - Planning Document
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
This document outlines all email templates required for the Dealer Claim Management workflow, including support for additional approvers.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Workflow Steps & Email Templates
|
|
||||||
|
|
||||||
### 1. **Request Created** ✅ (Already Exists)
|
|
||||||
- **When**: Claim request is created by initiator
|
|
||||||
- **Recipients**: Initiator
|
|
||||||
- **Template**: `requestCreated.template.ts`
|
|
||||||
- **Status**: ✅ Implemented
|
|
||||||
- **Notes**: Generic template works for dealer claims
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 2. **Dealer Assignment - Proposal Required** ✅ (Uses Existing)
|
|
||||||
- **When**: Step 1 - Dealer is assigned to submit proposal
|
|
||||||
- **Recipients**: Dealer
|
|
||||||
- **Template**: `approvalRequest.template.ts` (single approver)
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `assignment`
|
|
||||||
- **Notes**:
|
|
||||||
- Sent when request is created
|
|
||||||
- Uses existing approval request template
|
|
||||||
- May need dealer-specific customization
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 3. **Proposal Submitted** 🆕 (NEW - Recommended)
|
|
||||||
- **When**: Step 1 - Dealer submits proposal
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator (Requestor)
|
|
||||||
- Next Approver (Step 2 - Requestor Evaluation)
|
|
||||||
- **Template**: `dealerProposalSubmitted.template.ts` (NEW)
|
|
||||||
- **Status**: ❌ Not Implemented
|
|
||||||
- **Notification Type**: `proposal_submitted`
|
|
||||||
- **Data Needed**:
|
|
||||||
- Request details (number, title, activity name)
|
|
||||||
- Dealer information
|
|
||||||
- Proposal details (total budget, expected completion date)
|
|
||||||
- Cost breakdown summary
|
|
||||||
- Dealer comments
|
|
||||||
- **Notes**:
|
|
||||||
- Confirms to initiator that proposal was received
|
|
||||||
- Notifies next approver (initiator) to review
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 4. **Proposal Approved** ✅ (Uses Existing)
|
|
||||||
- **When**: Step 2 - Requestor approves proposal
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator (confirmation)
|
|
||||||
- Next Approver (Step 3 - Department Lead)
|
|
||||||
- **Template**: `approvalConfirmation.template.ts`
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `approval`
|
|
||||||
- **Notes**: Generic approval confirmation works
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 5. **Proposal Rejected** ✅ (Uses Existing)
|
|
||||||
- **When**: Step 2 - Requestor rejects proposal
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator
|
|
||||||
- Dealer
|
|
||||||
- All participants
|
|
||||||
- **Template**: `rejectionNotification.template.ts`
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `rejection`
|
|
||||||
- **Notes**: Generic rejection notification works
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 6. **Department Lead Approval** ✅ (Uses Existing)
|
|
||||||
- **When**: Step 3 - Department Lead approves and organizes IO
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator (confirmation)
|
|
||||||
- Next approver (if any additional approvers before Activity Creation)
|
|
||||||
- **Template**: `approvalConfirmation.template.ts`
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `approval`
|
|
||||||
- **Notes**:
|
|
||||||
- May want IO-specific details in email
|
|
||||||
- IO details are shown in workflow tab
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 7. **Activity Created** 🆕 (NEW - Recommended)
|
|
||||||
- **When**: After Step 3 approval - Activity is created
|
|
||||||
- **Recipients**:
|
|
||||||
- Dealer
|
|
||||||
- Initiator (Requestor)
|
|
||||||
- Department Lead
|
|
||||||
- **Template**: `activityCreated.template.ts` (NEW)
|
|
||||||
- **Status**: ❌ Not Implemented (currently uses generic notification)
|
|
||||||
- **Notification Type**: `activity_created`
|
|
||||||
- **Data Needed**:
|
|
||||||
- Activity name and type
|
|
||||||
- Request number
|
|
||||||
- Activity date
|
|
||||||
- Location
|
|
||||||
- IO number (if available)
|
|
||||||
- Next steps information
|
|
||||||
- **Notes**:
|
|
||||||
- Currently sends generic notification (line 2141-2151 in dealerClaim.service.ts)
|
|
||||||
- Should be a dedicated template with activity-specific information
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 8. **Completion Documents Submitted** 🆕 (NEW - Recommended)
|
|
||||||
- **When**: Step 4 - Dealer submits completion documents
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator (Requestor)
|
|
||||||
- Next Approver (Step 5 - Requestor Claim Approval)
|
|
||||||
- **Template**: `completionDocumentsSubmitted.template.ts` (NEW)
|
|
||||||
- **Status**: ❌ Not Implemented
|
|
||||||
- **Notification Type**: `completion_submitted`
|
|
||||||
- **Data Needed**:
|
|
||||||
- Request details
|
|
||||||
- Activity completion date
|
|
||||||
- Number of participants
|
|
||||||
- Total closed expenses
|
|
||||||
- Expense breakdown summary
|
|
||||||
- Documents submitted count
|
|
||||||
- **Notes**:
|
|
||||||
- Confirms to initiator that completion docs were received
|
|
||||||
- Notifies next approver to review completion
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 9. **Requestor Claim Approval** ✅ (Uses Existing)
|
|
||||||
- **When**: Step 5 - Requestor approves claim
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator (confirmation)
|
|
||||||
- Next step (DMS push)
|
|
||||||
- **Template**: `approvalConfirmation.template.ts`
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `approval`
|
|
||||||
- **Notes**: Generic approval confirmation works
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 10. **E-Invoice Generated** 🆕 (NEW - Recommended)
|
|
||||||
- **When**: Step 6 - E-Invoice is generated via DMS
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator
|
|
||||||
- Dealer
|
|
||||||
- Finance team (if applicable)
|
|
||||||
- **Template**: `einvoiceGenerated.template.ts` (NEW)
|
|
||||||
- **Status**: ❌ Not Implemented
|
|
||||||
- **Notification Type**: `einvoice_generated`
|
|
||||||
- **Data Needed**:
|
|
||||||
- E-Invoice number
|
|
||||||
- Invoice date
|
|
||||||
- DMS number
|
|
||||||
- Invoice amount
|
|
||||||
- Request details
|
|
||||||
- Download link (if available)
|
|
||||||
- **Notes**:
|
|
||||||
- Currently logged as activity only (line 1856-1863 in dealerClaim.service.ts)
|
|
||||||
- Should notify relevant parties when invoice is ready
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 11. **Credit Note Sent to Dealer** 🆕 (NEW - Recommended)
|
|
||||||
- **When**: Step 8 - Credit note is sent to dealer
|
|
||||||
- **Recipients**:
|
|
||||||
- Dealer (primary)
|
|
||||||
- Initiator (for record)
|
|
||||||
- Finance team
|
|
||||||
- **Template**: `creditNoteSent.template.ts` (NEW)
|
|
||||||
- **Status**: Required implementation
|
|
||||||
- **Notification Type**: `credit_note_sent`
|
|
||||||
- **Data Needed**:
|
|
||||||
- Credit note number
|
|
||||||
- Credit note date
|
|
||||||
- Credit note amount
|
|
||||||
- Request number
|
|
||||||
- Activity name
|
|
||||||
- Dealer information
|
|
||||||
- Reason for credit note
|
|
||||||
- Download link (if available)
|
|
||||||
- **Notes**:
|
|
||||||
- Planned for email implementation
|
|
||||||
- Critical for dealer notification
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 12. **Additional Approver Assignment** ✅ (Uses Existing)
|
|
||||||
- **When**: Additional approver is added between any steps
|
|
||||||
- **Recipients**: Additional Approver
|
|
||||||
- **Template**: `approvalRequest.template.ts` or `multiApproverRequest.template.ts`
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `assignment`
|
|
||||||
- **Notes**:
|
|
||||||
- Can use existing approval request templates
|
|
||||||
- Should show approval chain if multiple approvers
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 13. **Additional Approver Approval** ✅ (Uses Existing)
|
|
||||||
- **When**: Additional approver approves/rejects
|
|
||||||
- **Recipients**:
|
|
||||||
- Initiator
|
|
||||||
- Next approver
|
|
||||||
- **Template**: `approvalConfirmation.template.ts` or `rejectionNotification.template.ts`
|
|
||||||
- **Status**: ✅ Uses existing template
|
|
||||||
- **Notification Type**: `approval` or `rejection`
|
|
||||||
- **Notes**: Generic templates work for additional approvers
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
### ✅ Already Implemented (Using Existing Templates)
|
|
||||||
1. Request Created
|
|
||||||
2. Dealer Assignment (Proposal Required)
|
|
||||||
3. Proposal Approved
|
|
||||||
4. Proposal Rejected
|
|
||||||
5. Department Lead Approval
|
|
||||||
6. Requestor Claim Approval
|
|
||||||
7. Additional Approver Assignment/Approval
|
|
||||||
|
|
||||||
### 🆕 New Templates Needed
|
|
||||||
1. **Proposal Submitted** (`dealerProposalSubmitted.template.ts`)
|
|
||||||
- Priority: Medium
|
|
||||||
- When: Dealer submits proposal (Step 1)
|
|
||||||
|
|
||||||
2. **Activity Created** (`activityCreated.template.ts`)
|
|
||||||
- Priority: High
|
|
||||||
- When: Activity is created after Step 3 approval
|
|
||||||
- Currently uses generic notification
|
|
||||||
|
|
||||||
3. **Completion Documents Submitted** (`completionDocumentsSubmitted.template.ts`)
|
|
||||||
- Priority: Medium
|
|
||||||
- When: Dealer submits completion docs (Step 4)
|
|
||||||
|
|
||||||
4. **E-Invoice Generated** (`einvoiceGenerated.template.ts`)
|
|
||||||
- Priority: High
|
|
||||||
- When: E-Invoice is generated via DMS (Step 6)
|
|
||||||
- Currently only logged as activity
|
|
||||||
|
|
||||||
5. **Credit Note Sent** (`creditNoteSent.template.ts`)
|
|
||||||
- Priority: High
|
|
||||||
- When: Credit note is sent to dealer (Step 8)
|
|
||||||
- Planned for implementation
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Implementation Priority
|
|
||||||
|
|
||||||
### High Priority (Critical for Workflow)
|
|
||||||
1. **Activity Created** - Currently using generic notification, should be branded
|
|
||||||
2. **E-Invoice Generated** - Important for financial tracking
|
|
||||||
3. **Credit Note Sent** - Critical for dealer notification
|
|
||||||
|
|
||||||
### Medium Priority (Nice to Have)
|
|
||||||
4. **Proposal Submitted** - Better UX, but existing approval request works
|
|
||||||
5. **Completion Documents Submitted** - Better UX, but existing approval request works
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Template Design Considerations
|
|
||||||
|
|
||||||
### 1. Support for Additional Approvers
|
|
||||||
- All templates should handle dynamic approval chains
|
|
||||||
- Show approval chain when multiple approvers exist
|
|
||||||
- Use `multiApproverRequest.template.ts` pattern for multi-level scenarios
|
|
||||||
|
|
||||||
### 2. Dealer-Specific Information
|
|
||||||
- Include dealer name, code, email prominently
|
|
||||||
- Show activity name and type
|
|
||||||
- Include dealer-specific fields (dealer comments, etc.)
|
|
||||||
|
|
||||||
### 3. Financial Information
|
|
||||||
- Show budget/amount information clearly
|
|
||||||
- Include currency formatting (INR)
|
|
||||||
- Show expense breakdowns where relevant
|
|
||||||
|
|
||||||
### 4. Document Links
|
|
||||||
- Include links to view/download documents
|
|
||||||
- Link to request detail page
|
|
||||||
- Include document counts where relevant
|
|
||||||
|
|
||||||
### 5. Next Steps
|
|
||||||
- Clearly indicate what happens next
|
|
||||||
- Show who needs to take action
|
|
||||||
- Include deadlines/TAT information
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Integration Points
|
|
||||||
|
|
||||||
### Notification Service Integration
|
|
||||||
All new templates need to be integrated in:
|
|
||||||
- `Re_Backend/src/services/notification.service.ts`
|
|
||||||
- Add to `emailTypeMap`
|
|
||||||
- Add case in `triggerEmailByType` switch statement
|
|
||||||
|
|
||||||
### Email Notification Service Integration
|
|
||||||
All new templates need methods in:
|
|
||||||
- `Re_Backend/src/services/emailNotification.service.ts`
|
|
||||||
- Add `sendXXX` methods for each template
|
|
||||||
- Import template functions
|
|
||||||
- Handle data preparation
|
|
||||||
|
|
||||||
### Type Definitions
|
|
||||||
Add interfaces in:
|
|
||||||
- `Re_Backend/src/emailtemplates/types.ts`
|
|
||||||
- Define data interfaces for each template
|
|
||||||
|
|
||||||
### Email Preferences
|
|
||||||
Add notification types in:
|
|
||||||
- `Re_Backend/src/emailtemplates/emailPreferences.helper.ts`
|
|
||||||
- Add to `EmailNotificationType` enum
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Example Template Structure
|
|
||||||
|
|
||||||
Each new template should follow the pattern:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// types.ts
|
|
||||||
export interface DealerProposalSubmittedData extends BaseEmailData {
|
|
||||||
dealerName: string;
|
|
||||||
activityName: string;
|
|
||||||
activityType: string;
|
|
||||||
proposalBudget: number;
|
|
||||||
expectedCompletionDate: string;
|
|
||||||
dealerComments?: string;
|
|
||||||
costBreakupSummary?: string; // Summary of cost items
|
|
||||||
// ... other fields
|
|
||||||
}
|
|
||||||
|
|
||||||
// dealerProposalSubmitted.template.ts
|
|
||||||
export function getDealerProposalSubmittedEmail(data: DealerProposalSubmittedData): string {
|
|
||||||
// HTML template with Royal Enfield branding
|
|
||||||
// Responsive design
|
|
||||||
// Rich text support for descriptions
|
|
||||||
// Table support for cost breakdown
|
|
||||||
}
|
|
||||||
|
|
||||||
// emailNotification.service.ts
|
|
||||||
async sendDealerProposalSubmitted(
|
|
||||||
requestData: any,
|
|
||||||
dealerData: any,
|
|
||||||
initiatorData: any,
|
|
||||||
proposalData: any
|
|
||||||
): Promise<void> {
|
|
||||||
// Prepare data
|
|
||||||
// Check preferences
|
|
||||||
// Send email
|
|
||||||
}
|
|
||||||
|
|
||||||
// notification.service.ts
|
|
||||||
case 'proposal_submitted':
|
|
||||||
await emailNotificationService.sendDealerProposalSubmitted(...);
|
|
||||||
break;
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Testing Checklist
|
|
||||||
|
|
||||||
For each new template:
|
|
||||||
- [ ] Template renders correctly
|
|
||||||
- [ ] All dynamic fields populate correctly
|
|
||||||
- [ ] Mobile responsive
|
|
||||||
- [ ] Tables display correctly (if applicable)
|
|
||||||
- [ ] Links work correctly
|
|
||||||
- [ ] Email preferences respected
|
|
||||||
- [ ] Works with additional approvers
|
|
||||||
- [ ] Handles missing optional data gracefully
|
|
||||||
- [ ] Branding consistent with other templates
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
|
|
||||||
1. **Additional Approvers**: All templates should work seamlessly when additional approvers are added between fixed steps. The approval chain should be shown when relevant.
|
|
||||||
|
|
||||||
2. **System Steps**: Activity Creation, E-Invoice Generation, and Credit Note Confirmation are now activity logs only (not approval steps), but they still need email notifications.
|
|
||||||
|
|
||||||
3. **Dealer vs Internal Users**: Dealer may not be in the system initially - templates should handle this gracefully.
|
|
||||||
|
|
||||||
4. **Financial Data**: All financial amounts should be formatted as INR currency with proper decimal places.
|
|
||||||
|
|
||||||
5. **Document Links**: Include links to view/download documents where applicable, especially for proposals and completion documents.
|
|
||||||
|
|
||||||
@ -1,358 +0,0 @@
|
|||||||
# Dealer Claim Email Templates - Implementation Summary
|
|
||||||
|
|
||||||
## ✅ All 5 Templates Created and Integrated
|
|
||||||
|
|
||||||
All 5 new email templates for the dealer claim workflow have been successfully created and integrated into the notification system.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📧 Created Templates
|
|
||||||
|
|
||||||
### 1. **Dealer Proposal Submitted** ✅
|
|
||||||
- **File**: `dealerProposalSubmitted.template.ts`
|
|
||||||
- **Notification Type**: `proposal_submitted`
|
|
||||||
- **Email Type**: `DEALER_PROPOSAL_SUBMITTED`
|
|
||||||
- **When**: Step 1 - Dealer submits proposal
|
|
||||||
- **Recipients**: Initiator and next approver
|
|
||||||
- **Features**:
|
|
||||||
- Shows proposal budget, expected completion date
|
|
||||||
- Cost breakdown table (if available)
|
|
||||||
- Dealer comments
|
|
||||||
- Next approver information
|
|
||||||
|
|
||||||
### 2. **Activity Created** ✅
|
|
||||||
- **File**: `activityCreated.template.ts`
|
|
||||||
- **Notification Type**: `activity_created`
|
|
||||||
- **Email Type**: `ACTIVITY_CREATED`
|
|
||||||
- **When**: After Step 3 approval - Activity is created
|
|
||||||
- **Recipients**: Dealer, Initiator, Department Lead
|
|
||||||
- **Features**:
|
|
||||||
- Activity name, type, date, location
|
|
||||||
- Dealer information
|
|
||||||
- IO number (if available)
|
|
||||||
- Next steps information
|
|
||||||
|
|
||||||
### 3. **Completion Documents Submitted** ✅
|
|
||||||
- **File**: `completionDocumentsSubmitted.template.ts`
|
|
||||||
- **Notification Type**: `completion_submitted`
|
|
||||||
- **Email Type**: `COMPLETION_DOCUMENTS_SUBMITTED`
|
|
||||||
- **When**: Step 4 - Dealer submits completion documents
|
|
||||||
- **Recipients**: Initiator and next approver
|
|
||||||
- **Features**:
|
|
||||||
- Completion date, participants count
|
|
||||||
- Total expenses with breakdown table
|
|
||||||
- Documents count
|
|
||||||
- Next approver information
|
|
||||||
|
|
||||||
### 4. **E-Invoice Generated** ✅
|
|
||||||
- **File**: `einvoiceGenerated.template.ts`
|
|
||||||
- **Notification Type**: `einvoice_generated`
|
|
||||||
- **Email Type**: `EINVOICE_GENERATED`
|
|
||||||
- **When**: Step 6 - E-Invoice is generated via DMS
|
|
||||||
- **Recipients**: Initiator, Dealer, Finance team
|
|
||||||
- **Features**:
|
|
||||||
- Invoice number, date, DMS number
|
|
||||||
- Invoice amount
|
|
||||||
- Download link (if available)
|
|
||||||
- IO number and dealer information
|
|
||||||
|
|
||||||
### 5. **Credit Note Sent** ✅
|
|
||||||
- **File**: `creditNoteSent.template.ts`
|
|
||||||
- **Notification Type**: `credit_note_sent`
|
|
||||||
- **Email Type**: `CREDIT_NOTE_SENT`
|
|
||||||
- **When**: Step 8 - Credit note is sent to dealer
|
|
||||||
- **Recipients**: Dealer (primary), Initiator, Finance team
|
|
||||||
- **Features**:
|
|
||||||
- Credit note number, date, amount
|
|
||||||
- Related invoice number
|
|
||||||
- Reason for credit note
|
|
||||||
- Download link (if available)
|
|
||||||
- Completion message
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔧 Integration Points
|
|
||||||
|
|
||||||
### ✅ Type Definitions Added
|
|
||||||
- `DealerProposalSubmittedData` interface
|
|
||||||
- `ActivityCreatedData` interface
|
|
||||||
- `CompletionDocumentsSubmittedData` interface
|
|
||||||
- `EInvoiceGeneratedData` interface
|
|
||||||
- `CreditNoteSentData` interface
|
|
||||||
|
|
||||||
### ✅ Email Notification Types Added
|
|
||||||
- `DEALER_PROPOSAL_SUBMITTED`
|
|
||||||
- `ACTIVITY_CREATED`
|
|
||||||
- `COMPLETION_DOCUMENTS_SUBMITTED`
|
|
||||||
- `EINVOICE_GENERATED`
|
|
||||||
- `CREDIT_NOTE_SENT`
|
|
||||||
|
|
||||||
### ✅ Email Notification Service Methods Added
|
|
||||||
- `sendDealerProposalSubmitted()`
|
|
||||||
- `sendActivityCreated()`
|
|
||||||
- `sendCompletionDocumentsSubmitted()`
|
|
||||||
- `sendEInvoiceGenerated()`
|
|
||||||
- `sendCreditNoteSent()`
|
|
||||||
|
|
||||||
### ✅ Notification Service Integration
|
|
||||||
- Added to `emailTypeMap`
|
|
||||||
- Added switch cases in `triggerEmailByType()`
|
|
||||||
|
|
||||||
### ✅ Templates Exported
|
|
||||||
- All templates exported in `index.ts`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📝 Usage Examples
|
|
||||||
|
|
||||||
### 1. Send Proposal Submitted Email
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([initiatorId, nextApproverId], {
|
|
||||||
title: 'Proposal Submitted',
|
|
||||||
body: `Dealer ${dealerName} has submitted a proposal for request ${requestNumber}`,
|
|
||||||
requestNumber: requestNumber,
|
|
||||||
requestId: requestId,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'proposal_submitted',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
metadata: {
|
|
||||||
dealerData: {
|
|
||||||
userId: dealerId,
|
|
||||||
email: dealerEmail,
|
|
||||||
displayName: dealerName
|
|
||||||
},
|
|
||||||
proposalData: {
|
|
||||||
totalEstimatedBudget: 50000,
|
|
||||||
expectedCompletionDate: '2025-02-15',
|
|
||||||
dealerComments: 'Proposal comments...',
|
|
||||||
costBreakup: [
|
|
||||||
{ description: 'Item 1', amount: 20000 },
|
|
||||||
{ description: 'Item 2', amount: 30000 }
|
|
||||||
],
|
|
||||||
submittedAt: new Date()
|
|
||||||
},
|
|
||||||
nextApproverId: nextApproverId
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Send Activity Created Email
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([dealerId, initiatorId, deptLeadId], {
|
|
||||||
title: 'Activity Created',
|
|
||||||
body: `Activity "${activityName}" has been created for request ${requestNumber}`,
|
|
||||||
requestNumber: requestNumber,
|
|
||||||
requestId: requestId,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'activity_created',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
metadata: {
|
|
||||||
activityData: {
|
|
||||||
activityName: 'Dealer Event',
|
|
||||||
activityType: 'Marketing Event',
|
|
||||||
location: 'Mumbai',
|
|
||||||
dealerName: 'ABC Motors',
|
|
||||||
dealerCode: 'ABC001',
|
|
||||||
initiatorName: 'John Doe',
|
|
||||||
departmentLeadName: 'Jane Smith',
|
|
||||||
ioNumber: 'IO123456',
|
|
||||||
nextSteps: 'IO confirmation to be made...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Send Completion Documents Submitted Email
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([initiatorId, nextApproverId], {
|
|
||||||
title: 'Completion Documents Submitted',
|
|
||||||
body: `Dealer ${dealerName} has submitted completion documents`,
|
|
||||||
requestNumber: requestNumber,
|
|
||||||
requestId: requestId,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'completion_submitted',
|
|
||||||
priority: 'MEDIUM',
|
|
||||||
metadata: {
|
|
||||||
dealerData: {
|
|
||||||
userId: dealerId,
|
|
||||||
email: dealerEmail,
|
|
||||||
displayName: dealerName
|
|
||||||
},
|
|
||||||
completionData: {
|
|
||||||
activityCompletionDate: new Date('2025-02-10'),
|
|
||||||
numberOfParticipants: 50,
|
|
||||||
totalClosedExpenses: 45000,
|
|
||||||
closedExpenses: [
|
|
||||||
{ description: 'Expense 1', amount: 20000 },
|
|
||||||
{ description: 'Expense 2', amount: 25000 }
|
|
||||||
],
|
|
||||||
documentsCount: 5,
|
|
||||||
submittedAt: new Date()
|
|
||||||
},
|
|
||||||
nextApproverId: nextApproverId
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Send E-Invoice Generated Email
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([initiatorId, dealerId, financeId], {
|
|
||||||
title: 'E-Invoice Generated',
|
|
||||||
body: `E-Invoice ${invoiceNumber} has been generated for request ${requestNumber}`,
|
|
||||||
requestNumber: requestNumber,
|
|
||||||
requestId: requestId,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'einvoice_generated',
|
|
||||||
priority: 'HIGH',
|
|
||||||
metadata: {
|
|
||||||
invoiceData: {
|
|
||||||
invoiceNumber: 'INV-2025-001',
|
|
||||||
invoiceDate: new Date(),
|
|
||||||
dmsNumber: 'DMS123456',
|
|
||||||
amount: 50000,
|
|
||||||
dealerName: 'ABC Motors',
|
|
||||||
dealerCode: 'ABC001',
|
|
||||||
ioNumber: 'IO123456',
|
|
||||||
generatedAt: new Date(),
|
|
||||||
downloadLink: 'https://...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Send Credit Note Sent Email
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([dealerId, initiatorId, financeId], {
|
|
||||||
title: 'Credit Note Sent',
|
|
||||||
body: `Credit note ${creditNoteNumber} has been sent for request ${requestNumber}`,
|
|
||||||
requestNumber: requestNumber,
|
|
||||||
requestId: requestId,
|
|
||||||
url: `/request/${requestNumber}`,
|
|
||||||
type: 'credit_note_sent',
|
|
||||||
priority: 'HIGH',
|
|
||||||
metadata: {
|
|
||||||
creditNoteData: {
|
|
||||||
creditNoteNumber: 'CN-2025-001',
|
|
||||||
creditNoteDate: new Date(),
|
|
||||||
creditNoteAmount: 45000,
|
|
||||||
dealerName: 'ABC Motors',
|
|
||||||
dealerCode: 'ABC001',
|
|
||||||
dealerEmail: 'dealer@example.com',
|
|
||||||
reason: 'Claim settlement',
|
|
||||||
invoiceNumber: 'INV-2025-001',
|
|
||||||
sentAt: new Date(),
|
|
||||||
downloadLink: 'https://...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎨 Template Features
|
|
||||||
|
|
||||||
All templates include:
|
|
||||||
- ✅ Royal Enfield branding
|
|
||||||
- ✅ Responsive design (mobile-friendly)
|
|
||||||
- ✅ Rich text support (tables, lists, formatting)
|
|
||||||
- ✅ Table support for cost/expense breakdowns
|
|
||||||
- ✅ Proper currency formatting (INR)
|
|
||||||
- ✅ Conditional sections (only show if data available)
|
|
||||||
- ✅ View Details button with link
|
|
||||||
- ✅ Email preferences checking
|
|
||||||
- ✅ Error handling and logging
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔄 Next Steps for Backend Integration
|
|
||||||
|
|
||||||
To use these templates in the dealer claim service, update the notification calls:
|
|
||||||
|
|
||||||
### In `dealerClaim.service.ts`:
|
|
||||||
|
|
||||||
1. **Proposal Submitted** (line ~1288):
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([initiatorId, nextApproverId], {
|
|
||||||
type: 'proposal_submitted',
|
|
||||||
metadata: { dealerData, proposalData, nextApproverId }
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Activity Created** (line ~2141):
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([dealerId, initiatorId, deptLeadId], {
|
|
||||||
type: 'activity_created',
|
|
||||||
metadata: { activityData }
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Completion Submitted** (line ~1393):
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([initiatorId, nextApproverId], {
|
|
||||||
type: 'completion_submitted',
|
|
||||||
metadata: { dealerData, completionData, nextApproverId }
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **E-Invoice Generated** (line ~1862):
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([initiatorId, dealerId, financeId], {
|
|
||||||
type: 'einvoice_generated',
|
|
||||||
metadata: { invoiceData }
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
5. **Credit Note Sent** (line ~2029):
|
|
||||||
```typescript
|
|
||||||
await notificationService.sendToUsers([dealerId, initiatorId, financeId], {
|
|
||||||
type: 'credit_note_sent',
|
|
||||||
metadata: { creditNoteData }
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ✅ Testing Checklist
|
|
||||||
|
|
||||||
- [ ] Test proposal submitted email with cost breakdown table
|
|
||||||
- [ ] Test activity created email with IO number
|
|
||||||
- [ ] Test completion documents email with expense breakdown
|
|
||||||
- [ ] Test e-invoice email with download link
|
|
||||||
- [ ] Test credit note email with all fields
|
|
||||||
- [ ] Verify mobile responsiveness
|
|
||||||
- [ ] Verify email preferences are respected
|
|
||||||
- [ ] Test with missing optional fields
|
|
||||||
- [ ] Verify tables render correctly in email clients
|
|
||||||
- [ ] Test with additional approvers in workflow
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📚 Related Files
|
|
||||||
|
|
||||||
- **Templates**: `Re_Backend/src/emailtemplates/*.template.ts`
|
|
||||||
- **Types**: `Re_Backend/src/emailtemplates/types.ts`
|
|
||||||
- **Email Service**: `Re_Backend/src/services/emailNotification.service.ts`
|
|
||||||
- **Notification Service**: `Re_Backend/src/services/notification.service.ts`
|
|
||||||
- **Preferences**: `Re_Backend/src/emailtemplates/emailPreferences.helper.ts`
|
|
||||||
- **Planning Doc**: `Re_Backend/src/emailtemplates/DEALER_CLAIM_EMAIL_TEMPLATES.md`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 Summary
|
|
||||||
|
|
||||||
All 5 dealer claim email templates are now:
|
|
||||||
- ✅ Created with proper structure and styling
|
|
||||||
- ✅ Integrated into the notification system
|
|
||||||
- ✅ Ready to use with proper metadata
|
|
||||||
- ✅ Supporting additional approvers
|
|
||||||
- ✅ Mobile responsive
|
|
||||||
- ✅ Table support for financial data
|
|
||||||
- ✅ Following Royal Enfield branding guidelines
|
|
||||||
|
|
||||||
The templates are ready to be used in the dealer claim workflow service!
|
|
||||||
|
|
||||||
@ -991,9 +991,9 @@ Add to `.env`:
|
|||||||
SMTP_HOST=smtp.gmail.com
|
SMTP_HOST=smtp.gmail.com
|
||||||
SMTP_PORT=587
|
SMTP_PORT=587
|
||||||
SMTP_SECURE=false
|
SMTP_SECURE=false
|
||||||
SMTP_USER=notifications@{{APP_DOMAIN}}
|
SMTP_USER=notifications@royalenfield.com
|
||||||
SMTP_PASSWORD=your-app-specific-password
|
SMTP_PASSWORD=your-app-specific-password
|
||||||
EMAIL_FROM=RE Flow <noreply@{{APP_DOMAIN}}>
|
EMAIL_FROM=RE Flow <noreply@royalenfield.com>
|
||||||
|
|
||||||
# Email Settings
|
# Email Settings
|
||||||
EMAIL_ENABLED=true
|
EMAIL_ENABLED=true
|
||||||
@ -1002,10 +1002,10 @@ EMAIL_BATCH_SIZE=50
|
|||||||
EMAIL_RETRY_ATTEMPTS=3
|
EMAIL_RETRY_ATTEMPTS=3
|
||||||
|
|
||||||
# Application
|
# Application
|
||||||
BASE_URL=https://workflow.{{APP_DOMAIN}}
|
BASE_URL=https://workflow.royalenfield.com
|
||||||
COMPANY_NAME=Royal Enfield
|
COMPANY_NAME=Royal Enfield
|
||||||
COMPANY_WEBSITE=https://www.{{APP_DOMAIN}}
|
COMPANY_WEBSITE=https://www.royalenfield.com
|
||||||
SUPPORT_EMAIL=support@{{APP_DOMAIN}}
|
SUPPORT_EMAIL=support@royalenfield.com
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@ -65,7 +65,7 @@ Each template uses color-coded gradients to indicate the scenario:
|
|||||||
All templates feature a single action button:
|
All templates feature a single action button:
|
||||||
- **Text:** "View Request Details" / "Review Request Now" / "Take Action Now"
|
- **Text:** "View Request Details" / "Review Request Now" / "Take Action Now"
|
||||||
- **Link Format:** `{baseURL}/request/{requestNumber}`
|
- **Link Format:** `{baseURL}/request/{requestNumber}`
|
||||||
- **Example:** `https://workflow.{{APP_DOMAIN}}/request/REQ-2025-12-0013`
|
- **Example:** `https://workflow.royalenfield.com/request/REQ-2025-12-0013`
|
||||||
|
|
||||||
No approval/rejection buttons in emails - all actions happen within the application.
|
No approval/rejection buttons in emails - all actions happen within the application.
|
||||||
|
|
||||||
@ -231,8 +231,8 @@ SMTP_USER=your-email@domain.com
|
|||||||
SMTP_PASSWORD=your-app-password
|
SMTP_PASSWORD=your-app-password
|
||||||
|
|
||||||
# Email Settings
|
# Email Settings
|
||||||
EMAIL_FROM=RE Workflow System <notifications@{{APP_DOMAIN}}>
|
EMAIL_FROM=RE Workflow System <notifications@royalenfield.com>
|
||||||
BASE_URL=https://workflow.{{APP_DOMAIN}}
|
BASE_URL=https://workflow.royalenfield.com
|
||||||
COMPANY_NAME=Royal Enfield
|
COMPANY_NAME=Royal Enfield
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@ -361,7 +361,7 @@ All `[ViewDetailsLink]` placeholders should be replaced with:
|
|||||||
{baseURL}/request/{requestNumber}
|
{baseURL}/request/{requestNumber}
|
||||||
```
|
```
|
||||||
|
|
||||||
Example: `https://workflow.{{APP_DOMAIN}}/request/REQ-2025-12-0013`
|
Example: `https://workflow.royalenfield.com/request/REQ-2025-12-0013`
|
||||||
|
|
||||||
### Company Name
|
### Company Name
|
||||||
Replace `[CompanyName]` with your organization name (e.g., "Royal Enfield")
|
Replace `[CompanyName]` with your organization name (e.g., "Royal Enfield")
|
||||||
|
|||||||
@ -12,15 +12,15 @@ emailtemplates/
|
|||||||
├── approvalRequest.template.ts ✅ Single approver email
|
├── approvalRequest.template.ts ✅ Single approver email
|
||||||
├── multiApproverRequest.template.ts ✅ Multi-approver email
|
├── multiApproverRequest.template.ts ✅ Multi-approver email
|
||||||
│
|
│
|
||||||
├── approvalConfirmation.template.ts ✅ DONE
|
├── approvalConfirmation.template.ts 🔨 TODO
|
||||||
├── rejectionNotification.template.ts ✅ DONE
|
├── rejectionNotification.template.ts 🔨 TODO
|
||||||
├── tatReminder.template.ts ✅ DONE
|
├── tatReminder.template.ts 🔨 TODO
|
||||||
├── tatBreached.template.ts ✅ DONE
|
├── tatBreached.template.ts 🔨 TODO
|
||||||
├── workflowPaused.template.ts ✅ DONE
|
├── workflowPaused.template.ts 🔨 TODO
|
||||||
├── workflowResumed.template.ts ✅ DONE
|
├── workflowResumed.template.ts 🔨 TODO
|
||||||
├── participantAdded.template.ts ✅ DONE
|
├── participantAdded.template.ts 🔨 TODO
|
||||||
├── approverSkipped.template.ts ✅ DONE
|
├── approverSkipped.template.ts 🔨 TODO
|
||||||
└── requestClosed.template.ts ✅ DONE
|
└── requestClosed.template.ts 🔨 TODO
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -53,7 +53,7 @@ const data: RequestCreatedData = {
|
|||||||
requestTime: '02:30 PM',
|
requestTime: '02:30 PM',
|
||||||
totalApprovers: 3,
|
totalApprovers: 3,
|
||||||
expectedTAT: 48,
|
expectedTAT: 48,
|
||||||
viewDetailsLink: 'https://workflow.{{APP_DOMAIN}}/request/REQ-2025-12-0013',
|
viewDetailsLink: 'https://workflow.royalenfield.com/request/REQ-2025-12-0013',
|
||||||
companyName: 'Royal Enfield'
|
companyName: 'Royal Enfield'
|
||||||
};
|
};
|
||||||
```
|
```
|
||||||
@ -188,10 +188,10 @@ SMTP_USER=your-email@domain.com
|
|||||||
SMTP_PASSWORD=your-app-password
|
SMTP_PASSWORD=your-app-password
|
||||||
|
|
||||||
# Email Settings
|
# Email Settings
|
||||||
EMAIL_FROM=Royal Enfield Workflow <notifications@{{APP_DOMAIN}}>
|
EMAIL_FROM=Royal Enfield Workflow <notifications@royalenfield.com>
|
||||||
|
|
||||||
# Application Settings
|
# Application Settings
|
||||||
BASE_URL=https://workflow.{{APP_DOMAIN}}
|
BASE_URL=https://workflow.royalenfield.com
|
||||||
COMPANY_NAME=Royal Enfield
|
COMPANY_NAME=Royal Enfield
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@ -1,180 +0,0 @@
|
|||||||
/**
|
|
||||||
* Activity Created Email Template
|
|
||||||
* Sent when activity is created after Department Lead approval (Step 3)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { ActivityCreatedData } from './types';
|
|
||||||
import { getEmailFooter, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles } from './helpers';
|
|
||||||
import { getBrandedHeader } from './branding.config';
|
|
||||||
|
|
||||||
export function getActivityCreatedEmail(data: ActivityCreatedData): string {
|
|
||||||
return `
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="format-detection" content="telephone=no">
|
|
||||||
<title>Activity Created</title>
|
|
||||||
${getResponsiveStyles()}
|
|
||||||
</head>
|
|
||||||
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 40px 0;">
|
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
|
||||||
<!-- Header -->
|
|
||||||
${getEmailHeader(getBrandedHeader({
|
|
||||||
title: 'Activity Created Successfully',
|
|
||||||
...HeaderStyles.success
|
|
||||||
}))}
|
|
||||||
|
|
||||||
<!-- Content -->
|
|
||||||
<tr>
|
|
||||||
<td class="email-content" style="padding: 40px 30px;">
|
|
||||||
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
|
||||||
Dear <strong style="color: #667eea;">${data.recipientName}</strong>,
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p style="margin: 0 0 30px; color: #666666; font-size: 16px; line-height: 1.6;">
|
|
||||||
The activity <strong style="color: #333333;">"${data.activityName}"</strong> has been created successfully for request <strong>${data.requestId}</strong>.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<!-- Activity Details Box -->
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f8f9fa; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 25px;">
|
|
||||||
<h2 style="margin: 0 0 20px; color: #333333; font-size: 18px; font-weight: 600;">Activity Details</h2>
|
|
||||||
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px; width: 140px;">
|
|
||||||
<strong>Request ID:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.requestId}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.requestTitle ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Title:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.requestTitle}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Activity Name:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px; font-weight: 600;">
|
|
||||||
${data.activityName}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Activity Type:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.activityType}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.activityDate ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Activity Date:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.activityDate}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Location:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.location}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Dealer:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.dealerName} ${data.dealerCode ? `(${data.dealerCode})` : ''}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.ioNumber ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>IO Number:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px; font-weight: 600;">
|
|
||||||
${data.ioNumber}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Created On:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.createdDate} at ${data.createdTime}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
${data.nextSteps ? `
|
|
||||||
<!-- Next Steps -->
|
|
||||||
<div style="padding: 20px; background-color: #e7f3ff; border-left: 4px solid #0066cc; border-radius: 4px; margin-bottom: 30px;">
|
|
||||||
<h3 style="margin: 0 0 10px; color: #004085; font-size: 16px; font-weight: 600;">Next Steps</h3>
|
|
||||||
<div style="color: #004085; font-size: 14px; line-height: 1.8;">
|
|
||||||
${wrapRichText(data.nextSteps)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
` : `
|
|
||||||
<!-- Default Next Steps -->
|
|
||||||
<div style="padding: 20px; background-color: #e7f3ff; border-left: 4px solid #0066cc; border-radius: 4px; margin-bottom: 30px;">
|
|
||||||
<h3 style="margin: 0 0 10px; color: #004085; font-size: 16px; font-weight: 600;">Next Steps</h3>
|
|
||||||
<ul style="margin: 10px 0 0 0; padding-left: 20px; color: #004085; font-size: 14px; line-height: 1.8;">
|
|
||||||
<li>IO confirmation to be made</li>
|
|
||||||
<li>Dealer will proceed with activity execution</li>
|
|
||||||
<li>Completion documents will be submitted after activity completion</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
`}
|
|
||||||
|
|
||||||
<!-- View Details Button -->
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; margin-bottom: 20px;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: center;">
|
|
||||||
<a href="${data.viewDetailsLink}" class="cta-button" style="display: inline-block; padding: 15px 40px; background-color: #1a1a1a; color: #ffffff; text-decoration: none; text-align: center; border-radius: 6px; font-size: 16px; font-weight: 600; box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); min-width: 200px;">
|
|
||||||
View Request Details
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<p style="margin: 0; color: #666666; font-size: 14px; line-height: 1.6; text-align: center;">
|
|
||||||
Thank you for using the ${data.companyName} Workflow System.
|
|
||||||
</p>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
${getEmailFooter(data.companyName)}
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,152 +0,0 @@
|
|||||||
/**
|
|
||||||
* Additional Document Added Email Template
|
|
||||||
*
|
|
||||||
* Sent when a document is added to a request by:
|
|
||||||
* - Initiator → Notifies spectators and current approver
|
|
||||||
* - Spectator → Notifies initiator and current approver
|
|
||||||
* - Approver → Notifies initiator and spectators
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { AdditionalDocumentAddedData } from './types';
|
|
||||||
import { getEmailFooter, getEmailHeader, HeaderStyles, getResponsiveStyles, getEmailContainerStyles } from './helpers';
|
|
||||||
import { getBrandedHeader } from './branding.config';
|
|
||||||
|
|
||||||
export function getAdditionalDocumentAddedEmail(data: AdditionalDocumentAddedData): string {
|
|
||||||
return `
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="format-detection" content="telephone=no">
|
|
||||||
<title>Additional Document Added</title>
|
|
||||||
${getResponsiveStyles()}
|
|
||||||
</head>
|
|
||||||
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 40px 0;">
|
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
|
||||||
<!-- Header -->
|
|
||||||
${getEmailHeader(getBrandedHeader({
|
|
||||||
title: 'Additional Document Added',
|
|
||||||
...HeaderStyles.info
|
|
||||||
}))}
|
|
||||||
|
|
||||||
<!-- Content -->
|
|
||||||
<tr>
|
|
||||||
<td class="email-content">
|
|
||||||
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
|
||||||
Dear <strong style="color: #667eea;">${data.recipientName}</strong>,
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p style="margin: 0 0 30px; color: #666666; font-size: 16px; line-height: 1.6;">
|
|
||||||
<strong>${data.addedByName}</strong> has added an additional document to the following request:
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<!-- Request Details Box -->
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f8f9fa; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td class="detail-box" style="padding: 30px;">
|
|
||||||
<h2 style="margin: 0 0 25px; color: #333333; font-size: 20px; font-weight: 600;">Request Details</h2>
|
|
||||||
|
|
||||||
<table role="presentation" class="detail-table" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>Request ID:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.requestNumber || data.requestId}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>Title:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.requestTitle || 'N/A'}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>Document Name:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.documentName}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>File Size:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.fileSize}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>Added By:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.addedByName}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>Added On:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.addedDate} at ${data.addedTime}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.source ? `
|
|
||||||
<tr>
|
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #666666; font-size: 15px;">
|
|
||||||
<strong>Source:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 10px 0; color: #333333; font-size: 15px;">
|
|
||||||
${data.source}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<!-- Information Box -->
|
|
||||||
<div style="padding: 20px; background-color: #e7f3ff; border-left: 4px solid #0066cc; border-radius: 4px; margin-bottom: 30px;">
|
|
||||||
<h3 style="margin: 0 0 10px; color: #004085; font-size: 16px; font-weight: 600;">What This Means</h3>
|
|
||||||
<p style="margin: 0; color: #004085; font-size: 14px; line-height: 1.8;">
|
|
||||||
A new document has been added to this request. Please review the document in the request details page to stay updated with the latest information.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- View Details Button -->
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; margin-bottom: 20px;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: center;">
|
|
||||||
<a href="${data.viewDetailsLink}" class="cta-button" style="display: inline-block; padding: 15px 40px; background-color: #1a1a1a; color: #ffffff; text-decoration: none; text-align: center; border-radius: 6px; font-size: 16px; font-weight: 600; box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); min-width: 200px;">
|
|
||||||
View Request Details
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<p style="margin: 0; color: #666666; font-size: 14px; line-height: 1.6; text-align: center;">
|
|
||||||
Thank you for using the ${data.companyName} Workflow System.
|
|
||||||
</p>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
${getEmailFooter(data.companyName)}
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -3,14 +3,14 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ApprovalConfirmationData } from './types';
|
import { ApprovalConfirmationData } from './types';
|
||||||
import { getEmailFooter, getEmailHeader, HeaderStyles, getNextStepsSection, wrapRichText, getResponsiveStyles, getEmailContainerStyles } from './helpers';
|
import { getEmailFooter, getEmailHeader, HeaderStyles, getNextStepsSection, wrapRichText, getResponsiveStyles } from './helpers';
|
||||||
import { getBrandedHeader } from './branding.config';
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
export function getApprovalConfirmationEmail(data: ApprovalConfirmationData): string {
|
export function getApprovalConfirmationEmail(data: ApprovalConfirmationData): string {
|
||||||
const commentsSection = data.approverComments ? `
|
const commentsSection = data.approverComments ? `
|
||||||
<div style="margin-bottom: 30px;">
|
<div style="margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Approver Comments:</h3>
|
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Approver Comments:</h3>
|
||||||
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #28a745; border-radius: 4px; overflow-x: auto;">
|
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #28a745; border-radius: 4px;">
|
||||||
${wrapRichText(data.approverComments)}
|
${wrapRichText(data.approverComments)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -21,24 +21,21 @@ export function getApprovalConfirmationEmail(data: ApprovalConfirmationData): st
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="format-detection" content="telephone=no">
|
|
||||||
<title>Request Approved</title>
|
<title>Request Approved</title>
|
||||||
${getResponsiveStyles()}
|
|
||||||
</head>
|
</head>
|
||||||
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 40px 0;">
|
<td style="padding: 40px 0;">
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 600px; margin: 0 auto; background-color: #ffffff; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1);" cellpadding="0" cellspacing="0">
|
||||||
${getEmailHeader(getBrandedHeader({
|
${getEmailHeader(getBrandedHeader({
|
||||||
title: 'Request Approved',
|
title: 'Request Approved',
|
||||||
...HeaderStyles.success
|
...HeaderStyles.success
|
||||||
}))}
|
}))}
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td class="email-content">
|
<td style="padding: 40px 30px;">
|
||||||
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
||||||
Dear <strong style="color: #28a745;">${data.initiatorName}</strong>,
|
Dear <strong style="color: #28a745;">${data.initiatorName}</strong>,
|
||||||
</p>
|
</p>
|
||||||
@ -49,47 +46,47 @@ export function getApprovalConfirmationEmail(data: ApprovalConfirmationData): st
|
|||||||
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #d4edda; border: 1px solid #c3e6cb; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #d4edda; border: 1px solid #c3e6cb; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td class="detail-box" style="padding: 30px;">
|
<td style="padding: 25px;">
|
||||||
<h2 style="margin: 0 0 25px; color: #155724; font-size: 20px; font-weight: 600;">Request Summary</h2>
|
<h2 style="margin: 0 0 20px; color: #155724; font-size: 18px; font-weight: 600;">Request Summary</h2>
|
||||||
|
|
||||||
<table role="presentation" class="detail-table" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px; width: 140px;">
|
||||||
<strong>Request ID:</strong>
|
<strong>Request ID:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
${data.requestId}
|
${data.requestId}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
<strong>Approved By:</strong>
|
<strong>Approved By:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
${data.approverName}
|
${data.approverName}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
<strong>Approved On:</strong>
|
<strong>Approved On:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
${data.approvalDate}
|
${data.approvalDate}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
<strong>Time:</strong>
|
<strong>Time:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
${data.approvalTime}
|
${data.approvalTime}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td class="detail-label" style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
<strong>Request Type:</strong>
|
<strong>Request Type:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 10px 0; color: #155724; font-size: 15px;">
|
<td style="padding: 8px 0; color: #155724; font-size: 14px;">
|
||||||
${data.requestType}
|
${data.requestType}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ApprovalRequestData } from './types';
|
import { ApprovalRequestData } from './types';
|
||||||
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles } from './helpers';
|
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText } from './helpers';
|
||||||
import { getBrandedHeader } from './branding.config';
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
||||||
@ -22,7 +22,7 @@ export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
|||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 40px 0;">
|
<td style="padding: 40px 0;">
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
<table role="presentation" class="email-container" style="width: 600px; max-width: 100%; margin: 0 auto; background-color: #ffffff; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1);" cellpadding="0" cellspacing="0">
|
||||||
<!-- Header -->
|
<!-- Header -->
|
||||||
${getEmailHeader(getBrandedHeader({
|
${getEmailHeader(getBrandedHeader({
|
||||||
title: 'Approval Request',
|
title: 'Approval Request',
|
||||||
@ -55,16 +55,6 @@ export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
|||||||
${data.requestId}
|
${data.requestId}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
${data.requestTitle ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Title:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.requestTitle}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
||||||
<strong>Initiator:</strong>
|
<strong>Initiator:</strong>
|
||||||
@ -102,10 +92,10 @@ export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
|||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<!-- Description (supports rich text HTML including tables) -->
|
<!-- Description (supports rich text HTML) -->
|
||||||
<div style="margin-bottom: 30px;">
|
<div style="margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Description:</h3>
|
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Description:</h3>
|
||||||
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #667eea; border-radius: 4px; overflow-x: auto;">
|
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #667eea; border-radius: 4px;">
|
||||||
${wrapRichText(data.requestDescription)}
|
${wrapRichText(data.requestDescription)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ApproverSkippedData } from './types';
|
import { ApproverSkippedData } from './types';
|
||||||
import { getEmailFooter, getEmailHeader, HeaderStyles, wrapRichText, getResponsiveStyles, getEmailContainerStyles } from './helpers';
|
import { getEmailFooter, getEmailHeader, HeaderStyles, wrapRichText, getResponsiveStyles } from './helpers';
|
||||||
import { getBrandedHeader } from './branding.config';
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
export function getApproverSkippedEmail(data: ApproverSkippedData): string {
|
export function getApproverSkippedEmail(data: ApproverSkippedData): string {
|
||||||
@ -12,17 +12,14 @@ export function getApproverSkippedEmail(data: ApproverSkippedData): string {
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="format-detection" content="telephone=no">
|
|
||||||
<title>Approver Skipped</title>
|
<title>Approver Skipped</title>
|
||||||
${getResponsiveStyles()}
|
|
||||||
</head>
|
</head>
|
||||||
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 40px 0;">
|
<td style="padding: 40px 0;">
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 600px; margin: 0 auto; background-color: #ffffff; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1);" cellpadding="0" cellspacing="0">
|
||||||
${getEmailHeader(getBrandedHeader({
|
${getEmailHeader(getBrandedHeader({
|
||||||
title: 'Approval Level Skipped',
|
title: 'Approval Level Skipped',
|
||||||
...HeaderStyles.infoSecondary
|
...HeaderStyles.infoSecondary
|
||||||
@ -99,7 +96,7 @@ export function getApproverSkippedEmail(data: ApproverSkippedData): string {
|
|||||||
|
|
||||||
<div style="margin-bottom: 30px;">
|
<div style="margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Reason for Skipping:</h3>
|
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Reason for Skipping:</h3>
|
||||||
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #17a2b8; border-radius: 4px; overflow-x: auto;">
|
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #17a2b8; border-radius: 4px;">
|
||||||
${wrapRichText(data.skipReason)}
|
${wrapRichText(data.skipReason)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -7,20 +7,18 @@
|
|||||||
|
|
||||||
import { EmailHeaderConfig, EmailFooterConfig } from './helpers';
|
import { EmailHeaderConfig, EmailFooterConfig } from './helpers';
|
||||||
|
|
||||||
const appDomain = process.env.APP_DOMAIN || 'royalenfield.com';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Company Information
|
* Company Information
|
||||||
*/
|
*/
|
||||||
export const CompanyInfo = {
|
export const CompanyInfo = {
|
||||||
name: 'Royal Enfield',
|
name: 'Royal Enfield',
|
||||||
productName: 'RE Flow', // Product name displayed in header
|
productName: 'RE Flow', // Product name displayed in header
|
||||||
website: `https://www.${appDomain}`,
|
website: 'https://www.royalenfield.com',
|
||||||
supportEmail: `support@${appDomain}`,
|
supportEmail: 'support@royalenfield.com',
|
||||||
|
|
||||||
// Logo configuration for email headers
|
// Logo configuration for email headers
|
||||||
logo: {
|
logo: {
|
||||||
url: `https://www.${appDomain}/content/dam/RE-Platform-Revamp/re-revamp-commons/logo.webp`,
|
url: 'https://www.royalenfield.com/content/dam/RE-Platform-Revamp/re-revamp-commons/logo.webp',
|
||||||
alt: 'Royal Enfield Logo',
|
alt: 'Royal Enfield Logo',
|
||||||
width: 220, // Logo width in pixels (wider for better visibility)
|
width: 220, // Logo width in pixels (wider for better visibility)
|
||||||
height: 65, // Logo height in pixels (proportional ratio ~3.4:1)
|
height: 65, // Logo height in pixels (proportional ratio ~3.4:1)
|
||||||
|
|||||||
@ -1,180 +0,0 @@
|
|||||||
/**
|
|
||||||
* Completion Documents Submitted Email Template
|
|
||||||
* Sent when dealer submits completion documents (Step 4)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { CompletionDocumentsSubmittedData } from './types';
|
|
||||||
import { getEmailFooter, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles } from './helpers';
|
|
||||||
import { getBrandedHeader } from './branding.config';
|
|
||||||
|
|
||||||
export function getCompletionDocumentsSubmittedEmail(data: CompletionDocumentsSubmittedData): string {
|
|
||||||
return `
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="format-detection" content="telephone=no">
|
|
||||||
<title>Completion Documents Submitted</title>
|
|
||||||
${getResponsiveStyles()}
|
|
||||||
</head>
|
|
||||||
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 40px 0;">
|
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
|
||||||
<!-- Header -->
|
|
||||||
${getEmailHeader(getBrandedHeader({
|
|
||||||
title: 'Completion Documents Submitted',
|
|
||||||
...HeaderStyles.success
|
|
||||||
}))}
|
|
||||||
|
|
||||||
<!-- Content -->
|
|
||||||
<tr>
|
|
||||||
<td class="email-content" style="padding: 40px 30px;">
|
|
||||||
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
|
||||||
Dear <strong style="color: #667eea;">${data.recipientName}</strong>,
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p style="margin: 0 0 30px; color: #666666; font-size: 16px; line-height: 1.6;">
|
|
||||||
<strong style="color: #333333;">${data.dealerName}</strong> has submitted completion documents for the activity <strong>"${data.activityName}"</strong> (Request ${data.requestId}).
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<!-- Completion Details Box -->
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f8f9fa; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 25px;">
|
|
||||||
<h2 style="margin: 0 0 20px; color: #333333; font-size: 18px; font-weight: 600;">Completion Details</h2>
|
|
||||||
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px; width: 140px;">
|
|
||||||
<strong>Request ID:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.requestId}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.requestTitle ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Title:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.requestTitle}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Activity Name:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.activityName}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Dealer:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.dealerName}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Completion Date:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px; font-weight: 600;">
|
|
||||||
${data.activityCompletionDate}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.numberOfParticipants ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Participants:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.numberOfParticipants}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Total Expenses:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px; font-weight: 600;">
|
|
||||||
₹${data.totalClosedExpenses.toLocaleString('en-IN', { minimumFractionDigits: 2, maximumFractionDigits: 2 })}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
${data.documentsCount ? `
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Documents:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.documentsCount} document(s) submitted
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
` : ''}
|
|
||||||
<tr>
|
|
||||||
<td style="padding: 8px 0; color: #666666; font-size: 14px;">
|
|
||||||
<strong>Submitted On:</strong>
|
|
||||||
</td>
|
|
||||||
<td style="padding: 8px 0; color: #333333; font-size: 14px;">
|
|
||||||
${data.submittedDate} at ${data.submittedTime}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
${data.expenseBreakdown ? `
|
|
||||||
<!-- Expense Breakdown -->
|
|
||||||
<div style="margin-bottom: 30px;">
|
|
||||||
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Expense Breakdown:</h3>
|
|
||||||
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #667eea; border-radius: 4px; overflow-x: auto;">
|
|
||||||
${wrapRichText(data.expenseBreakdown)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
` : ''}
|
|
||||||
|
|
||||||
<!-- Next Steps -->
|
|
||||||
<div style="padding: 20px; background-color: #e7f3ff; border-left: 4px solid #0066cc; border-radius: 4px; margin-bottom: 30px;">
|
|
||||||
<h3 style="margin: 0 0 10px; color: #004085; font-size: 16px; font-weight: 600;">Next Steps</h3>
|
|
||||||
<p style="margin: 0; color: #004085; font-size: 14px; line-height: 1.8;">
|
|
||||||
${data.nextApproverName
|
|
||||||
? `Completion documents are now pending review by <strong>${data.nextApproverName}</strong>. You will be notified once a decision is made.`
|
|
||||||
: `Completion documents have been submitted successfully. <strong>Your review and approval is required</strong> to proceed with the final claim approval. Please review the completion documents and take action on this request.`}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- View Details Button -->
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; margin-bottom: 20px;" cellpadding="0" cellspacing="0">
|
|
||||||
<tr>
|
|
||||||
<td style="text-align: center;">
|
|
||||||
<a href="${data.viewDetailsLink}" class="cta-button" style="display: inline-block; padding: 15px 40px; background-color: #1a1a1a; color: #ffffff; text-decoration: none; text-align: center; border-radius: 6px; font-size: 16px; font-weight: 600; box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); min-width: 200px;">
|
|
||||||
View Request Details
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<p style="margin: 0; color: #666666; font-size: 14px; line-height: 1.6; text-align: center;">
|
|
||||||
Click the button above to review the completion documents and take action.
|
|
||||||
</p>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
${getEmailFooter(data.companyName)}
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user