Compare commits

..

No commits in common. "2b0d13436d5bbd01c3c9c4c26fded679ba16bd92" and "d432627c6186bec4afa7af45ea7d6e9391dd312d" have entirely different histories.

96 changed files with 806 additions and 17415 deletions

325
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,325 @@
pipeline {
agent any
environment {
SSH_CREDENTIALS = 'cloudtopiaa'
REMOTE_SERVER = 'ubuntu@160.187.166.17'
PROJECT_NAME = 'Royal-Enfield-Backend'
DEPLOY_PATH = '/home/ubuntu/Royal-Enfield/Re_Backend'
GIT_CREDENTIALS = 'git-cred'
REPO_URL = 'https://git.tech4biz.wiki/laxmanhalaki/Re_Backend.git'
GIT_BRANCH = 'main'
NPM_PATH = '/home/ubuntu/.nvm/versions/node/v22.21.1/bin/npm'
NODE_PATH = '/home/ubuntu/.nvm/versions/node/v22.21.1/bin/node'
PM2_PATH = '/home/ubuntu/.nvm/versions/node/v22.21.1/bin/pm2'
PM2_APP_NAME = 'royal-enfield-backend'
APP_PORT = '5000'
EMAIL_RECIPIENT = 'laxman.halaki@tech4biz.org'
}
options {
timeout(time: 20, unit: 'MINUTES')
disableConcurrentBuilds()
timestamps()
buildDiscarder(logRotator(numToKeepStr: '10', daysToKeepStr: '30'))
}
stages {
stage('Pre-deployment Check') {
steps {
script {
echo "═══════════════════════════════════════════"
echo "🚀 Starting ${PROJECT_NAME} Deployment"
echo "═══════════════════════════════════════════"
echo "Server: ${REMOTE_SERVER}"
echo "Deploy Path: ${DEPLOY_PATH}"
echo "PM2 App: ${PM2_APP_NAME}"
echo "Build #: ${BUILD_NUMBER}"
echo "═══════════════════════════════════════════"
}
}
}
stage('Pull Latest Code') {
steps {
sshagent(credentials: [SSH_CREDENTIALS]) {
withCredentials([usernamePassword(credentialsId: GIT_CREDENTIALS, usernameVariable: 'GIT_USER', passwordVariable: 'GIT_PASS')]) {
sh """
ssh -o StrictHostKeyChecking=no -o ConnectTimeout=10 ${REMOTE_SERVER} << 'ENDSSH'
set -e
echo "📦 Git Operations..."
if [ -d "${DEPLOY_PATH}/.git" ]; then
cd ${DEPLOY_PATH}
echo "Configuring git..."
git config --global --add safe.directory ${DEPLOY_PATH}
git config credential.helper store
echo "Fetching updates..."
git fetch https://${GIT_USER}:${GIT_PASS}@git.tech4biz.wiki/laxmanhalaki/Re_Backend.git ${GIT_BRANCH}
CURRENT_COMMIT=\$(git rev-parse HEAD)
LATEST_COMMIT=\$(git rev-parse FETCH_HEAD)
if [ "\$CURRENT_COMMIT" = "\$LATEST_COMMIT" ]; then
echo "⚠️ Already up to date. No changes to deploy."
echo "Current: \$CURRENT_COMMIT"
else
echo "Pulling new changes..."
git reset --hard FETCH_HEAD
git clean -fd
echo "✓ Updated from \${CURRENT_COMMIT:0:7} to \${LATEST_COMMIT:0:7}"
fi
else
echo "Cloning repository..."
rm -rf ${DEPLOY_PATH}
mkdir -p /home/ubuntu/Royal-Enfield
cd /home/ubuntu/Royal-Enfield
git clone https://${GIT_USER}:${GIT_PASS}@git.tech4biz.wiki/laxmanhalaki/Re_Backend.git Re_Backend
cd ${DEPLOY_PATH}
git checkout ${GIT_BRANCH}
git config --global --add safe.directory ${DEPLOY_PATH}
echo "✓ Repository cloned successfully"
fi
cd ${DEPLOY_PATH}
echo "Current commit: \$(git log -1 --oneline)"
ENDSSH
"""
}
}
}
}
stage('Install Dependencies') {
steps {
sshagent(credentials: [SSH_CREDENTIALS]) {
sh """
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
set -e
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
cd ${DEPLOY_PATH}
echo "🔧 Environment Check..."
echo "Node: \$(${NODE_PATH} -v)"
echo "NPM: \$(${NPM_PATH} -v)"
echo ""
echo "📥 Installing Dependencies..."
${NPM_PATH} install --prefer-offline --no-audit --progress=false
echo ""
echo "✅ Dependencies installed successfully!"
ENDSSH
"""
}
}
}
stage('Build Application') {
steps {
sshagent(credentials: [SSH_CREDENTIALS]) {
sh """
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
set -e
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
cd ${DEPLOY_PATH}
echo "🔨 Building application..."
${NPM_PATH} run build
echo "✅ Build completed successfully!"
ENDSSH
"""
}
}
}
stage('Stop PM2 Process') {
steps {
sshagent(credentials: [SSH_CREDENTIALS]) {
sh """
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
set -e
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
echo "🛑 Stopping existing PM2 process..."
if ${PM2_PATH} list | grep -q "${PM2_APP_NAME}"; then
echo "Stopping ${PM2_APP_NAME}..."
${PM2_PATH} stop ${PM2_APP_NAME} || true
${PM2_PATH} delete ${PM2_APP_NAME} || true
echo "✓ Process stopped"
else
echo "No existing process found"
fi
ENDSSH
"""
}
}
}
stage('Start with PM2') {
steps {
sshagent(credentials: [SSH_CREDENTIALS]) {
sh """
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
set -e
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
cd ${DEPLOY_PATH}
echo "🚀 Starting application with PM2..."
# Start with PM2
${PM2_PATH} start ${NPM_PATH} --name "${PM2_APP_NAME}" -- start
echo ""
echo "⏳ Waiting for application to start..."
sleep 5
# Save PM2 configuration
${PM2_PATH} save
# Show PM2 status
echo ""
echo "📊 PM2 Process Status:"
${PM2_PATH} list
# Show logs (last 20 lines)
echo ""
echo "📝 Application Logs:"
${PM2_PATH} logs ${PM2_APP_NAME} --lines 20 --nostream || true
echo ""
echo "✅ Application started successfully!"
ENDSSH
"""
}
}
}
stage('Health Check') {
steps {
sshagent(credentials: [SSH_CREDENTIALS]) {
sh """
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
set -e
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
echo "🔍 Deployment Verification..."
# Check if PM2 process is running
if ${PM2_PATH} list | grep -q "${PM2_APP_NAME}.*online"; then
echo "✓ PM2 process is running"
else
echo "✗ PM2 process is NOT running!"
${PM2_PATH} logs ${PM2_APP_NAME} --lines 50 --nostream || true
exit 1
fi
# Check if port is listening
echo ""
echo "Checking if port ${APP_PORT} is listening..."
if ss -tuln | grep -q ":${APP_PORT} "; then
echo "✓ Application is listening on port ${APP_PORT}"
else
echo "⚠️ Port ${APP_PORT} not detected (may take a moment to start)"
fi
# Show process info
echo ""
echo "📊 Process Information:"
${PM2_PATH} info ${PM2_APP_NAME}
echo ""
echo "═══════════════════════════════════════════"
echo "✅ DEPLOYMENT SUCCESSFUL"
echo "═══════════════════════════════════════════"
ENDSSH
"""
}
}
}
}
post {
always {
cleanWs()
}
success {
script {
def duration = currentBuild.durationString.replace(' and counting', '')
mail to: "${EMAIL_RECIPIENT}",
subject: "✅ ${PROJECT_NAME} - Deployment Successful #${BUILD_NUMBER}",
body: """
Deployment completed successfully!
Project: ${PROJECT_NAME}
Build: #${BUILD_NUMBER}
Duration: ${duration}
Server: ${REMOTE_SERVER}
PM2 App: ${PM2_APP_NAME}
Port: ${APP_PORT}
Deployed at: ${new Date().format('yyyy-MM-dd HH:mm:ss')}
Console: ${BUILD_URL}console
Commands to manage:
- View logs: pm2 logs ${PM2_APP_NAME}
- Restart: pm2 restart ${PM2_APP_NAME}
- Stop: pm2 stop ${PM2_APP_NAME}
"""
}
}
failure {
script {
sshagent(credentials: [SSH_CREDENTIALS]) {
try {
def logs = sh(
script: """ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} '
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
${PM2_PATH} logs ${PM2_APP_NAME} --lines 50 --nostream || echo "No logs available"
'""",
returnStdout: true
).trim()
mail to: "${EMAIL_RECIPIENT}",
subject: "❌ ${PROJECT_NAME} - Deployment Failed #${BUILD_NUMBER}",
body: """
Deployment FAILED!
Project: ${PROJECT_NAME}
Build: #${BUILD_NUMBER}
Server: ${REMOTE_SERVER}
Failed at: ${new Date().format('yyyy-MM-dd HH:mm:ss')}
Console Log: ${BUILD_URL}console
Recent PM2 Logs:
${logs}
Action required immediately!
"""
} catch (Exception e) {
mail to: "${EMAIL_RECIPIENT}",
subject: "❌ ${PROJECT_NAME} - Deployment Failed #${BUILD_NUMBER}",
body: """
Deployment FAILED!
Project: ${PROJECT_NAME}
Build: #${BUILD_NUMBER}
Server: ${REMOTE_SERVER}
Failed at: ${new Date().format('yyyy-MM-dd HH:mm:ss')}
Console Log: ${BUILD_URL}console
Could not retrieve PM2 logs. Please check manually.
"""
}
}
}
}
}
}

View File

@ -1,63 +0,0 @@
# Migration Merge Complete ✅
## Status: All Conflicts Resolved
Both migration files have been successfully merged with all conflicts resolved.
## Files Merged
### 1. `src/scripts/auto-setup.ts`
- **Status**: Clean, no conflict markers
- **Migrations**: All 40 migrations in correct order
- **Format**: Uses `require()` for CommonJS compatibility
### 2. `src/scripts/migrate.ts`
- **Status**: Clean, no conflict markers
- **Migrations**: All 40 migrations in correct order
- **Format**: Uses ES6 `import * as` syntax
## Migration Order (Final)
### Base Branch Migrations (m0-m29)
1. m0-m27: Core system migrations
2. m28: `20250130-migrate-to-vertex-ai`
3. m29: `20251203-add-user-notification-preferences`
### Dealer Claim Branch Migrations (m30-m39)
4. m30: `20251210-add-workflow-type-support`
5. m31: `20251210-enhance-workflow-templates`
6. m32: `20251210-add-template-id-foreign-key`
7. m33: `20251210-create-dealer-claim-tables`
8. m34: `20251210-create-proposal-cost-items-table`
9. m35: `20251211-create-internal-orders-table`
10. m36: `20251211-create-claim-budget-tracking-table`
11. m37: `20251213-drop-claim-details-invoice-columns`
12. m38: `20251213-create-claim-invoice-credit-note-tables`
13. m39: `20251214-create-dealer-completion-expenses`
## Verification
✅ No conflict markers (`<<<<<<<`, `=======`, `>>>>>>>`) found
✅ All migrations properly ordered
✅ Base branch migrations come first
✅ Dealer claim migrations follow
✅ Both files synchronized
## Next Steps
1. **If you see conflicts in your IDE/Git client:**
- Refresh your IDE/editor
- Run `git status` to check Git state
- If conflicts show in Git, run: `git add src/scripts/auto-setup.ts src/scripts/migrate.ts`
2. **Test the migrations:**
```bash
npm run migrate
# or
npm run setup
```
## Files Are Ready ✅
Both files are properly merged and ready to use. All 40 migrations are in the correct order with base branch migrations first, followed by dealer claim branch migrations.

View File

@ -1,2 +1,2 @@
import{a as t}from"./index-fG9vuU_E.js";import"./radix-vendor-DA0cB_hD.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BPwaxA-i.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";async function m(n){return(await t.post(`/conclusions/${n}/generate`)).data.data}async function d(n,o){return(await t.post(`/conclusions/${n}/finalize`,{finalRemark:o})).data.data}async function f(n){return(await t.get(`/conclusions/${n}`)).data.data}export{d as finalizeConclusion,m as generateConclusion,f as getConclusion}; import{a as t}from"./index-9cOIFSn9.js";import"./radix-vendor-C2EbRL2a.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BmvKDhMD.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";async function m(n){return(await t.post(`/conclusions/${n}/generate`)).data.data}async function d(n,o){return(await t.post(`/conclusions/${n}/finalize`,{finalRemark:o})).data.data}async function f(n){return(await t.get(`/conclusions/${n}`)).data.data}export{d as finalizeConclusion,m as generateConclusion,f as getConclusion};
//# sourceMappingURL=conclusionApi-CFqAjzFU.js.map //# sourceMappingURL=conclusionApi-uNxtglEr.js.map

View File

@ -1 +1 @@
{"version":3,"file":"conclusionApi-CFqAjzFU.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAKA,eAAsBC,EAAcJ,EAA8C,CAEhF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB"} {"version":3,"file":"conclusionApi-uNxtglEr.js","sources":["../../src/services/conclusionApi.ts"],"sourcesContent":["import apiClient from './authApi';\r\n\r\nexport interface ConclusionRemark {\r\n conclusionId: string;\r\n requestId: string;\r\n aiGeneratedRemark: string | null;\r\n aiModelUsed: string | null;\r\n aiConfidenceScore: number | null;\r\n finalRemark: string | null;\r\n editedBy: string | null;\r\n isEdited: boolean;\r\n editCount: number;\r\n approvalSummary: any;\r\n documentSummary: any;\r\n keyDiscussionPoints: string[];\r\n generatedAt: string | null;\r\n finalizedAt: string | null;\r\n createdAt: string;\r\n updatedAt: string;\r\n}\r\n\r\n/**\r\n * Generate AI-powered conclusion remark\r\n */\r\nexport async function generateConclusion(requestId: string): Promise<{\r\n conclusionId: string;\r\n aiGeneratedRemark: string;\r\n keyDiscussionPoints: string[];\r\n confidence: number;\r\n generatedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/generate`);\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Update conclusion remark (edit by initiator)\r\n */\r\nexport async function updateConclusion(requestId: string, finalRemark: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.put(`/conclusions/${requestId}`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Finalize conclusion and close request\r\n */\r\nexport async function finalizeConclusion(requestId: string, finalRemark: string): Promise<{\r\n conclusionId: string;\r\n requestNumber: string;\r\n status: string;\r\n finalRemark: string;\r\n finalizedAt: string;\r\n}> {\r\n const response = await apiClient.post(`/conclusions/${requestId}/finalize`, { finalRemark });\r\n return response.data.data;\r\n}\r\n\r\n/**\r\n * Get conclusion for a request\r\n */\r\nexport async function getConclusion(requestId: string): Promise<ConclusionRemark> {\r\n const response = await apiClient.get(`/conclusions/${requestId}`);\r\n return response.data.data;\r\n}\r\n\r\n"],"names":["generateConclusion","requestId","apiClient","finalizeConclusion","finalRemark","getConclusion"],"mappings":"6RAwBA,eAAsBA,EAAmBC,EAMtC,CAED,OADiB,MAAMC,EAAU,KAAK,gBAAgBD,CAAS,WAAW,GAC1D,KAAK,IACvB,CAaA,eAAsBE,EAAmBF,EAAmBG,EAMzD,CAED,OADiB,MAAMF,EAAU,KAAK,gBAAgBD,CAAS,YAAa,CAAE,YAAAG,EAAa,GAC3E,KAAK,IACvB,CAKA,eAAsBC,EAAcJ,EAA8C,CAEhF,OADiB,MAAMC,EAAU,IAAI,gBAAgBD,CAAS,EAAE,GAChD,KAAK,IACvB"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,2 +0,0 @@
import{g as s}from"./index-fG9vuU_E.js";import"./radix-vendor-DA0cB_hD.js";import"./charts-vendor-Cji9-Yri.js";import"./utils-vendor-DHm03ykU.js";import"./ui-vendor-BPwaxA-i.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-CRr9x_Jp.js";function R(o){const{requestId:e,status:t,request:a,navigate:r}=o;if((t==null?void 0:t.toLowerCase())==="draft"||t==="DRAFT"){r(`/edit-request/${e}`);return}const i=s(e);r(i)}export{R as navigateToRequest};
//# sourceMappingURL=requestNavigation-KN4bh371.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"requestNavigation-KN4bh371.js","sources":["../../src/utils/requestNavigation.ts"],"sourcesContent":["/**\r\n * Global Request Navigation Utility\r\n * \r\n * Centralized navigation logic for request-related routes.\r\n * This utility decides where to navigate when clicking on request cards\r\n * from anywhere in the application.\r\n * \r\n * Features:\r\n * - Single point of navigation logic\r\n * - Handles draft vs active requests\r\n * - Supports different flow types (CUSTOM, DEALER_CLAIM)\r\n * - Type-safe navigation\r\n */\r\n\r\nimport { NavigateFunction } from 'react-router-dom';\r\nimport { getRequestDetailRoute, RequestFlowType } from './requestTypeUtils';\r\n\r\nexport interface RequestNavigationOptions {\r\n requestId: string;\r\n requestTitle?: string;\r\n status?: string;\r\n request?: any; // Full request object if available\r\n navigate: NavigateFunction;\r\n}\r\n\r\n/**\r\n * Navigate to the appropriate request detail page based on request type\r\n * \r\n * This is the single point of navigation for all request cards.\r\n * It handles:\r\n * - Draft requests (navigate to edit)\r\n * - Different flow types (CUSTOM, DEALER_CLAIM)\r\n * - Status-based routing\r\n */\r\nexport function navigateToRequest(options: RequestNavigationOptions): void {\r\n const { requestId, status, request, navigate } = options;\r\n\r\n // Check if request is a draft - if so, route to edit form instead of detail view\r\n const isDraft = status?.toLowerCase() === 'draft' || status === 'DRAFT';\r\n if (isDraft) {\r\n navigate(`/edit-request/${requestId}`);\r\n return;\r\n }\r\n\r\n // Determine the appropriate route based on request type\r\n const route = getRequestDetailRoute(requestId, request);\r\n navigate(route);\r\n}\r\n\r\n/**\r\n * Navigate to create a new request based on flow type\r\n */\r\nexport function navigateToCreateRequest(\r\n navigate: NavigateFunction,\r\n flowType: RequestFlowType = 'CUSTOM'\r\n): void {\r\n const route = flowType === 'DEALER_CLAIM' \r\n ? '/claim-management' \r\n : '/new-request';\r\n navigate(route);\r\n}\r\n\r\n/**\r\n * Create a navigation handler function for request cards\r\n * This can be used directly in onClick handlers\r\n */\r\nexport function createRequestNavigationHandler(\r\n navigate: NavigateFunction\r\n) {\r\n return (requestId: string, requestTitle?: string, status?: string, request?: any) => {\r\n navigateToRequest({\r\n requestId,\r\n requestTitle,\r\n status,\r\n request,\r\n navigate,\r\n });\r\n };\r\n}\r\n"],"names":["navigateToRequest","options","requestId","status","request","navigate","route","getRequestDetailRoute"],"mappings":"6RAkCO,SAASA,EAAkBC,EAAyC,CACzE,KAAM,CAAE,UAAAC,EAAW,OAAAC,EAAQ,QAAAC,EAAS,SAAAC,GAAaJ,EAIjD,IADgBE,GAAA,YAAAA,EAAQ,iBAAkB,SAAWA,IAAW,QACnD,CACXE,EAAS,iBAAiBH,CAAS,EAAE,EACrC,MACF,CAGA,MAAMI,EAAQC,EAAsBL,CAAkB,EACtDG,EAASC,CAAK,CAChB"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -52,15 +52,15 @@
transition: transform 0.2s ease; transition: transform 0.2s ease;
} }
</style> </style>
<script type="module" crossorigin src="/assets/index-fG9vuU_E.js"></script> <script type="module" crossorigin src="/assets/index-9cOIFSn9.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-Cji9-Yri.js"> <link rel="modulepreload" crossorigin href="/assets/charts-vendor-Cji9-Yri.js">
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-DA0cB_hD.js"> <link rel="modulepreload" crossorigin href="/assets/radix-vendor-C2EbRL2a.js">
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-DHm03ykU.js"> <link rel="modulepreload" crossorigin href="/assets/utils-vendor-DHm03ykU.js">
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-BPwaxA-i.js"> <link rel="modulepreload" crossorigin href="/assets/ui-vendor-BmvKDhMD.js">
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js"> <link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js"> <link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
<link rel="modulepreload" crossorigin href="/assets/router-vendor-CRr9x_Jp.js"> <link rel="modulepreload" crossorigin href="/assets/router-vendor-CRr9x_Jp.js">
<link rel="stylesheet" crossorigin href="/assets/index-DAM_E-zB.css"> <link rel="stylesheet" crossorigin href="/assets/index-BmOYs32D.css">
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>

View File

@ -1,134 +0,0 @@
# Claim Management - Approver Mapping Documentation
## Overview
The Claim Management workflow has **8 fixed steps** with specific approvers and action types. This document explains how approvers are mapped when a claim request is created.
## 8-Step Workflow Structure
### Step 1: Dealer Proposal Submission
- **Approver Type**: Dealer (External)
- **Action Type**: **SUBMIT** (Dealer submits proposal documents)
- **TAT**: 72 hours
- **Mapping**: Uses `dealerEmail` from claim data
- **Status**: PENDING (waiting for dealer to submit)
### Step 2: Requestor Evaluation
- **Approver Type**: Initiator (Internal RE Employee)
- **Action Type**: **APPROVE/REJECT** (Requestor reviews dealer proposal)
- **TAT**: 48 hours
- **Mapping**: Uses `initiatorId` (the person who created the request)
- **Status**: PENDING (waiting for requestor to evaluate)
### Step 3: Department Lead Approval
- **Approver Type**: Department Lead (Internal RE Employee)
- **Action Type**: **APPROVE/REJECT** (Department lead approves and blocks IO budget)
- **TAT**: 72 hours
- **Mapping**:
- Option 1: Find user with role `MANAGEMENT` in same department as initiator
- Option 2: Use initiator's `manager` field from User model
- Option 3: Find user with designation containing "Lead" or "Head" in same department
- **Status**: PENDING (waiting for department lead approval)
### Step 4: Activity Creation
- **Approver Type**: System (Auto-processed)
- **Action Type**: **AUTO** (System automatically creates activity)
- **TAT**: 1 hour
- **Mapping**: System user (`system@royalenfield.com`)
- **Status**: Auto-approved when triggered
### Step 5: Dealer Completion Documents
- **Approver Type**: Dealer (External)
- **Action Type**: **SUBMIT** (Dealer submits completion documents)
- **TAT**: 120 hours
- **Mapping**: Uses `dealerEmail` from claim data
- **Status**: PENDING (waiting for dealer to submit)
### Step 6: Requestor Claim Approval
- **Approver Type**: Initiator (Internal RE Employee)
- **Action Type**: **APPROVE/REJECT** (Requestor approves completion)
- **TAT**: 48 hours
- **Mapping**: Uses `initiatorId`
- **Status**: PENDING (waiting for requestor approval)
### Step 7: E-Invoice Generation
- **Approver Type**: System (Auto-processed via DMS)
- **Action Type**: **AUTO** (System generates e-invoice via DMS integration)
- **TAT**: 1 hour
- **Mapping**: System user (`system@royalenfield.com`)
- **Status**: Auto-approved when triggered
### Step 8: Credit Note Confirmation
- **Approver Type**: Finance Team (Internal RE Employee)
- **Action Type**: **APPROVE/REJECT** (Finance confirms credit note)
- **TAT**: 48 hours
- **Mapping**:
- Option 1: Find user with role `MANAGEMENT` and department contains "Finance"
- Option 2: Find user with designation containing "Finance" or "Accountant"
- Option 3: Use configured finance team email from admin settings
- **Status**: PENDING (waiting for finance confirmation)
- **Is Final Approver**: Yes (final step)
## Current Implementation Issues
### Problems:
1. **Step 1 & 5**: Dealer email not being used - using placeholder UUID
2. **Step 3**: Department Lead not resolved - using placeholder UUID
3. **Step 8**: Finance team not resolved - using placeholder UUID
4. **All steps**: Using initiator email for non-initiator steps
### Impact:
- Steps 1, 3, 5, 8 won't have correct approvers assigned
- Notifications won't be sent to correct users
- Workflow will be stuck waiting for non-existent approvers
## Action Types Summary
| Step | Action Type | Description |
|------|-------------|-------------|
| 1 | SUBMIT | Dealer submits proposal (not approve/reject) |
| 2 | APPROVE/REJECT | Requestor evaluates proposal |
| 3 | APPROVE/REJECT | Department Lead approves and blocks budget |
| 4 | AUTO | System creates activity automatically |
| 5 | SUBMIT | Dealer submits completion documents |
| 6 | APPROVE/REJECT | Requestor approves completion |
| 7 | AUTO | System generates e-invoice via DMS |
| 8 | APPROVE/REJECT | Finance confirms credit note (FINAL) |
## Approver Resolution Logic
### For Dealer Steps (1, 5):
```typescript
// Use dealer email from claim data
const dealerEmail = claimData.dealerEmail;
// Find or create dealer user (if dealer is external, may need special handling)
const dealerUser = await User.findOne({ where: { email: dealerEmail } });
// If dealer doesn't exist in system, create participant entry
```
### For Department Lead (Step 3):
```typescript
// Priority order:
1. Find user with same department and role = 'MANAGEMENT'
2. Use initiator.manager field to find manager
3. Find user with designation containing "Lead" or "Head" in same department
4. Fallback: Use initiator's manager email from User model
```
### For Finance Team (Step 8):
```typescript
// Priority order:
1. Find user with department containing "Finance" and role = 'MANAGEMENT'
2. Find user with designation containing "Finance" or "Accountant"
3. Use configured finance team email from admin_configurations table
4. Fallback: Use default finance email (e.g., finance@royalenfield.com)
```
## Next Steps
The `createClaimApprovalLevels()` method needs to be updated to:
1. Accept `dealerEmail` parameter
2. Resolve Department Lead dynamically
3. Resolve Finance team member dynamically
4. Handle cases where approvers don't exist in the system

View File

@ -1,149 +0,0 @@
# Cost Breakup Table Architecture
## Overview
This document describes the enhanced architecture for storing cost breakups in the Dealer Claim Management system. Instead of storing cost breakups as JSONB arrays, we now use a dedicated relational table for better querying, reporting, and data integrity.
## Architecture Decision
### Previous Approach (JSONB)
- **Storage**: Cost breakups stored as JSONB array in `dealer_proposal_details.cost_breakup`
- **Limitations**:
- Difficult to query individual cost items
- Hard to update specific items
- Not ideal for reporting and analytics
- No referential integrity
### New Approach (Separate Table)
- **Storage**: Dedicated `dealer_proposal_cost_items` table
- **Benefits**:
- Better querying and filtering capabilities
- Easier to update individual cost items
- Better for analytics and reporting
- Maintains referential integrity
- Supports proper ordering of items
## Database Schema
### Table: `dealer_proposal_cost_items`
```sql
CREATE TABLE dealer_proposal_cost_items (
cost_item_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
proposal_id UUID NOT NULL REFERENCES dealer_proposal_details(proposal_id) ON DELETE CASCADE,
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
item_description VARCHAR(500) NOT NULL,
amount DECIMAL(15, 2) NOT NULL,
item_order INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
);
```
**Indexes**:
- `idx_proposal_cost_items_proposal_id` on `proposal_id`
- `idx_proposal_cost_items_request_id` on `request_id`
- `idx_proposal_cost_items_proposal_order` on `(proposal_id, item_order)`
## Backward Compatibility
The system maintains backward compatibility by:
1. **Dual Storage**: Still saves cost breakups to JSONB field for backward compatibility
2. **Smart Retrieval**: When fetching proposal details:
- First tries to get cost items from the new table
- Falls back to JSONB field if table is empty
3. **Migration**: Automatically migrates existing JSONB data to the new table during migration
## API Response Format
The API always returns cost breakups as an array, regardless of storage method:
```json
{
"proposalDetails": {
"proposalId": "uuid",
"costBreakup": [
{
"description": "Item 1",
"amount": 10000
},
{
"description": "Item 2",
"amount": 20000
}
],
"costItems": [
{
"costItemId": "uuid",
"itemDescription": "Item 1",
"amount": 10000,
"itemOrder": 0
}
]
}
}
```
## Implementation Details
### Saving Cost Items
When a proposal is submitted:
1. Save proposal details to `dealer_proposal_details` (with JSONB for backward compatibility)
2. Delete existing cost items for the proposal (if updating)
3. Insert new cost items into `dealer_proposal_cost_items` table
4. Items are ordered by `itemOrder` field
### Retrieving Cost Items
When fetching proposal details:
1. Query `dealer_proposal_details` with `include` for `costItems`
2. If cost items exist in the table, use them
3. If not, fall back to parsing JSONB `costBreakup` field
4. Always return as a normalized array format
## Migration
The migration (`20251210-create-proposal-cost-items-table.ts`):
1. Creates the new table
2. Creates indexes for performance
3. Migrates existing JSONB data to the new table automatically
4. Handles errors gracefully (doesn't fail if migration of existing data fails)
## Model Associations
```typescript
DealerProposalDetails.hasMany(DealerProposalCostItem, {
as: 'costItems',
foreignKey: 'proposalId',
sourceKey: 'proposalId'
});
DealerProposalCostItem.belongsTo(DealerProposalDetails, {
as: 'proposal',
foreignKey: 'proposalId',
targetKey: 'proposalId'
});
```
## Benefits for Frontend
1. **Consistent Format**: Always receives cost breakups as an array
2. **No Changes Required**: Frontend code doesn't need to change
3. **Better Performance**: Can query specific cost items if needed
4. **Future Extensibility**: Easy to add features like:
- Cost item categories
- Approval status per item
- Historical tracking of cost changes
## Future Enhancements
Potential future improvements:
- Add `category` field to cost items
- Add `approved_amount` vs `requested_amount` for budget approval workflows
- Add `notes` field for item-level comments
- Add audit trail for cost item changes
- Add `is_approved` flag for individual item approval

View File

@ -1,181 +0,0 @@
# Dealer Claim Management - Fresh Start Guide
## Overview
This guide helps you start fresh with the dealer claim management system by cleaning up all existing data and ensuring the database structure is ready for new requests.
## Prerequisites
1. **Database Migrations**: Ensure all migrations are up to date, including the new tables:
- `internal_orders` (for IO details)
- `claim_budget_tracking` (for comprehensive budget tracking)
2. **Backup** (Optional but Recommended):
- If you have important data, backup your database before running cleanup
## Fresh Start Steps
### Step 1: Run Database Migrations
Ensure all new tables are created:
```bash
cd Re_Backend
npm run migrate
```
This will create:
- ✅ `internal_orders` table (for IO details with `ioRemark`)
- ✅ `claim_budget_tracking` table (for comprehensive budget tracking)
- ✅ All other dealer claim related tables
### Step 2: Clean Up All Existing Dealer Claims
Run the cleanup script to remove all existing CLAIM_MANAGEMENT requests:
```bash
npm run cleanup:dealer-claims
```
**What this script does:**
- Finds all workflow requests with `workflow_type = 'CLAIM_MANAGEMENT'`
- Deletes all related data from:
- `claim_budget_tracking`
- `internal_orders`
- `dealer_proposal_cost_items`
- `dealer_completion_details`
- `dealer_proposal_details`
- `dealer_claim_details`
- `activities`
- `work_notes`
- `documents`
- `participants`
- `approval_levels`
- `subscriptions`
- `notifications`
- `request_summaries`
- `shared_summaries`
- `conclusion_remarks`
- `tat_alerts`
- `workflow_requests` (finally)
**Note:** This script uses a database transaction, so if any step fails, all changes will be rolled back.
### Step 3: Verify Cleanup
After running the cleanup script, verify that no CLAIM_MANAGEMENT requests remain:
```sql
SELECT COUNT(*) FROM workflow_requests WHERE workflow_type = 'CLAIM_MANAGEMENT';
-- Should return 0
```
### Step 4: Seed Dealers (If Needed)
If you need to seed dealer users:
```bash
npm run seed:dealers
```
## Database Structure Summary
### New Tables Created
1. **`internal_orders`** - Dedicated table for IO (Internal Order) details
- `io_id` (PK)
- `request_id` (FK, unique)
- `io_number`
- `io_remark` ✅ (dedicated field, not in comments)
- `io_available_balance`
- `io_blocked_amount`
- `io_remaining_balance`
- `organized_by` (FK to users)
- `organized_at`
- `status` (PENDING, BLOCKED, RELEASED, CANCELLED)
2. **`claim_budget_tracking`** - Comprehensive budget tracking
- `budget_id` (PK)
- `request_id` (FK, unique)
- `initial_estimated_budget`
- `proposal_estimated_budget`
- `approved_budget`
- `io_blocked_amount`
- `closed_expenses`
- `final_claim_amount`
- `credit_note_amount`
- `budget_status` (DRAFT, PROPOSED, APPROVED, BLOCKED, CLOSED, SETTLED)
- `variance_amount` & `variance_percentage`
- Audit fields (last_modified_by, last_modified_at, modification_reason)
### Existing Tables (Enhanced)
- `dealer_claim_details` - Main claim information
- `dealer_proposal_details` - Step 1: Dealer proposal
- `dealer_proposal_cost_items` - Cost breakdown items
- `dealer_completion_details` - Step 5: Completion documents
## What's New
### 1. IO Details in Separate Table
- ✅ IO remark is now stored in `internal_orders.io_remark` (not parsed from comments)
- ✅ Tracks who organized the IO (`organized_by`, `organized_at`)
- ✅ Better data integrity and querying
### 2. Comprehensive Budget Tracking
- ✅ All budget-related values in one place
- ✅ Tracks budget lifecycle (DRAFT → PROPOSED → APPROVED → BLOCKED → CLOSED → SETTLED)
- ✅ Calculates variance automatically
- ✅ Audit trail for budget modifications
### 3. Proper Data Structure
- ✅ Estimated budget: `claimDetails.estimatedBudget` or `proposalDetails.totalEstimatedBudget`
- ✅ Claim amount: `completionDetails.totalClosedExpenses` or `budgetTracking.finalClaimAmount`
- ✅ IO details: `internalOrder` table (separate, dedicated)
- ✅ E-Invoice: `claimDetails.eInvoiceNumber`, `claimDetails.eInvoiceDate`
- ✅ Credit Note: `claimDetails.creditNoteNumber`, `claimDetails.creditNoteAmount`
## Next Steps After Cleanup
1. **Create New Claim Requests**: Use the API or frontend to create fresh dealer claim requests
2. **Test Workflow**: Go through the 8-step workflow to ensure everything works correctly
3. **Verify Data Storage**: Check that IO details and budget tracking are properly stored
## Troubleshooting
### If Cleanup Fails
1. Check database connection
2. Verify foreign key constraints are not blocking deletion
3. Check logs for specific error messages
4. The script uses transactions, so partial deletions won't occur
### If Tables Don't Exist
Run migrations again:
```bash
npm run migrate
```
### If You Need to Restore Data
If you backed up before cleanup, restore from your backup. The cleanup script does not create backups automatically.
## API Endpoints Ready
After cleanup, you can use these endpoints:
- `POST /api/v1/dealer-claims` - Create new claim request
- `POST /api/v1/dealer-claims/:requestId/proposal` - Submit proposal (Step 1)
- `PUT /api/v1/dealer-claims/:requestId/io` - Update IO details (Step 3)
- `POST /api/v1/dealer-claims/:requestId/completion` - Submit completion (Step 5)
- `PUT /api/v1/dealer-claims/:requestId/e-invoice` - Update e-invoice (Step 7)
- `PUT /api/v1/dealer-claims/:requestId/credit-note` - Update credit note (Step 8)
## Summary
**Cleanup Script**: `npm run cleanup:dealer-claims`
**Migrations**: `npm run migrate`
**Fresh Start**: Database is ready for new dealer claim requests
**Proper Structure**: IO details and budget tracking in dedicated tables

View File

@ -1,134 +0,0 @@
# Dealer User Architecture
## Overview
**Dealers and regular users are stored in the SAME `users` table.** This is the correct approach because dealers ARE users in the system - they login via SSO, participate in workflows, receive notifications, etc.
## Why Single Table?
### ✅ Advantages:
1. **Unified Authentication**: Dealers login via the same Okta SSO as regular users
2. **Shared Functionality**: Dealers need all user features (notifications, workflow participation, etc.)
3. **Simpler Architecture**: No need for joins or complex queries
4. **Data Consistency**: Single source of truth for all users
5. **Workflow Integration**: Dealers can be approvers, participants, or action takers seamlessly
### ❌ Why NOT Separate Table:
- Would require complex joins for every query
- Data duplication (email, name, etc. in both tables)
- Dealers still need user authentication and permissions
- More complex to maintain
## How Dealers Are Identified
Dealers are identified using **three criteria** (any one matches):
1. **`employeeId` field starts with `'RE-'`** (e.g., `RE-MH-001`, `RE-DL-002`)
- This is the **primary identifier** for dealers
- Dealer code is stored in `employeeId` field
2. **`designation` contains `'dealer'`** (case-insensitive)
- Example: `"Dealer"`, `"Senior Dealer"`, etc.
3. **`department` contains `'dealer'`** (case-insensitive)
- Example: `"Dealer Operations"`, `"Dealer Management"`, etc.
## Database Schema
```sql
users {
user_id UUID PK
email VARCHAR(255) UNIQUE
okta_sub VARCHAR(100) UNIQUE -- From Okta SSO
employee_id VARCHAR(50) -- For dealers: stores dealer code (RE-MH-001)
display_name VARCHAR(255)
designation VARCHAR(255) -- For dealers: "Dealer"
department VARCHAR(255) -- For dealers: "Dealer Operations"
role ENUM('USER', 'MANAGEMENT', 'ADMIN')
is_active BOOLEAN
-- ... other user fields
}
```
## Example Data
### Regular User:
```json
{
"userId": "uuid-1",
"email": "john.doe@royalenfield.com",
"employeeId": "E12345", // Regular employee ID
"designation": "Software Engineer",
"department": "IT",
"role": "USER"
}
```
### Dealer User:
```json
{
"userId": "uuid-2",
"email": "test.2@royalenfield.com",
"employeeId": "RE-MH-001", // Dealer code stored here
"designation": "Dealer",
"department": "Dealer Operations",
"role": "USER"
}
```
## Querying Dealers
The `dealer.service.ts` uses these filters to find dealers:
```typescript
User.findAll({
where: {
[Op.or]: [
{ designation: { [Op.iLike]: '%dealer%' } },
{ employeeId: { [Op.like]: 'RE-%' } },
{ department: { [Op.iLike]: '%dealer%' } },
],
isActive: true,
}
});
```
## Seed Script Behavior
When running `npm run seed:dealers`:
1. **If user exists (from Okta SSO)**:
- ✅ Preserves `oktaSub` (real Okta subject ID)
- ✅ Preserves `role` (from Okta)
- ✅ Updates `employeeId` with dealer code
- ✅ Updates `designation` to "Dealer" (if not already)
- ✅ Updates `department` to "Dealer Operations" (if not already)
2. **If user doesn't exist**:
- Creates placeholder user
- Sets `oktaSub` to `dealer-{code}-pending-sso`
- When dealer logs in via SSO, `oktaSub` gets updated automatically
## Workflow Integration
Dealers participate in workflows just like regular users:
- **As Approvers**: In Steps 1 & 5 of claim management workflow
- **As Participants**: Can be added to any workflow
- **As Action Takers**: Can submit proposals, completion documents, etc.
The system identifies them as dealers by checking `employeeId` starting with `'RE-'` or `designation` containing `'dealer'`.
## API Endpoints
- `GET /api/v1/dealers` - Get all dealers (filters users table)
- `GET /api/v1/dealers/code/:dealerCode` - Get dealer by code
- `GET /api/v1/dealers/email/:email` - Get dealer by email
- `GET /api/v1/dealers/search?q=term` - Search dealers
All endpoints query the same `users` table with dealer-specific filters.
## Conclusion
**✅ Single `users` table is the correct approach.** No separate dealer table needed. Dealers are users with special identification markers (dealer code in `employeeId`, dealer designation, etc.).

View File

@ -1,695 +0,0 @@
# DMS Integration API Documentation
## Overview
This document describes the data exchange between the Royal Enfield Workflow System (RE-Flow) and the DMS (Document Management System) for:
1. **E-Invoice Generation** - Submitting claim data to DMS for e-invoice creation
2. **Credit Note Generation** - Fetching/Generating credit note from DMS
## Data Flow Overview
### Inputs from RE-Flow System
The following data is sent **FROM** RE-Flow System **TO** DMS:
1. **Dealer Code** - Unique dealer identifier
2. **Dealer Name** - Dealer business name
3. **Activity Name** - Name of the activity/claim type (see Activity Types below)
4. **Activity Description** - Detailed description of the activity
5. **Claim Amount** - Total claim amount (before taxes)
6. **Request Number** - Unique request identifier from RE-Flow (e.g., "REQ-2025-12-0001")
7. **IO Number** - Internal Order number (if available)
### Inputs from DMS Team
The following data is **PROVIDED BY** DMS Team **TO** RE-Flow System (via webhook):
3. **Document No** - Generated invoice/credit note number
4. **Document Type** - Type of document ("E-INVOICE", "INVOICE", or "CREDIT_NOTE")
10. **Item Code No** - Item code number (same as provided in request, used for GST calculation)
11. **HSN/SAC Code** - HSN/SAC code for tax calculation (determined by DMS based on Item Code No)
12. **CGST %** - CGST percentage (e.g., 9.0 for 9%) - calculated by DMS based on Item Code No and dealer location
13. **SGST %** - SGST percentage (e.g., 9.0 for 9%) - calculated by DMS based on Item Code No and dealer location
14. **IGST %** - IGST percentage (0.0 for intra-state, >0 for inter-state) - calculated by DMS based on Item Code No and dealer location
15. **CGST Amount** - CGST amount in INR - calculated by DMS
16. **SGST Amount** - SGST amount in INR - calculated by DMS
17. **IGST Amount** - IGST amount in INR - calculated by DMS
18. **Credit Type** - Type of credit: "GST" or "Commercial Credit" (for credit notes only)
19. **IRN No** - Invoice Reference Number from GST portal (response from GST system)
20. **SAP Credit Note No** - SAP Credit Note Number (response from SAP system, for credit notes only)
**Important:** Item Code No is used by DMS for GST calculation. DMS determines HSN/SAC code, tax percentages, and tax amounts based on the Item Code No and dealer location.
### Predefined Activity Types
The following is the complete list of predefined Activity Types that RE-Flow System uses. DMS Team must provide **Item Code No** mapping for each Activity Type:
- **Riders Mania Claims**
- **Marketing Cost Bike to Vendor**
- **Media Bike Service**
- **ARAI Motorcycle Liquidation**
- **ARAI Certification STA Approval CNR**
- **Procurement of Spares/Apparel/GMA for Events**
- **Fuel for Media Bike Used for Event**
- **Motorcycle Buyback and Goodwill Support**
- **Liquidation of Used Motorcycle**
- **Motorcycle Registration CNR (Owned or Gifted by RE)**
- **Legal Claims Reimbursement**
- **Service Camp Claims**
- **Corporate Claims Institutional Sales PDI**
**Item Code No Lookup Process:**
1. RE-Flow sends `activity_name` to DMS
2. DMS responds with corresponding `item_code_no` based on activity type mapping
3. RE-Flow includes the `item_code_no` in invoice/credit note generation payload
4. DMS uses `item_code_no` to determine HSN/SAC code and calculate GST (CGST/SGST/IGST percentages and amounts)
**Note:** DMS Team must configure the Activity Type → Item Code No mapping in their system. This mapping is used for GST calculation.
---
## 1. E-Invoice Generation (DMS Push)
### When It's Called
This API is called when:
- **Step 6** of the claim management workflow is approved (Requestor approves the claim)
- User manually pushes claim data to DMS via the "Push to DMS" action
- System auto-generates e-invoice after claim approval
### Request Details
**Endpoint:** `POST {DMS_BASE_URL}/api/invoices/generate`
**Headers:**
```http
Authorization: Bearer {DMS_API_KEY}
Content-Type: application/json
```
**Request Body (Complete Payload):**
```json
{
"request_number": "REQ-2025-12-0001",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001"
}
```
**Complete Webhook Response Payload (from DMS to RE-Flow):**
After processing, DMS will send the following complete payload to RE-Flow webhook endpoint `POST /api/v1/webhooks/dms/invoice`:
```json
{
"request_number": "REQ-2025-12-0001",
"document_no": "EINV-2025-001234",
"document_type": "E-INVOICE",
"document_date": "2025-12-17T10:30:00.000Z",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001",
"hsn_sac_code": "998314",
"cgst_percentage": 9.0,
"sgst_percentage": 9.0,
"igst_percentage": 0.0,
"cgst_amount": 13500.00,
"sgst_amount": 13500.00,
"igst_amount": 0.00,
"total_amount": 177000.00,
"irn_no": "IRN123456789012345678901234567890123456789012345678901234567890",
"invoice_file_path": "https://dms.example.com/invoices/EINV-2025-001234.pdf",
"error_message": null,
"timestamp": "2025-12-17T10:30:00.000Z"
}
```
**Important Notes:**
- RE-Flow sends all required details including `item_code_no` (determined by DMS based on `activity_name` mapping)
- DMS processes the invoice generation **asynchronously**
- DMS responds with acknowledgment only
- **Status Verification (Primary Method):** DMS sends webhook to RE-Flow webhook URL `POST /api/v1/webhooks/dms/invoice` (see DMS_WEBHOOK_API.md) to notify when invoice is generated with complete details
- `item_code_no` is used by DMS for GST calculation (HSN/SAC code, tax percentages, tax amounts)
- **Status Verification (Backup Method):** If webhook fails, RE-Flow can use backup status check API (see section "Backup: Status Check API" below)
### Request Field Descriptions
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `request_number` | string | ✅ Yes | Unique request number from RE-Flow System (e.g., "REQ-2025-12-0001") |
| `dealer_code` | string | ✅ Yes | Dealer's unique code/identifier |
| `dealer_name` | string | ✅ Yes | Dealer's business name |
| `activity_name` | string | ✅ Yes | Activity type name (must match one of the predefined Activity Types) |
| `activity_description` | string | ✅ Yes | Detailed description of the activity/claim |
| `claim_amount` | number | ✅ Yes | Total claim amount before taxes (in INR, decimal format) |
| `io_number` | string | No | Internal Order (IO) number if available |
| `item_code_no` | string | ✅ Yes | Item code number determined by DMS based on `activity_name` mapping. RE-Flow includes this in the request. Used by DMS for GST calculation. |
### Expected Response
**Success Response (200 OK):**
**Note:** DMS should respond with a simple acknowledgment. The actual invoice details (document number, tax calculations, IRN, etc.) will be sent back to RE-Flow via **webhook** (see DMS_WEBHOOK_API.md).
```json
{
"success": true,
"message": "Invoice generation request received and queued for processing",
"request_number": "REQ-2025-12-0001"
}
```
### Response Field Descriptions
| Field | Type | Description |
|-------|------|-------------|
| `success` | boolean | Indicates if the request was accepted |
| `message` | string | Status message |
| `request_number` | string | Echo of the request number for reference |
**Important:**
- The actual invoice generation happens **asynchronously**
- DMS will send the complete invoice details (including document number, tax calculations, IRN, file path, `item_code_no`, etc.) via **webhook** to RE-Flow System once processing is complete
- Webhook endpoint: `POST /api/v1/webhooks/dms/invoice` (see DMS_WEBHOOK_API.md for details)
- If webhook delivery fails, RE-Flow can use the backup status check API (see section "Backup: Status Check API" below)
### Error Response
**Error Response (400/500):**
```json
{
"success": false,
"error": "Error message describing what went wrong",
"error_code": "INVALID_DEALER_CODE"
}
```
### Error Scenarios
| Error Code | Description | Possible Causes |
|------------|-------------|-----------------|
| `INVALID_DEALER_CODE` | Dealer code not found in DMS | Dealer not registered in DMS |
| `INVALID_AMOUNT` | Amount validation failed | Negative amount or invalid format |
| `IO_NOT_FOUND` | IO number not found | Invalid or non-existent IO number |
| `DMS_SERVICE_ERROR` | DMS internal error | DMS system unavailable or processing error |
### Example cURL Request
```bash
curl -X POST "https://dms.example.com/api/invoices/generate" \
-H "Authorization: Bearer YOUR_DMS_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"request_number": "REQ-2025-12-0001",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001"
}'
```
---
## 2. Credit Note Generation (DMS Fetch)
### When It's Called
This API is called when:
- **Step 8** of the claim management workflow is initiated (Credit Note Confirmation)
- User requests to generate/fetch credit note from DMS
- System auto-generates credit note after e-invoice is confirmed
### Request Details
**Endpoint:** `POST {DMS_BASE_URL}/api/credit-notes/generate`
**Headers:**
```http
Authorization: Bearer {DMS_API_KEY}
Content-Type: application/json
```
**Request Body (Complete Payload):**
```json
{
"request_number": "REQ-2025-12-0001",
"e_invoice_number": "EINV-2025-001234",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001"
}
```
**Complete Webhook Response Payload (from DMS to RE-Flow):**
After processing, DMS will send the following complete payload to RE-Flow webhook endpoint `POST /api/v1/webhooks/dms/credit-note`:
```json
{
"request_number": "REQ-2025-12-0001",
"document_no": "CN-2025-001234",
"document_type": "CREDIT_NOTE",
"document_date": "2025-12-17T11:00:00.000Z",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001",
"hsn_sac_code": "998314",
"cgst_percentage": 9.0,
"sgst_percentage": 9.0,
"igst_percentage": 0.0,
"cgst_amount": 13500.00,
"sgst_amount": 13500.00,
"igst_amount": 0.00,
"total_amount": 177000.00,
"credit_type": "GST",
"irn_no": "IRN987654321098765432109876543210987654321098765432109876543210",
"sap_credit_note_no": "SAP-CN-2025-001234",
"credit_note_file_path": "https://dms.example.com/credit-notes/CN-2025-001234.pdf",
"error_message": null,
"timestamp": "2025-12-17T11:00:00.000Z"
}
```
**Important Notes:**
- RE-Flow sends `activity_name` in the request
- DMS should use the same Item Code No from the original invoice (determined by `activity_name`)
- DMS returns `item_code_no` in the webhook response (see DMS_WEBHOOK_API.md)
- `item_code_no` is used by DMS for GST calculation (HSN/SAC code, tax percentages, tax amounts)
### Request Field Descriptions
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `request_number` | string | ✅ Yes | Original request number from RE-Flow System |
| `e_invoice_number` | string | ✅ Yes | E-invoice number that was generated earlier (must exist in DMS) |
| `dealer_code` | string | ✅ Yes | Dealer's unique code/identifier (must match invoice) |
| `dealer_name` | string | ✅ Yes | Dealer's business name |
| `activity_name` | string | ✅ Yes | Activity type name (must match original invoice) |
| `activity_description` | string | ✅ Yes | Activity description (must match original invoice) |
| `claim_amount` | number | ✅ Yes | Credit note amount (in INR, decimal format) - typically matches invoice amount |
| `io_number` | string | No | Internal Order (IO) number if available |
| `item_code_no` | string | ✅ Yes | Item code number (same as original invoice, determined by `activity_name` mapping). RE-Flow includes this in the request. Used by DMS for GST calculation. |
### Expected Response
**Success Response (200 OK):**
**Note:** DMS should respond with a simple acknowledgment. The actual credit note details (document number, tax calculations, SAP credit note number, IRN, etc.) will be sent back to RE-Flow via **webhook** (see DMS_WEBHOOK_API.md).
```json
{
"success": true,
"message": "Credit note generation request received and queued for processing",
"request_number": "REQ-2025-12-0001"
}
```
### Response Field Descriptions
| Field | Type | Description |
|-------|------|-------------|
| `success` | boolean | Indicates if the request was accepted |
| `message` | string | Status message |
| `request_number` | string | Echo of the request number for reference |
**Important:** The actual credit note generation happens asynchronously. DMS will send the complete credit note details (including document number, tax calculations, SAP credit note number, IRN, file path, etc.) via webhook to RE-Flow System once processing is complete.
### Error Response
**Error Response (400/500):**
```json
{
"success": false,
"error": "Error message describing what went wrong",
"error_code": "INVOICE_NOT_FOUND"
}
```
### Error Scenarios
| Error Code | Description | Possible Causes |
|------------|-------------|-----------------|
| `INVOICE_NOT_FOUND` | E-invoice number not found in DMS | Invoice was not generated or invalid invoice number |
| `INVALID_AMOUNT` | Amount validation failed | Amount mismatch with invoice or invalid format |
| `DEALER_MISMATCH` | Dealer code/name doesn't match invoice | Different dealer code than original invoice |
| `CREDIT_NOTE_EXISTS` | Credit note already generated for this invoice | Duplicate request for same invoice |
| `DMS_SERVICE_ERROR` | DMS internal error | DMS system unavailable or processing error |
### Example cURL Request
```bash
curl -X POST "https://dms.example.com/api/credit-notes/generate" \
-H "Authorization: Bearer YOUR_DMS_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"request_number": "REQ-2025-12-0001",
"e_invoice_number": "EINV-2025-001234",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001"
}'
```
---
## Configuration
### Environment Variables
The following environment variables need to be configured in the RE Workflow System:
```env
# DMS Integration Configuration
DMS_BASE_URL=https://dms.example.com
DMS_API_KEY=your_dms_api_key_here
# Alternative: Username/Password Authentication
DMS_USERNAME=your_dms_username
DMS_PASSWORD=your_dms_password
```
### Authentication Methods
DMS supports two authentication methods:
1. **API Key Authentication** (Recommended)
- Set `DMS_API_KEY` in environment variables
- Header: `Authorization: Bearer {DMS_API_KEY}`
2. **Username/Password Authentication**
- Set `DMS_USERNAME` and `DMS_PASSWORD` in environment variables
- Use Basic Auth or custom authentication as per DMS requirements
---
## Integration Flow
### E-Invoice Generation Flow
```
┌─────────────────┐
│ RE-Flow System │
│ (Step 6) │
└────────┬────────┘
│ POST /api/invoices/generate
│ { request_number, dealer_code, activity_name,
│ claim_amount, item_code_no, ... }
┌─────────────────┐
│ DMS System │
│ │
│ - Validates │
│ - Queues for │
│ processing │
│ │
│ Response: │
│ { success: true }│
└────────┬────────┘
│ (Asynchronous Processing)
│ - Determines Item Code No
│ - Calculates GST
│ - Generates E-Invoice
│ - Gets IRN from GST
│ POST /api/v1/webhooks/dms/invoice
│ { document_no, item_code_no,
│ hsn_sac_code, tax details,
│ irn_no, invoice_file_path, ... }
┌─────────────────┐
│ RE-Flow System │
│ │
│ - Receives │
│ webhook │
│ - Stores │
│ invoice data │
│ - Updates │
│ workflow │
│ - Moves to │
│ Step 8 │
└─────────────────┘
Backup (if webhook fails):
┌─────────────────┐
│ RE-Flow System │
│ │
│ GET /api/invoices/status/{request_number}
│ │
┌─────────────────┐
│ DMS System │
│ │
│ Returns current │
│ invoice status │
│ and details │
└─────────────────┘
```
### Credit Note Generation Flow
```
┌─────────────────┐
│ RE-Flow System │
│ (Step 8) │
└────────┬────────┘
│ POST /api/credit-notes/generate
│ { e_invoice_number, request_number,
│ activity_name, claim_amount,
│ item_code_no, ... }
┌─────────────────┐
│ DMS System │
│ │
│ - Validates │
│ invoice │
│ - Queues for │
│ processing │
│ │
│ Response: │
│ { success: true }│
└────────┬────────┘
│ (Asynchronous Processing)
│ - Uses Item Code No from invoice
│ - Calculates GST
│ - Generates Credit Note
│ - Gets IRN from GST
│ - Gets SAP Credit Note No
│ POST /api/v1/webhooks/dms/credit-note
│ { document_no, item_code_no,
│ hsn_sac_code, tax details,
│ irn_no, sap_credit_note_no,
│ credit_note_file_path, ... }
┌─────────────────┐
│ RE-Flow System │
│ │
│ - Receives │
│ webhook │
│ - Stores │
│ credit note │
│ - Updates │
│ workflow │
│ - Completes │
│ request │
└─────────────────┘
Backup (if webhook fails):
┌─────────────────┐
│ RE-Flow System │
│ │
│ GET /api/credit-notes/status/{request_number}
│ │
┌─────────────────┐
│ DMS System │
│ │
│ Returns current │
│ credit note │
│ status and │
│ details │
└─────────────────┘
```
---
## Data Mapping
### RE-Flow System → DMS (API Request)
| RE-Flow Field | DMS Request Field | Notes |
|----------------|-------------------|-------|
| `request.requestNumber` | `request_number` | Direct mapping |
| `claimDetails.dealerCode` | `dealer_code` | Direct mapping |
| `claimDetails.dealerName` | `dealer_name` | Direct mapping |
| `claimDetails.activityName` | `activity_name` | Must match predefined Activity Types |
| `claimDetails.activityDescription` | `activity_description` | Direct mapping |
| `budgetTracking.closedExpenses` | `claim_amount` | Total claim amount (before taxes) |
| `internalOrder.ioNumber` | `io_number` | Optional, if available |
| `itemCodeNo` (determined by DMS) | `item_code_no` | Included in payload. DMS determines this based on `activity_name` mapping. Used by DMS for GST calculation. |
| `claimInvoice.invoiceNumber` | `e_invoice_number` | For credit note request only |
### DMS → RE-Flow System (Webhook Response)
**Note:** All invoice and credit note details are sent via webhook (see DMS_WEBHOOK_API.md), not in the API response.
| DMS Webhook Field | RE-Flow Database Field | Table | Notes |
|-------------------|------------------------|-------|-------|
| `document_no` | `invoice_number` / `credit_note_number` | `claim_invoices` / `claim_credit_notes` | Generated by DMS |
| `document_date` | `invoice_date` / `credit_note_date` | `claim_invoices` / `claim_credit_notes` | Converted to Date object |
| `total_amount` | `invoice_amount` / `credit_amount` | `claim_invoices` / `claim_credit_notes` | Includes taxes |
| `invoice_file_path` | `invoice_file_path` | `claim_invoices` | URL/path to PDF |
| `credit_note_file_path` | `credit_note_file_path` | `claim_credit_notes` | URL/path to PDF |
| `irn_no` | Stored in `description` field | Both tables | From GST portal |
| `sap_credit_note_no` | `sap_document_number` | `claim_credit_notes` | From SAP system |
| `item_code_no` | Stored in `description` field | Both tables | Provided by DMS based on activity |
| `hsn_sac_code` | Stored in `description` field | Both tables | Provided by DMS |
| `cgst_amount`, `sgst_amount`, `igst_amount` | Stored in `description` field | Both tables | Tax breakdown |
| `credit_type` | Stored in `description` field | `claim_credit_notes` | "GST" or "Commercial Credit" |
---
## Testing
### Mock Mode
When DMS is not configured, the system operates in **mock mode**:
- Returns mock invoice/credit note numbers
- Logs warnings instead of making actual API calls
- Useful for development and testing
### Test Data
**E-Invoice Test Request:**
```json
{
"request_number": "REQ-TEST-001",
"dealer_code": "TEST-DLR-001",
"dealer_name": "Test Dealer",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Test invoice generation for marketing activity",
"claim_amount": 10000.00,
"io_number": "IO-TEST-001",
"item_code_no": "ITEM-001"
}
```
**Credit Note Test Request:**
```json
{
"request_number": "REQ-TEST-001",
"e_invoice_number": "EINV-TEST-001",
"dealer_code": "TEST-DLR-001",
"dealer_name": "Test Dealer",
"activity_name": "Marketing Cost Bike to Vendor",
"activity_description": "Test credit note generation for marketing activity",
"claim_amount": 10000.00,
"io_number": "IO-TEST-001",
"item_code_no": "ITEM-001"
}
```
---
## Notes
1. **Asynchronous Processing**: Invoice and credit note generation happens asynchronously. DMS should:
- Accept the request immediately and return a success acknowledgment
- Process the invoice/credit note in the background
- Send complete details via webhook once processing is complete
2. **Activity Type to Item Code No Mapping**:
- DMS Team must provide **Item Code No** mapping for each predefined Activity Type
- This mapping should be configured in DMS system
- RE-Flow includes `item_code_no` in the request payload (determined by DMS based on `activity_name` mapping)
- DMS uses Item Code No to determine HSN/SAC code and calculate GST (CGST/SGST/IGST percentages and amounts)
- DMS returns `item_code_no` in the webhook response for verification
3. **Tax Calculation**: DMS is responsible for:
- Determining CGST/SGST/IGST percentages based on dealer location and activity type
- Calculating tax amounts
- Providing HSN/SAC codes
4. **Amount Validation**: DMS should validate that credit note amount matches or is less than the original invoice amount.
5. **Invoice Dependency**: Credit note generation requires a valid e-invoice to exist in DMS first.
6. **Error Handling**: RE-Flow System handles DMS errors gracefully and allows manual entry if DMS is unavailable.
7. **Retry Logic**: Consider implementing retry logic for transient DMS failures.
8. **Webhooks (Primary Method)**: DMS **MUST** send webhooks to notify RE-Flow System when invoice/credit note processing is complete. See DMS_WEBHOOK_API.md for webhook specifications. This is the **primary method** for status verification.
9. **Status Check API (Backup Method)**: If webhook delivery fails, RE-Flow can use the backup status check API to verify invoice/credit note generation status. See section "Backup: Status Check API" above.
10. **IRN Generation**: DMS should generate IRN (Invoice Reference Number) from GST portal and include it in the webhook response.
11. **SAP Integration**: For credit notes, DMS should generate SAP Credit Note Number and include it in the webhook response.
12. **Webhook URL Configuration**: DMS must be configured with RE-Flow webhook URLs:
- Invoice Webhook: `POST /api/v1/webhooks/dms/invoice`
- Credit Note Webhook: `POST /api/v1/webhooks/dms/credit-note`
- See DMS_WEBHOOK_API.md for complete webhook specifications
---
## Support
For issues or questions regarding DMS integration:
- **Backend Team**: Check logs in `Re_Backend/src/services/dmsIntegration.service.ts`
- **DMS Team**: Contact DMS support for API-related issues
- **Documentation**: Refer to DMS API documentation for latest updates
---
**Last Updated:** December 19, 2025
**Version:** 2.0
## Changelog
### Version 2.0 (December 19, 2025)
- Added clear breakdown of inputs from RE-Flow vs DMS Team
- Added predefined Activity Types list
- Updated request/response structure to reflect asynchronous processing
- Clarified that detailed responses come via webhook, not API response
- Updated field names to match actual implementation (`claim_amount` instead of `amount`, `activity_name`, `activity_description`)
- Added notes about Item Code No mapping requirement for DMS Team
- Updated data mapping section with webhook fields
### Version 1.0 (December 17, 2025)
- Initial documentation

View File

@ -1,574 +0,0 @@
# DMS Webhook API Documentation
## Overview
This document describes the webhook endpoints that DMS (Document Management System) will call to notify the RE Workflow System after processing invoice and credit note generation requests.
---
## Table of Contents
1. [Webhook Overview](#1-webhook-overview)
2. [Authentication](#2-authentication)
3. [Invoice Webhook](#3-invoice-webhook)
4. [Credit Note Webhook](#4-credit-note-webhook)
5. [Payload Specifications](#5-payload-specifications)
6. [Error Handling](#6-error-handling)
7. [Testing](#7-testing)
---
## 1. Webhook Overview
### 1.1 Purpose
After RE Workflow System pushes invoice/credit note generation requests to DMS, DMS processes them and sends webhook callbacks with the generated document details, tax information, and other metadata.
### 1.2 Webhook Flow
```
┌─────────────────┐ ┌─────────────────┐
│ RE Workflow │ │ DMS System │
│ System │ │ │
└────────┬────────┘ └────────┬────────┘
│ │
│ POST /api/invoices/generate │
│ { request_number, dealer_code, ... }│
├─────────────────────────────────────►│
│ │
│ │ Process Invoice
│ │ Generate Document
│ │ Calculate GST
│ │
│ │ POST /api/v1/webhooks/dms/invoice
│ │ { document_no, irn_no, ... }
│◄─────────────────────────────────────┤
│ │
│ Update Invoice Record │
│ Store IRN, GST Details, etc. │
│ │
```
---
## 2. Authentication
### 2.1 Webhook Signature
DMS must include a signature in the request header for security validation:
**Header:**
```
X-DMS-Signature: <HMAC-SHA256-signature>
```
**Signature Generation:**
1. Create HMAC-SHA256 hash of the request body (JSON string)
2. Use the shared secret key (`DMS_WEBHOOK_SECRET`)
3. Send the hex-encoded signature in the `X-DMS-Signature` header
**Example:**
```javascript
const crypto = require('crypto');
const body = JSON.stringify(payload);
const signature = crypto
.createHmac('sha256', DMS_WEBHOOK_SECRET)
.update(body)
.digest('hex');
// Send in header: X-DMS-Signature: <signature>
```
### 2.2 Environment Variable
Configure the webhook secret in RE Workflow System:
```env
DMS_WEBHOOK_SECRET=your_shared_secret_key_here
```
**Note:** If `DMS_WEBHOOK_SECRET` is not configured, signature validation is skipped (development mode only).
---
## 3. Invoice Webhook
### 3.1 Endpoint
**URL:** `POST /api/v1/webhooks/dms/invoice`
**Base URL Examples:**
- Development: `http://localhost:5000/api/v1/webhooks/dms/invoice`
- UAT: `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/invoice`
- Production: `https://reflow.royalenfield.com/api/v1/webhooks/dms/invoice`
### 3.2 Request Headers
```http
Content-Type: application/json
X-DMS-Signature: <HMAC-SHA256-signature>
User-Agent: DMS-Webhook-Client/1.0
```
### 3.3 Request Payload
```json
{
"request_number": "REQ-2025-12-0001",
"document_no": "EINV-2025-001234",
"document_type": "E-INVOICE",
"document_date": "2025-12-17T10:30:00.000Z",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Campaign",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001",
"hsn_sac_code": "998314",
"cgst_percentage": 9.0,
"sgst_percentage": 9.0,
"igst_percentage": 0.0,
"cgst_amount": 13500.00,
"sgst_amount": 13500.00,
"igst_amount": 0.00,
"total_amount": 177000.00,
"irn_no": "IRN123456789012345678901234567890123456789012345678901234567890",
"invoice_file_path": "https://dms.example.com/invoices/EINV-2025-001234.pdf",
"error_message": null,
"timestamp": "2025-12-17T10:30:00.000Z"
}
```
### 3.4 Payload Field Descriptions
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `request_number` | string | ✅ Yes | Original request number from RE Workflow System (e.g., "REQ-2025-12-0001") |
| `document_no` | string | ✅ Yes | Generated invoice/document number from DMS |
| `document_type` | string | ✅ Yes | Type of document: "E-INVOICE" or "INVOICE" |
| `document_date` | string (ISO 8601) | ✅ Yes | Date when invoice was generated |
| `dealer_code` | string | ✅ Yes | Dealer code (should match original request) |
| `dealer_name` | string | ✅ Yes | Dealer name (should match original request) |
| `activity_name` | string | ✅ Yes | Activity name from original request |
| `activity_description` | string | ✅ Yes | Activity description from original request |
| `claim_amount` | number | ✅ Yes | Original claim amount (before tax) |
| `io_number` | string | No | Internal Order number (if provided in original request) |
| `item_code_no` | string | ✅ Yes | Item code number (provided by DMS team based on activity list) |
| `hsn_sac_code` | string | ✅ Yes | HSN/SAC code for the invoice |
| `cgst_percentage` | number | ✅ Yes | CGST percentage (e.g., 9.0 for 9%) |
| `sgst_percentage` | number | ✅ Yes | SGST percentage (e.g., 9.0 for 9%) |
| `igst_percentage` | number | ✅ Yes | IGST percentage (0.0 for intra-state, >0 for inter-state) |
| `cgst_amount` | number | ✅ Yes | CGST amount in INR |
| `sgst_amount` | number | ✅ Yes | SGST amount in INR |
| `igst_amount` | number | ✅ Yes | IGST amount in INR |
| `total_amount` | number | ✅ Yes | Total invoice amount (claim_amount + all taxes) |
| `irn_no` | string | No | Invoice Reference Number (IRN) from GST portal (if generated) |
| `invoice_file_path` | string | ✅ Yes | URL or path to the generated invoice PDF/document file |
| `error_message` | string | No | Error message if invoice generation failed |
| `timestamp` | string (ISO 8601) | ✅ Yes | Timestamp when webhook is sent |
### 3.5 Success Response
**Status Code:** `200 OK`
```json
{
"success": true,
"message": "Invoice webhook processed successfully",
"data": {
"message": "Invoice webhook processed successfully",
"invoiceNumber": "EINV-2025-001234",
"requestNumber": "REQ-2025-12-0001"
}
}
```
### 3.6 Error Response
**Status Code:** `400 Bad Request` or `500 Internal Server Error`
```json
{
"success": false,
"message": "Failed to process invoice webhook",
"error": "Request not found: REQ-2025-12-0001"
}
```
---
## 4. Credit Note Webhook
### 4.1 Endpoint
**URL:** `POST /api/v1/webhooks/dms/credit-note`
**Base URL Examples:**
- Development: `http://localhost:5000/api/v1/webhooks/dms/credit-note`
- UAT: `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/credit-note`
- Production: `https://reflow.royalenfield.com/api/v1/webhooks/dms/credit-note`
### 4.2 Request Headers
```http
Content-Type: application/json
X-DMS-Signature: <HMAC-SHA256-signature>
User-Agent: DMS-Webhook-Client/1.0
```
### 4.3 Request Payload
```json
{
"request_number": "REQ-2025-12-0001",
"document_no": "CN-2025-001234",
"document_type": "CREDIT_NOTE",
"document_date": "2025-12-17T11:00:00.000Z",
"dealer_code": "DLR001",
"dealer_name": "ABC Motors",
"activity_name": "Marketing Campaign",
"activity_description": "Q4 Marketing Campaign for Royal Enfield",
"claim_amount": 150000.00,
"io_number": "IO-2025-001",
"item_code_no": "ITEM-001",
"hsn_sac_code": "998314",
"cgst_percentage": 9.0,
"sgst_percentage": 9.0,
"igst_percentage": 0.0,
"cgst_amount": 13500.00,
"sgst_amount": 13500.00,
"igst_amount": 0.00,
"total_amount": 177000.00,
"credit_type": "GST",
"irn_no": "IRN987654321098765432109876543210987654321098765432109876543210",
"sap_credit_note_no": "SAP-CN-2025-001234",
"credit_note_file_path": "https://dms.example.com/credit-notes/CN-2025-001234.pdf",
"error_message": null,
"timestamp": "2025-12-17T11:00:00.000Z"
}
```
### 4.4 Payload Field Descriptions
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `request_number` | string | ✅ Yes | Original request number from RE Workflow System |
| `document_no` | string | ✅ Yes | Generated credit note number from DMS |
| `document_type` | string | ✅ Yes | Type of document: "CREDIT_NOTE" |
| `document_date` | string (ISO 8601) | ✅ Yes | Date when credit note was generated |
| `dealer_code` | string | ✅ Yes | Dealer code (should match original request) |
| `dealer_name` | string | ✅ Yes | Dealer name (should match original request) |
| `activity_name` | string | ✅ Yes | Activity name from original request |
| `activity_description` | string | ✅ Yes | Activity description from original request |
| `claim_amount` | string | ✅ Yes | Original claim amount (before tax) |
| `io_number` | string | No | Internal Order number (if provided) |
| `item_code_no` | string | ✅ Yes | Item code number (provided by DMS team) |
| `hsn_sac_code` | string | ✅ Yes | HSN/SAC code for the credit note |
| `cgst_percentage` | number | ✅ Yes | CGST percentage |
| `sgst_percentage` | number | ✅ Yes | SGST percentage |
| `igst_percentage` | number | ✅ Yes | IGST percentage |
| `cgst_amount` | number | ✅ Yes | CGST amount in INR |
| `sgst_amount` | number | ✅ Yes | SGST amount in INR |
| `igst_amount` | number | ✅ Yes | IGST amount in INR |
| `total_amount` | number | ✅ Yes | Total credit note amount (claim_amount + all taxes) |
| `credit_type` | string | ✅ Yes | Type of credit: "GST" or "Commercial Credit" |
| `irn_no` | string | No | Invoice Reference Number (IRN) for credit note (if generated) |
| `sap_credit_note_no` | string | ✅ Yes | SAP Credit Note Number (generated by SAP system) |
| `credit_note_file_path` | string | ✅ Yes | URL or path to the generated credit note PDF/document file |
| `error_message` | string | No | Error message if credit note generation failed |
| `timestamp` | string (ISO 8601) | ✅ Yes | Timestamp when webhook is sent |
### 4.5 Success Response
**Status Code:** `200 OK`
```json
{
"success": true,
"message": "Credit note webhook processed successfully",
"data": {
"message": "Credit note webhook processed successfully",
"creditNoteNumber": "CN-2025-001234",
"requestNumber": "REQ-2025-12-0001"
}
}
```
### 4.6 Error Response
**Status Code:** `400 Bad Request` or `500 Internal Server Error`
```json
{
"success": false,
"message": "Failed to process credit note webhook",
"error": "Credit note record not found for request: REQ-2025-12-0001"
}
```
---
## 5. Payload Specifications
### 5.1 Data Mapping: RE Workflow → DMS
When RE Workflow System sends data to DMS, it includes:
| RE Workflow Field | DMS Receives | Notes |
|-------------------|--------------|-------|
| `requestNumber` | `request_number` | Direct mapping |
| `dealerCode` | `dealer_code` | Direct mapping |
| `dealerName` | `dealer_name` | Direct mapping |
| `activityName` | `activity_name` | From claim details |
| `activityDescription` | `activity_description` | From claim details |
| `claimAmount` | `claim_amount` | Total claim amount |
| `ioNumber` | `io_number` | If available |
### 5.2 Data Mapping: DMS → RE Workflow
When DMS sends webhook, RE Workflow System stores:
| DMS Webhook Field | RE Workflow Database Field | Table |
|-------------------|---------------------------|-------|
| `document_no` | `invoice_number` / `credit_note_number` | `claim_invoices` / `claim_credit_notes` |
| `document_date` | `invoice_date` / `credit_note_date` | `claim_invoices` / `claim_credit_notes` |
| `total_amount` | `invoice_amount` / `credit_note_amount` | `claim_invoices` / `claim_credit_notes` |
| `invoice_file_path` | `invoice_file_path` | `claim_invoices` |
| `credit_note_file_path` | `credit_note_file_path` | `claim_credit_notes` |
| `irn_no` | Stored in `description` field | Both tables |
| `sap_credit_note_no` | `sap_document_number` | `claim_credit_notes` |
| `item_code_no` | Stored in `description` field | Both tables |
| `hsn_sac_code` | Stored in `description` field | Both tables |
| GST amounts | Stored in `description` field | Both tables |
| `credit_type` | Stored in `description` field | `claim_credit_notes` |
### 5.3 GST Calculation Logic
**Intra-State (Same State):**
- CGST: Applied (e.g., 9%)
- SGST: Applied (e.g., 9%)
- IGST: 0%
**Inter-State (Different State):**
- CGST: 0%
- SGST: 0%
- IGST: Applied (e.g., 18%)
**Total Amount Calculation:**
```
total_amount = claim_amount + cgst_amount + sgst_amount + igst_amount
```
---
## 6. Error Handling
### 6.1 Common Error Scenarios
| Error | Status Code | Description | Solution |
|-------|-------------|-------------|----------|
| Invalid Signature | 401 | Webhook signature validation failed | Check `DMS_WEBHOOK_SECRET` and signature generation |
| Missing Required Field | 400 | Required field is missing in payload | Ensure all required fields are included |
| Request Not Found | 400 | Request number doesn't exist in system | Verify request number matches original request |
| Invoice Not Found | 400 | Invoice record not found for request | Ensure invoice was created before webhook |
| Credit Note Not Found | 400 | Credit note record not found for request | Ensure credit note was created before webhook |
| Database Error | 500 | Internal database error | Check database connection and logs |
### 6.2 Retry Logic
DMS should implement retry logic for failed webhook deliveries:
- **Initial Retry:** After 1 minute
- **Second Retry:** After 5 minutes
- **Third Retry:** After 15 minutes
- **Final Retry:** After 1 hour
**Maximum Retries:** 4 attempts
**Retry Conditions:**
- HTTP 5xx errors (server errors)
- Network timeouts
- Connection failures
**Do NOT Retry:**
- HTTP 400 errors (client errors - invalid payload)
- HTTP 401 errors (authentication errors)
### 6.3 Idempotency
Webhooks should be idempotent. If DMS sends the same webhook multiple times:
- RE Workflow System will update the record with the latest data
- No duplicate records will be created
- Status will be updated to reflect the latest state
---
## 7. Testing
### 7.1 Test Invoice Webhook
```bash
curl -X POST "http://localhost:5000/api/v1/webhooks/dms/invoice" \
-H "Content-Type: application/json" \
-H "X-DMS-Signature: <calculated-signature>" \
-d '{
"request_number": "REQ-2025-12-0001",
"document_no": "EINV-TEST-001",
"document_type": "E-INVOICE",
"document_date": "2025-12-17T10:30:00.000Z",
"dealer_code": "DLR001",
"dealer_name": "Test Dealer",
"activity_name": "Test Activity",
"activity_description": "Test Description",
"claim_amount": 100000.00,
"io_number": "IO-TEST-001",
"item_code_no": "ITEM-001",
"hsn_sac_code": "998314",
"cgst_percentage": 9.0,
"sgst_percentage": 9.0,
"igst_percentage": 0.0,
"cgst_amount": 9000.00,
"sgst_amount": 9000.00,
"igst_amount": 0.00,
"total_amount": 118000.00,
"irn_no": "IRN123456789012345678901234567890123456789012345678901234567890",
"invoice_file_path": "https://dms.example.com/invoices/EINV-TEST-001.pdf",
"timestamp": "2025-12-17T10:30:00.000Z"
}'
```
### 7.2 Test Credit Note Webhook
```bash
curl -X POST "http://localhost:5000/api/v1/webhooks/dms/credit-note" \
-H "Content-Type: application/json" \
-H "X-DMS-Signature: <calculated-signature>" \
-d '{
"request_number": "REQ-2025-12-0001",
"document_no": "CN-TEST-001",
"document_type": "CREDIT_NOTE",
"document_date": "2025-12-17T11:00:00.000Z",
"dealer_code": "DLR001",
"dealer_name": "Test Dealer",
"activity_name": "Test Activity",
"activity_description": "Test Description",
"claim_amount": 100000.00,
"io_number": "IO-TEST-001",
"item_code_no": "ITEM-001",
"hsn_sac_code": "998314",
"cgst_percentage": 9.0,
"sgst_percentage": 9.0,
"igst_percentage": 0.0,
"cgst_amount": 9000.00,
"sgst_amount": 9000.00,
"igst_amount": 0.00,
"total_amount": 118000.00,
"credit_type": "GST",
"irn_no": "IRN987654321098765432109876543210987654321098765432109876543210",
"sap_credit_note_no": "SAP-CN-TEST-001",
"credit_note_file_path": "https://dms.example.com/credit-notes/CN-TEST-001.pdf",
"timestamp": "2025-12-17T11:00:00.000Z"
}'
```
### 7.3 Signature Calculation (Node.js Example)
```javascript
const crypto = require('crypto');
function calculateSignature(payload, secret) {
const body = JSON.stringify(payload);
return crypto
.createHmac('sha256', secret)
.update(body)
.digest('hex');
}
const payload = { /* webhook payload */ };
const secret = process.env.DMS_WEBHOOK_SECRET;
const signature = calculateSignature(payload, secret);
// Use in header: X-DMS-Signature: <signature>
```
---
## 8. Integration Checklist
### 8.1 DMS Team Checklist
- [ ] Configure webhook URLs in DMS system
- [ ] Set up `DMS_WEBHOOK_SECRET` (shared secret)
- [ ] Implement signature generation (HMAC-SHA256)
- [ ] Test webhook delivery to RE Workflow endpoints
- [ ] Implement retry logic for failed deliveries
- [ ] Set up monitoring/alerting for webhook failures
- [ ] Document webhook payload structure
- [ ] Coordinate with RE Workflow team for testing
### 8.2 RE Workflow Team Checklist
- [ ] Configure `DMS_WEBHOOK_SECRET` in environment variables
- [ ] Deploy webhook endpoints to UAT/Production
- [ ] Test webhook endpoints with sample payloads
- [ ] Verify database updates after webhook processing
- [ ] Set up monitoring/alerting for webhook failures
- [ ] Document webhook endpoints for DMS team
- [ ] Coordinate with DMS team for integration testing
---
## 9. Support & Troubleshooting
### 9.1 Logs
RE Workflow System logs webhook processing:
- **Success:** `[DMSWebhook] Invoice webhook processed successfully`
- **Error:** `[DMSWebhook] Error processing invoice webhook: <error>`
- **Validation:** `[DMSWebhook] Invalid webhook signature`
### 9.2 Common Issues
**Issue: Webhook signature validation fails**
- Verify `DMS_WEBHOOK_SECRET` matches in both systems
- Check signature calculation method (HMAC-SHA256)
- Ensure request body is JSON stringified correctly
**Issue: Request not found**
- Verify `request_number` matches the original request
- Check if request exists in RE Workflow database
- Ensure request was created before webhook is sent
**Issue: Invoice/Credit Note record not found**
- Verify invoice/credit note was created in RE Workflow
- Check if webhook is sent before record creation
- Review workflow step sequence
---
## 10. Environment Configuration
### 10.1 Environment Variables
Add to RE Workflow System `.env` file:
```env
# DMS Webhook Configuration
DMS_WEBHOOK_SECRET=your_shared_secret_key_here
```
### 10.2 Webhook URLs by Environment
| Environment | Invoice Webhook URL | Credit Note Webhook URL |
|-------------|---------------------|-------------------------|
| Development | `http://localhost:5000/api/v1/webhooks/dms/invoice` | `http://localhost:5000/api/v1/webhooks/dms/credit-note` |
| UAT | `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/invoice` | `https://reflow-uat.royalenfield.com/api/v1/webhooks/dms/credit-note` |
| Production | `https://reflow.royalenfield.com/api/v1/webhooks/dms/invoice` | `https://reflow.royalenfield.com/api/v1/webhooks/dms/credit-note` |
---
**Document Version:** 1.0
**Last Updated:** December 2024
**Maintained By:** RE Workflow Development Team

File diff suppressed because it is too large Load Diff

View File

@ -1,507 +0,0 @@
erDiagram
users ||--o{ workflow_requests : initiates
users ||--o{ approval_levels : approves
users ||--o{ participants : participates
users ||--o{ work_notes : posts
users ||--o{ documents : uploads
users ||--o{ activities : performs
users ||--o{ notifications : receives
users ||--o{ user_sessions : has
workflow_requests ||--|{ approval_levels : has
workflow_requests ||--o{ participants : involves
workflow_requests ||--o{ documents : contains
workflow_requests ||--o{ work_notes : has
workflow_requests ||--o{ activities : logs
workflow_requests ||--o{ tat_tracking : monitors
workflow_requests ||--o{ notifications : triggers
workflow_requests ||--|| conclusion_remarks : concludes
workflow_requests ||--|| dealer_claim_details : claim_details
workflow_requests ||--|| dealer_proposal_details : proposal_details
dealer_proposal_details ||--o{ dealer_proposal_cost_items : cost_items
workflow_requests ||--|| dealer_completion_details : completion_details
workflow_requests ||--|| internal_orders : internal_order
workflow_requests ||--|| claim_budget_tracking : budget_tracking
workflow_requests ||--|| claim_invoices : claim_invoice
workflow_requests ||--|| claim_credit_notes : claim_credit_note
work_notes ||--o{ work_note_attachments : has
notifications ||--o{ email_logs : sends
notifications ||--o{ sms_logs : sends
workflow_requests ||--o{ report_cache : caches
workflow_requests ||--o{ audit_logs : audits
workflow_requests ||--o{ workflow_templates : templates
users ||--o{ system_settings : updates
users {
uuid user_id PK
varchar employee_id
varchar okta_sub
varchar email
varchar first_name
varchar last_name
varchar display_name
varchar department
varchar designation
varchar phone
varchar manager
varchar second_email
text job_title
varchar employee_number
varchar postal_address
varchar mobile_phone
jsonb ad_groups
jsonb location
boolean is_active
enum role
timestamp last_login
timestamp created_at
timestamp updated_at
}
workflow_requests {
uuid request_id PK
varchar request_number
uuid initiator_id FK
varchar template_type
varchar title
text description
enum priority
enum status
integer current_level
integer total_levels
decimal total_tat_hours
timestamp submission_date
timestamp closure_date
text conclusion_remark
text ai_generated_conclusion
boolean is_draft
boolean is_deleted
timestamp created_at
timestamp updated_at
}
approval_levels {
uuid level_id PK
uuid request_id FK
integer level_number
varchar level_name
uuid approver_id FK
varchar approver_email
varchar approver_name
decimal tat_hours
integer tat_days
enum status
timestamp level_start_time
timestamp level_end_time
timestamp action_date
text comments
text rejection_reason
boolean is_final_approver
decimal elapsed_hours
decimal remaining_hours
decimal tat_percentage_used
timestamp created_at
timestamp updated_at
}
participants {
uuid participant_id PK
uuid request_id FK
uuid user_id FK
varchar user_email
varchar user_name
enum participant_type
boolean can_comment
boolean can_view_documents
boolean can_download_documents
boolean notification_enabled
uuid added_by FK
timestamp added_at
boolean is_active
}
documents {
uuid document_id PK
uuid request_id FK
uuid uploaded_by FK
varchar file_name
varchar original_file_name
varchar file_type
varchar file_extension
bigint file_size
varchar file_path
varchar storage_url
varchar mime_type
varchar checksum
boolean is_google_doc
varchar google_doc_url
enum category
integer version
uuid parent_document_id
boolean is_deleted
integer download_count
timestamp uploaded_at
}
work_notes {
uuid note_id PK
uuid request_id FK
uuid user_id FK
varchar user_name
varchar user_role
text message
varchar message_type
boolean is_priority
boolean has_attachment
uuid parent_note_id
uuid[] mentioned_users
jsonb reactions
boolean is_edited
boolean is_deleted
timestamp created_at
timestamp updated_at
}
work_note_attachments {
uuid attachment_id PK
uuid note_id FK
varchar file_name
varchar file_type
bigint file_size
varchar file_path
varchar storage_url
boolean is_downloadable
integer download_count
timestamp uploaded_at
}
activities {
uuid activity_id PK
uuid request_id FK
uuid user_id FK
varchar user_name
varchar activity_type
text activity_description
varchar activity_category
varchar severity
jsonb metadata
boolean is_system_event
varchar ip_address
text user_agent
timestamp created_at
}
notifications {
uuid notification_id PK
uuid user_id FK
uuid request_id FK
varchar notification_type
varchar title
text message
boolean is_read
enum priority
varchar action_url
boolean action_required
jsonb metadata
varchar[] sent_via
boolean email_sent
boolean sms_sent
boolean push_sent
timestamp read_at
timestamp expires_at
timestamp created_at
}
tat_tracking {
uuid tracking_id PK
uuid request_id FK
uuid level_id FK
varchar tracking_type
enum tat_status
decimal total_tat_hours
decimal elapsed_hours
decimal remaining_hours
decimal percentage_used
boolean threshold_50_breached
timestamp threshold_50_alerted_at
boolean threshold_80_breached
timestamp threshold_80_alerted_at
boolean threshold_100_breached
timestamp threshold_100_alerted_at
integer alert_count
timestamp last_calculated_at
}
conclusion_remarks {
uuid conclusion_id PK
uuid request_id FK
text ai_generated_remark
varchar ai_model_used
decimal ai_confidence_score
text final_remark
uuid edited_by FK
boolean is_edited
integer edit_count
jsonb approval_summary
jsonb document_summary
text[] key_discussion_points
timestamp generated_at
timestamp finalized_at
}
audit_logs {
uuid audit_id PK
uuid user_id FK
varchar entity_type
uuid entity_id
varchar action
varchar action_category
jsonb old_values
jsonb new_values
text changes_summary
varchar ip_address
text user_agent
varchar session_id
varchar request_method
varchar request_url
integer response_status
integer execution_time_ms
timestamp created_at
}
user_sessions {
uuid session_id PK
uuid user_id FK
varchar session_token
varchar refresh_token
varchar ip_address
text user_agent
varchar device_type
varchar browser
varchar os
timestamp login_at
timestamp last_activity_at
timestamp logout_at
timestamp expires_at
boolean is_active
varchar logout_reason
}
email_logs {
uuid email_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_email
uuid recipient_user_id FK
text[] cc_emails
text[] bcc_emails
varchar subject
text body
varchar email_type
varchar status
integer send_attempts
timestamp sent_at
timestamp failed_at
text failure_reason
timestamp opened_at
timestamp clicked_at
timestamp created_at
}
sms_logs {
uuid sms_log_id PK
uuid request_id FK
uuid notification_id FK
varchar recipient_phone
uuid recipient_user_id FK
text message
varchar sms_type
varchar status
integer send_attempts
timestamp sent_at
timestamp delivered_at
timestamp failed_at
text failure_reason
varchar sms_provider
varchar sms_provider_message_id
decimal cost
timestamp created_at
}
system_settings {
uuid setting_id PK
varchar setting_key
text setting_value
varchar setting_type
varchar setting_category
text description
boolean is_editable
boolean is_sensitive
jsonb validation_rules
text default_value
uuid updated_by FK
timestamp created_at
timestamp updated_at
}
workflow_templates {
uuid template_id PK
varchar template_name
text template_description
varchar template_category
jsonb approval_levels_config
decimal default_tat_hours
boolean is_active
integer usage_count
uuid created_by FK
timestamp created_at
timestamp updated_at
}
report_cache {
uuid cache_id PK
varchar report_type
jsonb report_params
jsonb report_data
uuid generated_by FK
timestamp generated_at
timestamp expires_at
integer access_count
timestamp last_accessed_at
}
dealer_claim_details {
uuid claim_id PK
uuid request_id
varchar activity_name
varchar activity_type
varchar dealer_code
varchar dealer_name
varchar dealer_email
varchar dealer_phone
text dealer_address
date activity_date
varchar location
date period_start_date
date period_end_date
timestamp created_at
timestamp updated_at
}
dealer_proposal_details {
uuid proposal_id PK
uuid request_id
string proposal_document_path
string proposal_document_url
decimal total_estimated_budget
string timeline_mode
date expected_completion_date
int expected_completion_days
text dealer_comments
date submitted_at
timestamp created_at
timestamp updated_at
}
dealer_proposal_cost_items {
uuid cost_item_id PK
uuid proposal_id FK
uuid request_id FK
string item_description
decimal amount
int item_order
timestamp created_at
timestamp updated_at
}
dealer_completion_details {
uuid completion_id PK
uuid request_id
date activity_completion_date
int number_of_participants
decimal total_closed_expenses
date submitted_at
timestamp created_at
timestamp updated_at
}
dealer_completion_expenses {
uuid expense_id PK
uuid request_id
uuid completion_id
string description
decimal amount
timestamp created_at
timestamp updated_at
}
internal_orders {
uuid io_id PK
uuid request_id
string io_number
text io_remark
decimal io_available_balance
decimal io_blocked_amount
decimal io_remaining_balance
uuid organized_by FK
date organized_at
string sap_document_number
enum status
timestamp created_at
timestamp updated_at
}
claim_budget_tracking {
uuid budget_id PK
uuid request_id
decimal initial_estimated_budget
decimal proposal_estimated_budget
date proposal_submitted_at
decimal approved_budget
date approved_at
uuid approved_by FK
decimal io_blocked_amount
date io_blocked_at
decimal closed_expenses
date closed_expenses_submitted_at
decimal final_claim_amount
date final_claim_amount_approved_at
uuid final_claim_amount_approved_by FK
decimal credit_note_amount
date credit_note_issued_at
enum budget_status
string currency
decimal variance_amount
decimal variance_percentage
uuid last_modified_by FK
date last_modified_at
text modification_reason
timestamp created_at
timestamp updated_at
}
claim_invoices {
uuid invoice_id PK
uuid request_id
string invoice_number
date invoice_date
string dms_number
decimal amount
string status
text description
timestamp created_at
timestamp updated_at
}
claim_credit_notes {
uuid credit_note_id PK
uuid request_id
string credit_note_number
date credit_note_date
decimal credit_note_amount
string status
text reason
text description
timestamp created_at
timestamp updated_at
}

View File

@ -1,583 +0,0 @@
# Extensible Workflow Architecture Plan
## Supporting Multiple Template Types (Claim Management, Non-Templatized, Future Templates)
## Overview
This document outlines how to design the backend architecture to support:
1. **Unified Request System**: All requests (templatized, non-templatized, claim management) use the same `workflow_requests` table
2. **Template Identification**: Distinguish between different workflow types
3. **Extensibility**: Easy addition of new templates by admins without code changes
4. **Unified Views**: All requests appear in "My Requests", "Open Requests", etc. automatically
---
## Architecture Principles
### 1. **Single Source of Truth: `workflow_requests` Table**
All requests, regardless of type, are stored in the same table:
```sql
workflow_requests {
request_id UUID PK
request_number VARCHAR(20) UK
initiator_id UUID FK
template_type VARCHAR(20) -- 'CUSTOM' | 'TEMPLATE' (high-level)
workflow_type VARCHAR(50) -- 'NON_TEMPLATIZED' | 'CLAIM_MANAGEMENT' | 'DEALER_ONBOARDING' | etc.
template_id UUID FK (nullable) -- Reference to workflow_templates if using admin template
title VARCHAR(500)
description TEXT
status workflow_status
current_level INTEGER
total_levels INTEGER
-- ... common fields
}
```
**Key Fields:**
- `template_type`: High-level classification ('CUSTOM' for user-created, 'TEMPLATE' for admin templates)
- `workflow_type`: Specific workflow identifier (e.g., 'CLAIM_MANAGEMENT', 'NON_TEMPLATIZED')
- `template_id`: Optional reference to `workflow_templates` table if using an admin-created template
### 2. **Template-Specific Data Storage**
Each workflow type can have its own extension table for type-specific data:
```sql
-- For Claim Management
dealer_claim_details {
claim_id UUID PK
request_id UUID FK -> workflow_requests(request_id)
activity_name VARCHAR(500)
activity_type VARCHAR(100)
dealer_code VARCHAR(50)
dealer_name VARCHAR(200)
dealer_email VARCHAR(255)
dealer_phone VARCHAR(20)
dealer_address TEXT
activity_date DATE
location VARCHAR(255)
period_start_date DATE
period_end_date DATE
estimated_budget DECIMAL(15,2)
closed_expenses DECIMAL(15,2)
io_number VARCHAR(50)
io_blocked_amount DECIMAL(15,2)
sap_document_number VARCHAR(100)
dms_number VARCHAR(100)
e_invoice_number VARCHAR(100)
credit_note_number VARCHAR(100)
-- ... claim-specific fields
}
-- For Non-Templatized (if needed)
non_templatized_details {
detail_id UUID PK
request_id UUID FK -> workflow_requests(request_id)
custom_fields JSONB -- Flexible storage for any custom data
-- ... any specific fields
}
-- For Future Templates
-- Each new template can have its own extension table
```
### 3. **Workflow Templates Table (Admin-Created Templates)**
```sql
workflow_templates {
template_id UUID PK
template_name VARCHAR(200) -- Display name: "Claim Management", "Dealer Onboarding"
template_code VARCHAR(50) UK -- Unique identifier: "CLAIM_MANAGEMENT", "DEALER_ONBOARDING"
template_description TEXT
template_category VARCHAR(100) -- "Dealer Operations", "HR", "Finance", etc.
workflow_type VARCHAR(50) -- Maps to workflow_requests.workflow_type
approval_levels_config JSONB -- Step definitions, TAT, roles, etc.
default_tat_hours DECIMAL(10,2)
form_fields_config JSONB -- Form field definitions for wizard
is_active BOOLEAN
is_system_template BOOLEAN -- True for built-in (Claim Management), False for admin-created
created_by UUID FK
created_at TIMESTAMP
updated_at TIMESTAMP
}
```
---
## Database Schema Changes
### Migration: Add Workflow Type Support
```sql
-- Migration: 20251210-add-workflow-type-support.ts
-- 1. Add workflow_type column to workflow_requests
ALTER TABLE workflow_requests
ADD COLUMN IF NOT EXISTS workflow_type VARCHAR(50) DEFAULT 'NON_TEMPLATIZED';
-- 2. Add template_id column (nullable, for admin templates)
ALTER TABLE workflow_requests
ADD COLUMN IF NOT EXISTS template_id UUID REFERENCES workflow_templates(template_id);
-- 3. Create index for workflow_type
CREATE INDEX IF NOT EXISTS idx_workflow_requests_workflow_type
ON workflow_requests(workflow_type);
-- 4. Create index for template_id
CREATE INDEX IF NOT EXISTS idx_workflow_requests_template_id
ON workflow_requests(template_id);
-- 5. Create dealer_claim_details table
CREATE TABLE IF NOT EXISTS dealer_claim_details (
claim_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL UNIQUE REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
activity_name VARCHAR(500) NOT NULL,
activity_type VARCHAR(100) NOT NULL,
dealer_code VARCHAR(50) NOT NULL,
dealer_name VARCHAR(200) NOT NULL,
dealer_email VARCHAR(255),
dealer_phone VARCHAR(20),
dealer_address TEXT,
activity_date DATE,
location VARCHAR(255),
period_start_date DATE,
period_end_date DATE,
estimated_budget DECIMAL(15,2),
closed_expenses DECIMAL(15,2),
io_number VARCHAR(50),
io_available_balance DECIMAL(15,2),
io_blocked_amount DECIMAL(15,2),
io_remaining_balance DECIMAL(15,2),
sap_document_number VARCHAR(100),
dms_number VARCHAR(100),
e_invoice_number VARCHAR(100),
e_invoice_date DATE,
credit_note_number VARCHAR(100),
credit_note_date DATE,
credit_note_amount DECIMAL(15,2),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_dealer_claim_details_request_id ON dealer_claim_details(request_id);
CREATE INDEX idx_dealer_claim_details_dealer_code ON dealer_claim_details(dealer_code);
-- 6. Create proposal_details table (Step 1: Dealer Proposal)
CREATE TABLE IF NOT EXISTS dealer_proposal_details (
proposal_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
proposal_document_path VARCHAR(500),
proposal_document_url VARCHAR(500),
cost_breakup JSONB, -- Array of {description, amount}
total_estimated_budget DECIMAL(15,2),
timeline_mode VARCHAR(10), -- 'date' | 'days'
expected_completion_date DATE,
expected_completion_days INTEGER,
dealer_comments TEXT,
submitted_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_dealer_proposal_details_request_id ON dealer_proposal_details(request_id);
-- 7. Create completion_documents table (Step 5: Dealer Completion)
CREATE TABLE IF NOT EXISTS dealer_completion_details (
completion_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES workflow_requests(request_id) ON DELETE CASCADE,
activity_completion_date DATE NOT NULL,
number_of_participants INTEGER,
closed_expenses JSONB, -- Array of {description, amount}
total_closed_expenses DECIMAL(15,2),
completion_documents JSONB, -- Array of document references
activity_photos JSONB, -- Array of photo references
submitted_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_dealer_completion_details_request_id ON dealer_completion_details(request_id);
```
---
## Model Updates
### 1. Update WorkflowRequest Model
```typescript
// Re_Backend/src/models/WorkflowRequest.ts
interface WorkflowRequestAttributes {
requestId: string;
requestNumber: string;
initiatorId: string;
templateType: 'CUSTOM' | 'TEMPLATE';
workflowType: string; // NEW: 'NON_TEMPLATIZED' | 'CLAIM_MANAGEMENT' | etc.
templateId?: string; // NEW: Reference to workflow_templates
title: string;
description: string;
// ... existing fields
}
// Add association
WorkflowRequest.hasOne(DealerClaimDetails, {
as: 'claimDetails',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
```
### 2. Create DealerClaimDetails Model
```typescript
// Re_Backend/src/models/DealerClaimDetails.ts
import { DataTypes, Model } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
interface DealerClaimDetailsAttributes {
claimId: string;
requestId: string;
activityName: string;
activityType: string;
dealerCode: string;
dealerName: string;
// ... all claim-specific fields
}
class DealerClaimDetails extends Model<DealerClaimDetailsAttributes> {
public claimId!: string;
public requestId!: string;
// ... fields
}
DealerClaimDetails.init({
claimId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'claim_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
// ... all other fields
}, {
sequelize,
modelName: 'DealerClaimDetails',
tableName: 'dealer_claim_details',
timestamps: true
});
// Association
DealerClaimDetails.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId'
});
export { DealerClaimDetails };
```
---
## Service Layer Pattern
### 1. Template-Aware Service Factory
```typescript
// Re_Backend/src/services/templateService.factory.ts
import { WorkflowRequest } from '../models/WorkflowRequest';
import { DealerClaimService } from './dealerClaim.service';
import { NonTemplatizedService } from './nonTemplatized.service';
export class TemplateServiceFactory {
static getService(workflowType: string) {
switch (workflowType) {
case 'CLAIM_MANAGEMENT':
return new DealerClaimService();
case 'NON_TEMPLATIZED':
return new NonTemplatizedService();
default:
// For future templates, use a generic service or throw error
throw new Error(`Unsupported workflow type: ${workflowType}`);
}
}
static async getRequestDetails(requestId: string) {
const request = await WorkflowRequest.findByPk(requestId);
if (!request) return null;
const service = this.getService(request.workflowType);
return service.getRequestDetails(request);
}
}
```
### 2. Unified Workflow Service (No Changes Needed)
The existing `WorkflowService.listMyRequests()` and `listOpenForMe()` methods will **automatically** include all request types because they query `workflow_requests` table without filtering by `workflow_type`.
```typescript
// Existing code works as-is - no changes needed!
async listMyRequests(userId: string, page: number, limit: number, filters?: {...}) {
// This query automatically includes ALL workflow types
const requests = await WorkflowRequest.findAll({
where: {
initiatorId: userId,
isDraft: false,
// ... filters
// NO workflow_type filter - includes everything!
}
});
return requests;
}
```
---
## API Endpoints
### 1. Create Claim Management Request
```typescript
// Re_Backend/src/controllers/dealerClaim.controller.ts
async createClaimRequest(req: AuthenticatedRequest, res: Response) {
const userId = req.user?.userId;
const {
activityName,
activityType,
dealerCode,
// ... claim-specific fields
} = req.body;
// 1. Create workflow request (common)
const workflowRequest = await WorkflowRequest.create({
initiatorId: userId,
templateType: 'CUSTOM',
workflowType: 'CLAIM_MANAGEMENT', // Identify as claim
title: `${activityName} - Claim Request`,
description: req.body.requestDescription,
totalLevels: 8, // Fixed 8-step workflow
// ... other common fields
});
// 2. Create claim-specific details
const claimDetails = await DealerClaimDetails.create({
requestId: workflowRequest.requestId,
activityName,
activityType,
dealerCode,
// ... claim-specific fields
});
// 3. Create approval levels (8 steps)
await this.createClaimApprovalLevels(workflowRequest.requestId);
return ResponseHandler.success(res, {
request: workflowRequest,
claimDetails
});
}
```
### 2. Get Request Details (Template-Aware)
```typescript
async getRequestDetails(req: Request, res: Response) {
const { requestId } = req.params;
const request = await WorkflowRequest.findByPk(requestId, {
include: [
{ model: User, as: 'initiator' },
// Conditionally include template-specific data
...(request.workflowType === 'CLAIM_MANAGEMENT'
? [{ model: DealerClaimDetails, as: 'claimDetails' }]
: [])
]
});
// Use factory to get template-specific service
const templateService = TemplateServiceFactory.getService(request.workflowType);
const enrichedDetails = await templateService.enrichRequestDetails(request);
return ResponseHandler.success(res, enrichedDetails);
}
```
---
## Frontend Integration
### 1. Request List Views (No Changes Needed)
The existing "My Requests" and "Open Requests" pages will automatically show all request types because the backend doesn't filter by `workflow_type`.
```typescript
// Frontend: MyRequests.tsx - No changes needed!
const fetchMyRequests = async () => {
const result = await workflowApi.listMyInitiatedWorkflows({
page,
limit: itemsPerPage
});
// Returns ALL request types automatically
};
```
### 2. Request Detail Page (Template-Aware Rendering)
```typescript
// Frontend: RequestDetail.tsx
const RequestDetail = ({ requestId }) => {
const request = useRequestDetails(requestId);
// Render based on workflow type
if (request.workflowType === 'CLAIM_MANAGEMENT') {
return <ClaimManagementDetail request={request} />;
} else if (request.workflowType === 'NON_TEMPLATIZED') {
return <NonTemplatizedDetail request={request} />;
} else {
// Future templates - use generic renderer or template config
return <GenericWorkflowDetail request={request} />;
}
};
```
---
## Adding New Templates (Future)
### Step 1: Admin Creates Template in UI
1. Admin goes to "Template Management" page
2. Creates new template with:
- Template name: "Vendor Payment"
- Template code: "VENDOR_PAYMENT"
- Approval levels configuration
- Form fields configuration
### Step 2: Database Entry Created
```sql
INSERT INTO workflow_templates (
template_name,
template_code,
workflow_type,
approval_levels_config,
form_fields_config,
is_active,
is_system_template
) VALUES (
'Vendor Payment',
'VENDOR_PAYMENT',
'VENDOR_PAYMENT',
'{"levels": [...], "tat": {...}}'::jsonb,
'{"fields": [...]}'::jsonb,
true,
false -- Admin-created, not system template
);
```
### Step 3: Create Extension Table (If Needed)
```sql
CREATE TABLE vendor_payment_details (
payment_id UUID PRIMARY KEY,
request_id UUID UNIQUE REFERENCES workflow_requests(request_id),
vendor_code VARCHAR(50),
invoice_number VARCHAR(100),
payment_amount DECIMAL(15,2),
-- ... vendor-specific fields
);
```
### Step 4: Create Service (Optional - Can Use Generic Service)
```typescript
// Re_Backend/src/services/vendorPayment.service.ts
export class VendorPaymentService {
async getRequestDetails(request: WorkflowRequest) {
const paymentDetails = await VendorPaymentDetails.findOne({
where: { requestId: request.requestId }
});
return {
...request.toJSON(),
paymentDetails
};
}
}
// Update factory
TemplateServiceFactory.getService(workflowType: string) {
switch (workflowType) {
case 'VENDOR_PAYMENT':
return new VendorPaymentService();
// ... existing cases
}
}
```
### Step 5: Frontend Component (Optional)
```typescript
// Frontend: components/VendorPaymentDetail.tsx
export function VendorPaymentDetail({ request }) {
// Render vendor payment specific UI
}
```
---
## Benefits of This Architecture
1. **Unified Data Model**: All requests in one table, easy to query
2. **Automatic Inclusion**: My Requests/Open Requests show all types automatically
3. **Extensibility**: Add new templates without modifying existing code
4. **Type Safety**: Template-specific data in separate tables
5. **Flexibility**: Support both system templates and admin-created templates
6. **Backward Compatible**: Existing non-templatized requests continue to work
---
## Migration Strategy
1. **Phase 1**: Add `workflow_type` column, set default to 'NON_TEMPLATIZED' for existing requests
2. **Phase 2**: Create `dealer_claim_details` table and models
3. **Phase 3**: Update claim management creation flow to use new structure
4. **Phase 4**: Update request detail endpoints to be template-aware
5. **Phase 5**: Frontend updates (if needed) for template-specific rendering
---
## Summary
- **All requests** use `workflow_requests` table
- **Template identification** via `workflow_type` field
- **Template-specific data** in extension tables (e.g., `dealer_claim_details`)
- **Unified views** automatically include all types
- **Future templates** can be added by admins without code changes
- **Existing functionality** remains unchanged
This architecture ensures that:
- ✅ Claim Management requests appear in My Requests/Open Requests
- ✅ Non-templatized requests continue to work
- ✅ Future templates can be added easily
- ✅ No code duplication
- ✅ Single source of truth for all requests

View File

@ -1,669 +0,0 @@
# GCP Cloud Storage - Production Setup Guide
## Overview
This guide provides step-by-step instructions for setting up Google Cloud Storage (GCS) for the **Royal Enfield Workflow System** in **Production** environment. This document focuses specifically on production deployment requirements, folder structure, and environment configuration.
---
## Table of Contents
1. [Production Requirements](#1-production-requirements)
2. [GCP Bucket Configuration](#2-gcp-bucket-configuration)
3. [Service Account Setup](#3-service-account-setup)
4. [Environment Variables Configuration](#4-environment-variables-configuration)
5. [Folder Structure in GCS](#5-folder-structure-in-gcs)
6. [Security & Access Control](#6-security--access-control)
7. [CORS Configuration](#7-cors-configuration)
8. [Lifecycle Management](#8-lifecycle-management)
9. [Monitoring & Alerts](#9-monitoring--alerts)
10. [Verification & Testing](#10-verification--testing)
---
## 1. Production Requirements
### 1.1 Application Details
| Item | Production Value |
|------|------------------|
| **Application** | Royal Enfield Workflow System |
| **Environment** | Production |
| **Domain** | `https://reflow.royalenfield.com` |
| **Purpose** | Store workflow documents, attachments, invoices, and credit notes |
| **Storage Type** | Google Cloud Storage (GCS) |
| **Region** | `asia-south1` (Mumbai) |
### 1.2 Storage Requirements
The application stores:
- **Workflow Documents**: Initial documents uploaded during request creation
- **Work Note Attachments**: Files attached during approval workflow
- **Invoice Files**: Generated e-invoice PDFs
- **Credit Note Files**: Generated credit note PDFs
- **Dealer Claim Documents**: Proposal documents, completion documents
---
## 2. GCP Bucket Configuration
### 2.1 Production Bucket Settings
| Setting | Production Value |
|---------|------------------|
| **Bucket Name** | `reflow-documents-prod` |
| **Location Type** | Region |
| **Region** | `asia-south1` (Mumbai) |
| **Storage Class** | Standard (for active files) |
| **Access Control** | Uniform bucket-level access |
| **Public Access Prevention** | Enforced (Block all public access) |
| **Versioning** | Enabled (for recovery) |
| **Lifecycle Rules** | Configured (see section 8) |
### 2.2 Create Production Bucket
```bash
# Create production bucket
gcloud storage buckets create gs://reflow-documents-prod \
--project=re-platform-workflow-dealer \
--location=asia-south1 \
--uniform-bucket-level-access \
--public-access-prevention
# Enable versioning
gcloud storage buckets update gs://reflow-documents-prod \
--versioning
# Verify bucket creation
gcloud storage buckets describe gs://reflow-documents-prod
```
### 2.3 Bucket Naming Convention
| Environment | Bucket Name | Purpose |
|-------------|-------------|---------|
| Development | `reflow-documents-dev` | Development testing |
| UAT | `reflow-documents-uat` | User acceptance testing |
| Production | `reflow-documents-prod` | Live production data |
---
## 3. Service Account Setup
### 3.1 Create Production Service Account
```bash
# Create service account for production
gcloud iam service-accounts create reflow-storage-prod-sa \
--display-name="RE Workflow Production Storage Service Account" \
--description="Service account for production file storage operations" \
--project=re-platform-workflow-dealer
```
### 3.2 Assign Required Roles
The service account needs the following IAM roles:
| Role | Purpose | Required For |
|------|---------|--------------|
| `roles/storage.objectAdmin` | Full control over objects | Upload, delete, update files |
| `roles/storage.objectViewer` | Read objects | Download and preview files |
| `roles/storage.legacyBucketReader` | Read bucket metadata | List files and check bucket status |
```bash
# Grant Storage Object Admin role
gcloud projects add-iam-policy-binding re-platform-workflow-dealer \
--member="serviceAccount:reflow-storage-prod-sa@re-platform-workflow-dealer.iam.gserviceaccount.com" \
--role="roles/storage.objectAdmin"
# Grant Storage Object Viewer role (for read operations)
gcloud projects add-iam-policy-binding re-platform-workflow-dealer \
--member="serviceAccount:reflow-storage-prod-sa@re-platform-workflow-dealer.iam.gserviceaccount.com" \
--role="roles/storage.objectViewer"
```
### 3.3 Generate Service Account Key
```bash
# Generate JSON key file for production
gcloud iam service-accounts keys create ./config/gcp-key-prod.json \
--iam-account=reflow-storage-prod-sa@re-platform-workflow-dealer.iam.gserviceaccount.com \
--project=re-platform-workflow-dealer
```
⚠️ **Security Warning:**
- Store the key file securely (not in Git)
- Use secure file transfer methods
- Rotate keys periodically (every 90 days recommended)
- Restrict file permissions: `chmod 600 ./config/gcp-key-prod.json`
---
## 4. Environment Variables Configuration
### 4.1 Required Environment Variables
Add the following environment variables to your production `.env` file:
```env
# ============================================
# Google Cloud Storage (GCP) Configuration
# ============================================
# GCP Project ID - Must match the project_id in your service account key file
GCP_PROJECT_ID=re-platform-workflow-dealer
# GCP Bucket Name - Production bucket name
GCP_BUCKET_NAME=reflow-documents-prod
# GCP Service Account Key File Path
# Can be relative to project root or absolute path
# Example: ./config/gcp-key-prod.json
# Example: /etc/reflow/config/gcp-key-prod.json
GCP_KEY_FILE=./config/gcp-key-prod.json
```
### 4.2 Environment Variable Details
| Variable | Description | Example Value | Required |
|----------|-------------|---------------|----------|
| `GCP_PROJECT_ID` | Your GCP project ID. Must match the `project_id` field in the service account JSON key file. | `re-platform-workflow-dealer` | ✅ Yes |
| `GCP_BUCKET_NAME` | Name of the GCS bucket where files will be stored. Must exist in your GCP project. | `reflow-documents-prod` | ✅ Yes |
| `GCP_KEY_FILE` | Path to the service account JSON key file. Can be relative (from project root) or absolute path. | `./config/gcp-key-prod.json` | ✅ Yes |
### 4.3 File Path Configuration
**Relative Path (Recommended for Development):**
```env
GCP_KEY_FILE=./config/gcp-key-prod.json
```
**Absolute Path (Recommended for Production):**
```env
GCP_KEY_FILE=/etc/reflow/config/gcp-key-prod.json
```
### 4.4 Verification
After setting environment variables, verify the configuration:
```bash
# Check if variables are set
echo $GCP_PROJECT_ID
echo $GCP_BUCKET_NAME
echo $GCP_KEY_FILE
# Verify key file exists
ls -la $GCP_KEY_FILE
# Verify key file permissions (should be 600)
stat -c "%a %n" $GCP_KEY_FILE
```
---
## 5. Folder Structure in GCS
### 5.1 Production Bucket Structure
```
reflow-documents-prod/
├── requests/ # All workflow-related files
│ ├── REQ-2025-12-0001/ # Request-specific folder
│ │ ├── documents/ # Initial request documents
│ │ │ ├── 1701234567890-abc123-proposal.pdf
│ │ │ ├── 1701234567891-def456-specification.docx
│ │ │ └── 1701234567892-ghi789-budget.xlsx
│ │ │
│ │ ├── attachments/ # Work note attachments
│ │ │ ├── 1701234567893-jkl012-approval_note.pdf
│ │ │ ├── 1701234567894-mno345-signature.png
│ │ │ └── 1701234567895-pqr678-supporting_doc.pdf
│ │ │
│ │ ├── invoices/ # Generated invoice files
│ │ │ └── 1701234567896-stu901-invoice_REQ-2025-12-0001.pdf
│ │ │
│ │ └── credit-notes/ # Generated credit note files
│ │ └── 1701234567897-vwx234-credit_note_REQ-2025-12-0001.pdf
│ │
│ ├── REQ-2025-12-0002/
│ │ ├── documents/
│ │ ├── attachments/
│ │ ├── invoices/
│ │ └── credit-notes/
│ │
│ └── REQ-2025-12-0003/
│ └── ...
└── temp/ # Temporary uploads (auto-deleted after 24h)
└── (temporary files before processing)
```
### 5.2 File Path Patterns
| File Type | Path Pattern | Example |
|-----------|--------------|---------|
| **Documents** | `requests/{requestNumber}/documents/{timestamp}-{hash}-{filename}` | `requests/REQ-2025-12-0001/documents/1701234567890-abc123-proposal.pdf` |
| **Attachments** | `requests/{requestNumber}/attachments/{timestamp}-{hash}-{filename}` | `requests/REQ-2025-12-0001/attachments/1701234567893-jkl012-approval_note.pdf` |
| **Invoices** | `requests/{requestNumber}/invoices/{timestamp}-{hash}-{filename}` | `requests/REQ-2025-12-0001/invoices/1701234567896-stu901-invoice_REQ-2025-12-0001.pdf` |
| **Credit Notes** | `requests/{requestNumber}/credit-notes/{timestamp}-{hash}-{filename}` | `requests/REQ-2025-12-0001/credit-notes/1701234567897-vwx234-credit_note_REQ-2025-12-0001.pdf` |
### 5.3 File Naming Convention
Files are automatically renamed with the following pattern:
```
{timestamp}-{randomHash}-{sanitizedOriginalName}
```
**Example:**
- Original: `My Proposal Document (Final).pdf`
- Stored: `1701234567890-abc123-My_Proposal_Document__Final_.pdf`
**Benefits:**
- Prevents filename conflicts
- Maintains original filename for reference
- Ensures unique file identifiers
- Safe for URL encoding
---
## 6. Security & Access Control
### 6.1 Bucket Security Settings
```bash
# Enforce public access prevention
gcloud storage buckets update gs://reflow-documents-prod \
--public-access-prevention
# Enable uniform bucket-level access
gcloud storage buckets update gs://reflow-documents-prod \
--uniform-bucket-level-access
```
### 6.2 Access Control Strategy
**Production Approach:**
- **Private Bucket**: All files are private by default
- **Signed URLs**: Generate time-limited signed URLs for file access (recommended)
- **Service Account**: Only service account has direct access
- **IAM Policies**: Restrict access to specific service accounts only
### 6.3 Signed URL Configuration (Recommended)
For production, use signed URLs instead of public URLs:
```typescript
// Example: Generate signed URL (valid for 1 hour)
const [url] = await file.getSignedUrl({
action: 'read',
expires: Date.now() + 60 * 60 * 1000, // 1 hour
});
```
### 6.4 Security Checklist
- [ ] Public access prevention enabled
- [ ] Uniform bucket-level access enabled
- [ ] Service account has minimal required permissions
- [ ] JSON key file stored securely (not in Git)
- [ ] Key file permissions set to 600
- [ ] CORS configured for specific domains only
- [ ] Bucket versioning enabled
- [ ] Access logging enabled
- [ ] Signed URLs used for file access (if applicable)
---
## 7. CORS Configuration
### 7.1 Production CORS Policy
Create `cors-config-prod.json`:
```json
[
{
"origin": [
"https://reflow.royalenfield.com",
"https://www.royalenfield.com"
],
"method": ["GET", "PUT", "POST", "DELETE", "HEAD", "OPTIONS"],
"responseHeader": [
"Content-Type",
"Content-Disposition",
"Content-Length",
"Cache-Control",
"x-goog-meta-*"
],
"maxAgeSeconds": 3600
}
]
```
### 7.2 Apply CORS Configuration
```bash
gcloud storage buckets update gs://reflow-documents-prod \
--cors-file=cors-config-prod.json
```
### 7.3 Verify CORS
```bash
# Check CORS configuration
gcloud storage buckets describe gs://reflow-documents-prod \
--format="value(cors)"
```
---
## 8. Lifecycle Management
### 8.1 Lifecycle Rules Configuration
Create `lifecycle-config-prod.json`:
```json
{
"lifecycle": {
"rule": [
{
"action": { "type": "Delete" },
"condition": {
"age": 1,
"matchesPrefix": ["temp/"]
},
"description": "Delete temporary files after 24 hours"
},
{
"action": { "type": "SetStorageClass", "storageClass": "NEARLINE" },
"condition": {
"age": 90,
"matchesPrefix": ["requests/"]
},
"description": "Move old files to Nearline storage after 90 days"
},
{
"action": { "type": "SetStorageClass", "storageClass": "COLDLINE" },
"condition": {
"age": 365,
"matchesPrefix": ["requests/"]
},
"description": "Move archived files to Coldline storage after 1 year"
}
]
}
}
```
### 8.2 Apply Lifecycle Rules
```bash
gcloud storage buckets update gs://reflow-documents-prod \
--lifecycle-file=lifecycle-config-prod.json
```
### 8.3 Lifecycle Rule Benefits
| Rule | Purpose | Cost Savings |
|------|---------|--------------|
| Delete temp files | Remove temporary uploads after 24h | Prevents storage bloat |
| Move to Nearline | Archive files older than 90 days | ~50% cost reduction |
| Move to Coldline | Archive files older than 1 year | ~70% cost reduction |
---
## 9. Monitoring & Alerts
### 9.1 Enable Access Logging
```bash
# Create logging bucket (if not exists)
gcloud storage buckets create gs://reflow-logs-prod \
--project=re-platform-workflow-dealer \
--location=asia-south1
# Enable access logging
gcloud storage buckets update gs://reflow-documents-prod \
--log-bucket=gs://reflow-logs-prod \
--log-object-prefix=reflow-storage-logs/
```
### 9.2 Set Up Monitoring Alerts
**Recommended Alerts:**
1. **Storage Quota Alert**
- Trigger: Storage exceeds 80% of quota
- Action: Notify DevOps team
2. **Unusual Access Patterns**
- Trigger: Unusual download patterns detected
- Action: Security team notification
3. **Failed Access Attempts**
- Trigger: Multiple failed authentication attempts
- Action: Immediate security alert
4. **High Upload Volume**
- Trigger: Upload volume exceeds normal threshold
- Action: Performance team notification
### 9.3 Cost Monitoring
Monitor storage costs via:
- GCP Console → Billing → Reports
- Set up budget alerts at 50%, 75%, 90% of monthly budget
- Review storage class usage (Standard vs Nearline vs Coldline)
---
## 10. Verification & Testing
### 10.1 Pre-Deployment Verification
```bash
# 1. Verify bucket exists
gcloud storage buckets describe gs://reflow-documents-prod
# 2. Verify service account has access
gcloud storage ls gs://reflow-documents-prod \
--impersonate-service-account=reflow-storage-prod-sa@re-platform-workflow-dealer.iam.gserviceaccount.com
# 3. Test file upload
echo "test file" > test-upload.txt
gcloud storage cp test-upload.txt gs://reflow-documents-prod/temp/test-upload.txt
# 4. Test file download
gcloud storage cp gs://reflow-documents-prod/temp/test-upload.txt ./test-download.txt
# 5. Test file delete
gcloud storage rm gs://reflow-documents-prod/temp/test-upload.txt
# 6. Clean up
rm test-upload.txt test-download.txt
```
### 10.2 Application-Level Testing
1. **Upload Test:**
- Upload a document via API
- Verify file appears in GCS bucket
- Check database `storage_url` field contains GCS URL
2. **Download Test:**
- Download file via API
- Verify file is accessible
- Check response headers
3. **Delete Test:**
- Delete file via API
- Verify file is removed from GCS
- Check database record is updated
### 10.3 Production Readiness Checklist
- [ ] Bucket created and configured
- [ ] Service account created with correct permissions
- [ ] JSON key file generated and stored securely
- [ ] Environment variables configured in `.env`
- [ ] CORS policy applied
- [ ] Lifecycle rules configured
- [ ] Versioning enabled
- [ ] Access logging enabled
- [ ] Monitoring alerts configured
- [ ] Upload/download/delete operations tested
- [ ] Backup and recovery procedures documented
---
## 11. Troubleshooting
### 11.1 Common Issues
**Issue: Files not uploading to GCS**
- ✅ Check `.env` configuration matches credentials
- ✅ Verify service account has correct permissions
- ✅ Check bucket name exists and is accessible
- ✅ Review application logs for GCS errors
- ✅ Verify key file path is correct
**Issue: Files uploading but not accessible**
- ✅ Verify bucket permissions (private vs public)
- ✅ Check CORS configuration if accessing from browser
- ✅ Ensure `storage_url` is being saved correctly in database
- ✅ Verify signed URL generation (if using private bucket)
**Issue: Permission denied errors**
- ✅ Verify service account has `roles/storage.objectAdmin`
- ✅ Check bucket IAM policies
- ✅ Verify key file is valid and not expired
### 11.2 Log Analysis
Check application logs for GCS-related messages:
```bash
# Search for GCS initialization
grep "GCS.*Initialized" logs/app.log
# Search for GCS errors
grep "GCS.*Error" logs/app.log
# Search for upload failures
grep "GCS.*upload.*failed" logs/app.log
```
---
## 12. Production Deployment Steps
### 12.1 Deployment Checklist
1. **Pre-Deployment:**
- [ ] Create production bucket
- [ ] Create production service account
- [ ] Generate and secure key file
- [ ] Configure environment variables
- [ ] Test upload/download operations
2. **Deployment:**
- [ ] Deploy application with new environment variables
- [ ] Verify GCS initialization in logs
- [ ] Test file upload functionality
- [ ] Monitor for errors
3. **Post-Deployment:**
- [ ] Verify files are being stored in GCS
- [ ] Check database `storage_url` fields
- [ ] Monitor storage costs
- [ ] Review access logs
---
## 13. Cost Estimation (Production)
| Item | Monthly Estimate | Notes |
|------|------------------|-------|
| **Storage (500GB)** | ~$10.00 | Standard storage class |
| **Operations (100K)** | ~$0.50 | Upload/download operations |
| **Network Egress** | Variable | Depends on download volume |
| **Nearline Storage** | ~$5.00 | Files older than 90 days |
| **Coldline Storage** | ~$2.00 | Files older than 1 year |
**Total Estimated Monthly Cost:** ~$17.50 (excluding network egress)
---
## 14. Support & Contacts
| Role | Responsibility | Contact |
|------|----------------|---------|
| **DevOps Team** | GCP infrastructure setup | [DevOps Email] |
| **Application Team** | Application configuration | [App Team Email] |
| **Security Team** | Access control and permissions | [Security Email] |
---
## 15. Quick Reference
### 15.1 Essential Commands
```bash
# Create bucket
gcloud storage buckets create gs://reflow-documents-prod \
--project=re-platform-workflow-dealer \
--location=asia-south1 \
--uniform-bucket-level-access \
--public-access-prevention
# Create service account
gcloud iam service-accounts create reflow-storage-prod-sa \
--display-name="RE Workflow Production Storage" \
--project=re-platform-workflow-dealer
# Generate key
gcloud iam service-accounts keys create ./config/gcp-key-prod.json \
--iam-account=reflow-storage-prod-sa@re-platform-workflow-dealer.iam.gserviceaccount.com
# Set CORS
gcloud storage buckets update gs://reflow-documents-prod \
--cors-file=cors-config-prod.json
# Enable versioning
gcloud storage buckets update gs://reflow-documents-prod \
--versioning
```
### 15.2 Environment Variables Template
```env
# Production GCP Configuration
GCP_PROJECT_ID=re-platform-workflow-dealer
GCP_BUCKET_NAME=reflow-documents-prod
GCP_KEY_FILE=./config/gcp-key-prod.json
```
---
## Appendix: File Structure Reference
### Database Storage Fields
The application stores file information in the database:
| Table | Field | Description |
|-------|-------|-------------|
| `documents` | `file_path` | GCS path: `requests/{requestNumber}/documents/{filename}` |
| `documents` | `storage_url` | Full GCS URL: `https://storage.googleapis.com/bucket/path` |
| `work_note_attachments` | `file_path` | GCS path: `requests/{requestNumber}/attachments/{filename}` |
| `work_note_attachments` | `storage_url` | Full GCS URL |
| `claim_invoices` | `invoice_file_path` | GCS path: `requests/{requestNumber}/invoices/{filename}` |
| `claim_credit_notes` | `credit_note_file_path` | GCS path: `requests/{requestNumber}/credit-notes/{filename}` |
---
**Document Version:** 1.0
**Last Updated:** December 2024
**Maintained By:** RE Workflow Development Team

View File

@ -1,78 +0,0 @@
# Dealer Claim Management - Implementation Progress
## ✅ Completed
### 1. Database Migrations
- ✅ `20251210-add-workflow-type-support.ts` - Adds `workflow_type` and `template_id` to `workflow_requests`
- ✅ `20251210-enhance-workflow-templates.ts` - Enhances `workflow_templates` with form configuration fields
- ✅ `20251210-create-dealer-claim-tables.ts` - Creates dealer claim related tables:
- `dealer_claim_details` - Main claim information
- `dealer_proposal_details` - Step 1: Dealer proposal submission
- `dealer_completion_details` - Step 5: Dealer completion documents
### 2. Models
- ✅ Updated `WorkflowRequest` model with `workflowType` and `templateId` fields
- ✅ Created `DealerClaimDetails` model
- ✅ Created `DealerProposalDetails` model
- ✅ Created `DealerCompletionDetails` model
### 3. Services
- ✅ Created `TemplateFieldResolver` service for dynamic user field references
## 🚧 In Progress
### 4. Services (Next Steps)
- ⏳ Create `EnhancedTemplateService` - Main service for template operations
- ⏳ Create `DealerClaimService` - Claim-specific business logic
### 5. Controllers & Routes
- ⏳ Create `DealerClaimController` - API endpoints for claim management
- ⏳ Create routes for dealer claim operations
- ⏳ Create template management endpoints
## 📋 Next Steps
1. **Create EnhancedTemplateService**
- Get form configuration with resolved user references
- Save step data
- Validate form data
2. **Create DealerClaimService**
- Create claim request
- Handle 8-step workflow transitions
- Manage proposal and completion submissions
3. **Create Controllers**
- POST `/api/v1/dealer-claims` - Create claim request
- GET `/api/v1/dealer-claims/:requestId` - Get claim details
- POST `/api/v1/dealer-claims/:requestId/proposal` - Submit proposal (Step 1)
- POST `/api/v1/dealer-claims/:requestId/completion` - Submit completion (Step 5)
- GET `/api/v1/templates/:templateId/form-config` - Get form configuration
4. **Integration Services**
- SAP integration for IO validation and budget blocking
- DMS integration for e-invoice and credit note generation
## 📝 Notes
- All migrations are ready to run
- Models are created with proper associations
- Template field resolver supports dynamic user references
- System is designed to be extensible for future templates
## 🔄 Running Migrations
To apply the migrations:
```bash
cd Re_Backend
npm run migrate
```
Or run individually:
```bash
npx ts-node src/scripts/run-migration.ts 20251210-add-workflow-type-support
npx ts-node src/scripts/run-migration.ts 20251210-enhance-workflow-templates
npx ts-node src/scripts/run-migration.ts 20251210-create-dealer-claim-tables
```

View File

@ -1,159 +0,0 @@
# Dealer Claim Management - Implementation Summary
## ✅ Completed Implementation
### 1. Database Migrations (4 files)
- ✅ `20251210-add-workflow-type-support.ts` - Adds `workflow_type` and `template_id` to `workflow_requests`
- ✅ `20251210-enhance-workflow-templates.ts` - Enhances `workflow_templates` with form configuration
- ✅ `20251210-add-template-id-foreign-key.ts` - Adds FK constraint for `template_id`
- ✅ `20251210-create-dealer-claim-tables.ts` - Creates dealer claim tables:
- `dealer_claim_details` - Main claim information
- `dealer_proposal_details` - Step 1: Dealer proposal
- `dealer_completion_details` - Step 5: Completion documents
### 2. Models (5 files)
- ✅ Updated `WorkflowRequest` - Added `workflowType` and `templateId` fields
- ✅ Created `DealerClaimDetails` - Main claim information model
- ✅ Created `DealerProposalDetails` - Proposal submission model
- ✅ Created `DealerCompletionDetails` - Completion documents model
- ✅ Created `WorkflowTemplate` - Template configuration model
### 3. Services (3 files)
- ✅ Created `TemplateFieldResolver` - Resolves dynamic user field references
- ✅ Created `EnhancedTemplateService` - Template form management
- ✅ Created `DealerClaimService` - Claim-specific business logic:
- `createClaimRequest()` - Create new claim with 8-step workflow
- `getClaimDetails()` - Get complete claim information
- `submitDealerProposal()` - Step 1: Dealer proposal submission
- `submitCompletionDocuments()` - Step 5: Completion submission
- `updateIODetails()` - Step 3: IO budget blocking
- `updateEInvoiceDetails()` - Step 7: E-Invoice generation
- `updateCreditNoteDetails()` - Step 8: Credit note issuance
### 4. Controllers & Routes (2 files)
- ✅ Created `DealerClaimController` - API endpoints for claim operations
- ✅ Created `dealerClaim.routes.ts` - Route definitions
- ✅ Registered routes in `routes/index.ts`
### 5. Frontend Utilities (1 file)
- ✅ Created `claimRequestUtils.ts` - Utility functions for detecting claim requests
## 📋 API Endpoints Created
### Dealer Claim Management
- `POST /api/v1/dealer-claims` - Create claim request
- `GET /api/v1/dealer-claims/:requestId` - Get claim details
- `POST /api/v1/dealer-claims/:requestId/proposal` - Submit dealer proposal (Step 1)
- `POST /api/v1/dealer-claims/:requestId/completion` - Submit completion (Step 5)
- `PUT /api/v1/dealer-claims/:requestId/io` - Update IO details (Step 3)
- `PUT /api/v1/dealer-claims/:requestId/e-invoice` - Update e-invoice (Step 7)
- `PUT /api/v1/dealer-claims/:requestId/credit-note` - Update credit note (Step 8)
## 🔄 8-Step Workflow Implementation
The system automatically creates 8 approval levels:
1. **Dealer Proposal Submission** (72h) - Dealer submits proposal
2. **Requestor Evaluation** (48h) - Initiator reviews and confirms
3. **Department Lead Approval** (72h) - Dept lead approves and blocks IO
4. **Activity Creation** (1h, Auto) - System creates activity record
5. **Dealer Completion Documents** (120h) - Dealer submits completion docs
6. **Requestor Claim Approval** (48h) - Initiator approves claim
7. **E-Invoice Generation** (1h, Auto) - System generates e-invoice via DMS
8. **Credit Note Confirmation** (48h) - Finance confirms credit note
## 🎯 Key Features
1. **Unified Request System**
- All requests use same `workflow_requests` table
- Identified by `workflowType: 'CLAIM_MANAGEMENT'`
- Automatically appears in "My Requests" and "Open Requests"
2. **Template-Specific Data Storage**
- Claim data stored in extension tables
- Linked via `request_id` foreign key
- Supports future templates with their own tables
3. **Dynamic User References**
- Auto-populate fields from initiator, dealer, approvers
- Supports team lead, department lead references
- Configurable per template
4. **File Upload Integration**
- Uses GCS with local fallback
- Organized by request number and file type
- Supports proposal documents and completion files
## 📝 Next Steps
### Backend
1. ⏳ Add SAP integration for IO validation and budget blocking
2. ⏳ Add DMS integration for e-invoice and credit note generation
3. ⏳ Create template management API endpoints
4. ⏳ Add validation for dealer codes (SAP integration)
### Frontend
1. ⏳ Create `claimDataMapper.ts` utility functions
2. ⏳ Update `RequestDetail.tsx` to conditionally render claim components
3. ⏳ Update API services to include `workflowType`
4. ⏳ Create `dealerClaimApi.ts` service
5. ⏳ Update request cards to show workflow type
## 🚀 Running the Implementation
### 1. Run Migrations
```bash
cd Re_Backend
npm run migrate
```
### 2. Test API Endpoints
```bash
# Create claim request
POST /api/v1/dealer-claims
{
"activityName": "Diwali Campaign",
"activityType": "Marketing Activity",
"dealerCode": "RE-MH-001",
"dealerName": "Royal Motors Mumbai",
"location": "Mumbai",
"requestDescription": "Marketing campaign details..."
}
# Submit proposal
POST /api/v1/dealer-claims/:requestId/proposal
FormData with proposalDocument file and JSON data
```
## 📊 Database Structure
```
workflow_requests (common)
├── workflow_type: 'CLAIM_MANAGEMENT'
└── template_id: (nullable)
dealer_claim_details (claim-specific)
└── request_id → workflow_requests
dealer_proposal_details (Step 1)
└── request_id → workflow_requests
dealer_completion_details (Step 5)
└── request_id → workflow_requests
approval_levels (8 steps)
└── request_id → workflow_requests
```
## ✅ Testing Checklist
- [ ] Run migrations successfully
- [ ] Create claim request via API
- [ ] Submit dealer proposal
- [ ] Update IO details
- [ ] Submit completion documents
- [ ] Verify request appears in "My Requests"
- [ ] Verify request appears in "Open Requests"
- [ ] Test file uploads (GCS and local fallback)
- [ ] Test workflow progression through 8 steps

View File

@ -1,164 +0,0 @@
# Migration and Setup Summary
## ✅ Current Status
### Tables Created by Migrations
All **6 new dealer claim tables** are included in the migration system:
1. ✅ `dealer_claim_details` - Main claim information
2. ✅ `dealer_proposal_details` - Step 1: Dealer proposal
3. ✅ `dealer_completion_details` - Step 5: Completion documents
4. ✅ `dealer_proposal_cost_items` - Cost breakdown items
5. ✅ `internal_orders` ⭐ - IO details with dedicated fields
6. ✅ `claim_budget_tracking` ⭐ - Comprehensive budget tracking
## Migration Commands
### 1. **`npm run migrate`** ✅
**Status:** ✅ **Fully configured**
This command runs `src/scripts/migrate.ts` which includes **ALL** migrations including:
- ✅ All dealer claim tables (m25-m28)
- ✅ New tables: `internal_orders` (m27) and `claim_budget_tracking` (m28)
**Usage:**
```bash
npm run migrate
```
**What it does:**
- Checks which migrations have already run (via `migrations` table)
- Runs only pending migrations
- Marks them as executed
- Creates all new tables automatically
---
### 2. **`npm run dev`** ✅
**Status:** ✅ **Now fixed and configured**
This command runs:
```bash
npm run setup && nodemon --exec ts-node ...
```
Which calls `npm run setup``src/scripts/auto-setup.ts`
**What `auto-setup.ts` does:**
1. ✅ Checks if database exists, creates if missing
2. ✅ Installs PostgreSQL extensions (uuid-ossp)
3. ✅ **Runs all pending migrations** (including dealer claim tables)
4. ✅ Tests database connection
**Fixed:** ✅ Now includes all dealer claim migrations (m29-m35)
**Usage:**
```bash
npm run dev
```
This will automatically:
- Create database if needed
- Run all migrations (including new tables)
- Start the development server
---
### 3. **`npm run setup`** ✅
**Status:** ✅ **Now fixed and configured**
Same as what `npm run dev` calls - runs `auto-setup.ts`
**Usage:**
```bash
npm run setup
```
---
## Migration Files Included
### In `migrate.ts` (for `npm run migrate`):
- ✅ `20251210-add-workflow-type-support` (m22)
- ✅ `20251210-enhance-workflow-templates` (m23)
- ✅ `20251210-add-template-id-foreign-key` (m24)
- ✅ `20251210-create-dealer-claim-tables` (m25) - Creates 3 tables
- ✅ `20251210-create-proposal-cost-items-table` (m26)
- ✅ `20251211-create-internal-orders-table` (m27) ⭐ NEW
- ✅ `20251211-create-claim-budget-tracking-table` (m28) ⭐ NEW
### In `auto-setup.ts` (for `npm run dev` / `npm run setup`):
- ✅ All migrations from `migrate.ts` are now included (m29-m35)
---
## What Gets Created
When you run either `npm run migrate` or `npm run dev`, these tables will be created:
### Dealer Claim Tables (from `20251210-create-dealer-claim-tables.ts`):
1. `dealer_claim_details`
2. `dealer_proposal_details`
3. `dealer_completion_details`
### Additional Tables:
4. `dealer_proposal_cost_items` (from `20251210-create-proposal-cost-items-table.ts`)
5. `internal_orders` ⭐ (from `20251211-create-internal-orders-table.ts`)
6. `claim_budget_tracking` ⭐ (from `20251211-create-claim-budget-tracking-table.ts`)
---
## Verification
After running migrations, verify tables exist:
```sql
-- Check if new tables exist
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name IN (
'dealer_claim_details',
'dealer_proposal_details',
'dealer_completion_details',
'dealer_proposal_cost_items',
'internal_orders',
'claim_budget_tracking'
)
ORDER BY table_name;
```
Should return 6 rows.
---
## Summary
| Command | Runs Migrations? | Includes New Tables? | Status |
|---------|------------------|---------------------|--------|
| `npm run migrate` | ✅ Yes | ✅ Yes | ✅ Working |
| `npm run dev` | ✅ Yes | ✅ Yes | ✅ Fixed |
| `npm run setup` | ✅ Yes | ✅ Yes | ✅ Fixed |
**All commands now create the new tables automatically!** 🎉
---
## Next Steps
1. **Run migrations:**
```bash
npm run migrate
```
OR
```bash
npm run dev # This will also run migrations via setup
```
2. **Verify tables created:**
Check the database to confirm all 6 tables exist.
3. **Start using:**
The tables are ready for dealer claim management!

View File

@ -1,216 +0,0 @@
# New Tables Created for Dealer Claim Management
## Overview
This document lists all the new database tables created specifically for the Dealer Claim Management system.
## Tables Created
### 1. **`dealer_claim_details`**
**Migration:** `20251210-create-dealer-claim-tables.ts`
**Purpose:** Main table storing claim-specific information
**Key Fields:**
- `claim_id` (PK)
- `request_id` (FK to `workflow_requests`, unique)
- `activity_name`, `activity_type`
- `dealer_code`, `dealer_name`, `dealer_email`, `dealer_phone`, `dealer_address`
- `activity_date`, `location`
- `period_start_date`, `period_end_date`
- `estimated_budget`, `closed_expenses`
- `io_number`, `io_available_balance`, `io_blocked_amount`, `io_remaining_balance` (legacy - now in `internal_orders`)
- `sap_document_number`, `dms_number`
- `e_invoice_number`, `e_invoice_date`
- `credit_note_number`, `credit_note_date`, `credit_note_amount`
**Created:** December 10, 2025
---
### 2. **`dealer_proposal_details`**
**Migration:** `20251210-create-dealer-claim-tables.ts`
**Purpose:** Stores dealer proposal submission data (Step 1 of workflow)
**Key Fields:**
- `proposal_id` (PK)
- `request_id` (FK to `workflow_requests`, unique)
- `proposal_document_path`, `proposal_document_url`
- `cost_breakup` (JSONB - legacy, now use `dealer_proposal_cost_items`)
- `total_estimated_budget`
- `timeline_mode` ('date' | 'days')
- `expected_completion_date`, `expected_completion_days`
- `dealer_comments`
- `submitted_at`
**Created:** December 10, 2025
---
### 3. **`dealer_completion_details`**
**Migration:** `20251210-create-dealer-claim-tables.ts`
**Purpose:** Stores dealer completion documents and expenses (Step 5 of workflow)
**Key Fields:**
- `completion_id` (PK)
- `request_id` (FK to `workflow_requests`, unique)
- `activity_completion_date`
- `number_of_participants`
- `closed_expenses` (JSONB array)
- `total_closed_expenses`
- `completion_documents` (JSONB array)
- `activity_photos` (JSONB array)
- `submitted_at`
**Created:** December 10, 2025
---
### 4. **`dealer_proposal_cost_items`**
**Migration:** `20251210-create-proposal-cost-items-table.ts`
**Purpose:** Separate table for cost breakdown items (replaces JSONB in `dealer_proposal_details`)
**Key Fields:**
- `cost_item_id` (PK)
- `proposal_id` (FK to `dealer_proposal_details`)
- `request_id` (FK to `workflow_requests` - denormalized for easier querying)
- `item_description`
- `amount` (DECIMAL 15,2)
- `item_order` (for maintaining order in cost breakdown)
**Benefits:**
- Better querying and filtering
- Easier to update individual cost items
- Better for analytics and reporting
- Maintains referential integrity
**Created:** December 10, 2025
---
### 5. **`internal_orders`** ⭐ NEW
**Migration:** `20251211-create-internal-orders-table.ts`
**Purpose:** Dedicated table for IO (Internal Order) details with proper structure
**Key Fields:**
- `io_id` (PK)
- `request_id` (FK to `workflow_requests`, unique - one IO per request)
- `io_number` (STRING 50)
- `io_remark` (TEXT) ⭐ - Dedicated field for IO remarks (not in comments)
- `io_available_balance` (DECIMAL 15,2)
- `io_blocked_amount` (DECIMAL 15,2)
- `io_remaining_balance` (DECIMAL 15,2)
- `organized_by` (FK to `users`) ⭐ - Tracks who organized the IO
- `organized_at` (DATE) ⭐ - When IO was organized
- `sap_document_number` (STRING 100)
- `status` (ENUM: 'PENDING', 'BLOCKED', 'RELEASED', 'CANCELLED')
**Why This Table:**
- Previously IO details were stored in `dealer_claim_details` table
- IO remark was being parsed from comments
- Now dedicated table with proper fields and relationships
- Better data integrity and querying
**Created:** December 11, 2025
---
### 6. **`claim_budget_tracking`** ⭐ NEW
**Migration:** `20251211-create-claim-budget-tracking-table.ts`
**Purpose:** Comprehensive budget tracking throughout the claim lifecycle
**Key Fields:**
- `budget_id` (PK)
- `request_id` (FK to `workflow_requests`, unique - one budget record per request)
**Budget Values:**
- `initial_estimated_budget` - From claim creation
- `proposal_estimated_budget` - From Step 1 (Dealer Proposal)
- `approved_budget` - From Step 2 (Requestor Evaluation)
- `io_blocked_amount` - From Step 3 (Department Lead - IO blocking)
- `closed_expenses` - From Step 5 (Dealer Completion)
- `final_claim_amount` - From Step 6 (Requestor Claim Approval)
- `credit_note_amount` - From Step 8 (Finance)
**Tracking Fields:**
- `proposal_submitted_at`
- `approved_at`, `approved_by` (FK to `users`)
- `io_blocked_at`
- `closed_expenses_submitted_at`
- `final_claim_amount_approved_at`, `final_claim_amount_approved_by` (FK to `users`)
- `credit_note_issued_at`
**Status & Analysis:**
- `budget_status` (ENUM: 'DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED')
- `currency` (STRING 3, default: 'INR')
- `variance_amount` - Difference between approved and closed expenses
- `variance_percentage` - Variance as percentage
**Audit Fields:**
- `last_modified_by` (FK to `users`)
- `last_modified_at`
- `modification_reason` (TEXT)
**Why This Table:**
- Previously budget data was scattered across multiple tables
- No single source of truth for budget lifecycle
- No audit trail for budget modifications
- Now comprehensive tracking with status and variance calculation
**Created:** December 11, 2025
---
## Summary
### Total New Tables: **6**
1. ✅ `dealer_claim_details` - Main claim information
2. ✅ `dealer_proposal_details` - Step 1: Dealer proposal
3. ✅ `dealer_completion_details` - Step 5: Completion documents
4. ✅ `dealer_proposal_cost_items` - Cost breakdown items
5. ✅ `internal_orders` ⭐ - IO details with dedicated fields
6. ✅ `claim_budget_tracking` ⭐ - Comprehensive budget tracking
### Most Recent Additions (December 11, 2025):
- **`internal_orders`** - Proper IO data structure with `ioRemark` field
- **`claim_budget_tracking`** - Complete budget lifecycle tracking
## Migration Order
Run migrations in this order:
```bash
npm run migrate
```
The migrations will run in chronological order:
1. `20251210-create-dealer-claim-tables.ts` (creates tables 1-3)
2. `20251210-create-proposal-cost-items-table.ts` (creates table 4)
3. `20251211-create-internal-orders-table.ts` (creates table 5)
4. `20251211-create-claim-budget-tracking-table.ts` (creates table 6)
## Relationships
```
workflow_requests (1)
├── dealer_claim_details (1:1)
├── dealer_proposal_details (1:1)
│ └── dealer_proposal_cost_items (1:many)
├── dealer_completion_details (1:1)
├── internal_orders (1:1) ⭐ NEW
└── claim_budget_tracking (1:1) ⭐ NEW
```
## Notes
- All tables have `request_id` foreign key to `workflow_requests`
- Most tables have unique constraint on `request_id` (one record per request)
- `dealer_proposal_cost_items` can have multiple items per proposal
- All tables use UUID primary keys
- All tables have `created_at` and `updated_at` timestamps

View File

@ -1,167 +0,0 @@
# Okta Users API Integration
## Overview
The authentication service now uses the Okta Users API (`/api/v1/users/{userId}`) to fetch complete user profile information including manager, employeeID, designation, and other fields that may not be available in the standard OAuth2 userinfo endpoint.
## Configuration
Add the following environment variable to your `.env` file:
```env
OKTA_API_TOKEN=your_okta_api_token_here
```
This is the SSWS (Server-Side Web Service) token for Okta API access. You can generate this token from your Okta Admin Console under **Security > API > Tokens**.
## How It Works
### 1. Primary Method: Okta Users API
When a user logs in for the first time:
1. The system exchanges the authorization code for tokens (OAuth2 flow)
2. Gets the `oktaSub` (subject identifier) from the userinfo endpoint
3. **Attempts to fetch full user profile from Users API** using:
- First: Email address (as shown in curl example)
- Fallback: oktaSub (user ID) if email lookup fails
4. Extracts complete user information including:
- `profile.employeeID` - Employee ID
- `profile.manager` - Manager name
- `profile.title` - Job title/designation
- `profile.department` - Department
- `profile.mobilePhone` - Phone number
- `profile.firstName`, `profile.lastName`, `profile.displayName`
- And other profile fields
### 2. Fallback Method: OAuth2 Userinfo Endpoint
If the Users API:
- Is not configured (missing `OKTA_API_TOKEN`)
- Returns an error (4xx/5xx)
- Fails for any reason
The system automatically falls back to the standard OAuth2 userinfo endpoint (`/oauth2/default/v1/userinfo`) which provides basic user information.
## API Endpoint
```
GET https://{oktaDomain}/api/v1/users/{userId}
Authorization: SSWS {OKTA_API_TOKEN}
Accept: application/json
```
Where `{userId}` can be:
- Email address (e.g., `testuser10@eichergroup.com`)
- Okta user ID (e.g., `00u1e1japegDV2DkP0h8`)
## Response Structure
The Users API returns a complete user object:
```json
{
"id": "00u1e1japegDV2DkP0h8",
"status": "ACTIVE",
"profile": {
"firstName": "Sanjay",
"lastName": "Sahu",
"manager": "Ezhilan subramanian",
"mobilePhone": "8826740087",
"displayName": "Sanjay Sahu",
"employeeID": "E09994",
"title": "Supports Business Applications (SAP) portfolio",
"department": "Deputy Manager - Digital & IT",
"login": "sanjaysahu@Royalenfield.com",
"email": "sanjaysahu@royalenfield.com"
},
...
}
```
## Field Mapping
| Users API Field | Database Field | Notes |
|----------------|----------------|-------|
| `profile.employeeID` | `employeeId` | Employee ID from HR system |
| `profile.manager` | `manager` | Manager name |
| `profile.title` | `designation` | Job title/designation |
| `profile.department` | `department` | Department name |
| `profile.mobilePhone` | `phone` | Phone number |
| `profile.firstName` | `firstName` | First name |
| `profile.lastName` | `lastName` | Last name |
| `profile.displayName` | `displayName` | Display name |
| `profile.email` | `email` | Email address |
| `id` | `oktaSub` | Okta subject identifier |
## Benefits
1. **Complete User Profile**: Gets all available user information including manager, employeeID, and other custom attributes
2. **Automatic Fallback**: If Users API is unavailable, gracefully falls back to userinfo endpoint
3. **No Breaking Changes**: Existing functionality continues to work even without API token
4. **Better Data Quality**: Reduces missing user information (manager, employeeID, etc.)
## Logging
The service logs:
- When Users API is used vs. userinfo fallback
- Which lookup method succeeded (email or oktaSub)
- Extracted fields (employeeId, manager, department, etc.)
- Any errors or warnings
Example log:
```
[AuthService] Fetching user from Okta Users API (using email)
[AuthService] Successfully fetched user from Okta Users API (using email)
[AuthService] Extracted user data from Okta Users API
- oktaSub: 00u1e1japegDV2DkP0h8
- email: testuser10@eichergroup.com
- employeeId: E09994
- hasManager: true
- hasDepartment: true
- hasDesignation: true
```
## Testing
### Test with curl
```bash
curl --location 'https://dev-830839.oktapreview.com/api/v1/users/testuser10@eichergroup.com' \
--header 'Authorization: SSWS YOUR_OKTA_API_TOKEN' \
--header 'Accept: application/json'
```
### Test in Application
1. Set `OKTA_API_TOKEN` in `.env`
2. Log in with a user
3. Check logs to see if Users API was used
4. Verify user record in database has complete information (manager, employeeID, etc.)
## Troubleshooting
### Users API Not Being Used
- Check if `OKTA_API_TOKEN` is set in `.env`
- Check logs for warnings about missing API token
- Verify API token has correct permissions in Okta
### Users API Returns 404
- User may not exist in Okta
- Email format may be incorrect
- Try using oktaSub (user ID) instead
### Missing Fields in Database
- Check if fields exist in Okta user profile
- Verify field mapping in `extractUserDataFromUsersAPI` method
- Check logs to see which fields were extracted
## Security Notes
- **API Token Security**: Store `OKTA_API_TOKEN` securely, never commit to version control
- **Token Permissions**: Ensure API token has read access to user profiles
- **Rate Limiting**: Be aware of Okta API rate limits when fetching user data

View File

@ -1,214 +0,0 @@
# SAP Integration Testing Guide
## Postman Testing
### 1. Testing IO Validation API
**Endpoint:** `GET /api/v1/dealer-claims/:requestId/io`
**Method:** GET
**Headers:**
```
Authorization: Bearer <your_jwt_token>
Content-Type: application/json
```
**Note:** The CSRF error in Postman is likely coming from SAP, not our backend. Our backend doesn't have CSRF protection enabled.
### 2. Testing Budget Blocking API
**Endpoint:** `PUT /api/v1/dealer-claims/:requestId/io`
**Method:** PUT
**Headers:**
```
Authorization: Bearer <your_jwt_token>
Content-Type: application/json
```
**Body:**
```json
{
"ioNumber": "600060",
"ioRemark": "Test remark",
"availableBalance": 1000000,
"blockedAmount": 500,
"remainingBalance": 999500
}
```
### 3. Direct SAP API Testing in Postman
If you want to test SAP API directly (bypassing our backend):
#### IO Validation
- **URL:** `https://RENOIHND01.Eichergroup.com:1443/sap/opu/odata/sap/ZFI_BUDGET_CHECK_API_SRV/GetSenderDataSet?$filter=IONumber eq '600060'&$select=Sender,ResponseDate,GetIODetailsSet01&$expand=GetIODetailsSet01&$format=json`
- **Method:** GET
- **Authentication:** Basic Auth
- Username: Your SAP username
- Password: Your SAP password
- **Headers:**
- `Accept: application/json`
- `Content-Type: application/json`
#### Budget Blocking
- **URL:** `https://RENOIHND01.Eichergroup.com:1443/sap/opu/odata/sap/ZFI_BUDGET_BLOCK_API_SRV/RequesterInputSet`
- **Method:** POST
- **Authentication:** Basic Auth
- Username: Your SAP username
- Password: Your SAP password
- **Headers:**
- `Accept: application/json`
- `Content-Type: application/json`
- **Body:**
```json
{
"Request_Date_Time": "2025-08-29T10:51:00",
"Requester": "REFMS",
"lt_io_input": [
{
"IONumber": "600060",
"Amount": "500"
}
],
"lt_io_output": [],
"ls_response": []
}
```
## Common Errors and Solutions
### 1. CSRF Token Validation Error
**Error:** "CSRF token validation error"
**Possible Causes:**
- SAP API requires CSRF tokens for POST/PUT requests
- SAP might be checking for specific headers
**Solutions:**
1. **Get CSRF Token First:**
- Make a GET request to the SAP service root to get CSRF token
- Example: `GET https://RENOIHND01.Eichergroup.com:1443/sap/opu/odata/sap/ZFI_BUDGET_BLOCK_API_SRV/`
- Look for `x-csrf-token` header in response
- Add this token to subsequent POST/PUT requests as header: `X-CSRF-Token: <token>`
2. **Add Required Headers:**
```
X-CSRF-Token: Fetch
X-Requested-With: XMLHttpRequest
```
### 2. Authentication Failed
**Error:** "Authentication failed" or "401 Unauthorized"
**Possible Causes:**
1. Wrong username/password
2. Basic auth not being sent correctly
3. SSL certificate issues
4. SAP account locked or expired
**Solutions:**
1. **Verify Credentials:**
- Double-check `SAP_USERNAME` and `SAP_PASSWORD` in `.env`
- Ensure no extra spaces or special characters
- Test credentials in browser first
2. **Check SSL Certificate:**
- If using self-signed certificate, set `SAP_DISABLE_SSL_VERIFY=true` in `.env` (testing only!)
- For production, ensure proper SSL certificates are configured
3. **Test Basic Auth Manually:**
- Use Postman with Basic Auth enabled
- Verify the Authorization header format: `Basic <base64(username:password)>`
4. **Check SAP Account Status:**
- Verify account is active and not locked
- Check if password has expired
- Contact SAP administrator if needed
### 3. Connection Errors
**Error:** "ECONNREFUSED" or "ENOTFOUND"
**Solutions:**
1. Verify `SAP_BASE_URL` is correct
2. Check network connectivity to SAP server
3. Ensure firewall allows connections to port 1443
4. Verify Zscaler is configured correctly
### 4. Timeout Errors
**Error:** "Request timeout"
**Solutions:**
1. Increase `SAP_TIMEOUT_MS` in `.env` (default: 30000ms = 30 seconds)
2. Check SAP server response time
3. Verify network latency
## Debugging
### Enable Debug Logging
Set log level to debug in your `.env`:
```
LOG_LEVEL=debug
```
This will log:
- Request URLs
- Request payloads
- Response status codes
- Response data
- Error details
### Check Backend Logs
Look for `[SAP]` prefixed log messages:
```bash
# In development
npm run dev
# Check logs for SAP-related messages
```
### Test SAP Connection
You can test if SAP is reachable:
```bash
curl -u "username:password" \
"https://RENOIHND01.Eichergroup.com:1443/sap/opu/odata/sap/ZFI_BUDGET_CHECK_API_SRV/"
```
## Environment Variables Checklist
Ensure these are set in your `.env`:
```bash
# Required
SAP_BASE_URL=https://RENOIHND01.Eichergroup.com:1443
SAP_USERNAME=your_username
SAP_PASSWORD=your_password
# Optional (with defaults)
SAP_TIMEOUT_MS=30000
SAP_SERVICE_NAME=ZFI_BUDGET_CHECK_API_SRV
SAP_BLOCK_SERVICE_NAME=ZFI_BUDGET_BLOCK_API_SRV
SAP_REQUESTER=REFMS
SAP_DISABLE_SSL_VERIFY=false # Only for testing
```
## Next Steps
If you're still getting errors:
1. **Check Backend Logs:** Look for detailed error messages
2. **Test Directly in Postman:** Bypass backend and test SAP API directly
3. **Verify SAP Credentials:** Test with SAP administrator
4. **Check Network:** Ensure server can reach SAP URL
5. **Review SAP Documentation:** Check if there are additional requirements

View File

@ -1,299 +0,0 @@
# Step 3 (Department Lead Approval) - User Addition Flow Analysis
## Overview
This document analyzes how Step 3 approvers (Department Lead) are added to the dealer claim workflow, covering both frontend and backend implementation.
---
## Backend Implementation
### 1. Request Creation Flow (`dealerClaim.service.ts`)
#### Entry Point: `createClaimRequest()`
- **Location**: `Re_Backend/src/services/dealerClaim.service.ts:37`
- **Parameters**:
- `userId`: Initiator's user ID
- `claimData`: Includes optional `selectedManagerEmail` for user selection
#### Step 3 Approver Resolution Process:
**Phase 1: Pre-Validation (Before Creating Records)**
```typescript
// Lines 67-87: Resolve Department Lead BEFORE creating workflow
let departmentLead: User | null = null;
if (claimData.selectedManagerEmail) {
// User selected a manager from multiple options
departmentLead = await this.userService.ensureUserExists({
email: claimData.selectedManagerEmail,
});
} else {
// Search Okta using manager displayName from initiator's user record
departmentLead = await this.resolveDepartmentLeadFromManager(initiator);
// If no manager found, throw error BEFORE creating any records
if (!departmentLead) {
throw new Error(`No reporting manager found...`);
}
}
```
**Phase 2: Approval Level Creation**
```typescript
// Line 136: Create approval levels with pre-resolved department lead
await this.createClaimApprovalLevels(
workflowRequest.requestId,
userId,
claimData.dealerEmail,
claimData.selectedManagerEmail,
departmentLead // Pre-resolved to avoid re-searching
);
```
### 2. Approval Level Creation (`createClaimApprovalLevels()`)
#### Location: `Re_Backend/src/services/dealerClaim.service.ts:253`
#### Step 3 Configuration:
```typescript
// Lines 310-318: Step 3 definition
{
level: 3,
name: 'Department Lead Approval',
tatHours: 72,
isAuto: false,
approverType: 'department_lead' as const,
approverId: departmentLead?.userId || null,
approverEmail: departmentLead?.email || initiator.manager || 'deptlead@royalenfield.com',
}
```
#### Approver Resolution Logic:
```typescript
// Lines 405-417: Department Lead resolution
else if (step.approverType === 'department_lead') {
if (finalDepartmentLead) {
approverId = finalDepartmentLead.userId;
approverName = finalDepartmentLead.displayName || finalDepartmentLead.email || 'Department Lead';
approverEmail = finalDepartmentLead.email;
} else {
// This should never happen as we validate manager before creating records
throw new Error('Department lead not found...');
}
}
```
#### Database Record Creation:
```typescript
// Lines 432-454: Create ApprovalLevel record
await ApprovalLevel.create({
requestId,
levelNumber: 3,
levelName: 'Department Lead Approval',
approverId: approverId, // Department Lead's userId
approverEmail,
approverName,
tatHours: 72,
status: ApprovalStatus.PENDING, // Will be activated when Step 2 is approved
isFinalApprover: false,
// ... other fields
});
```
### 3. Department Lead Resolution Methods
#### Method 1: `resolveDepartmentLeadFromManager()` (Primary)
- **Location**: `Re_Backend/src/services/dealerClaim.service.ts:622`
- **Flow**:
1. Get `manager` displayName from initiator's User record
2. Search Okta directory by displayName using `userService.searchOktaByDisplayName()`
3. **If 0 matches**: Return `null` (fallback to legacy method)
4. **If 1 match**: Create user in DB if needed, return User object
5. **If multiple matches**: Throw error with `MULTIPLE_MANAGERS_FOUND` code and list of managers
#### Method 2: `resolveDepartmentLead()` (Fallback/Legacy)
- **Location**: `Re_Backend/src/services/dealerClaim.service.ts:699`
- **Priority Order**:
1. User with `MANAGEMENT` role in same department
2. User with designation containing "Lead"/"Head"/"Manager" in same department
3. User matching `initiator.manager` email field
4. Any user in same department (excluding initiator)
5. Any user with "Department Lead" designation (across all departments)
6. Any user with `MANAGEMENT` role (across all departments)
7. Any user with `ADMIN` role (across all departments)
### 4. Participant Creation
#### Location: `Re_Backend/src/services/dealerClaim.service.ts:463`
- Department Lead is automatically added as a participant when approval levels are created
- Participant type: `APPROVER`
- Allows department lead to view, comment, and approve the request
---
## Frontend Implementation
### 1. Request Creation (`ClaimManagementWizard.tsx`)
#### Location: `Re_Figma_Code/src/dealer-claim/components/request-creation/ClaimManagementWizard.tsx`
#### Current Implementation:
- **No UI for selecting Step 3 approver during creation**
- Step 3 approver is automatically resolved by backend based on:
- Initiator's manager field
- Department hierarchy
- Role-based lookup
#### Form Data Structure:
```typescript
// Lines 61-75: Form data structure
const [formData, setFormData] = useState({
activityName: '',
activityType: '',
dealerCode: '',
// ... other fields
// Note: No selectedManagerEmail field in wizard
});
```
#### Submission:
```typescript
// Lines 152-216: handleSubmit()
const claimData = {
...formData,
templateType: 'claim-management',
// selectedManagerEmail is NOT included in current wizard
// Backend will auto-resolve department lead
};
```
### 2. Request Detail View (`RequestDetail.tsx`)
#### Location: `Re_Figma_Code/src/dealer-claim/pages/RequestDetail.tsx`
#### Step 3 Approver Detection:
```typescript
// Lines 147-173: Finding Step 3 approver
const step3Level = approvalFlow.find((level: any) =>
(level.step || level.levelNumber || level.level_number) === 3
) || approvals.find((level: any) =>
(level.levelNumber || level.level_number) === 3
);
const deptLeadUserId = step3Level?.approverId || step3Level?.approver_id || step3Level?.approver?.userId;
const deptLeadEmail = (step3Level?.approverEmail || '').toLowerCase().trim();
// User is department lead if they match Step 3 approver
const isDeptLead = (deptLeadUserId && deptLeadUserId === currentUserId) ||
(deptLeadEmail && currentUserEmail && deptLeadEmail === currentUserEmail);
```
#### Add Approver Functionality:
- **Lines 203-217, 609, 621, 688, 701, 711**: References to `handleAddApprover` and `AddApproverModal`
- **Note**: This appears to be generic approver addition (for other workflow types), not specifically for Step 3
- Step 3 approver is **fixed** and cannot be changed after request creation
### 3. Workflow Tab (`WorkflowTab.tsx`)
#### Location: `Re_Figma_Code/src/dealer-claim/components/request-detail/WorkflowTab.tsx`
#### Step 3 Action Button Visibility:
```typescript
// Lines 1109-1126: Step 3 approval button
{step.step === 3 && (() => {
// Find step 3 from approvalFlow to get approverEmail
const step3Level = approvalFlow.find((l: any) => (l.step || l.levelNumber || l.level_number) === 3);
const step3ApproverEmail = (step3Level?.approverEmail || '').toLowerCase();
const isStep3ApproverByEmail = step3ApproverEmail && userEmail === step3ApproverEmail;
return isStep3ApproverByEmail || isStep3Approver || isCurrentApprover;
})() && (
<Button onClick={() => setShowIOApprovalModal(true)}>
Approve and Organise IO
</Button>
)}
```
#### Step 3 Approval Handler:
```typescript
// Lines 535-583: handleIOApproval()
// 1. Finds Step 3 levelId from approval levels
// 2. Updates IO details (ioNumber, ioRemark)
// 3. Approves Step 3 using approveLevel() API
// 4. Moves workflow to Step 4 (auto-processed)
```
---
## Key Findings
### Current Flow Summary:
1. **Request Creation**:
- User creates claim request via `ClaimManagementWizard`
- **No UI for selecting Step 3 approver**
- Backend automatically resolves department lead using:
- Initiator's `manager` displayName → Okta search
- Fallback to legacy resolution methods
2. **Multiple Managers Scenario**:
- If Okta search returns multiple managers:
- Backend throws `MULTIPLE_MANAGERS_FOUND` error
- Error includes list of manager options
- **Frontend needs to handle this** (currently not implemented in wizard)
3. **Approval Level Creation**:
- Step 3 approver is **fixed** at request creation
- Stored in `ApprovalLevel` table with:
- `levelNumber: 3`
- `approverId`: Department Lead's userId
- `approverEmail`: Department Lead's email
- `status: PENDING` (activated when Step 2 is approved)
4. **After Request Creation**:
- Step 3 approver **cannot be changed** via UI
- Generic `AddApproverModal` exists but is not used for Step 3
- Step 3 approver is determined by backend logic only
### Limitations:
1. **No User Selection During Creation**:
- Wizard doesn't allow user to select/override Step 3 approver
- If multiple managers found, error handling not implemented in frontend
2. **No Post-Creation Modification**:
- No UI to change Step 3 approver after request is created
- Would require backend API to update `ApprovalLevel.approverId`
3. **Fixed Resolution Logic**:
- Department lead resolution is hardcoded in backend
- No configuration or override mechanism
---
## Potential Enhancement Areas
1. **Frontend**: Add manager selection UI in wizard when multiple managers found
2. **Frontend**: Add "Change Approver" option for Step 3 (if allowed by business rules)
3. **Backend**: Add API endpoint to update Step 3 approver after request creation
4. **Backend**: Add configuration for department lead resolution rules
5. **Both**: Handle `MULTIPLE_MANAGERS_FOUND` error gracefully in frontend
---
## Related Files
### Backend:
- `Re_Backend/src/services/dealerClaim.service.ts` - Main service
- `Re_Backend/src/controllers/dealerClaim.controller.ts` - API endpoints
- `Re_Backend/src/services/user.service.ts` - User/Okta integration
- `Re_Backend/src/models/ApprovalLevel.ts` - Database model
### Frontend:
- `Re_Figma_Code/src/dealer-claim/components/request-creation/ClaimManagementWizard.tsx` - Request creation
- `Re_Figma_Code/src/dealer-claim/pages/RequestDetail.tsx` - Request detail view
- `Re_Figma_Code/src/dealer-claim/components/request-detail/WorkflowTab.tsx` - Workflow display
- `Re_Figma_Code/src/dealer-claim/components/request-detail/modals/DeptLeadIOApprovalModal.tsx` - Step 3 approval modal
### Documentation:
- `Re_Backend/docs/CLAIM_MANAGEMENT_APPROVER_MAPPING.md` - Approver mapping rules

View File

@ -84,18 +84,3 @@ VAPID_CONTACT=mailto:you@example.com
REDIS_URL={{REDIS_URL_FOR DELAY JoBS create redis setup and add url here}} REDIS_URL={{REDIS_URL_FOR DELAY JoBS create redis setup and add url here}}
TAT_TEST_MODE=false (on true it will consider 1 hour==1min) TAT_TEST_MODE=false (on true it will consider 1 hour==1min)
# SAP Integration (OData Service via Zscaler)
SAP_BASE_URL=https://RENOIHND01.Eichergroup.com:1443
SAP_USERNAME={{SAP_USERNAME}}
SAP_PASSWORD={{SAP_PASSWORD}}
SAP_TIMEOUT_MS=30000
# SAP OData Service Name for IO Validation (default: ZFI_BUDGET_CHECK_API_SRV)
SAP_SERVICE_NAME=ZFI_BUDGET_CHECK_API_SRV
# SAP OData Service Name for Budget Blocking (default: ZFI_BUDGET_BLOCK_API_SRV)
SAP_BLOCK_SERVICE_NAME=ZFI_BUDGET_BLOCK_API_SRV
# SAP Requester identifier for budget blocking API (default: REFMS)
SAP_REQUESTER=REFMS
# SAP SSL Verification (set to 'true' to disable SSL verification for testing with self-signed certs)
# WARNING: Only use in development/testing environments
SAP_DISABLE_SSL_VERIFY=false

57
package-lock.json generated
View File

@ -21,7 +21,6 @@
"dotenv": "^16.4.7", "dotenv": "^16.4.7",
"express": "^4.21.2", "express": "^4.21.2",
"express-rate-limit": "^7.5.0", "express-rate-limit": "^7.5.0",
"fast-xml-parser": "^5.3.3",
"helmet": "^8.0.0", "helmet": "^8.0.0",
"ioredis": "^5.8.2", "ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
@ -794,6 +793,18 @@
"fxparser": "src/cli/cli.js" "fxparser": "src/cli/cli.js"
} }
}, },
"node_modules/@aws-sdk/xml-builder/node_modules/strnum": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.1.tgz",
"integrity": "sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/NaturalIntelligence"
}
],
"license": "MIT"
},
"node_modules/@aws/lambda-invoke-store": { "node_modules/@aws/lambda-invoke-store": {
"version": "0.2.2", "version": "0.2.2",
"resolved": "https://registry.npmjs.org/@aws/lambda-invoke-store/-/lambda-invoke-store-0.2.2.tgz", "resolved": "https://registry.npmjs.org/@aws/lambda-invoke-store/-/lambda-invoke-store-0.2.2.tgz",
@ -1640,36 +1651,6 @@
"node": ">=14" "node": ">=14"
} }
}, },
"node_modules/@google-cloud/storage/node_modules/fast-xml-parser": {
"version": "4.5.3",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
"integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/NaturalIntelligence"
}
],
"license": "MIT",
"dependencies": {
"strnum": "^1.1.1"
},
"bin": {
"fxparser": "src/cli/cli.js"
}
},
"node_modules/@google-cloud/storage/node_modules/strnum": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz",
"integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/NaturalIntelligence"
}
],
"license": "MIT"
},
"node_modules/@google-cloud/vertexai": { "node_modules/@google-cloud/vertexai": {
"version": "1.10.0", "version": "1.10.0",
"resolved": "https://registry.npmjs.org/@google-cloud/vertexai/-/vertexai-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@google-cloud/vertexai/-/vertexai-1.10.0.tgz",
@ -6365,9 +6346,9 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/fast-xml-parser": { "node_modules/fast-xml-parser": {
"version": "5.3.3", "version": "4.5.3",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.3.tgz", "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
"integrity": "sha512-2O3dkPAAC6JavuMm8+4+pgTk+5hoAs+CjZ+sWcQLkX9+/tHRuTkQh/Oaifr8qDmZ8iEHb771Ea6G8CdwkrgvYA==", "integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==",
"funding": [ "funding": [
{ {
"type": "github", "type": "github",
@ -6376,7 +6357,7 @@
], ],
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"strnum": "^2.1.0" "strnum": "^1.1.1"
}, },
"bin": { "bin": {
"fxparser": "src/cli/cli.js" "fxparser": "src/cli/cli.js"
@ -10810,9 +10791,9 @@
} }
}, },
"node_modules/strnum": { "node_modules/strnum": {
"version": "2.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz", "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz",
"integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==", "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==",
"funding": [ "funding": [
{ {
"type": "github", "type": "github",

View File

@ -4,8 +4,8 @@
"description": "Royal Enfield Workflow Management System - Backend API (TypeScript)", "description": "Royal Enfield Workflow Management System - Backend API (TypeScript)",
"main": "dist/server.js", "main": "dist/server.js",
"scripts": { "scripts": {
"start": "npm install && npm run setup && npm run build && npm run start:prod", "start": "npm run setup && npm run build && npm run start:prod",
"dev": "npm run setup && npm run migrate && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts", "dev": "npm run setup && nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts", "dev:no-setup": "nodemon --exec ts-node -r tsconfig-paths/register src/server.ts",
"build": "tsc && tsc-alias", "build": "tsc && tsc-alias",
"build:watch": "tsc --watch", "build:watch": "tsc --watch",
@ -17,9 +17,7 @@
"clean": "rm -rf dist", "clean": "rm -rf dist",
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts", "setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts", "migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts", "seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts"
"seed:dealers": "ts-node -r tsconfig-paths/register src/scripts/seed-dealers.ts",
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
}, },
"dependencies": { "dependencies": {
"@google-cloud/storage": "^7.18.0", "@google-cloud/storage": "^7.18.0",
@ -35,7 +33,6 @@
"dotenv": "^16.4.7", "dotenv": "^16.4.7",
"express": "^4.21.2", "express": "^4.21.2",
"express-rate-limit": "^7.5.0", "express-rate-limit": "^7.5.0",
"fast-xml-parser": "^5.3.3",
"helmet": "^8.0.0", "helmet": "^8.0.0",
"ioredis": "^5.8.2", "ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",

View File

@ -66,8 +66,6 @@ export const constants = {
REFERENCE: 'REFERENCE', REFERENCE: 'REFERENCE',
FINAL: 'FINAL', FINAL: 'FINAL',
OTHER: 'OTHER', OTHER: 'OTHER',
COMPLETION_DOC: 'COMPLETION_DOC',
ACTIVITY_PHOTO: 'ACTIVITY_PHOTO',
}, },
// Work Note Types // Work Note Types

View File

@ -11,7 +11,6 @@ const ssoConfig: SSOConfig = {
oktaDomain: process.env.OKTA_DOMAIN || 'https://dev-830839.oktapreview.com', oktaDomain: process.env.OKTA_DOMAIN || 'https://dev-830839.oktapreview.com',
oktaClientId: process.env.OKTA_CLIENT_ID || '', oktaClientId: process.env.OKTA_CLIENT_ID || '',
oktaClientSecret: process.env.OKTA_CLIENT_SECRET || '', oktaClientSecret: process.env.OKTA_CLIENT_SECRET || '',
oktaApiToken: process.env.OKTA_API_TOKEN || '', // SSWS token for Users API
}; };
export { ssoConfig }; export { ssoConfig };

View File

@ -782,15 +782,15 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
// User doesn't exist, need to fetch from Okta and create // User doesn't exist, need to fetch from Okta and create
logger.info(`[Admin] User ${email} not found in database, fetching from Okta...`); logger.info(`[Admin] User ${email} not found in database, fetching from Okta...`);
// Import UserService to fetch full profile from Okta // Import UserService to search Okta
const { UserService } = await import('@services/user.service'); const { UserService } = await import('@services/user.service');
const userService = new UserService(); const userService = new UserService();
try { try {
// Fetch full user profile from Okta Users API (includes manager, jobTitle, etc.) // Search Okta for this user
const oktaUserData = await userService.fetchAndExtractOktaUserByEmail(email); const oktaUsers = await userService.searchUsers(email, 1);
if (!oktaUserData) { if (!oktaUsers || oktaUsers.length === 0) {
res.status(404).json({ res.status(404).json({
success: false, success: false,
error: 'User not found in Okta. Please ensure the email is correct.' error: 'User not found in Okta. Please ensure the email is correct.'
@ -798,15 +798,25 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
return; return;
} }
// Create user in our database via centralized userService with all fields including manager const oktaUser = oktaUsers[0];
const ensured = await userService.createOrUpdateUser({
...oktaUserData,
role, // Set the assigned role
isActive: true, // Ensure user is active
});
user = ensured;
logger.info(`[Admin] Created new user ${email} with role ${role} (manager: ${oktaUserData.manager || 'N/A'})`); // Create user in our database
user = await User.create({
email: oktaUser.email,
oktaSub: (oktaUser as any).userId || (oktaUser as any).oktaSub, // Okta user ID as oktaSub
employeeId: (oktaUser as any).employeeNumber || (oktaUser as any).employeeId || null,
firstName: oktaUser.firstName || null,
lastName: oktaUser.lastName || null,
displayName: oktaUser.displayName || `${oktaUser.firstName || ''} ${oktaUser.lastName || ''}`.trim() || oktaUser.email,
department: oktaUser.department || null,
designation: (oktaUser as any).designation || (oktaUser as any).title || null,
phone: (oktaUser as any).phone || (oktaUser as any).mobilePhone || null,
isActive: true,
role: role, // Assign the requested role
lastLogin: undefined // Not logged in yet
});
logger.info(`[Admin] Created new user ${email} with role ${role}`);
} catch (oktaError: any) { } catch (oktaError: any) {
logger.error('[Admin] Error fetching from Okta:', oktaError); logger.error('[Admin] Error fetching from Okta:', oktaError);
res.status(500).json({ res.status(500).json({
@ -816,7 +826,7 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
return; return;
} }
} else { } else {
// User exists - fetch latest data from Okta and sync all fields including role // User exists, update their role
const previousRole = user.role; const previousRole = user.role;
// Prevent self-demotion // Prevent self-demotion
@ -828,35 +838,9 @@ export const assignRoleByEmail = async (req: Request, res: Response): Promise<vo
return; return;
} }
// Import UserService to fetch latest data from Okta
const { UserService } = await import('@services/user.service');
const userService = new UserService();
try {
// Fetch full user profile from Okta Users API to sync manager and other fields
const oktaUserData = await userService.fetchAndExtractOktaUserByEmail(email);
if (oktaUserData) {
// Sync all fields from Okta including the new role using centralized method
const updated = await userService.createOrUpdateUser({
...oktaUserData, // Includes all fields: manager, jobTitle, postalAddress, etc.
role, // Set the new role
isActive: true, // Ensure user is active
});
user = updated;
logger.info(`[Admin] Synced user ${email} from Okta (manager: ${oktaUserData.manager || 'N/A'}) and updated role from ${previousRole} to ${role}`);
} else {
// Okta user not found, just update role
await user.update({ role }); await user.update({ role });
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta data not available)`);
} logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role}`);
} catch (oktaError: any) {
// If Okta fetch fails, just update the role
logger.warn(`[Admin] Failed to fetch Okta data for ${email}, updating role only:`, oktaError.message);
await user.update({ role });
logger.info(`[Admin] Updated user ${email} role from ${previousRole} to ${role} (Okta sync failed)`);
}
} }
res.json({ res.json({

View File

@ -1,86 +0,0 @@
import { Request, Response } from 'express';
import type { AuthenticatedRequest } from '../types/express';
import * as dealerService from '../services/dealer.service';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '../utils/logger';
export class DealerController {
/**
* Get all dealers
* GET /api/v1/dealers
*/
async getAllDealers(req: Request, res: Response): Promise<void> {
try {
const dealers = await dealerService.getAllDealers();
return ResponseHandler.success(res, dealers, 'Dealers fetched successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerController] Error fetching dealers:', error);
return ResponseHandler.error(res, 'Failed to fetch dealers', 500, errorMessage);
}
}
/**
* Get dealer by code
* GET /api/v1/dealers/code/:dealerCode
*/
async getDealerByCode(req: Request, res: Response): Promise<void> {
try {
const { dealerCode } = req.params;
const dealer = await dealerService.getDealerByCode(dealerCode);
if (!dealer) {
return ResponseHandler.error(res, 'Dealer not found', 404);
}
return ResponseHandler.success(res, dealer, 'Dealer fetched successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerController] Error fetching dealer by code:', error);
return ResponseHandler.error(res, 'Failed to fetch dealer', 500, errorMessage);
}
}
/**
* Get dealer by email
* GET /api/v1/dealers/email/:email
*/
async getDealerByEmail(req: Request, res: Response): Promise<void> {
try {
const { email } = req.params;
const dealer = await dealerService.getDealerByEmail(email);
if (!dealer) {
return ResponseHandler.error(res, 'Dealer not found', 404);
}
return ResponseHandler.success(res, dealer, 'Dealer fetched successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerController] Error fetching dealer by email:', error);
return ResponseHandler.error(res, 'Failed to fetch dealer', 500, errorMessage);
}
}
/**
* Search dealers
* GET /api/v1/dealers/search?q=searchTerm
*/
async searchDealers(req: Request, res: Response): Promise<void> {
try {
const { q } = req.query;
if (!q || typeof q !== 'string') {
return ResponseHandler.error(res, 'Search term is required', 400);
}
const dealers = await dealerService.searchDealers(q);
return ResponseHandler.success(res, dealers, 'Dealers searched successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerController] Error searching dealers:', error);
return ResponseHandler.error(res, 'Failed to search dealers', 500, errorMessage);
}
}
}

View File

@ -1,842 +0,0 @@
import { Request, Response } from 'express';
import type { AuthenticatedRequest } from '../types/express';
import { DealerClaimService } from '../services/dealerClaim.service';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '../utils/logger';
import { gcsStorageService } from '../services/gcsStorage.service';
import { Document } from '../models/Document';
import { InternalOrder } from '../models/InternalOrder';
import { constants } from '../config/constants';
import { sapIntegrationService } from '../services/sapIntegration.service';
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
export class DealerClaimController {
private dealerClaimService = new DealerClaimService();
/**
* Create a new dealer claim request
* POST /api/v1/dealer-claims
*/
async createClaimRequest(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
const {
activityName,
activityType,
dealerCode,
dealerName,
dealerEmail,
dealerPhone,
dealerAddress,
activityDate,
location,
requestDescription,
periodStartDate,
periodEndDate,
estimatedBudget,
approvers, // Array of approvers for all 8 steps
} = req.body;
// Validation
if (!activityName || !activityType || !dealerCode || !dealerName || !location || !requestDescription) {
return ResponseHandler.error(res, 'Missing required fields', 400);
}
const claimRequest = await this.dealerClaimService.createClaimRequest(userId, {
activityName,
activityType,
dealerCode,
dealerName,
dealerEmail,
dealerPhone,
dealerAddress,
activityDate: activityDate ? new Date(activityDate) : undefined,
location,
requestDescription,
periodStartDate: periodStartDate ? new Date(periodStartDate) : undefined,
periodEndDate: periodEndDate ? new Date(periodEndDate) : undefined,
estimatedBudget: estimatedBudget ? parseFloat(estimatedBudget) : undefined,
approvers: approvers || [], // Pass approvers array for all 8 steps
});
return ResponseHandler.success(res, {
request: claimRequest,
message: 'Claim request created successfully'
}, 'Claim request created');
} catch (error: any) {
// Handle approver validation errors
if (error.message && error.message.includes('Approver')) {
logger.warn('[DealerClaimController] Approver validation error:', { message: error.message });
return ResponseHandler.error(res, error.message, 400);
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error creating claim request:', error);
return ResponseHandler.error(res, 'Failed to create claim request', 500, errorMessage);
}
}
/**
* Get claim details
* GET /api/v1/dealer-claims/:requestId
* Accepts either UUID or requestNumber
*/
async getClaimDetails(req: Request, res: Response): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
const claimDetails = await this.dealerClaimService.getClaimDetails(requestId);
return ResponseHandler.success(res, claimDetails, 'Claim details fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error getting claim details:', error);
return ResponseHandler.error(res, 'Failed to fetch claim details', 500, errorMessage);
}
}
/**
* Helper to find workflow by either requestId (UUID) or requestNumber
*/
private async findWorkflowByIdentifier(identifier: string): Promise<any> {
const isUuid = (id: string): boolean => {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return uuidRegex.test(id);
};
const { WorkflowRequest } = await import('../models/WorkflowRequest');
if (isUuid(identifier)) {
return await WorkflowRequest.findByPk(identifier);
} else {
return await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
}
}
/**
* Submit dealer proposal (Step 1)
* POST /api/v1/dealer-claims/:requestId/proposal
* Accepts either UUID or requestNumber
*/
async submitProposal(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const userId = req.user?.userId;
const {
costBreakup,
totalEstimatedBudget,
timelineMode,
expectedCompletionDate,
expectedCompletionDays,
dealerComments,
} = req.body;
// Find workflow by identifier (UUID or requestNumber)
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
// Get actual UUID and requestNumber
const requestId = (workflow as any).requestId || (workflow as any).request_id;
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
// Parse costBreakup - it comes as JSON string from FormData
let parsedCostBreakup: any[] = [];
if (costBreakup) {
if (typeof costBreakup === 'string') {
try {
parsedCostBreakup = JSON.parse(costBreakup);
} catch (parseError) {
logger.error('[DealerClaimController] Failed to parse costBreakup JSON:', parseError);
return ResponseHandler.error(res, 'Invalid costBreakup format. Expected JSON array.', 400);
}
} else if (Array.isArray(costBreakup)) {
parsedCostBreakup = costBreakup;
} else {
logger.warn('[DealerClaimController] costBreakup is not a string or array:', typeof costBreakup);
parsedCostBreakup = [];
}
}
// Validate costBreakup is an array
if (!Array.isArray(parsedCostBreakup)) {
logger.error('[DealerClaimController] costBreakup is not an array after parsing:', parsedCostBreakup);
return ResponseHandler.error(res, 'costBreakup must be an array of cost items', 400);
}
// Validate each cost item has required fields
for (const item of parsedCostBreakup) {
if (!item.description || item.amount === undefined || item.amount === null) {
return ResponseHandler.error(res, 'Each cost item must have description and amount', 400);
}
}
// Handle file upload if present
let proposalDocumentPath: string | undefined;
let proposalDocumentUrl: string | undefined;
if (req.file) {
const file = req.file;
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
mimeType: file.mimetype,
requestNumber: requestNumber || 'UNKNOWN',
fileType: 'documents'
});
proposalDocumentPath = uploadResult.filePath;
proposalDocumentUrl = uploadResult.storageUrl;
// Cleanup local file if exists
if (file.path && fs.existsSync(file.path)) {
fs.unlinkSync(file.path);
}
}
// Use actual UUID for service call with parsed costBreakup array
await this.dealerClaimService.submitDealerProposal(requestId, {
proposalDocumentPath,
proposalDocumentUrl,
costBreakup: parsedCostBreakup, // Use parsed array
totalEstimatedBudget: totalEstimatedBudget ? parseFloat(totalEstimatedBudget) : 0,
timelineMode: timelineMode || 'date',
expectedCompletionDate: expectedCompletionDate ? new Date(expectedCompletionDate) : undefined,
expectedCompletionDays: expectedCompletionDays ? parseInt(expectedCompletionDays) : undefined,
dealerComments: dealerComments || '',
});
return ResponseHandler.success(res, { message: 'Proposal submitted successfully' }, 'Proposal submitted');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error submitting proposal:', error);
return ResponseHandler.error(res, 'Failed to submit proposal', 500, errorMessage);
}
}
/**
* Submit completion documents (Step 5)
* POST /api/v1/dealer-claims/:requestId/completion
* Accepts either UUID or requestNumber
*/
async submitCompletion(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const {
activityCompletionDate,
numberOfParticipants,
closedExpenses,
totalClosedExpenses,
} = req.body;
// Parse closedExpenses if it's a JSON string
let parsedClosedExpenses: any[] = [];
if (closedExpenses) {
try {
parsedClosedExpenses = typeof closedExpenses === 'string' ? JSON.parse(closedExpenses) : closedExpenses;
} catch (e) {
logger.warn('[DealerClaimController] Failed to parse closedExpenses JSON:', e);
parsedClosedExpenses = [];
}
}
// Get files from multer
const files = req.files as { [fieldname: string]: Express.Multer.File[] } | undefined;
const completionDocumentsFiles = files?.completionDocuments || [];
const activityPhotosFiles = files?.activityPhotos || [];
const invoicesReceiptsFiles = files?.invoicesReceipts || [];
const attendanceSheetFile = files?.attendanceSheet?.[0];
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number || 'UNKNOWN';
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
const userId = (req as any).user?.userId || (req as any).user?.user_id;
if (!userId) {
return ResponseHandler.error(res, 'User not authenticated', 401);
}
if (!activityCompletionDate) {
return ResponseHandler.error(res, 'Activity completion date is required', 400);
}
// Upload files to GCS and save to documents table
const completionDocuments: any[] = [];
const activityPhotos: any[] = [];
// Upload and save completion documents to documents table with COMPLETION_DOC category
for (const file of completionDocumentsFiles) {
try {
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
mimeType: file.mimetype,
requestNumber: requestNumber,
fileType: 'documents'
});
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Save to documents table
const doc = await Document.create({
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
originalFileName: file.originalname,
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: uploadResult.filePath,
storageUrl: uploadResult.storageUrl,
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: constants.DOCUMENT_CATEGORIES.COMPLETION_DOC,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
completionDocuments.push({
documentId: doc.documentId,
name: file.originalname,
url: uploadResult.storageUrl,
size: file.size,
type: file.mimetype,
});
// Cleanup local file if exists
if (file.path && fs.existsSync(file.path)) {
try {
fs.unlinkSync(file.path);
} catch (unlinkError) {
logger.warn(`[DealerClaimController] Failed to delete local file ${file.path}`);
}
}
} catch (error) {
logger.error(`[DealerClaimController] Error uploading completion document ${file.originalname}:`, error);
}
}
// Upload and save activity photos to documents table with ACTIVITY_PHOTO category
for (const file of activityPhotosFiles) {
try {
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
mimeType: file.mimetype,
requestNumber: requestNumber,
fileType: 'attachments'
});
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Save to documents table
const doc = await Document.create({
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
originalFileName: file.originalname,
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: uploadResult.filePath,
storageUrl: uploadResult.storageUrl,
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: constants.DOCUMENT_CATEGORIES.ACTIVITY_PHOTO,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
activityPhotos.push({
documentId: doc.documentId,
name: file.originalname,
url: uploadResult.storageUrl,
size: file.size,
type: file.mimetype,
});
// Cleanup local file if exists
if (file.path && fs.existsSync(file.path)) {
try {
fs.unlinkSync(file.path);
} catch (unlinkError) {
logger.warn(`[DealerClaimController] Failed to delete local file ${file.path}`);
}
}
} catch (error) {
logger.error(`[DealerClaimController] Error uploading activity photo ${file.originalname}:`, error);
}
}
// Upload and save invoices/receipts to documents table with SUPPORTING category
const invoicesReceipts: any[] = [];
for (const file of invoicesReceiptsFiles) {
try {
const fileBuffer = file.buffer || (file.path ? fs.readFileSync(file.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: file.originalname,
mimeType: file.mimetype,
requestNumber: requestNumber,
fileType: 'attachments'
});
const extension = path.extname(file.originalname).replace('.', '').toLowerCase();
// Save to documents table
const doc = await Document.create({
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
originalFileName: file.originalname,
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: uploadResult.filePath,
storageUrl: uploadResult.storageUrl,
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: constants.DOCUMENT_CATEGORIES.SUPPORTING,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
invoicesReceipts.push({
documentId: doc.documentId,
name: file.originalname,
url: uploadResult.storageUrl,
size: file.size,
type: file.mimetype,
});
// Cleanup local file if exists
if (file.path && fs.existsSync(file.path)) {
try {
fs.unlinkSync(file.path);
} catch (unlinkError) {
logger.warn(`[DealerClaimController] Failed to delete local file ${file.path}`);
}
}
} catch (error) {
logger.error(`[DealerClaimController] Error uploading invoice/receipt ${file.originalname}:`, error);
}
}
// Upload and save attendance sheet to documents table with SUPPORTING category
let attendanceSheet: any = null;
if (attendanceSheetFile) {
try {
const fileBuffer = attendanceSheetFile.buffer || (attendanceSheetFile.path ? fs.readFileSync(attendanceSheetFile.path) : Buffer.from(''));
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
const uploadResult = await gcsStorageService.uploadFileWithFallback({
buffer: fileBuffer,
originalName: attendanceSheetFile.originalname,
mimeType: attendanceSheetFile.mimetype,
requestNumber: requestNumber,
fileType: 'attachments'
});
const extension = path.extname(attendanceSheetFile.originalname).replace('.', '').toLowerCase();
// Save to documents table
const doc = await Document.create({
requestId,
uploadedBy: userId,
fileName: path.basename(attendanceSheetFile.filename || attendanceSheetFile.originalname),
originalFileName: attendanceSheetFile.originalname,
fileType: extension,
fileExtension: extension,
fileSize: attendanceSheetFile.size,
filePath: uploadResult.filePath,
storageUrl: uploadResult.storageUrl,
mimeType: attendanceSheetFile.mimetype,
checksum,
isGoogleDoc: false,
googleDocUrl: null as any,
category: constants.DOCUMENT_CATEGORIES.SUPPORTING,
version: 1,
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
attendanceSheet = {
documentId: doc.documentId,
name: attendanceSheetFile.originalname,
url: uploadResult.storageUrl,
size: attendanceSheetFile.size,
type: attendanceSheetFile.mimetype,
};
// Cleanup local file if exists
if (attendanceSheetFile.path && fs.existsSync(attendanceSheetFile.path)) {
try {
fs.unlinkSync(attendanceSheetFile.path);
} catch (unlinkError) {
logger.warn(`[DealerClaimController] Failed to delete local file ${attendanceSheetFile.path}`);
}
}
} catch (error) {
logger.error(`[DealerClaimController] Error uploading attendance sheet:`, error);
}
}
await this.dealerClaimService.submitCompletionDocuments(requestId, {
activityCompletionDate: new Date(activityCompletionDate),
numberOfParticipants: numberOfParticipants ? parseInt(numberOfParticipants) : undefined,
closedExpenses: parsedClosedExpenses,
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
attendanceSheet: attendanceSheet || undefined,
});
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error submitting completion:', error);
return ResponseHandler.error(res, 'Failed to submit completion documents', 500, errorMessage);
}
}
/**
* Validate/Fetch IO details from SAP
* GET /api/v1/dealer-claims/:requestId/io/validate?ioNumber=IO1234
* This endpoint fetches IO details from SAP and returns them, does not store anything
* Flow: Fetch from SAP -> Return to frontend (no database storage)
*/
async validateIO(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const { ioNumber } = req.query;
if (!ioNumber || typeof ioNumber !== 'string') {
return ResponseHandler.error(res, 'IO number is required', 400);
}
// Fetch IO details from SAP (will return mock data until SAP is integrated)
const ioValidation = await sapIntegrationService.validateIONumber(ioNumber.trim());
if (!ioValidation.isValid) {
return ResponseHandler.error(res, ioValidation.error || 'Invalid IO number', 400);
}
return ResponseHandler.success(res, {
ioNumber: ioValidation.ioNumber,
availableBalance: ioValidation.availableBalance,
blockedAmount: ioValidation.blockedAmount,
remainingBalance: ioValidation.remainingBalance,
currency: ioValidation.currency,
description: ioValidation.description,
isValid: true,
}, 'IO fetched successfully from SAP');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error validating IO:', error);
return ResponseHandler.error(res, 'Failed to fetch IO from SAP', 500, errorMessage);
}
}
/**
* Update IO details and block amount in SAP
* PUT /api/v1/dealer-claims/:requestId/io
* Only stores data when blocking amount > 0
* Accepts either UUID or requestNumber
*/
async updateIODetails(req: AuthenticatedRequest, res: Response): Promise<void> {
const userId = (req as any).user?.userId || (req as any).user?.user_id;
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const {
ioNumber,
ioRemark,
availableBalance,
blockedAmount,
remainingBalance,
} = req.body;
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
if (!ioNumber) {
return ResponseHandler.error(res, 'IO number is required', 400);
}
const blockAmount = blockedAmount ? parseFloat(blockedAmount) : 0;
// Log received data for debugging
logger.info('[DealerClaimController] updateIODetails received:', {
requestId,
ioNumber,
availableBalance,
blockedAmount: blockAmount,
receivedBlockedAmount: blockedAmount, // Original value from request
userId,
});
// Store in database when blocking amount > 0 OR when ioNumber and ioRemark are provided (for Step 3 approval)
if (blockAmount > 0) {
if (availableBalance === undefined) {
return ResponseHandler.error(res, 'Available balance is required when blocking amount', 400);
}
// Don't pass remainingBalance - let the service calculate it from SAP's response
// This ensures we always use the actual remaining balance from SAP after blocking
const ioData = {
ioNumber,
ioRemark: ioRemark || '',
availableBalance: parseFloat(availableBalance),
blockedAmount: blockAmount,
// remainingBalance will be calculated by the service from SAP's response
};
logger.info('[DealerClaimController] Calling updateIODetails service with:', ioData);
await this.dealerClaimService.updateIODetails(
requestId,
ioData,
userId
);
// Fetch and return the updated IO details from database
const updatedIO = await InternalOrder.findOne({ where: { requestId } });
if (updatedIO) {
return ResponseHandler.success(res, {
message: 'IO blocked successfully in SAP',
ioDetails: {
ioNumber: updatedIO.ioNumber,
ioAvailableBalance: updatedIO.ioAvailableBalance,
ioBlockedAmount: updatedIO.ioBlockedAmount,
ioRemainingBalance: updatedIO.ioRemainingBalance,
ioRemark: updatedIO.ioRemark,
status: updatedIO.status,
}
}, 'IO blocked');
}
return ResponseHandler.success(res, { message: 'IO blocked successfully in SAP' }, 'IO blocked');
} else if (ioNumber && ioRemark !== undefined) {
// Save IO details (ioNumber, ioRemark) even without blocking amount
// This is useful when Step 3 is approved but amount hasn't been blocked yet
// IMPORTANT: Don't pass balance fields to preserve existing values from previous blocking
await this.dealerClaimService.updateIODetails(
requestId,
{
ioNumber,
ioRemark: ioRemark || '',
// Don't pass balance fields - preserve existing values from previous blocking
// Only pass if explicitly provided and > 0 (for new records)
...(availableBalance && parseFloat(availableBalance) > 0 && { availableBalance: parseFloat(availableBalance) }),
blockedAmount: 0,
// Don't pass remainingBalance - preserve existing value from previous blocking
},
userId
);
return ResponseHandler.success(res, { message: 'IO details saved successfully' }, 'IO details saved');
} else {
// Just validate IO number without storing
// This is for validation only (fetch amount scenario)
return ResponseHandler.success(res, { message: 'IO validated successfully' }, 'IO validated');
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error updating IO details:', error);
return ResponseHandler.error(res, 'Failed to update IO details', 500, errorMessage);
}
}
/**
* Update e-invoice details (Step 7)
* PUT /api/v1/dealer-claims/:requestId/e-invoice
* If eInvoiceNumber is not provided, will auto-generate via DMS
* Accepts either UUID or requestNumber
*/
async updateEInvoice(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const {
eInvoiceNumber,
eInvoiceDate,
dmsNumber,
amount,
description,
} = req.body;
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
// If eInvoiceNumber provided, use manual entry; otherwise auto-generate
const invoiceData = eInvoiceNumber ? {
eInvoiceNumber,
eInvoiceDate: eInvoiceDate ? new Date(eInvoiceDate) : new Date(),
dmsNumber,
} : {
amount: amount ? parseFloat(amount) : undefined,
description,
};
await this.dealerClaimService.updateEInvoiceDetails(requestId, invoiceData);
return ResponseHandler.success(res, { message: 'E-Invoice details updated successfully' }, 'E-Invoice updated');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error updating e-invoice:', error);
return ResponseHandler.error(res, 'Failed to update e-invoice details', 500, errorMessage);
}
}
/**
* Update credit note details (Step 8)
* PUT /api/v1/dealer-claims/:requestId/credit-note
* If creditNoteNumber is not provided, will auto-generate via DMS
* Accepts either UUID or requestNumber
*/
async updateCreditNote(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const {
creditNoteNumber,
creditNoteDate,
creditNoteAmount,
reason,
description,
} = req.body;
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
// If creditNoteNumber provided, use manual entry; otherwise auto-generate
const creditNoteData = creditNoteNumber ? {
creditNoteNumber,
creditNoteDate: creditNoteDate ? new Date(creditNoteDate) : new Date(),
creditNoteAmount: creditNoteAmount ? parseFloat(creditNoteAmount) : undefined,
} : {
creditNoteAmount: creditNoteAmount ? parseFloat(creditNoteAmount) : undefined,
reason,
description,
};
await this.dealerClaimService.updateCreditNoteDetails(requestId, creditNoteData);
return ResponseHandler.success(res, { message: 'Credit note details updated successfully' }, 'Credit note updated');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error updating credit note:', error);
return ResponseHandler.error(res, 'Failed to update credit note details', 500, errorMessage);
}
}
/**
* Send credit note to dealer and auto-approve Step 8
* POST /api/v1/dealer-claims/:requestId/credit-note/send
* Accepts either UUID or requestNumber
*/
async sendCreditNoteToDealer(
req: AuthenticatedRequest,
res: Response
): Promise<void> {
try {
const identifier = req.params.requestId; // Can be UUID or requestNumber
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
// Find workflow to get actual UUID
const workflow = await this.findWorkflowByIdentifier(identifier);
if (!workflow) {
return ResponseHandler.error(res, 'Workflow request not found', 404);
}
const requestId = (workflow as any).requestId || (workflow as any).request_id;
if (!requestId) {
return ResponseHandler.error(res, 'Invalid workflow request', 400);
}
await this.dealerClaimService.sendCreditNoteToDealer(requestId, userId);
return ResponseHandler.success(res, { message: 'Credit note sent to dealer and Step 8 approved successfully' }, 'Credit note sent');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DealerClaimController] Error sending credit note to dealer:', error);
return ResponseHandler.error(res, 'Failed to send credit note to dealer', 500, errorMessage);
}
}
}

View File

@ -1,117 +0,0 @@
import { Request, Response } from 'express';
import { DMSWebhookService } from '../services/dmsWebhook.service';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '../utils/logger';
/**
* DMS Webhook Controller
* Handles webhook callbacks from DMS system for invoice and credit note generation
*/
export class DMSWebhookController {
private webhookService = new DMSWebhookService();
/**
* Handle invoice generation webhook from DMS
* POST /api/v1/webhooks/dms/invoice
*/
async handleInvoiceWebhook(req: Request, res: Response): Promise<void> {
try {
const payload = req.body;
logger.info('[DMSWebhook] Invoice webhook received', {
requestNumber: payload.request_number,
documentNo: payload.document_no,
});
// Validate webhook signature if configured
const isValid = await this.webhookService.validateWebhookSignature(req);
if (!isValid) {
logger.warn('[DMSWebhook] Invalid webhook signature');
return ResponseHandler.error(res, 'Invalid webhook signature', 401);
}
// Process invoice webhook
const result = await this.webhookService.processInvoiceWebhook(payload);
if (!result.success) {
logger.error('[DMSWebhook] Invoice webhook processing failed', {
error: result.error,
requestNumber: payload.request_number,
});
return ResponseHandler.error(res, result.error || 'Failed to process invoice webhook', 400);
}
logger.info('[DMSWebhook] Invoice webhook processed successfully', {
requestNumber: payload.request_number,
invoiceNumber: result.invoiceNumber,
});
return ResponseHandler.success(
res,
{
message: 'Invoice webhook processed successfully',
invoiceNumber: result.invoiceNumber,
requestNumber: payload.request_number,
},
'Webhook processed'
);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
return ResponseHandler.error(res, 'Failed to process invoice webhook', 500, errorMessage);
}
}
/**
* Handle credit note generation webhook from DMS
* POST /api/v1/webhooks/dms/credit-note
*/
async handleCreditNoteWebhook(req: Request, res: Response): Promise<void> {
try {
const payload = req.body;
logger.info('[DMSWebhook] Credit note webhook received', {
requestNumber: payload.request_number,
documentNo: payload.document_no,
});
// Validate webhook signature if configured
const isValid = await this.webhookService.validateWebhookSignature(req);
if (!isValid) {
logger.warn('[DMSWebhook] Invalid webhook signature');
return ResponseHandler.error(res, 'Invalid webhook signature', 401);
}
// Process credit note webhook
const result = await this.webhookService.processCreditNoteWebhook(payload);
if (!result.success) {
logger.error('[DMSWebhook] Credit note webhook processing failed', {
error: result.error,
requestNumber: payload.request_number,
});
return ResponseHandler.error(res, result.error || 'Failed to process credit note webhook', 400);
}
logger.info('[DMSWebhook] Credit note webhook processed successfully', {
requestNumber: payload.request_number,
creditNoteNumber: result.creditNoteNumber,
});
return ResponseHandler.success(
res,
{
message: 'Credit note webhook processed successfully',
creditNoteNumber: result.creditNoteNumber,
requestNumber: payload.request_number,
},
'Webhook processed'
);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
return ResponseHandler.error(res, 'Failed to process credit note webhook', 500, errorMessage);
}
}
}

View File

@ -22,57 +22,20 @@ export class DocumentController {
return; return;
} }
// Extract requestId from body (multer should parse form fields) const requestId = String((req.body?.requestId || '').trim());
// Try both req.body and req.body.requestId for compatibility if (!requestId) {
const identifier = String((req.body?.requestId || req.body?.request_id || '').trim());
if (!identifier || identifier === 'undefined' || identifier === 'null') {
logWithContext('error', 'RequestId missing or invalid in document upload', {
body: req.body,
bodyKeys: Object.keys(req.body || {}),
userId: req.user?.userId
});
ResponseHandler.error(res, 'requestId is required', 400); ResponseHandler.error(res, 'requestId is required', 400);
return; return;
} }
// Helper to check if identifier is UUID // Get workflow request to retrieve requestNumber
const isUuid = (id: string): boolean => { const workflowRequest = await WorkflowRequest.findOne({ where: { requestId } });
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return uuidRegex.test(id);
};
// Get workflow request - handle both UUID (requestId) and requestNumber
let workflowRequest: WorkflowRequest | null = null;
if (isUuid(identifier)) {
workflowRequest = await WorkflowRequest.findByPk(identifier);
} else {
workflowRequest = await WorkflowRequest.findOne({ where: { requestNumber: identifier } });
}
if (!workflowRequest) { if (!workflowRequest) {
logWithContext('error', 'Workflow request not found for document upload', {
identifier,
isUuid: isUuid(identifier),
userId: req.user?.userId
});
ResponseHandler.error(res, 'Workflow request not found', 404); ResponseHandler.error(res, 'Workflow request not found', 404);
return; return;
} }
// Get the actual requestId (UUID) and requestNumber
const requestId = (workflowRequest as any).requestId || (workflowRequest as any).request_id;
const requestNumber = (workflowRequest as any).requestNumber || (workflowRequest as any).request_number; const requestNumber = (workflowRequest as any).requestNumber || (workflowRequest as any).request_number;
if (!requestNumber) {
logWithContext('error', 'Request number not found for workflow', {
requestId,
workflowRequest: JSON.stringify(workflowRequest.toJSON()),
userId: req.user?.userId
});
ResponseHandler.error(res, 'Request number not found for workflow', 500);
return;
}
const file = (req as any).file as Express.Multer.File | undefined; const file = (req as any).file as Express.Multer.File | undefined;
if (!file) { if (!file) {
ResponseHandler.error(res, 'No file uploaded', 400); ResponseHandler.error(res, 'No file uploaded', 400);
@ -190,21 +153,10 @@ export class DocumentController {
ResponseHandler.success(res, doc, 'File uploaded', 201); ResponseHandler.success(res, doc, 'File uploaded', 201);
} catch (error) { } catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'; const message = error instanceof Error ? error.message : 'Unknown error';
const errorStack = error instanceof Error ? error.stack : undefined;
logWithContext('error', 'Document upload failed', { logWithContext('error', 'Document upload failed', {
userId: req.user?.userId, userId: req.user?.userId,
requestId: req.body?.requestId || req.body?.request_id, requestId: req.body?.requestId,
body: req.body, error,
bodyKeys: Object.keys(req.body || {}),
file: req.file ? {
originalname: req.file.originalname,
size: req.file.size,
mimetype: req.file.mimetype,
hasBuffer: !!req.file.buffer,
hasPath: !!req.file.path
} : 'No file',
error: message,
stack: errorStack
}); });
ResponseHandler.error(res, 'Upload failed', 500, message); ResponseHandler.error(res, 'Upload failed', 500, message);
} }

View File

@ -1,192 +0,0 @@
import { Request, Response } from 'express';
import type { AuthenticatedRequest } from '../types/express';
import { TemplateService } from '../services/template.service';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '../utils/logger';
export class TemplateController {
private templateService = new TemplateService();
/**
* Create a new template
* POST /api/v1/templates
*/
async createTemplate(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
const {
templateName,
templateCode,
templateDescription,
templateCategory,
workflowType,
approvalLevelsConfig,
defaultTatHours,
formStepsConfig,
userFieldMappings,
dynamicApproverConfig,
isActive,
} = req.body;
if (!templateName) {
return ResponseHandler.error(res, 'Template name is required', 400);
}
const template = await this.templateService.createTemplate(userId, {
templateName,
templateCode,
templateDescription,
templateCategory,
workflowType,
approvalLevelsConfig,
defaultTatHours: defaultTatHours ? parseFloat(defaultTatHours) : undefined,
formStepsConfig,
userFieldMappings,
dynamicApproverConfig,
isActive,
});
return ResponseHandler.success(res, template, 'Template created successfully', 201);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[TemplateController] Error creating template:', error);
return ResponseHandler.error(res, 'Failed to create template', 500, errorMessage);
}
}
/**
* Get template by ID
* GET /api/v1/templates/:templateId
*/
async getTemplate(req: Request, res: Response): Promise<void> {
try {
const { templateId } = req.params;
const template = await this.templateService.getTemplate(templateId);
if (!template) {
return ResponseHandler.error(res, 'Template not found', 404);
}
return ResponseHandler.success(res, template, 'Template fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[TemplateController] Error getting template:', error);
return ResponseHandler.error(res, 'Failed to fetch template', 500, errorMessage);
}
}
/**
* List templates
* GET /api/v1/templates
*/
async listTemplates(req: Request, res: Response): Promise<void> {
try {
const {
category,
workflowType,
isActive,
isSystemTemplate,
search,
} = req.query;
const filters: any = {};
if (category) filters.category = category as string;
if (workflowType) filters.workflowType = workflowType as string;
if (isActive !== undefined) filters.isActive = isActive === 'true';
if (isSystemTemplate !== undefined) filters.isSystemTemplate = isSystemTemplate === 'true';
if (search) filters.search = search as string;
const templates = await this.templateService.listTemplates(filters);
return ResponseHandler.success(res, templates, 'Templates fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[TemplateController] Error listing templates:', error);
return ResponseHandler.error(res, 'Failed to fetch templates', 500, errorMessage);
}
}
/**
* Get active templates (for workflow creation)
* GET /api/v1/templates/active
*/
async getActiveTemplates(req: Request, res: Response): Promise<void> {
try {
const templates = await this.templateService.getActiveTemplates();
return ResponseHandler.success(res, templates, 'Active templates fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[TemplateController] Error getting active templates:', error);
return ResponseHandler.error(res, 'Failed to fetch active templates', 500, errorMessage);
}
}
/**
* Update template
* PUT /api/v1/templates/:templateId
*/
async updateTemplate(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const { templateId } = req.params;
const userId = req.user?.userId;
if (!userId) {
return ResponseHandler.error(res, 'Unauthorized', 401);
}
const {
templateName,
templateDescription,
templateCategory,
approvalLevelsConfig,
defaultTatHours,
formStepsConfig,
userFieldMappings,
dynamicApproverConfig,
isActive,
} = req.body;
const template = await this.templateService.updateTemplate(templateId, userId, {
templateName,
templateDescription,
templateCategory,
approvalLevelsConfig,
defaultTatHours: defaultTatHours ? parseFloat(defaultTatHours) : undefined,
formStepsConfig,
userFieldMappings,
dynamicApproverConfig,
isActive,
});
return ResponseHandler.success(res, template, 'Template updated successfully');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[TemplateController] Error updating template:', error);
return ResponseHandler.error(res, 'Failed to update template', 500, errorMessage);
}
}
/**
* Delete template
* DELETE /api/v1/templates/:templateId
*/
async deleteTemplate(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
const { templateId } = req.params;
await this.templateService.deleteTemplate(templateId);
return ResponseHandler.success(res, { message: 'Template deleted successfully' }, 'Template deleted');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[TemplateController] Error deleting template:', error);
return ResponseHandler.error(res, 'Failed to delete template', 500, errorMessage);
}
}
}

View File

@ -36,39 +36,6 @@ export class UserController {
} }
} }
/**
* Search users in Okta by displayName
* GET /api/v1/users/search-by-displayname?displayName=John Doe
* Used when creating claim requests to find manager by displayName
*/
async searchByDisplayName(req: Request, res: Response): Promise<void> {
try {
const displayName = String(req.query.displayName || '').trim();
if (!displayName) {
ResponseHandler.error(res, 'displayName query parameter is required', 400);
return;
}
const oktaUsers = await this.userService.searchOktaByDisplayName(displayName);
const result = oktaUsers.map(u => ({
userId: u.id,
email: u.profile.email || u.profile.login,
displayName: u.profile.displayName || `${u.profile.firstName || ''} ${u.profile.lastName || ''}`.trim(),
firstName: u.profile.firstName,
lastName: u.profile.lastName,
department: u.profile.department,
status: u.status,
}));
ResponseHandler.success(res, result, 'Users found by displayName');
} catch (error: any) {
logger.error('Search by displayName failed', { error });
ResponseHandler.error(res, error.message || 'Search by displayName failed', 500);
}
}
/** /**
* Ensure user exists in database (create if not exists) * Ensure user exists in database (create if not exists)
* Called when user is selected/tagged in the frontend * Called when user is selected/tagged in the frontend

View File

@ -4,7 +4,7 @@ export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`DO $$ await queryInterface.sequelize.query(`DO $$
BEGIN BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_document_category') THEN IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'enum_document_category') THEN
CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER','COMPLETION_DOC','ACTIVITY_PHOTO'); CREATE TYPE enum_document_category AS ENUM ('SUPPORTING','APPROVAL','REFERENCE','FINAL','OTHER');
END IF; END IF;
END$$;`); END$$;`);

View File

@ -1,54 +0,0 @@
import { QueryInterface } from 'sequelize';
/**
* Add foreign key constraint for template_id after workflow_templates table exists
* This should run after both:
* - 20251210-enhance-workflow-templates (creates workflow_templates table)
* - 20251210-add-workflow-type-support (adds template_id column)
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if workflow_templates table exists
const [tables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'workflow_templates';
`);
if (tables.length > 0) {
// Check if foreign key already exists
const [constraints] = await queryInterface.sequelize.query(`
SELECT constraint_name
FROM information_schema.table_constraints
WHERE table_schema = 'public'
AND table_name = 'workflow_requests'
AND constraint_name = 'workflow_requests_template_id_fkey';
`);
if (constraints.length === 0) {
// Add foreign key constraint
await queryInterface.sequelize.query(`
ALTER TABLE workflow_requests
ADD CONSTRAINT workflow_requests_template_id_fkey
FOREIGN KEY (template_id)
REFERENCES workflow_templates(template_id)
ON UPDATE CASCADE
ON DELETE SET NULL;
`);
}
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove foreign key constraint if it exists
try {
await queryInterface.sequelize.query(`
ALTER TABLE workflow_requests
DROP CONSTRAINT IF EXISTS workflow_requests_template_id_fkey;
`);
} catch (error) {
// Ignore if constraint doesn't exist
console.log('Note: Foreign key constraint may not exist');
}
}

View File

@ -1,116 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
try {
// Check if columns already exist (for idempotency and backward compatibility)
const tableDescription = await queryInterface.describeTable('workflow_requests');
// 1. Add workflow_type column to workflow_requests (only if it doesn't exist)
if (!tableDescription.workflow_type) {
try {
await queryInterface.addColumn('workflow_requests', 'workflow_type', {
type: DataTypes.STRING(50),
allowNull: true,
defaultValue: 'NON_TEMPLATIZED'
});
console.log('✅ Added workflow_type column');
} catch (error: any) {
// Column might have been added manually, check if it exists now
const updatedDescription = await queryInterface.describeTable('workflow_requests');
if (!updatedDescription.workflow_type) {
throw error; // Re-throw if column still doesn't exist
}
console.log('Note: workflow_type column already exists (may have been added manually)');
}
} else {
console.log('Note: workflow_type column already exists, skipping');
}
// 2. Add template_id column (nullable, for admin templates)
// Note: Foreign key constraint will be added later if workflow_templates table exists
if (!tableDescription.template_id) {
try {
await queryInterface.addColumn('workflow_requests', 'template_id', {
type: DataTypes.UUID,
allowNull: true
});
console.log('✅ Added template_id column');
} catch (error: any) {
// Column might have been added manually, check if it exists now
const updatedDescription = await queryInterface.describeTable('workflow_requests');
if (!updatedDescription.template_id) {
throw error; // Re-throw if column still doesn't exist
}
console.log('Note: template_id column already exists (may have been added manually)');
}
} else {
console.log('Note: template_id column already exists, skipping');
}
// Get updated table description for index creation
const finalTableDescription = await queryInterface.describeTable('workflow_requests');
// 3. Create index for workflow_type (only if column exists)
if (finalTableDescription.workflow_type) {
try {
await queryInterface.addIndex('workflow_requests', ['workflow_type'], {
name: 'idx_workflow_requests_workflow_type'
});
console.log('✅ Created workflow_type index');
} catch (error: any) {
// Index might already exist, ignore error
if (error.message?.includes('already exists') || error.message?.includes('duplicate')) {
console.log('Note: workflow_type index already exists');
} else {
console.log('Note: Could not create workflow_type index:', error.message);
}
}
}
// 4. Create index for template_id (only if column exists)
if (finalTableDescription.template_id) {
try {
await queryInterface.addIndex('workflow_requests', ['template_id'], {
name: 'idx_workflow_requests_template_id'
});
console.log('✅ Created template_id index');
} catch (error: any) {
// Index might already exist, ignore error
if (error.message?.includes('already exists') || error.message?.includes('duplicate')) {
console.log('Note: template_id index already exists');
} else {
console.log('Note: Could not create template_id index:', error.message);
}
}
}
// 5. Update existing records to have workflow_type (if any exist and column exists)
if (finalTableDescription.workflow_type) {
try {
const [result] = await queryInterface.sequelize.query(`
UPDATE workflow_requests
SET workflow_type = 'NON_TEMPLATIZED'
WHERE workflow_type IS NULL;
`);
console.log('✅ Updated existing records with workflow_type');
} catch (error: any) {
// Ignore if table is empty or other error
console.log('Note: Could not update existing records:', error.message);
}
}
} catch (error: any) {
console.error('Migration error:', error.message);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove indexes
await queryInterface.removeIndex('workflow_requests', 'idx_workflow_requests_template_id');
await queryInterface.removeIndex('workflow_requests', 'idx_workflow_requests_workflow_type');
// Remove columns
await queryInterface.removeColumn('workflow_requests', 'template_id');
await queryInterface.removeColumn('workflow_requests', 'workflow_type');
}

View File

@ -1,214 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// 1. Create dealer_claim_details table
await queryInterface.createTable('dealer_claim_details', {
claim_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
activity_name: {
type: DataTypes.STRING(500),
allowNull: false
},
activity_type: {
type: DataTypes.STRING(100),
allowNull: false
},
dealer_code: {
type: DataTypes.STRING(50),
allowNull: false
},
dealer_name: {
type: DataTypes.STRING(200),
allowNull: false
},
dealer_email: {
type: DataTypes.STRING(255),
allowNull: true
},
dealer_phone: {
type: DataTypes.STRING(20),
allowNull: true
},
dealer_address: {
type: DataTypes.TEXT,
allowNull: true
},
activity_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
location: {
type: DataTypes.STRING(255),
allowNull: true
},
period_start_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
period_end_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('dealer_claim_details', ['request_id'], {
name: 'idx_dealer_claim_details_request_id',
unique: true
});
await queryInterface.addIndex('dealer_claim_details', ['dealer_code'], {
name: 'idx_dealer_claim_details_dealer_code'
});
await queryInterface.addIndex('dealer_claim_details', ['activity_type'], {
name: 'idx_dealer_claim_details_activity_type'
});
// 2. Create dealer_proposal_details table (Step 1: Dealer Proposal)
await queryInterface.createTable('dealer_proposal_details', {
proposal_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
proposal_document_path: {
type: DataTypes.STRING(500),
allowNull: true
},
proposal_document_url: {
type: DataTypes.STRING(500),
allowNull: true
},
total_estimated_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
timeline_mode: {
type: DataTypes.STRING(10),
allowNull: true
},
expected_completion_date: {
type: DataTypes.DATEONLY,
allowNull: true
},
expected_completion_days: {
type: DataTypes.INTEGER,
allowNull: true
},
dealer_comments: {
type: DataTypes.TEXT,
allowNull: true
},
submitted_at: {
type: DataTypes.DATE,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
await queryInterface.addIndex('dealer_proposal_details', ['request_id'], {
name: 'idx_dealer_proposal_details_request_id',
unique: true
});
// 3. Create dealer_completion_details table (Step 5: Dealer Completion)
await queryInterface.createTable('dealer_completion_details', {
completion_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
activity_completion_date: {
type: DataTypes.DATEONLY,
allowNull: false
},
number_of_participants: {
type: DataTypes.INTEGER,
allowNull: true
},
total_closed_expenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
submitted_at: {
type: DataTypes.DATE,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
await queryInterface.addIndex('dealer_completion_details', ['request_id'], {
name: 'idx_dealer_completion_details_request_id',
unique: true
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('dealer_completion_details');
await queryInterface.dropTable('dealer_proposal_details');
await queryInterface.dropTable('dealer_claim_details');
}

View File

@ -1,194 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Migration: Create dealer_proposal_cost_items table
*
* Purpose: Separate table for cost breakups to enable better querying, reporting, and data integrity
* This replaces the JSONB costBreakup field in dealer_proposal_details
*
* Benefits:
* - Better querying and filtering
* - Easier to update individual cost items
* - Better for analytics and reporting
* - Maintains referential integrity
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if table already exists
const [tables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'dealer_proposal_cost_items';
`);
if (tables.length === 0) {
// Create dealer_proposal_cost_items table
await queryInterface.createTable('dealer_proposal_cost_items', {
cost_item_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
field: 'cost_item_id'
},
proposal_id: {
type: DataTypes.UUID,
allowNull: false,
field: 'proposal_id',
references: {
model: 'dealer_proposal_details',
key: 'proposal_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
comment: 'Denormalized for easier querying without joins'
},
item_description: {
type: DataTypes.STRING(500),
allowNull: false,
field: 'item_description'
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: false,
field: 'amount',
comment: 'Cost amount in INR'
},
item_order: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
field: 'item_order',
comment: 'Order of item in the cost breakdown list'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
});
// Create indexes for better query performance
await queryInterface.addIndex('dealer_proposal_cost_items', ['proposal_id'], {
name: 'idx_proposal_cost_items_proposal_id'
});
await queryInterface.addIndex('dealer_proposal_cost_items', ['request_id'], {
name: 'idx_proposal_cost_items_request_id'
});
await queryInterface.addIndex('dealer_proposal_cost_items', ['proposal_id', 'item_order'], {
name: 'idx_proposal_cost_items_proposal_order'
});
console.log('✅ Created dealer_proposal_cost_items table');
} else {
console.log('Note: dealer_proposal_cost_items table already exists');
}
// Migrate existing JSONB costBreakup data to the new table
try {
const [existingProposals] = await queryInterface.sequelize.query(`
SELECT proposal_id, request_id, cost_breakup
FROM dealer_proposal_details
WHERE cost_breakup IS NOT NULL
AND cost_breakup::text != 'null'
AND cost_breakup::text != '[]';
`);
if (Array.isArray(existingProposals) && existingProposals.length > 0) {
console.log(`📦 Migrating ${existingProposals.length} existing proposal(s) with cost breakups...`);
for (const proposal of existingProposals as any[]) {
const proposalId = proposal.proposal_id;
const requestId = proposal.request_id;
let costBreakup = proposal.cost_breakup;
// Parse JSONB if it's a string
if (typeof costBreakup === 'string') {
try {
costBreakup = JSON.parse(costBreakup);
} catch (e) {
console.warn(`⚠️ Failed to parse costBreakup for proposal ${proposalId}:`, e);
continue;
}
}
// Ensure it's an array
if (!Array.isArray(costBreakup)) {
console.warn(`⚠️ costBreakup is not an array for proposal ${proposalId}`);
continue;
}
// Insert cost items
for (let i = 0; i < costBreakup.length; i++) {
const item = costBreakup[i];
if (item && item.description && item.amount !== undefined) {
await queryInterface.sequelize.query(`
INSERT INTO dealer_proposal_cost_items
(proposal_id, request_id, item_description, amount, item_order, created_at, updated_at)
VALUES (:proposalId, :requestId, :description, :amount, :order, NOW(), NOW())
ON CONFLICT DO NOTHING;
`, {
replacements: {
proposalId,
requestId,
description: item.description,
amount: item.amount,
order: i
}
});
}
}
}
console.log('✅ Migrated existing cost breakups to new table');
}
} catch (error: any) {
console.warn('⚠️ Could not migrate existing cost breakups:', error.message);
// Don't fail the migration if migration of existing data fails
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Drop indexes first
try {
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_proposal_order');
} catch (e) {
// Index might not exist
}
try {
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_request_id');
} catch (e) {
// Index might not exist
}
try {
await queryInterface.removeIndex('dealer_proposal_cost_items', 'idx_proposal_cost_items_proposal_id');
} catch (e) {
// Index might not exist
}
// Drop table
await queryInterface.dropTable('dealer_proposal_cost_items');
console.log('✅ Dropped dealer_proposal_cost_items table');
}

View File

@ -1,174 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Check if workflow_templates table exists, if not create it
const [tables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'workflow_templates';
`);
if (tables.length === 0) {
// Create workflow_templates table if it doesn't exist
await queryInterface.createTable('workflow_templates', {
template_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
template_name: {
type: DataTypes.STRING(200),
allowNull: false
},
template_code: {
type: DataTypes.STRING(50),
allowNull: true,
unique: true
},
template_description: {
type: DataTypes.TEXT,
allowNull: true
},
template_category: {
type: DataTypes.STRING(100),
allowNull: true
},
workflow_type: {
type: DataTypes.STRING(50),
allowNull: true
},
approval_levels_config: {
type: DataTypes.JSONB,
allowNull: true
},
default_tat_hours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
defaultValue: 24
},
form_steps_config: {
type: DataTypes.JSONB,
allowNull: true
},
user_field_mappings: {
type: DataTypes.JSONB,
allowNull: true
},
dynamic_approver_config: {
type: DataTypes.JSONB,
allowNull: true
},
is_active: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true
},
is_system_template: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
},
usage_count: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0
},
created_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
}
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('workflow_templates', ['template_code'], {
name: 'idx_workflow_templates_template_code',
unique: true
});
await queryInterface.addIndex('workflow_templates', ['workflow_type'], {
name: 'idx_workflow_templates_workflow_type'
});
await queryInterface.addIndex('workflow_templates', ['is_active'], {
name: 'idx_workflow_templates_is_active'
});
} else {
// Table exists, add new columns if they don't exist
const tableDescription = await queryInterface.describeTable('workflow_templates');
if (!tableDescription.form_steps_config) {
await queryInterface.addColumn('workflow_templates', 'form_steps_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.user_field_mappings) {
await queryInterface.addColumn('workflow_templates', 'user_field_mappings', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.dynamic_approver_config) {
await queryInterface.addColumn('workflow_templates', 'dynamic_approver_config', {
type: DataTypes.JSONB,
allowNull: true
});
}
if (!tableDescription.workflow_type) {
await queryInterface.addColumn('workflow_templates', 'workflow_type', {
type: DataTypes.STRING(50),
allowNull: true
});
}
if (!tableDescription.is_system_template) {
await queryInterface.addColumn('workflow_templates', 'is_system_template', {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false
});
}
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// Remove columns if they exist
const tableDescription = await queryInterface.describeTable('workflow_templates');
if (tableDescription.dynamic_approver_config) {
await queryInterface.removeColumn('workflow_templates', 'dynamic_approver_config');
}
if (tableDescription.user_field_mappings) {
await queryInterface.removeColumn('workflow_templates', 'user_field_mappings');
}
if (tableDescription.form_steps_config) {
await queryInterface.removeColumn('workflow_templates', 'form_steps_config');
}
if (tableDescription.workflow_type) {
await queryInterface.removeColumn('workflow_templates', 'workflow_type');
}
if (tableDescription.is_system_template) {
await queryInterface.removeColumn('workflow_templates', 'is_system_template');
}
}

View File

@ -1,197 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Create claim_budget_tracking table for comprehensive budget management
await queryInterface.createTable('claim_budget_tracking', {
budget_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
// Initial Budget (from claim creation)
initial_estimated_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Initial estimated budget when claim was created'
},
// Proposal Budget (from Step 1 - Dealer Proposal)
proposal_estimated_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Total estimated budget from dealer proposal'
},
proposal_submitted_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When dealer submitted proposal'
},
// Approved Budget (from Step 2 - Requestor Evaluation)
approved_budget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Budget approved by requestor in Step 2'
},
approved_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When budget was approved by requestor'
},
approved_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
comment: 'User who approved the budget'
},
// IO Blocked Budget (from Step 3 - Department Lead)
io_blocked_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Amount blocked in IO (from internal_orders table)'
},
io_blocked_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When budget was blocked in IO'
},
// Closed Expenses (from Step 5 - Dealer Completion)
closed_expenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Total closed expenses from completion documents'
},
closed_expenses_submitted_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When completion expenses were submitted'
},
// Final Claim Amount (from Step 6 - Requestor Claim Approval)
final_claim_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Final claim amount approved/modified by requestor in Step 6'
},
final_claim_amount_approved_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When final claim amount was approved'
},
final_claim_amount_approved_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
comment: 'User who approved final claim amount'
},
// Credit Note (from Step 8 - Finance)
credit_note_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Credit note amount issued by finance'
},
credit_note_issued_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When credit note was issued'
},
// Budget Status
budget_status: {
type: DataTypes.ENUM('DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED'),
defaultValue: 'DRAFT',
allowNull: false,
comment: 'Current status of budget lifecycle'
},
// Currency
currency: {
type: DataTypes.STRING(3),
defaultValue: 'INR',
allowNull: false,
comment: 'Currency code (INR, USD, etc.)'
},
// Budget Variance
variance_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
comment: 'Difference between approved and closed expenses (closed - approved)'
},
variance_percentage: {
type: DataTypes.DECIMAL(5, 2),
allowNull: true,
comment: 'Variance as percentage of approved budget'
},
// Audit fields
last_modified_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
comment: 'Last user who modified budget'
},
last_modified_at: {
type: DataTypes.DATE,
allowNull: true,
comment: 'When budget was last modified'
},
modification_reason: {
type: DataTypes.TEXT,
allowNull: true,
comment: 'Reason for budget modification'
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('claim_budget_tracking', ['request_id'], {
name: 'idx_claim_budget_tracking_request_id',
unique: true
});
await queryInterface.addIndex('claim_budget_tracking', ['budget_status'], {
name: 'idx_claim_budget_tracking_status'
});
await queryInterface.addIndex('claim_budget_tracking', ['approved_by'], {
name: 'idx_claim_budget_tracking_approved_by'
});
await queryInterface.addIndex('claim_budget_tracking', ['final_claim_amount_approved_by'], {
name: 'idx_claim_budget_tracking_final_approved_by'
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('claim_budget_tracking');
}

View File

@ -1,95 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Create internal_orders table for storing IO (Internal Order) details
await queryInterface.createTable('internal_orders', {
io_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'workflow_requests',
key: 'request_id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
io_number: {
type: DataTypes.STRING(50),
allowNull: false
},
io_remark: {
type: DataTypes.TEXT,
allowNull: true
},
io_available_balance: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
io_blocked_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
io_remaining_balance: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true
},
organized_by: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id'
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE'
},
organized_at: {
type: DataTypes.DATE,
allowNull: true
},
sap_document_number: {
type: DataTypes.STRING(100),
allowNull: true
},
status: {
type: DataTypes.ENUM('PENDING', 'BLOCKED', 'RELEASED', 'CANCELLED'),
defaultValue: 'PENDING',
allowNull: false
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
});
// Create indexes
await queryInterface.addIndex('internal_orders', ['io_number'], {
name: 'idx_internal_orders_io_number'
});
await queryInterface.addIndex('internal_orders', ['organized_by'], {
name: 'idx_internal_orders_organized_by'
});
// Create unique constraint: one IO per request (unique index on request_id)
await queryInterface.addIndex('internal_orders', ['request_id'], {
name: 'idx_internal_orders_request_id_unique',
unique: true
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('internal_orders');
}

View File

@ -1,162 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('claim_invoices', {
invoice_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true, // one invoice per request (adjust later if multiples needed)
references: { model: 'workflow_requests', key: 'request_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoice_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
invoice_date: {
type: DataTypes.DATEONLY,
allowNull: true,
},
invoice_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
},
dms_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
invoice_file_path: {
type: DataTypes.STRING(500),
allowNull: true,
},
generation_status: {
type: DataTypes.STRING(50), // e.g., PENDING, GENERATED, SENT, FAILED, CANCELLED
allowNull: true,
},
error_message: {
type: DataTypes.TEXT,
allowNull: true,
},
generated_at: {
type: DataTypes.DATE,
allowNull: true,
},
description: {
type: DataTypes.TEXT,
allowNull: true,
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
await queryInterface.addIndex('claim_invoices', ['request_id'], { name: 'idx_claim_invoices_request_id', unique: true });
await queryInterface.addIndex('claim_invoices', ['invoice_number'], { name: 'idx_claim_invoices_invoice_number' });
await queryInterface.addIndex('claim_invoices', ['dms_number'], { name: 'idx_claim_invoices_dms_number' });
await queryInterface.addIndex('claim_invoices', ['generation_status'], { name: 'idx_claim_invoices_status' });
await queryInterface.createTable('claim_credit_notes', {
credit_note_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
unique: true, // one credit note per request (adjust later if multiples needed)
references: { model: 'workflow_requests', key: 'request_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoice_id: {
type: DataTypes.UUID,
allowNull: true,
references: { model: 'claim_invoices', key: 'invoice_id' },
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
credit_note_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
credit_note_date: {
type: DataTypes.DATEONLY,
allowNull: true,
},
credit_amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
},
sap_document_number: {
type: DataTypes.STRING(100),
allowNull: true,
},
credit_note_file_path: {
type: DataTypes.STRING(500),
allowNull: true,
},
confirmation_status: {
type: DataTypes.STRING(50), // e.g., PENDING, GENERATED, CONFIRMED, FAILED, CANCELLED
allowNull: true,
},
error_message: {
type: DataTypes.TEXT,
allowNull: true,
},
confirmed_by: {
type: DataTypes.UUID,
allowNull: true,
references: { model: 'users', key: 'user_id' },
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
confirmed_at: {
type: DataTypes.DATE,
allowNull: true,
},
reason: {
type: DataTypes.TEXT,
allowNull: true,
},
description: {
type: DataTypes.TEXT,
allowNull: true,
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
await queryInterface.addIndex('claim_credit_notes', ['request_id'], { name: 'idx_claim_credit_notes_request_id', unique: true });
await queryInterface.addIndex('claim_credit_notes', ['invoice_id'], { name: 'idx_claim_credit_notes_invoice_id' });
await queryInterface.addIndex('claim_credit_notes', ['credit_note_number'], { name: 'idx_claim_credit_notes_number' });
await queryInterface.addIndex('claim_credit_notes', ['sap_document_number'], { name: 'idx_claim_credit_notes_sap_doc' });
await queryInterface.addIndex('claim_credit_notes', ['confirmation_status'], { name: 'idx_claim_credit_notes_status' });
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('claim_credit_notes');
await queryInterface.dropTable('claim_invoices');
}

View File

@ -1,68 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Helper function to check if a column exists in a table
*/
async function columnExists(
queryInterface: QueryInterface,
tableName: string,
columnName: string
): Promise<boolean> {
try {
const tableDescription = await queryInterface.describeTable(tableName);
return columnName in tableDescription;
} catch (error) {
return false;
}
}
export async function up(queryInterface: QueryInterface): Promise<void> {
const columnsToRemove = [
'dms_number',
'e_invoice_number',
'e_invoice_date',
'credit_note_number',
'credit_note_date',
'credit_note_amount',
];
// Only remove columns if they exist
// This handles the case where dealer_claim_details was created without these columns
for (const columnName of columnsToRemove) {
const exists = await columnExists(queryInterface, 'dealer_claim_details', columnName);
if (exists) {
await queryInterface.removeColumn('dealer_claim_details', columnName);
console.log(` ✅ Removed column: ${columnName}`);
} else {
console.log(` ⏭️ Column ${columnName} does not exist, skipping...`);
}
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.addColumn('dealer_claim_details', 'dms_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'e_invoice_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'e_invoice_date', {
type: DataTypes.DATEONLY,
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'credit_note_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'credit_note_date', {
type: DataTypes.DATEONLY,
allowNull: true,
});
await queryInterface.addColumn('dealer_claim_details', 'credit_note_amount', {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
});
}

View File

@ -1,55 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.createTable('dealer_completion_expenses', {
expense_id: {
type: DataTypes.UUID,
primaryKey: true,
defaultValue: DataTypes.UUIDV4,
},
request_id: {
type: DataTypes.UUID,
allowNull: false,
references: { model: 'workflow_requests', key: 'request_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
completion_id: {
type: DataTypes.UUID,
allowNull: true,
references: { model: 'dealer_completion_details', key: 'completion_id' },
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
description: {
type: DataTypes.STRING(500),
allowNull: false,
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: false,
},
created_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
updated_at: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
await queryInterface.addIndex('dealer_completion_expenses', ['request_id'], {
name: 'idx_dealer_completion_expenses_request_id',
});
await queryInterface.addIndex('dealer_completion_expenses', ['completion_id'], {
name: 'idx_dealer_completion_expenses_completion_id',
});
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('dealer_completion_expenses');
}

View File

@ -1,240 +0,0 @@
import { QueryInterface, DataTypes } from 'sequelize';
/**
* Helper function to check if a column exists in a table
*/
async function columnExists(
queryInterface: QueryInterface,
tableName: string,
columnName: string
): Promise<boolean> {
try {
const tableDescription = await queryInterface.describeTable(tableName);
return columnName in tableDescription;
} catch (error) {
return false;
}
}
/**
* Migration: Fix column names in claim_invoices and claim_credit_notes tables
*
* This migration handles the case where tables were created with old column names
* and need to be updated to match the new schema.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
try {
// Check if claim_invoices table exists
const [invoiceTables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'claim_invoices';
`);
if (invoiceTables.length > 0) {
// Fix claim_invoices table
const hasOldAmount = await columnExists(queryInterface, 'claim_invoices', 'amount');
const hasNewAmount = await columnExists(queryInterface, 'claim_invoices', 'invoice_amount');
if (hasOldAmount && !hasNewAmount) {
// Rename amount to invoice_amount
await queryInterface.renameColumn('claim_invoices', 'amount', 'invoice_amount');
console.log('✅ Renamed claim_invoices.amount to invoice_amount');
} else if (!hasOldAmount && !hasNewAmount) {
// Add invoice_amount if neither exists
await queryInterface.addColumn('claim_invoices', 'invoice_amount', {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
});
console.log('✅ Added invoice_amount column to claim_invoices');
} else if (hasNewAmount) {
console.log('✅ invoice_amount column already exists in claim_invoices');
}
// Check for status vs generation_status
const hasStatus = await columnExists(queryInterface, 'claim_invoices', 'status');
const hasGenerationStatus = await columnExists(queryInterface, 'claim_invoices', 'generation_status');
if (hasStatus && !hasGenerationStatus) {
// Rename status to generation_status
await queryInterface.renameColumn('claim_invoices', 'status', 'generation_status');
console.log('✅ Renamed claim_invoices.status to generation_status');
} else if (!hasStatus && !hasGenerationStatus) {
// Add generation_status if neither exists
await queryInterface.addColumn('claim_invoices', 'generation_status', {
type: DataTypes.STRING(50),
allowNull: true,
});
console.log('✅ Added generation_status column to claim_invoices');
} else if (hasGenerationStatus) {
console.log('✅ generation_status column already exists in claim_invoices');
}
}
// Check if claim_credit_notes table exists
const [creditNoteTables] = await queryInterface.sequelize.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'claim_credit_notes';
`);
if (creditNoteTables.length > 0) {
// Fix claim_credit_notes table
const hasOldAmount = await columnExists(queryInterface, 'claim_credit_notes', 'credit_note_amount');
const hasNewAmount = await columnExists(queryInterface, 'claim_credit_notes', 'credit_amount');
if (hasOldAmount && !hasNewAmount) {
// Rename credit_note_amount to credit_amount
await queryInterface.renameColumn('claim_credit_notes', 'credit_note_amount', 'credit_amount');
console.log('✅ Renamed claim_credit_notes.credit_note_amount to credit_amount');
} else if (!hasOldAmount && !hasNewAmount) {
// Add credit_amount if neither exists
await queryInterface.addColumn('claim_credit_notes', 'credit_amount', {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
});
console.log('✅ Added credit_amount column to claim_credit_notes');
} else if (hasNewAmount) {
console.log('✅ credit_amount column already exists in claim_credit_notes');
}
// Check for status vs confirmation_status
const hasStatus = await columnExists(queryInterface, 'claim_credit_notes', 'status');
const hasConfirmationStatus = await columnExists(queryInterface, 'claim_credit_notes', 'confirmation_status');
if (hasStatus && !hasConfirmationStatus) {
// Rename status to confirmation_status
await queryInterface.renameColumn('claim_credit_notes', 'status', 'confirmation_status');
console.log('✅ Renamed claim_credit_notes.status to confirmation_status');
} else if (!hasStatus && !hasConfirmationStatus) {
// Add confirmation_status if neither exists
await queryInterface.addColumn('claim_credit_notes', 'confirmation_status', {
type: DataTypes.STRING(50),
allowNull: true,
});
console.log('✅ Added confirmation_status column to claim_credit_notes');
} else if (hasConfirmationStatus) {
console.log('✅ confirmation_status column already exists in claim_credit_notes');
}
// Ensure invoice_id column exists
const hasInvoiceId = await columnExists(queryInterface, 'claim_credit_notes', 'invoice_id');
if (!hasInvoiceId) {
await queryInterface.addColumn('claim_credit_notes', 'invoice_id', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'claim_invoices',
key: 'invoice_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
});
console.log('✅ Added invoice_id column to claim_credit_notes');
}
// Ensure sap_document_number column exists
const hasSapDoc = await columnExists(queryInterface, 'claim_credit_notes', 'sap_document_number');
if (!hasSapDoc) {
await queryInterface.addColumn('claim_credit_notes', 'sap_document_number', {
type: DataTypes.STRING(100),
allowNull: true,
});
console.log('✅ Added sap_document_number column to claim_credit_notes');
}
// Ensure credit_note_file_path column exists
const hasFilePath = await columnExists(queryInterface, 'claim_credit_notes', 'credit_note_file_path');
if (!hasFilePath) {
await queryInterface.addColumn('claim_credit_notes', 'credit_note_file_path', {
type: DataTypes.STRING(500),
allowNull: true,
});
console.log('✅ Added credit_note_file_path column to claim_credit_notes');
}
// Ensure confirmed_by column exists
const hasConfirmedBy = await columnExists(queryInterface, 'claim_credit_notes', 'confirmed_by');
if (!hasConfirmedBy) {
await queryInterface.addColumn('claim_credit_notes', 'confirmed_by', {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'user_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
});
console.log('✅ Added confirmed_by column to claim_credit_notes');
}
// Ensure confirmed_at column exists
const hasConfirmedAt = await columnExists(queryInterface, 'claim_credit_notes', 'confirmed_at');
if (!hasConfirmedAt) {
await queryInterface.addColumn('claim_credit_notes', 'confirmed_at', {
type: DataTypes.DATE,
allowNull: true,
});
console.log('✅ Added confirmed_at column to claim_credit_notes');
}
}
// Ensure invoice_file_path exists in claim_invoices
if (invoiceTables.length > 0) {
const hasFilePath = await columnExists(queryInterface, 'claim_invoices', 'invoice_file_path');
if (!hasFilePath) {
await queryInterface.addColumn('claim_invoices', 'invoice_file_path', {
type: DataTypes.STRING(500),
allowNull: true,
});
console.log('✅ Added invoice_file_path column to claim_invoices');
}
// Ensure error_message exists
const hasErrorMessage = await columnExists(queryInterface, 'claim_invoices', 'error_message');
if (!hasErrorMessage) {
await queryInterface.addColumn('claim_invoices', 'error_message', {
type: DataTypes.TEXT,
allowNull: true,
});
console.log('✅ Added error_message column to claim_invoices');
}
// Ensure generated_at exists
const hasGeneratedAt = await columnExists(queryInterface, 'claim_invoices', 'generated_at');
if (!hasGeneratedAt) {
await queryInterface.addColumn('claim_invoices', 'generated_at', {
type: DataTypes.DATE,
allowNull: true,
});
console.log('✅ Added generated_at column to claim_invoices');
}
}
// Ensure error_message exists in claim_credit_notes
if (creditNoteTables.length > 0) {
const hasErrorMessage = await columnExists(queryInterface, 'claim_credit_notes', 'error_message');
if (!hasErrorMessage) {
await queryInterface.addColumn('claim_credit_notes', 'error_message', {
type: DataTypes.TEXT,
allowNull: true,
});
console.log('✅ Added error_message column to claim_credit_notes');
}
}
} catch (error: any) {
console.error('Migration error:', error.message);
throw error;
}
}
export async function down(queryInterface: QueryInterface): Promise<void> {
// This migration is idempotent and safe to run multiple times
// The down migration would reverse the changes, but it's safer to keep the new schema
console.log('Note: Down migration not implemented - keeping new column names');
}

View File

@ -1,295 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { User } from './User';
export enum BudgetStatus {
DRAFT = 'DRAFT',
PROPOSED = 'PROPOSED',
APPROVED = 'APPROVED',
BLOCKED = 'BLOCKED',
CLOSED = 'CLOSED',
SETTLED = 'SETTLED'
}
interface ClaimBudgetTrackingAttributes {
budgetId: string;
requestId: string;
// Initial Budget
initialEstimatedBudget?: number;
// Proposal Budget
proposalEstimatedBudget?: number;
proposalSubmittedAt?: Date;
// Approved Budget
approvedBudget?: number;
approvedAt?: Date;
approvedBy?: string;
// IO Blocked Budget
ioBlockedAmount?: number;
ioBlockedAt?: Date;
// Closed Expenses
closedExpenses?: number;
closedExpensesSubmittedAt?: Date;
// Final Claim Amount
finalClaimAmount?: number;
finalClaimAmountApprovedAt?: Date;
finalClaimAmountApprovedBy?: string;
// Credit Note
creditNoteAmount?: number;
creditNoteIssuedAt?: Date;
// Status & Metadata
budgetStatus: BudgetStatus;
currency: string;
varianceAmount?: number;
variancePercentage?: number;
// Audit
lastModifiedBy?: string;
lastModifiedAt?: Date;
modificationReason?: string;
createdAt: Date;
updatedAt: Date;
}
interface ClaimBudgetTrackingCreationAttributes extends Optional<ClaimBudgetTrackingAttributes, 'budgetId' | 'initialEstimatedBudget' | 'proposalEstimatedBudget' | 'proposalSubmittedAt' | 'approvedBudget' | 'approvedAt' | 'approvedBy' | 'ioBlockedAmount' | 'ioBlockedAt' | 'closedExpenses' | 'closedExpensesSubmittedAt' | 'finalClaimAmount' | 'finalClaimAmountApprovedAt' | 'finalClaimAmountApprovedBy' | 'creditNoteAmount' | 'creditNoteIssuedAt' | 'varianceAmount' | 'variancePercentage' | 'lastModifiedBy' | 'lastModifiedAt' | 'modificationReason' | 'budgetStatus' | 'currency' | 'createdAt' | 'updatedAt'> {}
class ClaimBudgetTracking extends Model<ClaimBudgetTrackingAttributes, ClaimBudgetTrackingCreationAttributes> implements ClaimBudgetTrackingAttributes {
public budgetId!: string;
public requestId!: string;
public initialEstimatedBudget?: number;
public proposalEstimatedBudget?: number;
public proposalSubmittedAt?: Date;
public approvedBudget?: number;
public approvedAt?: Date;
public approvedBy?: string;
public ioBlockedAmount?: number;
public ioBlockedAt?: Date;
public closedExpenses?: number;
public closedExpensesSubmittedAt?: Date;
public finalClaimAmount?: number;
public finalClaimAmountApprovedAt?: Date;
public finalClaimAmountApprovedBy?: string;
public creditNoteAmount?: number;
public creditNoteIssuedAt?: Date;
public budgetStatus!: BudgetStatus;
public currency!: string;
public varianceAmount?: number;
public variancePercentage?: number;
public lastModifiedBy?: string;
public lastModifiedAt?: Date;
public modificationReason?: string;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public request?: WorkflowRequest;
public approver?: User;
public finalApprover?: User;
public lastModifier?: User;
}
ClaimBudgetTracking.init(
{
budgetId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'budget_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
initialEstimatedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'initial_estimated_budget'
},
proposalEstimatedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'proposal_estimated_budget'
},
proposalSubmittedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'proposal_submitted_at'
},
approvedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'approved_budget'
},
approvedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'approved_at'
},
approvedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'approved_by',
references: {
model: 'users',
key: 'user_id'
}
},
ioBlockedAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'io_blocked_amount'
},
ioBlockedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'io_blocked_at'
},
closedExpenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'closed_expenses'
},
closedExpensesSubmittedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'closed_expenses_submitted_at'
},
finalClaimAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'final_claim_amount'
},
finalClaimAmountApprovedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'final_claim_amount_approved_at'
},
finalClaimAmountApprovedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'final_claim_amount_approved_by',
references: {
model: 'users',
key: 'user_id'
}
},
creditNoteAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'credit_note_amount'
},
creditNoteIssuedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'credit_note_issued_at'
},
budgetStatus: {
type: DataTypes.ENUM('DRAFT', 'PROPOSED', 'APPROVED', 'BLOCKED', 'CLOSED', 'SETTLED'),
defaultValue: 'DRAFT',
allowNull: false,
field: 'budget_status'
},
currency: {
type: DataTypes.STRING(3),
defaultValue: 'INR',
allowNull: false
},
varianceAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'variance_amount'
},
variancePercentage: {
type: DataTypes.DECIMAL(5, 2),
allowNull: true,
field: 'variance_percentage'
},
lastModifiedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'last_modified_by',
references: {
model: 'users',
key: 'user_id'
}
},
lastModifiedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'last_modified_at'
},
modificationReason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'modification_reason'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'ClaimBudgetTracking',
tableName: 'claim_budget_tracking',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
fields: ['request_id'],
unique: true
},
{
fields: ['budget_status']
},
{
fields: ['approved_by']
},
{
fields: ['final_claim_amount_approved_by']
}
]
}
);
// Associations
ClaimBudgetTracking.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
ClaimBudgetTracking.belongsTo(User, {
as: 'approver',
foreignKey: 'approvedBy',
targetKey: 'userId'
});
ClaimBudgetTracking.belongsTo(User, {
as: 'finalApprover',
foreignKey: 'finalClaimAmountApprovedBy',
targetKey: 'userId'
});
ClaimBudgetTracking.belongsTo(User, {
as: 'lastModifier',
foreignKey: 'lastModifiedBy',
targetKey: 'userId'
});
export { ClaimBudgetTracking };

View File

@ -1,193 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { ClaimInvoice } from './ClaimInvoice';
interface ClaimCreditNoteAttributes {
creditNoteId: string;
requestId: string;
invoiceId?: string;
creditNoteNumber?: string;
creditNoteDate?: Date;
creditNoteAmount?: number;
sapDocumentNumber?: string;
creditNoteFilePath?: string;
status?: string;
errorMessage?: string;
confirmedBy?: string;
confirmedAt?: Date;
reason?: string;
description?: string;
createdAt: Date;
updatedAt: Date;
}
interface ClaimCreditNoteCreationAttributes extends Optional<ClaimCreditNoteAttributes, 'creditNoteId' | 'invoiceId' | 'creditNoteNumber' | 'creditNoteDate' | 'creditNoteAmount' | 'sapDocumentNumber' | 'creditNoteFilePath' | 'status' | 'errorMessage' | 'confirmedBy' | 'confirmedAt' | 'reason' | 'description' | 'createdAt' | 'updatedAt'> {}
class ClaimCreditNote extends Model<ClaimCreditNoteAttributes, ClaimCreditNoteCreationAttributes> implements ClaimCreditNoteAttributes {
public creditNoteId!: string;
public requestId!: string;
public invoiceId?: string;
public creditNoteNumber?: string;
public creditNoteDate?: Date;
public creditNoteAmount?: number;
public sapDocumentNumber?: string;
public creditNoteFilePath?: string;
public status?: string;
public errorMessage?: string;
public confirmedBy?: string;
public confirmedAt?: Date;
public reason?: string;
public description?: string;
public createdAt!: Date;
public updatedAt!: Date;
}
ClaimCreditNote.init(
{
creditNoteId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'credit_note_id',
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoiceId: {
type: DataTypes.UUID,
allowNull: true,
field: 'invoice_id',
references: {
model: 'claim_invoices',
key: 'invoice_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
creditNoteNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'credit_note_number',
},
creditNoteDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'credit_note_date',
},
creditNoteAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'credit_amount',
},
sapDocumentNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'sap_document_number',
},
creditNoteFilePath: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'credit_note_file_path',
},
status: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'confirmation_status',
},
errorMessage: {
type: DataTypes.TEXT,
allowNull: true,
field: 'error_message',
},
confirmedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'confirmed_by',
references: {
model: 'users',
key: 'user_id',
},
onDelete: 'SET NULL',
onUpdate: 'CASCADE',
},
confirmedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'confirmed_at',
},
reason: {
type: DataTypes.TEXT,
allowNull: true,
field: 'reason',
},
description: {
type: DataTypes.TEXT,
allowNull: true,
field: 'description',
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at',
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at',
},
},
{
sequelize,
modelName: 'ClaimCreditNote',
tableName: 'claim_credit_notes',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ unique: true, fields: ['request_id'], name: 'idx_claim_credit_notes_request_id' },
{ fields: ['invoice_id'], name: 'idx_claim_credit_notes_invoice_id' },
{ fields: ['credit_note_number'], name: 'idx_claim_credit_notes_number' },
{ fields: ['sap_document_number'], name: 'idx_claim_credit_notes_sap_doc' },
{ fields: ['confirmation_status'], name: 'idx_claim_credit_notes_status' },
],
}
);
WorkflowRequest.hasOne(ClaimCreditNote, {
as: 'claimCreditNote',
foreignKey: 'requestId',
sourceKey: 'requestId',
});
ClaimCreditNote.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId',
});
ClaimCreditNote.belongsTo(ClaimInvoice, {
as: 'claimInvoice',
foreignKey: 'invoiceId',
targetKey: 'invoiceId',
});
ClaimInvoice.hasMany(ClaimCreditNote, {
as: 'creditNotes',
foreignKey: 'invoiceId',
sourceKey: 'invoiceId',
});
export { ClaimCreditNote };

View File

@ -1,149 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
interface ClaimInvoiceAttributes {
invoiceId: string;
requestId: string;
invoiceNumber?: string;
invoiceDate?: Date;
amount?: number;
dmsNumber?: string;
invoiceFilePath?: string;
status?: string;
errorMessage?: string;
generatedAt?: Date;
description?: string;
createdAt: Date;
updatedAt: Date;
}
interface ClaimInvoiceCreationAttributes extends Optional<ClaimInvoiceAttributes, 'invoiceId' | 'invoiceNumber' | 'invoiceDate' | 'amount' | 'dmsNumber' | 'invoiceFilePath' | 'status' | 'errorMessage' | 'generatedAt' | 'description' | 'createdAt' | 'updatedAt'> {}
class ClaimInvoice extends Model<ClaimInvoiceAttributes, ClaimInvoiceCreationAttributes> implements ClaimInvoiceAttributes {
public invoiceId!: string;
public requestId!: string;
public invoiceNumber?: string;
public invoiceDate?: Date;
public amount?: number;
public dmsNumber?: string;
public invoiceFilePath?: string;
public status?: string;
public errorMessage?: string;
public generatedAt?: Date;
public description?: string;
public createdAt!: Date;
public updatedAt!: Date;
}
ClaimInvoice.init(
{
invoiceId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'invoice_id',
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
invoiceNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'invoice_number',
},
invoiceDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'invoice_date',
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'invoice_amount',
},
dmsNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'dms_number',
},
invoiceFilePath: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'invoice_file_path',
},
status: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'generation_status',
},
errorMessage: {
type: DataTypes.TEXT,
allowNull: true,
field: 'error_message',
},
generatedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'generated_at',
},
description: {
type: DataTypes.TEXT,
allowNull: true,
field: 'description',
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at',
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at',
},
},
{
sequelize,
modelName: 'ClaimInvoice',
tableName: 'claim_invoices',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ unique: true, fields: ['request_id'], name: 'idx_claim_invoices_request_id' },
{ fields: ['invoice_number'], name: 'idx_claim_invoices_invoice_number' },
{ fields: ['dms_number'], name: 'idx_claim_invoices_dms_number' },
{ fields: ['generation_status'], name: 'idx_claim_invoices_status' },
],
}
);
WorkflowRequest.hasOne(ClaimInvoice, {
as: 'claimInvoice',
foreignKey: 'requestId',
sourceKey: 'requestId',
});
ClaimInvoice.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId',
});
// Note: hasMany association with ClaimCreditNote is defined in ClaimCreditNote.ts
// to avoid circular dependency issues
export { ClaimInvoice };

View File

@ -1,167 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
interface DealerClaimDetailsAttributes {
claimId: string;
requestId: string;
activityName: string;
activityType: string;
dealerCode: string;
dealerName: string;
dealerEmail?: string;
dealerPhone?: string;
dealerAddress?: string;
activityDate?: Date;
location?: string;
periodStartDate?: Date;
periodEndDate?: Date;
createdAt: Date;
updatedAt: Date;
}
interface DealerClaimDetailsCreationAttributes extends Optional<DealerClaimDetailsAttributes, 'claimId' | 'dealerEmail' | 'dealerPhone' | 'dealerAddress' | 'activityDate' | 'location' | 'periodStartDate' | 'periodEndDate' | 'createdAt' | 'updatedAt'> {}
class DealerClaimDetails extends Model<DealerClaimDetailsAttributes, DealerClaimDetailsCreationAttributes> implements DealerClaimDetailsAttributes {
public claimId!: string;
public requestId!: string;
public activityName!: string;
public activityType!: string;
public dealerCode!: string;
public dealerName!: string;
public dealerEmail?: string;
public dealerPhone?: string;
public dealerAddress?: string;
public activityDate?: Date;
public location?: string;
public periodStartDate?: Date;
public periodEndDate?: Date;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public workflowRequest?: WorkflowRequest;
}
DealerClaimDetails.init(
{
claimId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'claim_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
activityName: {
type: DataTypes.STRING(500),
allowNull: false,
field: 'activity_name'
},
activityType: {
type: DataTypes.STRING(100),
allowNull: false,
field: 'activity_type'
},
dealerCode: {
type: DataTypes.STRING(50),
allowNull: false,
field: 'dealer_code'
},
dealerName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'dealer_name'
},
dealerEmail: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'dealer_email'
},
dealerPhone: {
type: DataTypes.STRING(20),
allowNull: true,
field: 'dealer_phone'
},
dealerAddress: {
type: DataTypes.TEXT,
allowNull: true,
field: 'dealer_address'
},
activityDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'activity_date'
},
location: {
type: DataTypes.STRING(255),
allowNull: true
},
periodStartDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'period_start_date'
},
periodEndDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'period_end_date'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'DealerClaimDetails',
tableName: 'dealer_claim_details',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['request_id']
},
{
fields: ['dealer_code']
},
{
fields: ['activity_type']
}
]
}
);
// Associations
DealerClaimDetails.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId'
});
WorkflowRequest.hasOne(DealerClaimDetails, {
as: 'claimDetails',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
export { DealerClaimDetails };

View File

@ -1,111 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
interface DealerCompletionDetailsAttributes {
completionId: string;
requestId: string;
activityCompletionDate: Date;
numberOfParticipants?: number;
totalClosedExpenses?: number;
submittedAt?: Date;
createdAt: Date;
updatedAt: Date;
}
interface DealerCompletionDetailsCreationAttributes extends Optional<DealerCompletionDetailsAttributes, 'completionId' | 'numberOfParticipants' | 'totalClosedExpenses' | 'submittedAt' | 'createdAt' | 'updatedAt'> {}
class DealerCompletionDetails extends Model<DealerCompletionDetailsAttributes, DealerCompletionDetailsCreationAttributes> implements DealerCompletionDetailsAttributes {
public completionId!: string;
public requestId!: string;
public activityCompletionDate!: Date;
public numberOfParticipants?: number;
public totalClosedExpenses?: number;
public submittedAt?: Date;
public createdAt!: Date;
public updatedAt!: Date;
public workflowRequest?: WorkflowRequest;
}
DealerCompletionDetails.init(
{
completionId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'completion_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
activityCompletionDate: {
type: DataTypes.DATEONLY,
allowNull: false,
field: 'activity_completion_date'
},
numberOfParticipants: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'number_of_participants'
},
totalClosedExpenses: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'total_closed_expenses'
},
submittedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'submitted_at'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'DealerCompletionDetails',
tableName: 'dealer_completion_details',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['request_id']
}
]
}
);
DealerCompletionDetails.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId'
});
WorkflowRequest.hasOne(DealerCompletionDetails, {
as: 'completionDetails',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
export { DealerCompletionDetails };

View File

@ -1,118 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { DealerCompletionDetails } from './DealerCompletionDetails';
interface DealerCompletionExpenseAttributes {
expenseId: string;
requestId: string;
completionId?: string | null;
description: string;
amount: number;
createdAt: Date;
updatedAt: Date;
}
interface DealerCompletionExpenseCreationAttributes extends Optional<DealerCompletionExpenseAttributes, 'expenseId' | 'completionId' | 'createdAt' | 'updatedAt'> {}
class DealerCompletionExpense extends Model<DealerCompletionExpenseAttributes, DealerCompletionExpenseCreationAttributes> implements DealerCompletionExpenseAttributes {
public expenseId!: string;
public requestId!: string;
public completionId?: string | null;
public description!: string;
public amount!: number;
public createdAt!: Date;
public updatedAt!: Date;
}
DealerCompletionExpense.init(
{
expenseId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'expense_id',
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id',
},
},
completionId: {
type: DataTypes.UUID,
allowNull: true,
field: 'completion_id',
references: {
model: 'dealer_completion_details',
key: 'completion_id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
description: {
type: DataTypes.STRING(500),
allowNull: false,
field: 'description',
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: false,
field: 'amount',
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at',
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at',
},
},
{
sequelize,
modelName: 'DealerCompletionExpense',
tableName: 'dealer_completion_expenses',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ fields: ['request_id'], name: 'idx_dealer_completion_expenses_request_id' },
{ fields: ['completion_id'], name: 'idx_dealer_completion_expenses_completion_id' },
],
}
);
WorkflowRequest.hasMany(DealerCompletionExpense, {
as: 'completionExpenses',
foreignKey: 'requestId',
sourceKey: 'requestId',
});
DealerCompletionExpense.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId',
});
DealerCompletionDetails.hasMany(DealerCompletionExpense, {
as: 'expenses',
foreignKey: 'completionId',
sourceKey: 'completionId',
});
DealerCompletionExpense.belongsTo(DealerCompletionDetails, {
as: 'completion',
foreignKey: 'completionId',
targetKey: 'completionId',
});
export { DealerCompletionExpense };

View File

@ -1,123 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { DealerProposalDetails } from './DealerProposalDetails';
import { WorkflowRequest } from './WorkflowRequest';
interface DealerProposalCostItemAttributes {
costItemId: string;
proposalId: string;
requestId: string;
itemDescription: string;
amount: number;
itemOrder: number;
createdAt: Date;
updatedAt: Date;
}
interface DealerProposalCostItemCreationAttributes extends Optional<DealerProposalCostItemAttributes, 'costItemId' | 'itemOrder' | 'createdAt' | 'updatedAt'> {}
class DealerProposalCostItem extends Model<DealerProposalCostItemAttributes, DealerProposalCostItemCreationAttributes> implements DealerProposalCostItemAttributes {
public costItemId!: string;
public proposalId!: string;
public requestId!: string;
public itemDescription!: string;
public amount!: number;
public itemOrder!: number;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public proposal?: DealerProposalDetails;
public workflowRequest?: WorkflowRequest;
}
DealerProposalCostItem.init(
{
costItemId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'cost_item_id'
},
proposalId: {
type: DataTypes.UUID,
allowNull: false,
field: 'proposal_id',
references: {
model: 'dealer_proposal_details',
key: 'proposal_id'
}
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
itemDescription: {
type: DataTypes.STRING(500),
allowNull: false,
field: 'item_description'
},
amount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: false
},
itemOrder: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
field: 'item_order'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'DealerProposalCostItem',
tableName: 'dealer_proposal_cost_items',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{ fields: ['proposal_id'], name: 'idx_proposal_cost_items_proposal_id' },
{ fields: ['request_id'], name: 'idx_proposal_cost_items_request_id' },
{ fields: ['proposal_id', 'item_order'], name: 'idx_proposal_cost_items_proposal_order' }
]
}
);
// Associations
DealerProposalCostItem.belongsTo(DealerProposalDetails, {
as: 'proposal',
foreignKey: 'proposalId',
targetKey: 'proposalId'
});
DealerProposalCostItem.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId'
});
DealerProposalDetails.hasMany(DealerProposalCostItem, {
as: 'costItems',
foreignKey: 'proposalId',
sourceKey: 'proposalId'
});
export { DealerProposalCostItem };

View File

@ -1,142 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
interface DealerProposalDetailsAttributes {
proposalId: string;
requestId: string;
proposalDocumentPath?: string;
proposalDocumentUrl?: string;
// costBreakup removed - now using dealer_proposal_cost_items table
totalEstimatedBudget?: number;
timelineMode?: 'date' | 'days';
expectedCompletionDate?: Date;
expectedCompletionDays?: number;
dealerComments?: string;
submittedAt?: Date;
createdAt: Date;
updatedAt: Date;
}
interface DealerProposalDetailsCreationAttributes extends Optional<DealerProposalDetailsAttributes, 'proposalId' | 'proposalDocumentPath' | 'proposalDocumentUrl' | 'totalEstimatedBudget' | 'timelineMode' | 'expectedCompletionDate' | 'expectedCompletionDays' | 'dealerComments' | 'submittedAt' | 'createdAt' | 'updatedAt'> {}
class DealerProposalDetails extends Model<DealerProposalDetailsAttributes, DealerProposalDetailsCreationAttributes> implements DealerProposalDetailsAttributes {
public proposalId!: string;
public requestId!: string;
public proposalDocumentPath?: string;
public proposalDocumentUrl?: string;
// costBreakup removed - now using dealer_proposal_cost_items table
public totalEstimatedBudget?: number;
public timelineMode?: 'date' | 'days';
public expectedCompletionDate?: Date;
public expectedCompletionDays?: number;
public dealerComments?: string;
public submittedAt?: Date;
public createdAt!: Date;
public updatedAt!: Date;
public workflowRequest?: WorkflowRequest;
}
DealerProposalDetails.init(
{
proposalId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'proposal_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
unique: true,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
proposalDocumentPath: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'proposal_document_path'
},
proposalDocumentUrl: {
type: DataTypes.STRING(500),
allowNull: true,
field: 'proposal_document_url'
},
// costBreakup field removed - now using dealer_proposal_cost_items table
totalEstimatedBudget: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'total_estimated_budget'
},
timelineMode: {
type: DataTypes.STRING(10),
allowNull: true,
field: 'timeline_mode'
},
expectedCompletionDate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'expected_completion_date'
},
expectedCompletionDays: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'expected_completion_days'
},
dealerComments: {
type: DataTypes.TEXT,
allowNull: true,
field: 'dealer_comments'
},
submittedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'submitted_at'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'DealerProposalDetails',
tableName: 'dealer_proposal_details',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['request_id']
}
]
}
);
DealerProposalDetails.belongsTo(WorkflowRequest, {
as: 'workflowRequest',
foreignKey: 'requestId',
targetKey: 'requestId'
});
WorkflowRequest.hasOne(DealerProposalDetails, {
as: 'proposalDetails',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
export { DealerProposalDetails };

View File

@ -1,166 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { WorkflowRequest } from './WorkflowRequest';
import { User } from './User';
export enum IOStatus {
PENDING = 'PENDING',
BLOCKED = 'BLOCKED',
RELEASED = 'RELEASED',
CANCELLED = 'CANCELLED'
}
interface InternalOrderAttributes {
ioId: string;
requestId: string;
ioNumber: string;
ioRemark?: string;
ioAvailableBalance?: number;
ioBlockedAmount?: number;
ioRemainingBalance?: number;
organizedBy?: string;
organizedAt?: Date;
sapDocumentNumber?: string;
status: IOStatus;
createdAt: Date;
updatedAt: Date;
}
interface InternalOrderCreationAttributes extends Optional<InternalOrderAttributes, 'ioId' | 'ioRemark' | 'ioAvailableBalance' | 'ioBlockedAmount' | 'ioRemainingBalance' | 'organizedBy' | 'organizedAt' | 'sapDocumentNumber' | 'status' | 'createdAt' | 'updatedAt'> {}
class InternalOrder extends Model<InternalOrderAttributes, InternalOrderCreationAttributes> implements InternalOrderAttributes {
public ioId!: string;
public requestId!: string;
public ioNumber!: string;
public ioRemark?: string;
public ioAvailableBalance?: number;
public ioBlockedAmount?: number;
public ioRemainingBalance?: number;
public organizedBy?: string;
public organizedAt?: Date;
public sapDocumentNumber?: string;
public status!: IOStatus;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public request?: WorkflowRequest;
public organizer?: User;
}
InternalOrder.init(
{
ioId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'io_id'
},
requestId: {
type: DataTypes.UUID,
allowNull: false,
field: 'request_id',
references: {
model: 'workflow_requests',
key: 'request_id'
}
},
ioNumber: {
type: DataTypes.STRING(50),
allowNull: false,
field: 'io_number'
},
ioRemark: {
type: DataTypes.TEXT,
allowNull: true,
field: 'io_remark'
},
ioAvailableBalance: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'io_available_balance'
},
ioBlockedAmount: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'io_blocked_amount'
},
ioRemainingBalance: {
type: DataTypes.DECIMAL(15, 2),
allowNull: true,
field: 'io_remaining_balance'
},
organizedBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'organized_by',
references: {
model: 'users',
key: 'user_id'
}
},
organizedAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'organized_at'
},
sapDocumentNumber: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'sap_document_number'
},
status: {
type: DataTypes.ENUM('PENDING', 'BLOCKED', 'RELEASED', 'CANCELLED'),
defaultValue: 'PENDING',
allowNull: false
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'InternalOrder',
tableName: 'internal_orders',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
fields: ['request_id'],
unique: true
},
{
fields: ['io_number']
},
{
fields: ['organized_by']
}
]
}
);
// Associations
InternalOrder.belongsTo(WorkflowRequest, {
as: 'request',
foreignKey: 'requestId',
targetKey: 'requestId'
});
InternalOrder.belongsTo(User, {
as: 'organizer',
foreignKey: 'organizedBy',
targetKey: 'userId'
});
export { InternalOrder };

View File

@ -7,9 +7,7 @@ interface WorkflowRequestAttributes {
requestId: string; requestId: string;
requestNumber: string; requestNumber: string;
initiatorId: string; initiatorId: string;
templateType: 'CUSTOM' | 'TEMPLATE' | 'DEALER CLAIM'; templateType: 'CUSTOM' | 'TEMPLATE';
workflowType?: string; // 'NON_TEMPLATIZED' | 'CLAIM_MANAGEMENT' | etc.
templateId?: string; // Reference to workflow_templates if using admin template
title: string; title: string;
description: string; description: string;
priority: Priority; priority: Priority;
@ -39,9 +37,7 @@ class WorkflowRequest extends Model<WorkflowRequestAttributes, WorkflowRequestCr
public requestId!: string; public requestId!: string;
public requestNumber!: string; public requestNumber!: string;
public initiatorId!: string; public initiatorId!: string;
public templateType!: 'CUSTOM' | 'TEMPLATE' | 'DEALER CLAIM'; public templateType!: 'CUSTOM' | 'TEMPLATE';
public workflowType?: string;
public templateId?: string;
public title!: string; public title!: string;
public description!: string; public description!: string;
public priority!: Priority; public priority!: Priority;
@ -96,23 +92,6 @@ WorkflowRequest.init(
defaultValue: 'CUSTOM', defaultValue: 'CUSTOM',
field: 'template_type' field: 'template_type'
}, },
workflowType: {
type: DataTypes.STRING(50),
allowNull: true,
defaultValue: 'NON_TEMPLATIZED',
field: 'workflow_type',
// Don't fail if column doesn't exist (for backward compatibility with old environments)
// Sequelize will handle this gracefully if the column is missing
},
templateId: {
type: DataTypes.UUID,
allowNull: true,
field: 'template_id',
references: {
model: 'workflow_templates',
key: 'template_id'
}
},
title: { title: {
type: DataTypes.STRING(500), type: DataTypes.STRING(500),
allowNull: false allowNull: false
@ -244,12 +223,6 @@ WorkflowRequest.init(
}, },
{ {
fields: ['created_at'] fields: ['created_at']
},
{
fields: ['workflow_type']
},
{
fields: ['template_id']
} }
] ]
} }

View File

@ -1,180 +0,0 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
import { User } from './User';
interface WorkflowTemplateAttributes {
templateId: string;
templateName: string;
templateCode?: string;
templateDescription?: string;
templateCategory?: string;
workflowType?: string;
approvalLevelsConfig?: any;
defaultTatHours?: number;
formStepsConfig?: any;
userFieldMappings?: any;
dynamicApproverConfig?: any;
isActive: boolean;
isSystemTemplate: boolean;
usageCount: number;
createdBy?: string;
createdAt: Date;
updatedAt: Date;
}
interface WorkflowTemplateCreationAttributes extends Optional<WorkflowTemplateAttributes, 'templateId' | 'templateCode' | 'templateDescription' | 'templateCategory' | 'workflowType' | 'approvalLevelsConfig' | 'defaultTatHours' | 'formStepsConfig' | 'userFieldMappings' | 'dynamicApproverConfig' | 'createdBy' | 'createdAt' | 'updatedAt'> {}
class WorkflowTemplate extends Model<WorkflowTemplateAttributes, WorkflowTemplateCreationAttributes> implements WorkflowTemplateAttributes {
public templateId!: string;
public templateName!: string;
public templateCode?: string;
public templateDescription?: string;
public templateCategory?: string;
public workflowType?: string;
public approvalLevelsConfig?: any;
public defaultTatHours?: number;
public formStepsConfig?: any;
public userFieldMappings?: any;
public dynamicApproverConfig?: any;
public isActive!: boolean;
public isSystemTemplate!: boolean;
public usageCount!: number;
public createdBy?: string;
public createdAt!: Date;
public updatedAt!: Date;
// Associations
public creator?: User;
}
WorkflowTemplate.init(
{
templateId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'template_id'
},
templateName: {
type: DataTypes.STRING(200),
allowNull: false,
field: 'template_name'
},
templateCode: {
type: DataTypes.STRING(50),
allowNull: true,
unique: true,
field: 'template_code'
},
templateDescription: {
type: DataTypes.TEXT,
allowNull: true,
field: 'template_description'
},
templateCategory: {
type: DataTypes.STRING(100),
allowNull: true,
field: 'template_category'
},
workflowType: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'workflow_type'
},
approvalLevelsConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'approval_levels_config'
},
defaultTatHours: {
type: DataTypes.DECIMAL(10, 2),
allowNull: true,
defaultValue: 24,
field: 'default_tat_hours'
},
formStepsConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'form_steps_config'
},
userFieldMappings: {
type: DataTypes.JSONB,
allowNull: true,
field: 'user_field_mappings'
},
dynamicApproverConfig: {
type: DataTypes.JSONB,
allowNull: true,
field: 'dynamic_approver_config'
},
isActive: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: true,
field: 'is_active'
},
isSystemTemplate: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
field: 'is_system_template'
},
usageCount: {
type: DataTypes.INTEGER,
allowNull: false,
defaultValue: 0,
field: 'usage_count'
},
createdBy: {
type: DataTypes.UUID,
allowNull: true,
field: 'created_by',
references: {
model: 'users',
key: 'user_id'
}
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
},
{
sequelize,
modelName: 'WorkflowTemplate',
tableName: 'workflow_templates',
timestamps: true,
createdAt: 'created_at',
updatedAt: 'updated_at',
indexes: [
{
unique: true,
fields: ['template_code']
},
{
fields: ['workflow_type']
},
{
fields: ['is_active']
}
]
}
);
// Associations
WorkflowTemplate.belongsTo(User, {
as: 'creator',
foreignKey: 'createdBy',
targetKey: 'userId'
});
export { WorkflowTemplate };

View File

@ -16,13 +16,6 @@ import { Notification } from './Notification';
import ConclusionRemark from './ConclusionRemark'; import ConclusionRemark from './ConclusionRemark';
import RequestSummary from './RequestSummary'; import RequestSummary from './RequestSummary';
import SharedSummary from './SharedSummary'; import SharedSummary from './SharedSummary';
import { DealerClaimDetails } from './DealerClaimDetails';
import { DealerProposalDetails } from './DealerProposalDetails';
import { DealerCompletionDetails } from './DealerCompletionDetails';
import { DealerProposalCostItem } from './DealerProposalCostItem';
import { WorkflowTemplate } from './WorkflowTemplate';
import { InternalOrder } from './InternalOrder';
import { ClaimBudgetTracking } from './ClaimBudgetTracking';
// Define associations // Define associations
const defineAssociations = () => { const defineAssociations = () => {
@ -121,20 +114,6 @@ const defineAssociations = () => {
sourceKey: 'userId' sourceKey: 'userId'
}); });
// InternalOrder associations
WorkflowRequest.hasOne(InternalOrder, {
as: 'internalOrder',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
// ClaimBudgetTracking associations
WorkflowRequest.hasOne(ClaimBudgetTracking, {
as: 'budgetTracking',
foreignKey: 'requestId',
sourceKey: 'requestId'
});
// Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts // Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts
// Only hasMany associations from WorkflowRequest are defined here since they're one-way // Only hasMany associations from WorkflowRequest are defined here since they're one-way
}; };
@ -159,14 +138,7 @@ export {
Notification, Notification,
ConclusionRemark, ConclusionRemark,
RequestSummary, RequestSummary,
SharedSummary, SharedSummary
DealerClaimDetails,
DealerProposalDetails,
DealerCompletionDetails,
DealerProposalCostItem,
WorkflowTemplate,
InternalOrder,
ClaimBudgetTracking
}; };
// Export default sequelize instance // Export default sequelize instance

View File

@ -1,38 +0,0 @@
import { Router } from 'express';
import { DealerController } from '../controllers/dealer.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
const router = Router();
const dealerController = new DealerController();
/**
* @route GET /api/v1/dealers
* @desc Get all dealers
* @access Private
*/
router.get('/', authenticateToken, asyncHandler(dealerController.getAllDealers.bind(dealerController)));
/**
* @route GET /api/v1/dealers/search
* @desc Search dealers by name, code, or email
* @access Private
*/
router.get('/search', authenticateToken, asyncHandler(dealerController.searchDealers.bind(dealerController)));
/**
* @route GET /api/v1/dealers/code/:dealerCode
* @desc Get dealer by code
* @access Private
*/
router.get('/code/:dealerCode', authenticateToken, asyncHandler(dealerController.getDealerByCode.bind(dealerController)));
/**
* @route GET /api/v1/dealers/email/:email
* @desc Get dealer by email
* @access Private
*/
router.get('/email/:email', authenticateToken, asyncHandler(dealerController.getDealerByEmail.bind(dealerController)));
export default router;

View File

@ -1,97 +0,0 @@
import { Router } from 'express';
import { DealerClaimController } from '../controllers/dealerClaim.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
import multer from 'multer';
import path from 'path';
const router = Router();
const dealerClaimController = new DealerClaimController();
// Configure multer for file uploads (memory storage for direct GCS upload)
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024, // 10MB
},
fileFilter: (req, file, cb) => {
const allowedExtensions = ['.pdf', '.doc', '.docx', '.xls', '.xlsx', '.jpg', '.jpeg', '.png', '.zip'];
const ext = path.extname(file.originalname).toLowerCase();
if (allowedExtensions.includes(ext)) {
cb(null, true);
} else {
cb(new Error(`File type ${ext} not allowed. Allowed types: ${allowedExtensions.join(', ')}`));
}
},
});
/**
* @route POST /api/v1/dealer-claims
* @desc Create a new dealer claim request
* @access Private
*/
router.post('/', authenticateToken, asyncHandler(dealerClaimController.createClaimRequest.bind(dealerClaimController)));
/**
* @route GET /api/v1/dealer-claims/:requestId
* @desc Get claim details
* @access Private
*/
router.get('/:requestId', authenticateToken, asyncHandler(dealerClaimController.getClaimDetails.bind(dealerClaimController)));
/**
* @route POST /api/v1/dealer-claims/:requestId/proposal
* @desc Submit dealer proposal (Step 1)
* @access Private
*/
router.post('/:requestId/proposal', authenticateToken, upload.single('proposalDocument'), asyncHandler(dealerClaimController.submitProposal.bind(dealerClaimController)));
/**
* @route POST /api/v1/dealer-claims/:requestId/completion
* @desc Submit completion documents (Step 5)
* @access Private
*/
router.post('/:requestId/completion', authenticateToken, upload.fields([
{ name: 'completionDocuments', maxCount: 10 },
{ name: 'activityPhotos', maxCount: 10 },
{ name: 'invoicesReceipts', maxCount: 10 },
{ name: 'attendanceSheet', maxCount: 1 },
]), asyncHandler(dealerClaimController.submitCompletion.bind(dealerClaimController)));
/**
* @route GET /api/v1/dealer-claims/:requestId/io/validate
* @desc Validate/Fetch IO details from SAP (returns dummy data for now)
* @access Private
*/
router.get('/:requestId/io/validate', authenticateToken, asyncHandler(dealerClaimController.validateIO.bind(dealerClaimController)));
/**
* @route PUT /api/v1/dealer-claims/:requestId/io
* @desc Block IO amount in SAP and store in database
* @access Private
*/
router.put('/:requestId/io', authenticateToken, asyncHandler(dealerClaimController.updateIODetails.bind(dealerClaimController)));
/**
* @route PUT /api/v1/dealer-claims/:requestId/e-invoice
* @desc Update e-invoice details (Step 7)
* @access Private
*/
router.put('/:requestId/e-invoice', authenticateToken, asyncHandler(dealerClaimController.updateEInvoice.bind(dealerClaimController)));
/**
* @route PUT /api/v1/dealer-claims/:requestId/credit-note
* @desc Update credit note details (Step 8)
* @access Private
*/
router.put('/:requestId/credit-note', authenticateToken, asyncHandler(dealerClaimController.updateCreditNote.bind(dealerClaimController)));
/**
* @route POST /api/v1/dealer-claims/:requestId/credit-note/send
* @desc Send credit note to dealer and auto-approve Step 8
* @access Private
*/
router.post('/:requestId/credit-note/send', authenticateToken, asyncHandler(dealerClaimController.sendCreditNoteToDealer.bind(dealerClaimController)));
export default router;

View File

@ -1,47 +0,0 @@
import { Router } from 'express';
import { DMSWebhookController } from '../controllers/dmsWebhook.controller';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
const router = Router();
const webhookController = new DMSWebhookController();
/**
* @route GET /api/v1/webhooks/dms/health
* @desc Health check endpoint for webhook routes (for testing)
* @access Public
*/
router.get('/health', (_req, res) => {
res.status(200).json({
status: 'OK',
message: 'DMS Webhook routes are active',
endpoints: {
invoice: 'POST /api/v1/webhooks/dms/invoice',
creditNote: 'POST /api/v1/webhooks/dms/credit-note'
}
});
});
/**
* @route POST /api/v1/webhooks/dms/invoice
* @desc Webhook endpoint for DMS invoice generation callbacks
* @access Public (authenticated via webhook signature)
* @note This endpoint is called by DMS system after invoice generation
*/
router.post(
'/invoice',
asyncHandler(webhookController.handleInvoiceWebhook.bind(webhookController))
);
/**
* @route POST /api/v1/webhooks/dms/credit-note
* @desc Webhook endpoint for DMS credit note generation callbacks
* @access Public (authenticated via webhook signature)
* @note This endpoint is called by DMS system after credit note generation
*/
router.post(
'/credit-note',
asyncHandler(webhookController.handleCreditNoteWebhook.bind(webhookController))
);
export default router;

View File

@ -13,10 +13,6 @@ import dashboardRoutes from './dashboard.routes';
import notificationRoutes from './notification.routes'; import notificationRoutes from './notification.routes';
import conclusionRoutes from './conclusion.routes'; import conclusionRoutes from './conclusion.routes';
import aiRoutes from './ai.routes'; import aiRoutes from './ai.routes';
import dealerClaimRoutes from './dealerClaim.routes';
import templateRoutes from './template.routes';
import dealerRoutes from './dealer.routes';
import dmsWebhookRoutes from './dmsWebhook.routes';
const router = Router(); const router = Router();
@ -44,10 +40,6 @@ router.use('/notifications', notificationRoutes);
router.use('/conclusions', conclusionRoutes); router.use('/conclusions', conclusionRoutes);
router.use('/ai', aiRoutes); router.use('/ai', aiRoutes);
router.use('/summaries', summaryRoutes); router.use('/summaries', summaryRoutes);
router.use('/dealer-claims', dealerClaimRoutes);
router.use('/templates', templateRoutes);
router.use('/dealers', dealerRoutes);
router.use('/webhooks/dms', dmsWebhookRoutes);
// TODO: Add other route modules as they are implemented // TODO: Add other route modules as they are implemented
// router.use('/approvals', approvalRoutes); // router.use('/approvals', approvalRoutes);

View File

@ -1,53 +0,0 @@
import { Router } from 'express';
import { TemplateController } from '../controllers/template.controller';
import { authenticateToken } from '../middlewares/auth.middleware';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { requireAdmin } from '../middlewares/auth.middleware';
const router = Router();
const templateController = new TemplateController();
/**
* @route GET /api/v1/templates
* @desc List all templates (with optional filters)
* @access Private
*/
router.get('/', authenticateToken, asyncHandler(templateController.listTemplates.bind(templateController)));
/**
* @route GET /api/v1/templates/active
* @desc Get active templates for workflow creation
* @access Private
*/
router.get('/active', authenticateToken, asyncHandler(templateController.getActiveTemplates.bind(templateController)));
/**
* @route GET /api/v1/templates/:templateId
* @desc Get template by ID
* @access Private
*/
router.get('/:templateId', authenticateToken, asyncHandler(templateController.getTemplate.bind(templateController)));
/**
* @route POST /api/v1/templates
* @desc Create a new template
* @access Private (Admin only)
*/
router.post('/', authenticateToken, requireAdmin, asyncHandler(templateController.createTemplate.bind(templateController)));
/**
* @route PUT /api/v1/templates/:templateId
* @desc Update template
* @access Private (Admin only)
*/
router.put('/:templateId', authenticateToken, requireAdmin, asyncHandler(templateController.updateTemplate.bind(templateController)));
/**
* @route DELETE /api/v1/templates/:templateId
* @desc Delete template (soft delete)
* @access Private (Admin only)
*/
router.delete('/:templateId', authenticateToken, requireAdmin, asyncHandler(templateController.deleteTemplate.bind(templateController)));
export default router;

View File

@ -10,9 +10,6 @@ const userController = new UserController();
// GET /api/v1/users/search?q=<email or name> // GET /api/v1/users/search?q=<email or name>
router.get('/search', authenticateToken, asyncHandler(userController.searchUsers.bind(userController))); router.get('/search', authenticateToken, asyncHandler(userController.searchUsers.bind(userController)));
// GET /api/v1/users/search-by-displayname?displayName=John Doe
router.get('/search-by-displayname', authenticateToken, asyncHandler(userController.searchByDisplayName.bind(userController)));
// GET /api/v1/users/configurations - Get public configurations (document policy, workflow sharing, TAT settings) // GET /api/v1/users/configurations - Get public configurations (document policy, workflow sharing, TAT settings)
router.get('/configurations', authenticateToken, asyncHandler(getPublicConfigurations)); router.get('/configurations', authenticateToken, asyncHandler(getPublicConfigurations));

View File

@ -118,21 +118,8 @@ async function runMigrations(): Promise<void> {
const m25 = require('../migrations/20250126-add-pause-fields-to-workflow-requests'); const m25 = require('../migrations/20250126-add-pause-fields-to-workflow-requests');
const m26 = require('../migrations/20250126-add-pause-fields-to-approval-levels'); const m26 = require('../migrations/20250126-add-pause-fields-to-approval-levels');
const m27 = require('../migrations/20250127-migrate-in-progress-to-pending'); const m27 = require('../migrations/20250127-migrate-in-progress-to-pending');
// Base branch migrations (m28-m29)
const m28 = require('../migrations/20250130-migrate-to-vertex-ai'); const m28 = require('../migrations/20250130-migrate-to-vertex-ai');
const m29 = require('../migrations/20251203-add-user-notification-preferences'); const m29 = require('../migrations/20251203-add-user-notification-preferences');
// Dealer claim branch migrations (m30-m39)
const m30 = require('../migrations/20251210-add-workflow-type-support');
const m31 = require('../migrations/20251210-enhance-workflow-templates');
const m32 = require('../migrations/20251210-add-template-id-foreign-key');
const m33 = require('../migrations/20251210-create-dealer-claim-tables');
const m34 = require('../migrations/20251210-create-proposal-cost-items-table');
const m35 = require('../migrations/20251211-create-internal-orders-table');
const m36 = require('../migrations/20251211-create-claim-budget-tracking-table');
const m37 = require('../migrations/20251213-drop-claim-details-invoice-columns');
const m38 = require('../migrations/20251213-create-claim-invoice-credit-note-tables');
const m39 = require('../migrations/20251214-create-dealer-completion-expenses');
const m40 = require('../migrations/20251218-fix-claim-invoice-credit-note-columns');
const migrations = [ const migrations = [
{ name: '2025103000-create-users', module: m0 }, { name: '2025103000-create-users', module: m0 },
@ -163,21 +150,8 @@ async function runMigrations(): Promise<void> {
{ name: '20250126-add-pause-fields-to-workflow-requests', module: m25 }, { name: '20250126-add-pause-fields-to-workflow-requests', module: m25 },
{ name: '20250126-add-pause-fields-to-approval-levels', module: m26 }, { name: '20250126-add-pause-fields-to-approval-levels', module: m26 },
{ name: '20250127-migrate-in-progress-to-pending', module: m27 }, { name: '20250127-migrate-in-progress-to-pending', module: m27 },
// Base branch migrations (m28-m29)
{ name: '20250130-migrate-to-vertex-ai', module: m28 }, { name: '20250130-migrate-to-vertex-ai', module: m28 },
{ name: '20251203-add-user-notification-preferences', module: m29 }, { name: '20251203-add-user-notification-preferences', module: m29 },
// Dealer claim branch migrations (m30-m39)
{ name: '20251210-add-workflow-type-support', module: m30 },
{ name: '20251210-enhance-workflow-templates', module: m31 },
{ name: '20251210-add-template-id-foreign-key', module: m32 },
{ name: '20251210-create-dealer-claim-tables', module: m33 },
{ name: '20251210-create-proposal-cost-items-table', module: m34 },
{ name: '20251211-create-internal-orders-table', module: m35 },
{ name: '20251211-create-claim-budget-tracking-table', module: m36 },
{ name: '20251213-drop-claim-details-invoice-columns', module: m37 },
{ name: '20251213-create-claim-invoice-credit-note-tables', module: m38 },
{ name: '20251214-create-dealer-completion-expenses', module: m39 },
{ name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 },
]; ];
const queryInterface = sequelize.getQueryInterface(); const queryInterface = sequelize.getQueryInterface();

View File

@ -1,167 +0,0 @@
/**
* Cleanup Dealer Claims Script
* Removes all dealer claim related data for a fresh start
*
* Usage: npm run cleanup:dealer-claims
*
* WARNING: This will permanently delete all CLAIM_MANAGEMENT requests and related data!
*/
import { sequelize } from '../config/database';
import { QueryTypes } from 'sequelize';
import logger from '../utils/logger';
async function cleanupDealerClaims(): Promise<void> {
const transaction = await sequelize.transaction();
try {
logger.info('[Cleanup] Starting dealer claim cleanup...');
// Step 1: Find all CLAIM_MANAGEMENT request IDs
logger.info('[Cleanup] Finding all CLAIM_MANAGEMENT requests...');
const claimRequests = await sequelize.query<{ request_id: string }>(
`SELECT request_id FROM workflow_requests WHERE workflow_type = 'CLAIM_MANAGEMENT'`,
{ type: QueryTypes.SELECT, transaction }
);
const requestIds = claimRequests.map(r => r.request_id);
const count = requestIds.length;
if (count === 0) {
logger.info('[Cleanup] No CLAIM_MANAGEMENT requests found. Nothing to clean up.');
await transaction.commit();
return;
}
logger.info(`[Cleanup] Found ${count} CLAIM_MANAGEMENT request(s) to delete`);
// Step 2: Delete in order (respecting foreign key constraints)
// Start with child tables, then parent tables
// Convert UUID array to PostgreSQL array format
const requestIdsArray = `{${requestIds.map(id => `'${id}'`).join(',')}}`;
// Delete from claim_budget_tracking (new table)
logger.info('[Cleanup] Deleting from claim_budget_tracking...');
await sequelize.query(
`DELETE FROM claim_budget_tracking WHERE request_id = ANY(ARRAY[${requestIds.map(() => '?').join(',')}]::uuid[])`,
{
replacements: requestIds,
type: QueryTypes.DELETE,
transaction
}
);
// Step 2: Delete in order (respecting foreign key constraints)
// Start with child tables, then parent tables
// Helper function to delete with array
const deleteWithArray = async (tableName: string, columnName: string = 'request_id') => {
await sequelize.query(
`DELETE FROM ${tableName} WHERE ${columnName} = ANY(ARRAY[${requestIds.map(() => '?').join(',')}]::uuid[])`,
{
replacements: requestIds,
type: QueryTypes.DELETE,
transaction
}
);
};
// Delete from claim_budget_tracking (new table)
logger.info('[Cleanup] Deleting from claim_budget_tracking...');
await deleteWithArray('claim_budget_tracking');
// Delete from internal_orders (new table)
logger.info('[Cleanup] Deleting from internal_orders...');
await deleteWithArray('internal_orders');
// Delete from dealer_proposal_cost_items
logger.info('[Cleanup] Deleting from dealer_proposal_cost_items...');
await deleteWithArray('dealer_proposal_cost_items');
// Delete from dealer_completion_details
logger.info('[Cleanup] Deleting from dealer_completion_details...');
await deleteWithArray('dealer_completion_details');
// Delete from dealer_proposal_details
logger.info('[Cleanup] Deleting from dealer_proposal_details...');
await deleteWithArray('dealer_proposal_details');
// Delete from dealer_claim_details
logger.info('[Cleanup] Deleting from dealer_claim_details...');
await deleteWithArray('dealer_claim_details');
// Delete from activities (workflow activities)
logger.info('[Cleanup] Deleting from activities...');
await deleteWithArray('activities');
// Delete from work_notes
logger.info('[Cleanup] Deleting from work_notes...');
await deleteWithArray('work_notes');
// Delete from documents
logger.info('[Cleanup] Deleting from documents...');
await deleteWithArray('documents');
// Delete from participants
logger.info('[Cleanup] Deleting from participants...');
await deleteWithArray('participants');
// Delete from approval_levels
logger.info('[Cleanup] Deleting from approval_levels...');
await deleteWithArray('approval_levels');
// Note: subscriptions table doesn't have request_id - it's for push notification subscriptions
// Skip subscriptions as it's not related to workflow requests
// Delete from notifications
logger.info('[Cleanup] Deleting from notifications...');
await deleteWithArray('notifications');
// Delete from request_summaries
logger.info('[Cleanup] Deleting from request_summaries...');
await deleteWithArray('request_summaries');
// Delete from shared_summaries
logger.info('[Cleanup] Deleting from shared_summaries...');
await deleteWithArray('shared_summaries');
// Delete from conclusion_remarks
logger.info('[Cleanup] Deleting from conclusion_remarks...');
await deleteWithArray('conclusion_remarks');
// Delete from tat_alerts
logger.info('[Cleanup] Deleting from tat_alerts...');
await deleteWithArray('tat_alerts');
// Finally, delete from workflow_requests
logger.info('[Cleanup] Deleting from workflow_requests...');
await deleteWithArray('workflow_requests');
await transaction.commit();
logger.info(`[Cleanup] ✅ Successfully deleted ${count} CLAIM_MANAGEMENT request(s) and all related data!`);
logger.info('[Cleanup] Database is now clean and ready for fresh dealer claim requests.');
} catch (error) {
await transaction.rollback();
logger.error('[Cleanup] ❌ Error during cleanup:', error);
throw error;
}
}
// Run cleanup if called directly
if (require.main === module) {
cleanupDealerClaims()
.then(() => {
logger.info('[Cleanup] Cleanup completed successfully');
process.exit(0);
})
.catch((error) => {
logger.error('[Cleanup] Cleanup failed:', error);
process.exit(1);
});
}
export { cleanupDealerClaims };

View File

@ -28,21 +28,8 @@ import * as m24 from '../migrations/20250126-add-paused-to-workflow-status-enum'
import * as m25 from '../migrations/20250126-add-pause-fields-to-workflow-requests'; import * as m25 from '../migrations/20250126-add-pause-fields-to-workflow-requests';
import * as m26 from '../migrations/20250126-add-pause-fields-to-approval-levels'; import * as m26 from '../migrations/20250126-add-pause-fields-to-approval-levels';
import * as m27 from '../migrations/20250127-migrate-in-progress-to-pending'; import * as m27 from '../migrations/20250127-migrate-in-progress-to-pending';
// Base branch migrations (m28-m29)
import * as m28 from '../migrations/20250130-migrate-to-vertex-ai'; import * as m28 from '../migrations/20250130-migrate-to-vertex-ai';
import * as m29 from '../migrations/20251203-add-user-notification-preferences'; import * as m29 from '../migrations/20251203-add-user-notification-preferences';
// Dealer claim branch migrations (m30-m39)
import * as m30 from '../migrations/20251210-add-workflow-type-support';
import * as m31 from '../migrations/20251210-enhance-workflow-templates';
import * as m32 from '../migrations/20251210-add-template-id-foreign-key';
import * as m33 from '../migrations/20251210-create-dealer-claim-tables';
import * as m34 from '../migrations/20251210-create-proposal-cost-items-table';
import * as m35 from '../migrations/20251211-create-internal-orders-table';
import * as m36 from '../migrations/20251211-create-claim-budget-tracking-table';
import * as m37 from '../migrations/20251213-drop-claim-details-invoice-columns';
import * as m38 from '../migrations/20251213-create-claim-invoice-credit-note-tables';
import * as m39 from '../migrations/20251214-create-dealer-completion-expenses';
import * as m40 from '../migrations/20251218-fix-claim-invoice-credit-note-columns';
interface Migration { interface Migration {
name: string; name: string;
@ -85,21 +72,8 @@ const migrations: Migration[] = [
{ name: '20250126-add-pause-fields-to-workflow-requests', module: m25 }, { name: '20250126-add-pause-fields-to-workflow-requests', module: m25 },
{ name: '20250126-add-pause-fields-to-approval-levels', module: m26 }, { name: '20250126-add-pause-fields-to-approval-levels', module: m26 },
{ name: '20250127-migrate-in-progress-to-pending', module: m27 }, { name: '20250127-migrate-in-progress-to-pending', module: m27 },
// Base branch migrations (m28-m29)
{ name: '20250130-migrate-to-vertex-ai', module: m28 }, { name: '20250130-migrate-to-vertex-ai', module: m28 },
{ name: '20251203-add-user-notification-preferences', module: m29 }, { name: '20251203-add-user-notification-preferences', module: m29 },
// Dealer claim branch migrations (m30-m39)
{ name: '20251210-add-workflow-type-support', module: m30 },
{ name: '20251210-enhance-workflow-templates', module: m31 },
{ name: '20251210-add-template-id-foreign-key', module: m32 },
{ name: '20251210-create-dealer-claim-tables', module: m33 },
{ name: '20251210-create-proposal-cost-items-table', module: m34 },
{ name: '20251211-create-internal-orders-table', module: m35 },
{ name: '20251211-create-claim-budget-tracking-table', module: m36 },
{ name: '20251213-drop-claim-details-invoice-columns', module: m37 },
{ name: '20251213-create-claim-invoice-credit-note-tables', module: m38 },
{ name: '20251214-create-dealer-completion-expenses', module: m39 },
{ name: '20251218-fix-claim-invoice-credit-note-columns', module: m40 },
]; ];
/** /**

View File

@ -1,182 +0,0 @@
/**
* Seed Dealer Users
* Creates dealer users for claim management workflow
* These users will act as action takers in the workflow
*/
import { sequelize } from '../config/database';
import { User } from '../models/User';
import logger from '../utils/logger';
interface DealerData {
email: string;
dealerCode: string;
dealerName: string;
displayName: string;
department?: string;
designation?: string;
phone?: string;
role?: 'USER' | 'MANAGEMENT' | 'ADMIN';
}
const dealers: DealerData[] = [
{
email: 'test.2@royalenfield.com',
dealerCode: 'RE-MH-001',
dealerName: 'Royal Motors Mumbai',
displayName: 'Royal Motors Mumbai',
department: 'Dealer Operations',
designation: 'Dealer',
phone: '+91-9876543210',
role: 'USER',
},
{
email: 'test.4@royalenfield.com',
dealerCode: 'RE-DL-002',
dealerName: 'Delhi enfield center',
displayName: 'Delhi Enfield Center',
department: 'Dealer Operations',
designation: 'Dealer',
phone: '+91-9876543211',
role: 'USER',
},
];
async function seedDealers(): Promise<void> {
try {
logger.info('[Seed Dealers] Starting dealer user seeding...');
for (const dealer of dealers) {
// Check if user already exists
const existingUser = await User.findOne({
where: { email: dealer.email },
});
if (existingUser) {
// User already exists (likely from Okta SSO login)
const isOktaUser = existingUser.oktaSub && !existingUser.oktaSub.startsWith('dealer-');
if (isOktaUser) {
logger.info(`[Seed Dealers] User ${dealer.email} already exists as Okta user (oktaSub: ${existingUser.oktaSub}), updating dealer-specific fields only...`);
} else {
logger.info(`[Seed Dealers] User ${dealer.email} already exists, updating dealer information...`);
}
// Update existing user with dealer information
// IMPORTANT: Preserve Okta data (oktaSub, role from Okta, etc.) and only update dealer-specific fields
const nameParts = dealer.dealerName.split(' ');
const firstName = nameParts[0] || dealer.dealerName;
const lastName = nameParts.slice(1).join(' ') || '';
// Build update object - only update fields that don't conflict with Okta data
const updateData: any = {
// Always update dealer code in employeeId (this is dealer-specific, safe to update)
employeeId: dealer.dealerCode,
};
// Only update displayName if it's different or if current one is empty
if (!existingUser.displayName || existingUser.displayName !== dealer.displayName) {
updateData.displayName = dealer.displayName;
}
// Only update designation if current one doesn't indicate dealer role
if (!existingUser.designation || !existingUser.designation.toLowerCase().includes('dealer')) {
updateData.designation = dealer.designation || existingUser.designation;
}
// Only update department if it's not set or if we want to ensure "Dealer Operations"
if (!existingUser.department || existingUser.department !== 'Dealer Operations') {
updateData.department = dealer.department || existingUser.department;
}
// Update phone if not set
if (!existingUser.phone && dealer.phone) {
updateData.phone = dealer.phone;
}
// Update name parts if not set
if (!existingUser.firstName && firstName) {
updateData.firstName = firstName;
}
if (!existingUser.lastName && lastName) {
updateData.lastName = lastName;
}
await existingUser.update(updateData);
if (isOktaUser) {
logger.info(`[Seed Dealers] ✅ Updated existing Okta user ${dealer.email} with dealer code: ${dealer.dealerCode}`);
logger.info(`[Seed Dealers] Preserved Okta data: oktaSub=${existingUser.oktaSub}, role=${existingUser.role}`);
} else {
logger.info(`[Seed Dealers] ✅ Updated user ${dealer.email} with dealer code: ${dealer.dealerCode}`);
}
} else {
// User doesn't exist - create new dealer user
// NOTE: If dealer is an Okta user, they should login via SSO first to be created automatically
// This creates a placeholder user that will be updated when they login via SSO
logger.warn(`[Seed Dealers] User ${dealer.email} not found in database. Creating placeholder user...`);
logger.warn(`[Seed Dealers] ⚠️ If this user is an Okta user, they should login via SSO first to be created automatically.`);
logger.warn(`[Seed Dealers] ⚠️ The oktaSub will be updated when they login via SSO.`);
// Generate a UUID for userId
const { v4: uuidv4 } = require('uuid');
const userId = uuidv4();
const nameParts = dealer.dealerName.split(' ');
const firstName = nameParts[0] || dealer.dealerName;
const lastName = nameParts.slice(1).join(' ') || '';
await User.create({
userId,
email: dealer.email.toLowerCase(),
displayName: dealer.displayName,
firstName,
lastName,
department: dealer.department || 'Dealer Operations',
designation: dealer.designation || 'Dealer',
phone: dealer.phone,
role: dealer.role || 'USER',
employeeId: dealer.dealerCode, // Store dealer code in employeeId field
isActive: true,
// Set placeholder oktaSub - will be updated when user logs in via SSO
// Using a recognizable pattern so we know it's a placeholder
oktaSub: `dealer-${dealer.dealerCode}-pending-sso`,
emailNotificationsEnabled: true,
pushNotificationsEnabled: false,
inAppNotificationsEnabled: true,
createdAt: new Date(),
updatedAt: new Date(),
} as any);
logger.info(`[Seed Dealers] ⚠️ Created placeholder dealer user: ${dealer.email} (${dealer.dealerCode})`);
logger.info(`[Seed Dealers] ⚠️ User should login via SSO to update oktaSub field with real Okta subject ID`);
}
}
logger.info('[Seed Dealers] ✅ Dealer seeding completed successfully');
} catch (error) {
logger.error('[Seed Dealers] ❌ Error seeding dealers:', error);
throw error;
}
}
// Run if called directly
if (require.main === module) {
sequelize
.authenticate()
.then(() => {
logger.info('[Seed Dealers] Database connection established');
return seedDealers();
})
.then(() => {
logger.info('[Seed Dealers] Seeding completed');
process.exit(0);
})
.catch((error) => {
logger.error('[Seed Dealers] Seeding failed:', error);
process.exit(1);
});
}
export { seedDealers, dealers };

View File

@ -12,7 +12,6 @@ import { notificationService } from './notification.service';
import { activityService } from './activity.service'; import { activityService } from './activity.service';
import { tatSchedulerService } from './tatScheduler.service'; import { tatSchedulerService } from './tatScheduler.service';
import { emitToRequestRoom } from '../realtime/socket'; import { emitToRequestRoom } from '../realtime/socket';
import { DealerClaimService } from './dealerClaim.service';
export class ApprovalService { export class ApprovalService {
async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> { async approveLevel(levelId: string, action: ApprovalAction, _userId: string, requestMetadata?: { ipAddress?: string | null; userAgent?: string | null }): Promise<ApprovalLevel | null> {
@ -426,61 +425,14 @@ export class ApprovalService {
); );
logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`); logger.info(`Approved level ${level.levelNumber}. Activated next level ${nextLevelNumber} for workflow ${level.requestId}`);
// Notify next approver
// Check if this is Department Lead approval in a claim management workflow, and next level is Activity Creation (auto-step)
const workflowType = (wf as any)?.workflowType;
const isClaimManagement = workflowType === 'CLAIM_MANAGEMENT';
// Check if current level is Department Lead (by levelName, not hardcoded step number)
const currentLevelName = (level.levelName || '').toLowerCase();
const isDeptLeadApproval = currentLevelName.includes('department lead') || level.levelNumber === 3;
// Check if next level is Activity Creation (by levelName or approverEmail, not hardcoded step number)
const nextLevelName = (nextLevel?.levelName || '').toLowerCase();
const nextLevelEmail = (nextLevel?.approverEmail || '').toLowerCase();
const isActivityCreationNext = nextLevelName.includes('activity creation') ||
(nextLevelEmail === 'system@royalenfield.com' && nextLevelNumber > level.levelNumber);
// Check if current level is Requestor Claim Approval (Step 6) and next is E-Invoice Generation (Step 7)
const currentLevelNameForStep6 = (level.levelName || '').toLowerCase();
const isRequestorClaimApproval = currentLevelNameForStep6.includes('requestor') &&
(currentLevelNameForStep6.includes('claim') || currentLevelNameForStep6.includes('approval')) ||
level.levelNumber === 6;
const nextLevelNameForStep7 = (nextLevel?.levelName || '').toLowerCase();
const nextLevelEmailForStep7 = (nextLevel?.approverEmail || '').toLowerCase();
const isEInvoiceGenerationNext = nextLevelNameForStep7.includes('e-invoice') ||
nextLevelNameForStep7.includes('invoice generation') ||
(nextLevelEmailForStep7 === 'system@royalenfield.com' && nextLevelNumber > level.levelNumber);
if (isClaimManagement && isDeptLeadApproval && isActivityCreationNext && nextLevel) {
// Activity Creation is an auto-step - process it automatically
logger.info(`[Approval] Department Lead approved for claim management workflow. Auto-processing Activity Creation (Level ${nextLevelNumber})`);
try {
const dealerClaimService = new DealerClaimService();
await dealerClaimService.processActivityCreation(level.requestId);
logger.info(`[Approval] Activity Creation auto-processing completed for request ${level.requestId}`);
} catch (step4Error) {
logger.error(`[Approval] Error auto-processing Activity Creation for request ${level.requestId}:`, step4Error);
// Don't fail the Department Lead approval if Activity Creation processing fails - log and continue
}
} else if (isClaimManagement && isRequestorClaimApproval && isEInvoiceGenerationNext && nextLevel && nextLevel.approverEmail === 'system@royalenfield.com') {
// E-Invoice Generation is an auto-step - activate it but don't process invoice generation
// Invoice generation will be handled by DMS webhook when invoice is created
logger.info(`[Approval] Requestor Claim Approval approved for claim management workflow. E-Invoice Generation (Level ${nextLevelNumber}) activated. Waiting for DMS webhook to generate invoice.`);
// E-Invoice Generation will remain in IN_PROGRESS until webhook creates invoice and auto-approves it
// Continue with normal flow to activate E-Invoice Generation
}
if (wf && nextLevel) { if (wf && nextLevel) {
// Normal flow - notify next approver (skip for auto-steps) await notificationService.sendToUsers([ (nextLevel as any).approverId ], {
// Check if it's an auto-step by checking approverEmail or levelName title: `Action required: ${(wf as any).requestNumber}`,
const isAutoStep = (nextLevel as any).approverEmail === 'system@royalenfield.com' body: `${(wf as any).title}`,
|| (nextLevel as any).approverName === 'System Auto-Process' requestNumber: (wf as any).requestNumber,
|| (nextLevel as any).levelName === 'Activity Creation' url: `/request/${(wf as any).requestNumber}`
|| (nextLevel as any).levelName === 'E-Invoice Generation'; });
// Log approval activity
activityService.log({ activityService.log({
requestId: level.requestId, requestId: level.requestId,
type: 'approval', type: 'approval',
@ -491,87 +443,6 @@ export class ApprovalService {
ipAddress: requestMetadata?.ipAddress || undefined, ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined userAgent: requestMetadata?.userAgent || undefined
}); });
// Log assignment activity for next level (when it becomes active)
// IMPORTANT: Skip notifications and assignment logging for system/auto-steps
// System steps are: Activity Creation (Step 4), E-Invoice Generation (Step 7), and any step with system@royalenfield.com
// These steps are processed automatically and should NOT trigger notifications
if (!isAutoStep && (nextLevel as any).approverId && (nextLevel as any).approverId !== 'system') {
// Additional checks: ensure approverEmail and approverName are not system-related
// This prevents notifications to system accounts even if they pass other checks
const approverEmail = (nextLevel as any).approverEmail || '';
const approverName = (nextLevel as any).approverName || '';
const isSystemEmail = approverEmail.toLowerCase() === 'system@royalenfield.com'
|| approverEmail.toLowerCase().includes('system');
const isSystemName = approverName.toLowerCase() === 'system auto-process'
|| approverName.toLowerCase().includes('system');
// EXCLUDE all system-related steps from notifications
// Only send notifications to real users, NOT system processes
if (!isSystemEmail && !isSystemName) {
// Send notification to next approver (only for real users, not system processes)
// This will send both in-app and email notifications
const nextApproverId = (nextLevel as any).approverId;
const nextApproverName = (nextLevel as any).approverName || (nextLevel as any).approverEmail || 'approver';
logger.info(`[Approval] Sending assignment notification to next approver: ${nextApproverName} (${nextApproverId}) at level ${nextLevelNumber} for request ${(wf as any).requestNumber}`);
await notificationService.sendToUsers([ nextApproverId ], {
title: `Action required: ${(wf as any).requestNumber}`,
body: `${(wf as any).title}`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'assignment',
priority: 'HIGH',
actionRequired: true
});
logger.info(`[Approval] Assignment notification sent successfully to ${nextApproverName} for level ${nextLevelNumber}`);
// Log assignment activity for the next approver
activityService.log({
requestId: level.requestId,
type: 'assignment',
user: { userId: level.approverId, name: level.approverName },
timestamp: new Date().toISOString(),
action: 'Assigned to approver',
details: `Request assigned to ${nextApproverName} for ${(nextLevel as any).levelName || `level ${nextLevelNumber}`}`,
ipAddress: requestMetadata?.ipAddress || undefined,
userAgent: requestMetadata?.userAgent || undefined
});
} else {
logger.info(`[Approval] Skipping notification for system process: ${approverEmail} at level ${nextLevelNumber}`);
}
} else {
logger.info(`[Approval] Skipping notification for auto-step at level ${nextLevelNumber}`);
}
// Notify initiator when dealer submits documents (Dealer Proposal or Dealer Completion Documents approval in claim management)
const workflowType = (wf as any)?.workflowType;
const isClaimManagement = workflowType === 'CLAIM_MANAGEMENT';
const levelName = (level.levelName || '').toLowerCase();
const isDealerProposalApproval = levelName.includes('dealer') && levelName.includes('proposal') || level.levelNumber === 1;
const isDealerCompletionApproval = levelName.includes('dealer') && (levelName.includes('completion') || levelName.includes('documents')) || level.levelNumber === 5;
if (isClaimManagement && (isDealerProposalApproval || isDealerCompletionApproval) && (wf as any).initiatorId) {
const stepMessage = isDealerProposalApproval
? 'Dealer proposal has been submitted and is now under review.'
: 'Dealer completion documents have been submitted and are now under review.';
await notificationService.sendToUsers([(wf as any).initiatorId], {
title: isDealerProposalApproval ? 'Proposal Submitted' : 'Completion Documents Submitted',
body: `Your claim request "${(wf as any).title}" - ${stepMessage}`,
requestNumber: (wf as any).requestNumber,
requestId: (wf as any).requestId,
url: `/request/${(wf as any).requestNumber}`,
type: 'approval',
priority: 'MEDIUM',
actionRequired: false
});
logger.info(`[Approval] Sent notification to initiator for ${isDealerProposalApproval ? 'Dealer Proposal Submission' : 'Dealer Completion Documents'} approval in claim management workflow`);
}
} }
} else { } else {
// No next level found but not final approver - this shouldn't happen // No next level found but not final approver - this shouldn't happen

View File

@ -7,230 +7,6 @@ import logger, { logAuthEvent } from '../utils/logger';
import axios from 'axios'; import axios from 'axios';
export class AuthService { export class AuthService {
/**
* Fetch user details from Okta Users API (full profile with manager, employeeID, etc.)
* Falls back to userinfo endpoint if Users API fails or token is not configured
*/
private async fetchUserFromOktaUsersAPI(oktaSub: string, email: string, accessToken: string): Promise<any> {
try {
// Check if API token is configured
if (!ssoConfig.oktaApiToken || ssoConfig.oktaApiToken.trim() === '') {
logger.info('OKTA_API_TOKEN not configured, will use userinfo endpoint as fallback');
return null;
}
// Try to fetch from Users API using email first (as shown in curl example)
// If email lookup fails, try with oktaSub (user ID)
let usersApiResponse: any = null;
// First attempt: Use email (preferred method as shown in curl example)
if (email) {
const usersApiEndpoint = `${ssoConfig.oktaDomain}/api/v1/users/${encodeURIComponent(email)}`;
logger.info('Fetching user from Okta Users API (using email)', {
endpoint: usersApiEndpoint.replace(email, email.substring(0, 5) + '...'),
hasApiToken: !!ssoConfig.oktaApiToken,
});
try {
const response = await axios.get(usersApiEndpoint, {
headers: {
'Authorization': `SSWS ${ssoConfig.oktaApiToken}`,
'Accept': 'application/json',
},
validateStatus: (status) => status < 500, // Don't throw on 4xx errors
});
if (response.status === 200 && response.data) {
logger.info('Successfully fetched user from Okta Users API (using email)', {
userId: response.data.id,
hasProfile: !!response.data.profile,
});
return response.data;
}
} catch (emailError: any) {
logger.warn('Users API lookup with email failed, will try with oktaSub', {
status: emailError.response?.status,
error: emailError.message,
});
}
}
// Second attempt: Use oktaSub (user ID) if email lookup failed
if (oktaSub) {
const usersApiEndpoint = `${ssoConfig.oktaDomain}/api/v1/users/${encodeURIComponent(oktaSub)}`;
logger.info('Fetching user from Okta Users API (using oktaSub)', {
endpoint: usersApiEndpoint.replace(oktaSub, oktaSub.substring(0, 10) + '...'),
hasApiToken: !!ssoConfig.oktaApiToken,
});
try {
const response = await axios.get(usersApiEndpoint, {
headers: {
'Authorization': `SSWS ${ssoConfig.oktaApiToken}`,
'Accept': 'application/json',
},
validateStatus: (status) => status < 500,
});
if (response.status === 200 && response.data) {
logger.info('Successfully fetched user from Okta Users API (using oktaSub)', {
userId: response.data.id,
hasProfile: !!response.data.profile,
});
return response.data;
} else {
logger.warn('Okta Users API returned non-200 status (oktaSub lookup)', {
status: response.status,
statusText: response.statusText,
});
}
} catch (oktaSubError: any) {
logger.warn('Users API lookup with oktaSub also failed', {
status: oktaSubError.response?.status,
error: oktaSubError.message,
});
}
}
return null;
} catch (error: any) {
logger.warn('Failed to fetch from Okta Users API, will use userinfo fallback', {
error: error.message,
status: error.response?.status,
});
return null;
}
}
/**
* Extract user data from Okta Users API response
*/
private extractUserDataFromUsersAPI(oktaUserResponse: any, oktaSub: string): SSOUserData | null {
try {
const profile = oktaUserResponse.profile || {};
const userData: SSOUserData = {
oktaSub: oktaSub || oktaUserResponse.id || '',
email: profile.email || profile.login || '',
employeeId: profile.employeeID || profile.employeeId || profile.employee_id || undefined,
firstName: profile.firstName || undefined,
lastName: profile.lastName || undefined,
displayName: profile.displayName || undefined,
department: profile.department || undefined,
designation: profile.title || profile.designation || undefined,
phone: profile.mobilePhone || profile.phone || profile.phoneNumber || undefined,
manager: profile.manager || undefined, // Store manager name if available
jobTitle: profile.title || undefined,
postalAddress: profile.postalAddress || undefined,
mobilePhone: profile.mobilePhone || undefined,
secondEmail: profile.secondEmail || profile.second_email || undefined,
adGroups: Array.isArray(profile.memberOf) ? profile.memberOf : undefined,
};
// Validate required fields
if (!userData.oktaSub || !userData.email) {
logger.warn('Users API response missing required fields (oktaSub or email)');
return null;
}
logger.info('Extracted user data from Okta Users API', {
oktaSub: userData.oktaSub,
email: userData.email,
employeeId: userData.employeeId || 'not provided',
hasManager: !!userData.manager,
manager: userData.manager || 'not provided',
hasDepartment: !!userData.department,
hasDesignation: !!userData.designation,
designation: userData.designation || 'not provided',
hasJobTitle: !!userData.jobTitle,
jobTitle: userData.jobTitle || 'not provided',
hasTitle: !!(userData.jobTitle || userData.designation),
hasAdGroups: !!userData.adGroups && Array.isArray(userData.adGroups) && userData.adGroups.length > 0,
adGroupsCount: userData.adGroups && Array.isArray(userData.adGroups) ? userData.adGroups.length : 0,
adGroups: userData.adGroups && Array.isArray(userData.adGroups) ? userData.adGroups.slice(0, 5) : 'none', // Log first 5 groups
});
return userData;
} catch (error) {
logger.error('Error extracting user data from Users API response', error);
return null;
}
}
/**
* Extract user data from Okta userinfo endpoint (fallback)
*/
private extractUserDataFromUserInfo(oktaUser: any, oktaSub: string): SSOUserData {
// Extract oktaSub (required)
const sub = oktaSub || oktaUser.sub || '';
if (!sub) {
throw new Error('Okta sub (subject identifier) is required but not found in response');
}
// Extract employeeId (optional)
const employeeId =
oktaUser.employeeId ||
oktaUser.employee_id ||
oktaUser.empId ||
oktaUser.employeeNumber ||
undefined;
const userData: SSOUserData = {
oktaSub: sub,
email: oktaUser.email || '',
employeeId: employeeId,
};
// Validate: Ensure we're not accidentally using oktaSub as employeeId
if (employeeId === sub) {
logger.warn('Warning: employeeId matches oktaSub - this should not happen unless explicitly set in Okta', {
oktaSub: sub,
employeeId,
});
userData.employeeId = undefined;
}
// Only set optional fields if they have values
if (oktaUser.given_name || oktaUser.firstName) {
userData.firstName = oktaUser.given_name || oktaUser.firstName;
}
if (oktaUser.family_name || oktaUser.lastName) {
userData.lastName = oktaUser.family_name || oktaUser.lastName;
}
if (oktaUser.name) {
userData.displayName = oktaUser.name;
}
if (oktaUser.department) {
userData.department = oktaUser.department;
}
if (oktaUser.title || oktaUser.designation) {
userData.designation = oktaUser.title || oktaUser.designation;
userData.jobTitle = oktaUser.title || oktaUser.designation;
}
if (oktaUser.phone_number || oktaUser.phone) {
userData.phone = oktaUser.phone_number || oktaUser.phone;
}
if (oktaUser.manager) {
userData.manager = oktaUser.manager;
}
if (oktaUser.mobilePhone) {
userData.mobilePhone = oktaUser.mobilePhone;
}
if (oktaUser.address || oktaUser.postalAddress) {
userData.postalAddress = oktaUser.address || oktaUser.postalAddress;
}
if (oktaUser.secondEmail) {
userData.secondEmail = oktaUser.secondEmail;
}
if (Array.isArray(oktaUser.memberOf)) {
userData.adGroups = oktaUser.memberOf;
}
return userData;
}
/** /**
* Handle SSO callback from frontend * Handle SSO callback from frontend
* Creates new user or updates existing user based on employeeId * Creates new user or updates existing user based on employeeId
@ -283,7 +59,6 @@ export class AuthService {
if (userData.department) userUpdateData.department = userData.department; if (userData.department) userUpdateData.department = userData.department;
if (userData.designation) userUpdateData.designation = userData.designation; if (userData.designation) userUpdateData.designation = userData.designation;
if (userData.phone) userUpdateData.phone = userData.phone; if (userData.phone) userUpdateData.phone = userData.phone;
if (userData.manager) userUpdateData.manager = userData.manager; // Manager name from Okta
// Check if user exists by email (primary identifier) // Check if user exists by email (primary identifier)
let user = await User.findOne({ let user = await User.findOne({
@ -313,7 +88,6 @@ export class AuthService {
department: userData.department || null, department: userData.department || null,
designation: userData.designation || null, designation: userData.designation || null,
phone: userData.phone || null, phone: userData.phone || null,
manager: userData.manager || null, // Manager name from Okta
isActive: true, isActive: true,
role: 'USER', role: 'USER',
lastLogin: new Date() lastLogin: new Date()
@ -532,42 +306,51 @@ export class AuthService {
}, },
}); });
const oktaUserInfo = userInfoResponse.data; const oktaUser = userInfoResponse.data;
const oktaSub = oktaUserInfo.sub || '';
// Step 3: Extract user data from Okta response
const oktaSub = oktaUser.sub || '';
if (!oktaSub) { if (!oktaSub) {
throw new Error('Okta sub (subject identifier) not found in response'); throw new Error('Okta sub (subject identifier) not found in response');
} }
// Step 3: Try Users API first (provides full profile including manager, employeeID, etc.) const employeeId =
let userData: SSOUserData | null = null; oktaUser.employeeId ||
const usersApiResponse = await this.fetchUserFromOktaUsersAPI(oktaSub, oktaUserInfo.email || username, access_token); oktaUser.employee_id ||
oktaUser.empId ||
oktaUser.employeeNumber ||
undefined;
if (usersApiResponse) { const userData: SSOUserData = {
userData = this.extractUserDataFromUsersAPI(usersApiResponse, oktaSub); oktaSub: oktaSub,
} email: oktaUser.email || username,
employeeId: employeeId,
};
// Fallback to userinfo endpoint if Users API failed or returned null // Add optional fields
if (!userData) { if (oktaUser.given_name || oktaUser.firstName) {
logger.info('Using userinfo endpoint as fallback (Users API unavailable or failed)'); userData.firstName = oktaUser.given_name || oktaUser.firstName;
userData = this.extractUserDataFromUserInfo(oktaUserInfo, oktaSub);
// Override email with username if needed
if (!userData.email && username) {
userData.email = username;
} }
if (oktaUser.family_name || oktaUser.lastName) {
userData.lastName = oktaUser.family_name || oktaUser.lastName;
}
if (oktaUser.name) {
userData.displayName = oktaUser.name;
}
if (oktaUser.department) {
userData.department = oktaUser.department;
}
if (oktaUser.title || oktaUser.designation) {
userData.designation = oktaUser.title || oktaUser.designation;
}
if (oktaUser.phone_number || oktaUser.phone) {
userData.phone = oktaUser.phone_number || oktaUser.phone;
} }
logger.info('User data extracted from Okta', { logger.info('User data extracted from Okta', {
email: userData.email, email: userData.email,
employeeId: userData.employeeId || 'not provided',
hasEmployeeId: !!userData.employeeId, hasEmployeeId: !!userData.employeeId,
hasName: !!userData.displayName, hasName: !!userData.displayName,
hasManager: !!(userData as any).manager,
manager: (userData as any).manager || 'not provided',
hasDepartment: !!userData.department,
hasDesignation: !!userData.designation,
hasJobTitle: !!userData.jobTitle,
source: usersApiResponse ? 'Users API' : 'userinfo endpoint',
}); });
// Step 4: Create/update user in our database // Step 4: Create/update user in our database
@ -700,8 +483,7 @@ export class AuthService {
hasIdToken: !!id_token, hasIdToken: !!id_token,
}); });
// Step 1: Try to get user info from Okta Users API (full profile with manager, employeeID, etc.) // Get user info from Okta using access token
// First, get oktaSub from userinfo to use as user ID
const userInfoEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/userinfo`; const userInfoEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/userinfo`;
const userInfoResponse = await axios.get(userInfoEndpoint, { const userInfoResponse = await axios.get(userInfoEndpoint, {
headers: { headers: {
@ -709,41 +491,98 @@ export class AuthService {
}, },
}); });
const oktaUserInfo = userInfoResponse.data; const oktaUser = userInfoResponse.data;
const oktaSub = oktaUserInfo.sub || '';
// Log the full Okta response to see what attributes are available
logger.info('Okta userinfo response received', {
availableKeys: Object.keys(oktaUser || {}),
sub: oktaUser.sub,
email: oktaUser.email,
// Log specific fields that might be employeeId
employeeId: oktaUser.employeeId || oktaUser.employee_id || oktaUser.empId || 'NOT_FOUND',
// Log other common custom attributes
customAttributes: Object.keys(oktaUser || {}).filter(key =>
key.includes('employee') || key.includes('emp') || key.includes('id')
),
});
// Extract oktaSub (required) - this is the Okta subject identifier
// IMPORTANT: Do NOT use oktaSub for employeeId - they are separate fields
const oktaSub = oktaUser.sub || '';
if (!oktaSub) { if (!oktaSub) {
throw new Error('Okta sub (subject identifier) is required but not found in response'); throw new Error('Okta sub (subject identifier) is required but not found in response');
} }
// Try Users API first (provides full profile including manager, employeeID, etc.) // Extract employeeId (optional) - ONLY from custom Okta attributes, NOT from sub
let userData: SSOUserData | null = null; // Check multiple possible sources for actual employee ID attribute:
const usersApiResponse = await this.fetchUserFromOktaUsersAPI(oktaSub, oktaUserInfo.email || '', access_token); // 1. Custom Okta attribute: employeeId, employee_id, empId, employeeNumber
// 2. Leave undefined if not found - DO NOT use oktaSub/sub as fallback
const employeeId =
oktaUser.employeeId ||
oktaUser.employee_id ||
oktaUser.empId ||
oktaUser.employeeNumber ||
undefined; // Explicitly undefined if not found - oktaSub is stored separately
if (usersApiResponse) { // Extract user data from Okta response
userData = this.extractUserDataFromUsersAPI(usersApiResponse, oktaSub); // Adjust these mappings based on your Okta user profile attributes
// Only include fields that have values, leave others undefined for optional handling
const userData: SSOUserData = {
oktaSub: oktaSub, // Required - Okta subject identifier (stored in okta_sub column)
email: oktaUser.email || '',
employeeId: employeeId, // Optional - Only if provided as custom attribute, NOT oktaSub
};
// Validate: Ensure we're not accidentally using oktaSub as employeeId
if (employeeId === oktaSub) {
logger.warn('Warning: employeeId matches oktaSub - this should not happen unless explicitly set in Okta', {
oktaSub,
employeeId,
});
// Clear employeeId to avoid confusion - user can update it later if needed
userData.employeeId = undefined;
} }
// Fallback to userinfo endpoint if Users API failed or returned null logger.info('User data extracted from Okta', {
if (!userData) { oktaSub: oktaSub,
logger.info('Using userinfo endpoint as fallback (Users API unavailable or failed)'); email: oktaUser.email,
userData = this.extractUserDataFromUserInfo(oktaUserInfo, oktaSub); employeeId: employeeId || 'not provided (optional)',
employeeIdSource: oktaUser.employeeId ? 'employeeId attribute' :
oktaUser.employee_id ? 'employee_id attribute' :
oktaUser.empId ? 'empId attribute' :
'not found',
note: 'Using email as primary identifier, oktaSub for uniqueness',
});
// Only set optional fields if they have values
if (oktaUser.given_name || oktaUser.firstName) {
userData.firstName = oktaUser.given_name || oktaUser.firstName;
}
if (oktaUser.family_name || oktaUser.lastName) {
userData.lastName = oktaUser.family_name || oktaUser.lastName;
}
if (oktaUser.name) {
userData.displayName = oktaUser.name;
}
if (oktaUser.department) {
userData.department = oktaUser.department;
}
if (oktaUser.title || oktaUser.designation) {
userData.designation = oktaUser.title || oktaUser.designation;
}
if (oktaUser.phone_number || oktaUser.phone) {
userData.phone = oktaUser.phone_number || oktaUser.phone;
} }
logger.info('Final extracted user data', { logger.info('Extracted user data from Okta', {
oktaSub: userData.oktaSub, employeeId: userData.employeeId,
email: userData.email, email: userData.email,
employeeId: userData.employeeId || 'not provided', hasFirstName: !!userData.firstName,
hasManager: !!(userData as any).manager, hasLastName: !!userData.lastName,
manager: (userData as any).manager || 'not provided', hasDisplayName: !!userData.displayName,
hasDepartment: !!userData.department, hasDepartment: !!userData.department,
hasDesignation: !!userData.designation, hasDesignation: !!userData.designation,
hasJobTitle: !!userData.jobTitle, hasPhone: !!userData.phone,
hasPostalAddress: !!userData.postalAddress,
hasMobilePhone: !!userData.mobilePhone,
hasSecondEmail: !!userData.secondEmail,
hasAdGroups: !!userData.adGroups && Array.isArray(userData.adGroups) && userData.adGroups.length > 0,
source: usersApiResponse ? 'Users API' : 'userinfo endpoint',
}); });
// Handle SSO callback to create/update user and generate our tokens // Handle SSO callback to create/update user and generate our tokens

View File

@ -1,165 +0,0 @@
/**
* Dealer Service
* Handles dealer-related operations for claim management
*/
import { User } from '../models/User';
import { Op } from 'sequelize';
import logger from '../utils/logger';
export interface DealerInfo {
userId: string;
email: string;
dealerCode: string;
dealerName: string;
displayName: string;
phone?: string;
department?: string;
designation?: string;
}
/**
* Get all dealers (users with designation = 'dealer')
* Note: If employeeId is empty, generates a dummy dealerCode like "RE-MH-001" in the response
* The database employeeId remains unchanged - this is only for the API response
*/
export async function getAllDealers(): Promise<DealerInfo[]> {
try {
const dealers = await User.findAll({
where: {
designation: { [Op.iLike]: 'dealer' } as any,
isActive: true,
},
order: [['displayName', 'ASC']],
});
return dealers.map((dealer, index) => {
// Generate dummy dealerCode in response if employeeId is empty
// Format: RE-MH-XXX where XXX is a zero-padded index (001, 002, etc.)
// This is only for the API response - database employeeId is not modified
const dealerCode = dealer.employeeId && dealer.employeeId.trim() !== ''
? dealer.employeeId
: `RE-MH-${String(index + 1).padStart(3, '0')}`;
return {
userId: dealer.userId,
email: dealer.email,
dealerCode: dealerCode,
dealerName: dealer.displayName || dealer.email,
displayName: dealer.displayName || dealer.email,
phone: dealer.phone || undefined,
department: dealer.department || undefined,
designation: dealer.designation || undefined,
};
});
} catch (error) {
logger.error('[DealerService] Error fetching dealers:', error);
throw error;
}
}
/**
* Get dealer by code
*/
export async function getDealerByCode(dealerCode: string): Promise<DealerInfo | null> {
try {
const dealer = await User.findOne({
where: {
employeeId: dealerCode,
designation: { [Op.iLike]: 'dealer' } as any,
isActive: true,
},
});
if (!dealer) {
return null;
}
return {
userId: dealer.userId,
email: dealer.email,
dealerCode: dealer.employeeId || '',
dealerName: dealer.displayName || dealer.email,
displayName: dealer.displayName || dealer.email,
phone: dealer.phone || undefined,
department: dealer.department || undefined,
designation: dealer.designation || undefined,
};
} catch (error) {
logger.error('[DealerService] Error fetching dealer by code:', error);
throw error;
}
}
/**
* Get dealer by email
*/
export async function getDealerByEmail(email: string): Promise<DealerInfo | null> {
try {
const dealer = await User.findOne({
where: {
email: email.toLowerCase(),
designation: { [Op.iLike]: 'dealer' } as any,
isActive: true,
},
});
if (!dealer) {
return null;
}
return {
userId: dealer.userId,
email: dealer.email,
dealerCode: dealer.employeeId || '',
dealerName: dealer.displayName || dealer.email,
displayName: dealer.displayName || dealer.email,
phone: dealer.phone || undefined,
department: dealer.department || undefined,
designation: dealer.designation || undefined,
};
} catch (error) {
logger.error('[DealerService] Error fetching dealer by email:', error);
throw error;
}
}
/**
* Search dealers by name or code
*/
export async function searchDealers(searchTerm: string): Promise<DealerInfo[]> {
try {
const dealers = await User.findAll({
where: {
[Op.and]: [
{ designation: { [Op.iLike]: 'dealer' } as any },
{
[Op.or]: [
{ displayName: { [Op.iLike]: `%${searchTerm}%` } as any },
{ email: { [Op.iLike]: `%${searchTerm}%` } as any },
{ employeeId: { [Op.iLike]: `%${searchTerm}%` } as any },
],
},
{ isActive: true },
],
},
order: [['displayName', 'ASC']],
limit: 50, // Limit results
});
return dealers.map((dealer) => ({
userId: dealer.userId,
email: dealer.email,
dealerCode: dealer.employeeId || '',
dealerName: dealer.displayName || dealer.email,
displayName: dealer.displayName || dealer.email,
phone: dealer.phone || undefined,
department: dealer.department || undefined,
designation: dealer.designation || undefined,
}));
} catch (error) {
logger.error('[DealerService] Error searching dealers:', error);
throw error;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,313 +0,0 @@
import logger from '../utils/logger';
/**
* DMS (Document Management System) Integration Service
* Handles integration with DMS for e-invoice and credit note generation
*
* NOTE: This is a placeholder/stub implementation.
* Replace with actual DMS API integration based on your DMS system.
*/
export class DMSIntegrationService {
private dmsBaseUrl: string;
private dmsApiKey?: string;
private dmsUsername?: string;
private dmsPassword?: string;
constructor() {
this.dmsBaseUrl = process.env.DMS_BASE_URL || '';
this.dmsApiKey = process.env.DMS_API_KEY;
this.dmsUsername = process.env.DMS_USERNAME;
this.dmsPassword = process.env.DMS_PASSWORD;
}
/**
* Check if DMS integration is configured
*/
private isConfigured(): boolean {
return !!this.dmsBaseUrl && (!!this.dmsApiKey || (!!this.dmsUsername && !!this.dmsPassword));
}
/**
* Generate e-invoice in DMS
* @param invoiceData - Invoice data
* @returns E-invoice details including invoice number, DMS number, etc.
*/
async generateEInvoice(invoiceData: {
requestNumber: string;
dealerCode: string;
dealerName: string;
amount: number;
description: string;
ioNumber?: string;
taxDetails?: any;
}): Promise<{
success: boolean;
eInvoiceNumber?: string;
dmsNumber?: string;
invoiceDate?: Date;
invoiceUrl?: string;
error?: string;
}> {
try {
if (!this.isConfigured()) {
logger.warn('[DMS] DMS integration not configured, generating mock e-invoice');
// Return mock data for development/testing
const mockInvoiceNumber = `EINV-${Date.now()}`;
const mockDmsNumber = `DMS-${Date.now()}`;
return {
success: true,
eInvoiceNumber: mockInvoiceNumber,
dmsNumber: mockDmsNumber,
invoiceDate: new Date(),
invoiceUrl: `https://dms.example.com/invoices/${mockInvoiceNumber}`,
error: 'DMS not configured - e-invoice generation simulated'
};
}
// TODO: Implement actual DMS API call
// Example:
// const response = await axios.post(`${this.dmsBaseUrl}/api/invoices/generate`, {
// request_number: invoiceData.requestNumber,
// dealer_code: invoiceData.dealerCode,
// dealer_name: invoiceData.dealerName,
// amount: invoiceData.amount,
// description: invoiceData.description,
// io_number: invoiceData.ioNumber,
// tax_details: invoiceData.taxDetails
// }, {
// headers: {
// 'Authorization': `Bearer ${this.dmsApiKey}`,
// 'Content-Type': 'application/json'
// }
// });
//
// return {
// success: response.data.success,
// eInvoiceNumber: response.data.e_invoice_number,
// dmsNumber: response.data.dms_number,
// invoiceDate: new Date(response.data.invoice_date),
// invoiceUrl: response.data.invoice_url
// };
logger.warn('[DMS] DMS e-invoice generation not implemented, generating mock invoice');
const mockInvoiceNumber = `EINV-${Date.now()}`;
const mockDmsNumber = `DMS-${Date.now()}`;
return {
success: true,
eInvoiceNumber: mockInvoiceNumber,
dmsNumber: mockDmsNumber,
invoiceDate: new Date(),
invoiceUrl: `https://dms.example.com/invoices/${mockInvoiceNumber}`,
error: 'DMS API not implemented - e-invoice generation simulated'
};
} catch (error) {
logger.error('[DMS] Error generating e-invoice:', error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
/**
* Generate credit note in DMS
* @param creditNoteData - Credit note data
* @returns Credit note details including credit note number, amount, etc.
*/
async generateCreditNote(creditNoteData: {
requestNumber: string;
eInvoiceNumber: string;
dealerCode: string;
dealerName: string;
amount: number;
reason: string;
description?: string;
}): Promise<{
success: boolean;
creditNoteNumber?: string;
creditNoteDate?: Date;
creditNoteAmount?: number;
creditNoteUrl?: string;
error?: string;
}> {
try {
if (!this.isConfigured()) {
logger.warn('[DMS] DMS integration not configured, generating mock credit note');
// Return mock data for development/testing
const mockCreditNoteNumber = `CN-${Date.now()}`;
return {
success: true,
creditNoteNumber: mockCreditNoteNumber,
creditNoteDate: new Date(),
creditNoteAmount: creditNoteData.amount,
creditNoteUrl: `https://dms.example.com/credit-notes/${mockCreditNoteNumber}`,
error: 'DMS not configured - credit note generation simulated'
};
}
// TODO: Implement actual DMS API call
// Example:
// const response = await axios.post(`${this.dmsBaseUrl}/api/credit-notes/generate`, {
// request_number: creditNoteData.requestNumber,
// e_invoice_number: creditNoteData.eInvoiceNumber,
// dealer_code: creditNoteData.dealerCode,
// dealer_name: creditNoteData.dealerName,
// amount: creditNoteData.amount,
// reason: creditNoteData.reason,
// description: creditNoteData.description
// }, {
// headers: {
// 'Authorization': `Bearer ${this.dmsApiKey}`,
// 'Content-Type': 'application/json'
// }
// });
//
// return {
// success: response.data.success,
// creditNoteNumber: response.data.credit_note_number,
// creditNoteDate: new Date(response.data.credit_note_date),
// creditNoteAmount: response.data.credit_note_amount,
// creditNoteUrl: response.data.credit_note_url
// };
logger.warn('[DMS] DMS credit note generation not implemented, generating mock credit note');
const mockCreditNoteNumber = `CN-${Date.now()}`;
return {
success: true,
creditNoteNumber: mockCreditNoteNumber,
creditNoteDate: new Date(),
creditNoteAmount: creditNoteData.amount,
creditNoteUrl: `https://dms.example.com/credit-notes/${mockCreditNoteNumber}`,
error: 'DMS API not implemented - credit note generation simulated'
};
} catch (error) {
logger.error('[DMS] Error generating credit note:', error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
/**
* Get invoice status from DMS
* @param eInvoiceNumber - E-invoice number
* @returns Invoice status and details
*/
async getInvoiceStatus(eInvoiceNumber: string): Promise<{
success: boolean;
status?: string;
invoiceNumber?: string;
dmsNumber?: string;
invoiceDate?: Date;
amount?: number;
error?: string;
}> {
try {
if (!this.isConfigured()) {
logger.warn('[DMS] DMS integration not configured, returning mock invoice status');
return {
success: true,
status: 'GENERATED',
invoiceNumber: eInvoiceNumber,
dmsNumber: `DMS-${Date.now()}`,
invoiceDate: new Date(),
amount: 0,
error: 'DMS not configured - invoice status simulated'
};
}
// TODO: Implement actual DMS API call
// Example:
// const response = await axios.get(`${this.dmsBaseUrl}/api/invoices/${eInvoiceNumber}/status`, {
// headers: {
// 'Authorization': `Bearer ${this.dmsApiKey}`,
// 'Content-Type': 'application/json'
// }
// });
//
// return {
// success: true,
// status: response.data.status,
// invoiceNumber: response.data.invoice_number,
// dmsNumber: response.data.dms_number,
// invoiceDate: new Date(response.data.invoice_date),
// amount: response.data.amount
// };
logger.warn('[DMS] DMS invoice status check not implemented, returning mock status');
return {
success: true,
status: 'GENERATED',
invoiceNumber: eInvoiceNumber,
dmsNumber: `DMS-${Date.now()}`,
invoiceDate: new Date(),
amount: 0,
error: 'DMS API not implemented - invoice status simulated'
};
} catch (error) {
logger.error('[DMS] Error getting invoice status:', error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
/**
* Download invoice document from DMS
* @param eInvoiceNumber - E-invoice number
* @returns Invoice document URL or file buffer
*/
async downloadInvoice(eInvoiceNumber: string): Promise<{
success: boolean;
documentUrl?: string;
documentBuffer?: Buffer;
mimeType?: string;
error?: string;
}> {
try {
if (!this.isConfigured()) {
logger.warn('[DMS] DMS integration not configured, returning mock download URL');
return {
success: true,
documentUrl: `https://dms.example.com/invoices/${eInvoiceNumber}/download`,
mimeType: 'application/pdf',
error: 'DMS not configured - download URL simulated'
};
}
// TODO: Implement actual DMS API call
// Example:
// const response = await axios.get(`${this.dmsBaseUrl}/api/invoices/${eInvoiceNumber}/download`, {
// headers: {
// 'Authorization': `Bearer ${this.dmsApiKey}`
// },
// responseType: 'arraybuffer'
// });
//
// return {
// success: true,
// documentBuffer: Buffer.from(response.data),
// mimeType: response.headers['content-type'] || 'application/pdf'
// };
logger.warn('[DMS] DMS invoice download not implemented, returning mock URL');
return {
success: true,
documentUrl: `https://dms.example.com/invoices/${eInvoiceNumber}/download`,
mimeType: 'application/pdf',
error: 'DMS API not implemented - download URL simulated'
};
} catch (error) {
logger.error('[DMS] Error downloading invoice:', error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
}
export const dmsIntegrationService = new DMSIntegrationService();

View File

@ -1,420 +0,0 @@
import { Request } from 'express';
import { ClaimInvoice } from '../models/ClaimInvoice';
import { ClaimCreditNote } from '../models/ClaimCreditNote';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { ApprovalLevel } from '../models/ApprovalLevel';
import { ApprovalService } from './approval.service';
import logger from '../utils/logger';
import crypto from 'crypto';
/**
* DMS Webhook Service
* Handles processing of webhook callbacks from DMS system
*/
export class DMSWebhookService {
private webhookSecret: string;
private approvalService: ApprovalService;
constructor() {
this.webhookSecret = process.env.DMS_WEBHOOK_SECRET || '';
this.approvalService = new ApprovalService();
}
/**
* Validate webhook signature for security
* DMS should send a signature in the header that we can verify
*/
async validateWebhookSignature(req: Request): Promise<boolean> {
// If webhook secret is not configured, skip validation (for development)
if (!this.webhookSecret) {
logger.warn('[DMSWebhook] Webhook secret not configured, skipping signature validation');
return true;
}
try {
const signature = req.headers['x-dms-signature'] as string;
if (!signature) {
logger.warn('[DMSWebhook] Missing webhook signature in header');
return false;
}
// Create HMAC hash of the request body
const body = JSON.stringify(req.body);
const expectedSignature = crypto
.createHmac('sha256', this.webhookSecret)
.update(body)
.digest('hex');
// Compare signatures (use constant-time comparison to prevent timing attacks)
const isValid = crypto.timingSafeEqual(
Buffer.from(signature),
Buffer.from(expectedSignature)
);
if (!isValid) {
logger.warn('[DMSWebhook] Invalid webhook signature');
}
return isValid;
} catch (error) {
logger.error('[DMSWebhook] Error validating webhook signature:', error);
return false;
}
}
/**
* Process invoice generation webhook from DMS
*/
async processInvoiceWebhook(payload: any): Promise<{
success: boolean;
invoiceNumber?: string;
error?: string;
}> {
try {
// Validate required fields
const requiredFields = ['request_number', 'document_no', 'document_type'];
for (const field of requiredFields) {
if (!payload[field]) {
return {
success: false,
error: `Missing required field: ${field}`,
};
}
}
// Find workflow request by request number
const request = await WorkflowRequest.findOne({
where: {
requestNumber: payload.request_number,
},
});
if (!request) {
return {
success: false,
error: `Request not found: ${payload.request_number}`,
};
}
// Find or create invoice record
let invoice = await ClaimInvoice.findOne({
where: { requestId: request.requestId },
});
// Create invoice if it doesn't exist (new flow: webhook creates invoice)
if (!invoice) {
logger.info('[DMSWebhook] Invoice record not found, creating new invoice from webhook', {
requestNumber: payload.request_number,
});
invoice = await ClaimInvoice.create({
requestId: request.requestId,
invoiceNumber: payload.document_no,
dmsNumber: payload.document_no,
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
amount: payload.total_amount || payload.claim_amount,
status: 'GENERATED',
generatedAt: new Date(),
invoiceFilePath: payload.invoice_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildInvoiceDescription(payload),
});
logger.info('[DMSWebhook] Invoice created successfully from webhook', {
requestNumber: payload.request_number,
invoiceNumber: payload.document_no,
});
} else {
// Update existing invoice with DMS response data
await invoice.update({
invoiceNumber: payload.document_no,
dmsNumber: payload.document_no, // DMS document number
invoiceDate: payload.document_date ? new Date(payload.document_date) : new Date(),
amount: payload.total_amount || payload.claim_amount,
status: 'GENERATED',
generatedAt: new Date(),
invoiceFilePath: payload.invoice_file_path || null,
errorMessage: payload.error_message || null,
// Store additional DMS data in description or separate fields if needed
description: this.buildInvoiceDescription(payload),
});
logger.info('[DMSWebhook] Invoice updated successfully', {
requestNumber: payload.request_number,
invoiceNumber: payload.document_no,
irnNo: payload.irn_no,
});
}
// Auto-approve Step 7 and move to Step 8
await this.autoApproveStep7(request.requestId, payload.request_number);
return {
success: true,
invoiceNumber: payload.document_no,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing invoice webhook:', error);
return {
success: false,
error: errorMessage,
};
}
}
/**
* Process credit note generation webhook from DMS
*/
async processCreditNoteWebhook(payload: any): Promise<{
success: boolean;
creditNoteNumber?: string;
error?: string;
}> {
try {
// Validate required fields
const requiredFields = ['request_number', 'document_no', 'document_type'];
for (const field of requiredFields) {
if (!payload[field]) {
return {
success: false,
error: `Missing required field: ${field}`,
};
}
}
// Find workflow request by request number
const request = await WorkflowRequest.findOne({
where: {
requestNumber: payload.request_number,
},
});
if (!request) {
return {
success: false,
error: `Request not found: ${payload.request_number}`,
};
}
// Find invoice to link credit note
const invoice = await ClaimInvoice.findOne({
where: { requestId: request.requestId },
});
if (!invoice) {
return {
success: false,
error: `Invoice not found for request: ${payload.request_number}`,
};
}
// Find or create credit note record
let creditNote = await ClaimCreditNote.findOne({
where: { requestId: request.requestId },
});
// Create credit note if it doesn't exist (new flow: webhook creates credit note)
if (!creditNote) {
logger.info('[DMSWebhook] Credit note record not found, creating new credit note from webhook', {
requestNumber: payload.request_number,
});
creditNote = await ClaimCreditNote.create({
requestId: request.requestId,
invoiceId: invoice.invoiceId,
creditNoteNumber: payload.document_no,
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
creditNoteAmount: payload.total_amount || payload.credit_amount,
sapDocumentNumber: payload.sap_credit_note_no || null,
status: 'CONFIRMED',
confirmedAt: new Date(),
creditNoteFilePath: payload.credit_note_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildCreditNoteDescription(payload),
});
logger.info('[DMSWebhook] Credit note created successfully from webhook', {
requestNumber: payload.request_number,
creditNoteNumber: payload.document_no,
});
} else {
// Update existing credit note with DMS response data
await creditNote.update({
invoiceId: invoice.invoiceId,
creditNoteNumber: payload.document_no,
creditNoteDate: payload.document_date ? new Date(payload.document_date) : new Date(),
creditNoteAmount: payload.total_amount || payload.credit_amount,
sapDocumentNumber: payload.sap_credit_note_no || null,
status: 'CONFIRMED',
confirmedAt: new Date(),
creditNoteFilePath: payload.credit_note_file_path || null,
errorMessage: payload.error_message || null,
description: this.buildCreditNoteDescription(payload),
});
logger.info('[DMSWebhook] Credit note updated successfully', {
requestNumber: payload.request_number,
creditNoteNumber: payload.document_no,
sapCreditNoteNo: payload.sap_credit_note_no,
irnNo: payload.irn_no,
});
}
return {
success: true,
creditNoteNumber: payload.document_no,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error processing credit note webhook:', error);
return {
success: false,
error: errorMessage,
};
}
}
/**
* Build invoice description from DMS payload
*/
private buildInvoiceDescription(payload: any): string {
const parts: string[] = [];
if (payload.irn_no) {
parts.push(`IRN: ${payload.irn_no}`);
}
if (payload.item_code_no) {
parts.push(`Item Code: ${payload.item_code_no}`);
}
if (payload.hsn_sac_code) {
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
}
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
}
return parts.length > 0 ? parts.join(' | ') : '';
}
/**
* Build credit note description from DMS payload
*/
private buildCreditNoteDescription(payload: any): string {
const parts: string[] = [];
if (payload.irn_no) {
parts.push(`IRN: ${payload.irn_no}`);
}
if (payload.sap_credit_note_no) {
parts.push(`SAP CN: ${payload.sap_credit_note_no}`);
}
if (payload.credit_type) {
parts.push(`Credit Type: ${payload.credit_type}`);
}
if (payload.item_code_no) {
parts.push(`Item Code: ${payload.item_code_no}`);
}
if (payload.hsn_sac_code) {
parts.push(`HSN/SAC: ${payload.hsn_sac_code}`);
}
if (payload.cgst_amount || payload.sgst_amount || payload.igst_amount) {
parts.push(`GST - CGST: ${payload.cgst_amount || 0}, SGST: ${payload.sgst_amount || 0}, IGST: ${payload.igst_amount || 0}`);
}
return parts.length > 0 ? parts.join(' | ') : '';
}
/**
* Auto-approve Step 7 (E-Invoice Generation) and move to Step 8
* This is called after invoice is created/updated from DMS webhook
*/
private async autoApproveStep7(requestId: string, requestNumber: string): Promise<void> {
try {
// Check if this is a claim management workflow
const request = await WorkflowRequest.findByPk(requestId);
if (!request) {
logger.warn('[DMSWebhook] Request not found for Step 7 auto-approval', { requestId });
return;
}
const workflowType = (request as any).workflowType;
if (workflowType !== 'CLAIM_MANAGEMENT') {
logger.info('[DMSWebhook] Not a claim management workflow, skipping Step 7 auto-approval', {
requestId,
workflowType,
});
return;
}
// Get E-Invoice Generation approval level dynamically (by levelName, not hardcoded step number)
let eInvoiceLevel = await ApprovalLevel.findOne({
where: {
requestId,
levelName: 'E-Invoice Generation',
},
});
// Fallback: try to find by levelNumber 7 (for backwards compatibility)
if (!eInvoiceLevel) {
eInvoiceLevel = await ApprovalLevel.findOne({
where: {
requestId,
levelNumber: 7,
},
});
}
if (!eInvoiceLevel) {
logger.warn('[DMSWebhook] E-Invoice Generation approval level not found', { requestId, requestNumber });
return;
}
// Check if E-Invoice Generation is already approved
if (eInvoiceLevel.status === 'APPROVED') {
logger.info('[DMSWebhook] E-Invoice Generation already approved, skipping auto-approval', {
requestId,
requestNumber,
});
return;
}
// Auto-approve E-Invoice Generation
logger.info(`[DMSWebhook] Auto-approving E-Invoice Generation (Level ${eInvoiceLevel.levelNumber})`, {
requestId,
requestNumber,
levelId: eInvoiceLevel.levelId,
levelNumber: eInvoiceLevel.levelNumber,
});
await this.approvalService.approveLevel(
eInvoiceLevel.levelId,
{
action: 'APPROVE',
comments: `E-Invoice generated via DMS webhook. Invoice Number: ${(await ClaimInvoice.findOne({ where: { requestId } }))?.invoiceNumber || 'N/A'}. E-Invoice Generation auto-approved.`,
},
'system', // System user for auto-approval
{
ipAddress: null,
userAgent: 'DMS-Webhook-System',
}
);
logger.info('[DMSWebhook] E-Invoice Generation auto-approved successfully. Workflow moved to next step', {
requestId,
requestNumber,
levelNumber: eInvoiceLevel.levelNumber,
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[DMSWebhook] Error auto-approving Step 7:', {
requestId,
requestNumber,
error: errorMessage,
});
// Don't throw error - webhook processing should continue even if Step 7 approval fails
// The invoice is already created/updated, which is the primary goal
}
}
}

View File

@ -1,241 +0,0 @@
import { WorkflowTemplate } from '../models/WorkflowTemplate';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { ApprovalLevel } from '../models/ApprovalLevel';
import { TemplateFieldResolver, FormStepConfig } from './templateFieldResolver.service';
import logger from '../utils/logger';
/**
* Enhanced Template Service
* Handles template-based workflow operations with dynamic form configuration
*/
export class EnhancedTemplateService {
private fieldResolver = new TemplateFieldResolver();
/**
* Get form configuration for a template with resolved user references
*/
async getFormConfig(
templateId: string,
requestId?: string,
currentUserId?: string
): Promise<FormStepConfig[]> {
try {
const template = await WorkflowTemplate.findByPk(templateId);
if (!template) {
throw new Error('Template not found');
}
const stepsConfig = (template.formStepsConfig || []) as FormStepConfig[];
// If request exists, resolve user references
if (requestId && currentUserId) {
const request = await WorkflowRequest.findByPk(requestId, {
include: [{ model: ApprovalLevel, as: 'approvalLevels' }]
});
if (request) {
return await this.resolveStepsWithUserData(stepsConfig, request, currentUserId);
}
}
return stepsConfig;
} catch (error) {
logger.error('[EnhancedTemplateService] Error getting form config:', error);
throw error;
}
}
/**
* Resolve user references in all steps
*/
private async resolveStepsWithUserData(
steps: FormStepConfig[],
request: WorkflowRequest,
currentUserId: string
): Promise<FormStepConfig[]> {
try {
// Get all approvers for context
const approvers = await ApprovalLevel.findAll({
where: { requestId: request.requestId }
});
const approverMap = new Map(
approvers.map(a => [a.levelNumber, a])
);
const resolvedSteps = await Promise.all(
steps.map(async (step) => {
const resolvedFields = await this.fieldResolver.resolveUserReferences(
step,
request,
currentUserId,
{
currentLevel: request.currentLevel,
approvers: approverMap
}
);
// Merge resolved values into field defaults
const enrichedFields = step.fields.map(field => ({
...field,
defaultValue: resolvedFields[field.fieldId] || field.defaultValue
}));
return {
...step,
fields: enrichedFields
};
})
);
return resolvedSteps;
} catch (error) {
logger.error('[EnhancedTemplateService] Error resolving steps:', error);
return steps; // Return original steps on error
}
}
/**
* Validate and save form data for a step
*/
async saveStepData(
templateId: string,
requestId: string,
stepNumber: number,
formData: Record<string, any>,
userId: string
): Promise<void> {
try {
const template = await WorkflowTemplate.findByPk(templateId);
if (!template) {
throw new Error('Template not found');
}
const stepsConfig = (template.formStepsConfig || []) as FormStepConfig[];
const stepConfig = stepsConfig.find(s => s.stepNumber === stepNumber);
if (!stepConfig) {
throw new Error(`Step ${stepNumber} not found in template`);
}
// Validate required fields
this.validateStepData(stepConfig, formData);
// Save to template-specific storage
await this.saveToTemplateStorage(template.workflowType, requestId, stepNumber, formData);
} catch (error) {
logger.error('[EnhancedTemplateService] Error saving step data:', error);
throw error;
}
}
/**
* Validate step data against configuration
*/
private validateStepData(stepConfig: FormStepConfig, formData: Record<string, any>): void {
for (const field of stepConfig.fields) {
if (field.required && !formData[field.fieldId]) {
throw new Error(`Field ${field.label} is required`);
}
// Apply validation rules
if (field.validation && formData[field.fieldId]) {
const value = formData[field.fieldId];
if (field.validation.min !== undefined && value < field.validation.min) {
throw new Error(`${field.label} must be at least ${field.validation.min}`);
}
if (field.validation.max !== undefined && value > field.validation.max) {
throw new Error(`${field.label} must be at most ${field.validation.max}`);
}
if (field.validation.pattern) {
const regex = new RegExp(field.validation.pattern);
if (!regex.test(String(value))) {
throw new Error(`${field.label} format is invalid`);
}
}
}
}
}
/**
* Save to template-specific storage based on workflow type
*/
private async saveToTemplateStorage(
workflowType: string,
requestId: string,
stepNumber: number,
formData: Record<string, any>
): Promise<void> {
switch (workflowType) {
case 'CLAIM_MANAGEMENT':
await this.saveClaimManagementStepData(requestId, stepNumber, formData);
break;
default:
// Generic storage for custom templates
logger.warn(`[EnhancedTemplateService] No specific storage handler for workflow type: ${workflowType}`);
}
}
/**
* Save claim management step data
*/
private async saveClaimManagementStepData(
requestId: string,
stepNumber: number,
formData: Record<string, any>
): Promise<void> {
const { DealerClaimDetails } = await import('../models/DealerClaimDetails');
const { DealerProposalDetails } = await import('../models/DealerProposalDetails');
const { DealerCompletionDetails } = await import('../models/DealerCompletionDetails');
switch (stepNumber) {
case 1:
// Save to dealer_claim_details
await DealerClaimDetails.upsert({
requestId,
activityName: formData.activity_name,
activityType: formData.activity_type,
dealerCode: formData.dealer_code,
dealerName: formData.dealer_name,
dealerEmail: formData.dealer_email,
dealerPhone: formData.dealer_phone,
dealerAddress: formData.dealer_address,
activityDate: formData.activity_date,
location: formData.location,
periodStartDate: formData.period_start_date,
periodEndDate: formData.period_end_date,
estimatedBudget: formData.estimated_budget,
});
break;
case 2:
// Save to dealer_proposal_details
await DealerProposalDetails.upsert({
requestId,
costBreakup: formData.cost_breakup,
totalEstimatedBudget: formData.total_estimated_budget,
timelineMode: formData.timeline_mode,
expectedCompletionDate: formData.expected_completion_date,
expectedCompletionDays: formData.expected_completion_days,
dealerComments: formData.dealer_comments,
proposalDocumentPath: formData.proposal_document_path,
proposalDocumentUrl: formData.proposal_document_url,
submittedAt: new Date(),
});
break;
case 5:
// Save to dealer_completion_details
await DealerCompletionDetails.upsert({
requestId,
activityCompletionDate: formData.activity_completion_date,
numberOfParticipants: formData.number_of_participants,
closedExpenses: formData.closed_expenses,
totalClosedExpenses: formData.total_closed_expenses,
completionDocuments: formData.completion_documents,
activityPhotos: formData.activity_photos,
submittedAt: new Date(),
});
break;
default:
logger.warn(`[EnhancedTemplateService] No storage handler for claim management step ${stepNumber}`);
}
}
}

View File

@ -143,17 +143,8 @@ class NotificationService {
// 1. Check admin + user preferences for in-app notifications // 1. Check admin + user preferences for in-app notifications
const canSendInApp = await shouldSendInAppNotification(userId, payload.type || 'general'); const canSendInApp = await shouldSendInAppNotification(userId, payload.type || 'general');
logger.info(`[Notification] In-app notification check for user ${userId}:`, {
canSendInApp,
inAppNotificationsEnabled: user.inAppNotificationsEnabled,
notificationType: payload.type,
willCreate: canSendInApp && user.inAppNotificationsEnabled
});
let notification: any = null;
if (canSendInApp && user.inAppNotificationsEnabled) { if (canSendInApp && user.inAppNotificationsEnabled) {
try { const notification = await Notification.create({
notification = await Notification.create({
userId, userId,
requestId: payload.requestId, requestId: payload.requestId,
notificationType: payload.type || 'general', notificationType: payload.type || 'general',
@ -174,7 +165,7 @@ class NotificationService {
} as any); } as any);
sentVia.push('IN_APP'); sentVia.push('IN_APP');
logger.info(`[Notification] Created in-app notification for user ${userId}: ${payload.title} (ID: ${(notification as any).notificationId})`); logger.info(`[Notification] Created in-app notification for user ${userId}: ${payload.title}`);
// 2. Emit real-time socket event for immediate delivery // 2. Emit real-time socket event for immediate delivery
try { try {
@ -184,20 +175,14 @@ class NotificationService {
notification: notification.toJSON(), notification: notification.toJSON(),
...payload ...payload
}); });
logger.info(`[Notification] ✅ Emitted socket event to user ${userId}`); logger.info(`[Notification] Emitted socket event to user ${userId}`);
} else {
logger.warn(`[Notification] emitToUser function not available`);
} }
} catch (socketError) { } catch (socketError) {
logger.warn(`[Notification] Socket emit failed (not critical):`, socketError); logger.warn(`[Notification] Socket emit failed (not critical):`, socketError);
} }
} catch (notificationError) {
logger.error(`[Notification] ❌ Failed to create in-app notification for user ${userId}:`, notificationError);
// Continue - don't block other notification channels
}
// 3. Send push notification (if enabled and user has subscriptions) // 3. Send push notification (if enabled and user has subscriptions)
if (user.pushNotificationsEnabled && canSendInApp && notification) { if (user.pushNotificationsEnabled && canSendInApp) {
let subs = this.userIdToSubscriptions.get(userId) || []; let subs = this.userIdToSubscriptions.get(userId) || [];
// Load from DB if memory empty // Load from DB if memory empty
if (subs.length === 0) { if (subs.length === 0) {
@ -305,24 +290,18 @@ class NotificationService {
} }
// Check if email should be sent (admin + user preferences) // Check if email should be sent (admin + user preferences)
// For assignment notifications, always attempt to send email (unless explicitly disabled by admin)
// This ensures next approvers always receive email notifications
const shouldSend = payload.type === 'rejection' || payload.type === 'tat_breach' const shouldSend = payload.type === 'rejection' || payload.type === 'tat_breach'
? await shouldSendEmailWithOverride(userId, emailType) // Critical emails ? await shouldSendEmailWithOverride(userId, emailType) // Critical emails
: payload.type === 'assignment'
? await shouldSendEmailWithOverride(userId, emailType) // Assignment emails - use override to ensure delivery
: await shouldSendEmail(userId, emailType); // Regular emails : await shouldSendEmail(userId, emailType); // Regular emails
console.log(`[DEBUG Email] Should send email: ${shouldSend} for type: ${payload.type}, userId: ${userId}`); console.log(`[DEBUG Email] Should send email: ${shouldSend}`);
if (!shouldSend) { if (!shouldSend) {
console.log(`[DEBUG Email] Email skipped for user ${userId}, type: ${payload.type} (preferences)`); console.log(`[DEBUG Email] Email skipped for user ${userId}, type: ${payload.type} (preferences)`);
logger.warn(`[Email] Email skipped for user ${userId}, type: ${payload.type} (preferences or admin disabled)`); logger.info(`[Email] Skipped for user ${userId}, type: ${payload.type} (preferences)`);
return; return;
} }
logger.info(`[Email] Sending email notification to user ${userId} for type: ${payload.type}, requestId: ${payload.requestId}`);
// Trigger email based on notification type // Trigger email based on notification type
// Email service will fetch additional data as needed // Email service will fetch additional data as needed
console.log(`[DEBUG Email] Triggering email for type: ${payload.type}`); console.log(`[DEBUG Email] Triggering email for type: ${payload.type}`);

View File

@ -657,10 +657,7 @@ export class PauseService {
const now = new Date(); const now = new Date();
// Find all paused workflows where resume date has passed // Find all paused workflows where resume date has passed
// Handle backward compatibility: workflow_type column may not exist in old environments const pausedWorkflows = await WorkflowRequest.findAll({
let pausedWorkflows: WorkflowRequest[];
try {
pausedWorkflows = await WorkflowRequest.findAll({
where: { where: {
isPaused: true, isPaused: true,
pauseResumeDate: { pauseResumeDate: {
@ -668,33 +665,6 @@ export class PauseService {
} }
} }
}); });
} catch (error: any) {
// If error is due to missing workflow_type column, use raw query
if (error.message?.includes('workflow_type') || (error.message?.includes('column') && error.message?.includes('does not exist'))) {
logger.warn('[Pause] workflow_type column not found, using raw query for backward compatibility');
const { sequelize } = await import('../config/database');
const { QueryTypes } = await import('sequelize');
const results = await sequelize.query(`
SELECT request_id, is_paused, pause_resume_date
FROM workflow_requests
WHERE is_paused = true
AND pause_resume_date <= :now
`, {
replacements: { now },
type: QueryTypes.SELECT
});
// Convert to WorkflowRequest-like objects
// results is an array of objects from SELECT query
pausedWorkflows = (results as any[]).map((r: any) => ({
requestId: r.request_id,
isPaused: r.is_paused,
pauseResumeDate: r.pause_resume_date
})) as any;
} else {
throw error; // Re-throw if it's a different error
}
}
let resumedCount = 0; let resumedCount = 0;
for (const workflow of pausedWorkflows) { for (const workflow of pausedWorkflows) {

File diff suppressed because it is too large Load Diff

View File

@ -1,246 +0,0 @@
import { WorkflowTemplate } from '../models/WorkflowTemplate';
import { WorkflowRequest } from '../models/WorkflowRequest';
import { User } from '../models/User';
import { Op } from 'sequelize';
import logger from '../utils/logger';
/**
* Template Service
* Handles CRUD operations for workflow templates
*/
export class TemplateService {
/**
* Create a new workflow template
*/
async createTemplate(
userId: string,
templateData: {
templateName: string;
templateCode?: string;
templateDescription?: string;
templateCategory?: string;
workflowType?: string;
approvalLevelsConfig?: any;
defaultTatHours?: number;
formStepsConfig?: any;
userFieldMappings?: any;
dynamicApproverConfig?: any;
isActive?: boolean;
}
): Promise<WorkflowTemplate> {
try {
// Validate template code uniqueness if provided
if (templateData.templateCode) {
const existing = await WorkflowTemplate.findOne({
where: { templateCode: templateData.templateCode }
});
if (existing) {
throw new Error(`Template code '${templateData.templateCode}' already exists`);
}
}
const template = await WorkflowTemplate.create({
templateName: templateData.templateName,
templateCode: templateData.templateCode,
templateDescription: templateData.templateDescription,
templateCategory: templateData.templateCategory,
workflowType: templateData.workflowType || templateData.templateCode?.toUpperCase(),
approvalLevelsConfig: templateData.approvalLevelsConfig,
defaultTatHours: templateData.defaultTatHours || 24,
formStepsConfig: templateData.formStepsConfig,
userFieldMappings: templateData.userFieldMappings,
dynamicApproverConfig: templateData.dynamicApproverConfig,
isActive: templateData.isActive !== undefined ? templateData.isActive : true,
isSystemTemplate: false, // Admin-created templates are not system templates
usageCount: 0,
createdBy: userId,
});
logger.info(`[TemplateService] Created template: ${template.templateId}`);
return template;
} catch (error) {
logger.error('[TemplateService] Error creating template:', error);
throw error;
}
}
/**
* Get template by ID
*/
async getTemplate(templateId: string): Promise<WorkflowTemplate | null> {
try {
return await WorkflowTemplate.findByPk(templateId, {
include: [{ model: User, as: 'creator' }]
});
} catch (error) {
logger.error('[TemplateService] Error getting template:', error);
throw error;
}
}
/**
* Get template by code
*/
async getTemplateByCode(templateCode: string): Promise<WorkflowTemplate | null> {
try {
return await WorkflowTemplate.findOne({
where: { templateCode },
include: [{ model: User, as: 'creator' }]
});
} catch (error) {
logger.error('[TemplateService] Error getting template by code:', error);
throw error;
}
}
/**
* List all templates with filters
*/
async listTemplates(filters?: {
category?: string;
workflowType?: string;
isActive?: boolean;
isSystemTemplate?: boolean;
search?: string;
}): Promise<WorkflowTemplate[]> {
try {
const where: any = {};
if (filters?.category) {
where.templateCategory = filters.category;
}
if (filters?.workflowType) {
where.workflowType = filters.workflowType;
}
if (filters?.isActive !== undefined) {
where.isActive = filters.isActive;
}
if (filters?.isSystemTemplate !== undefined) {
where.isSystemTemplate = filters.isSystemTemplate;
}
if (filters?.search) {
where[Op.or] = [
{ templateName: { [Op.iLike]: `%${filters.search}%` } },
{ templateCode: { [Op.iLike]: `%${filters.search}%` } },
{ templateDescription: { [Op.iLike]: `%${filters.search}%` } }
];
}
return await WorkflowTemplate.findAll({
where,
include: [{ model: User, as: 'creator' }],
order: [['createdAt', 'DESC']]
});
} catch (error) {
logger.error('[TemplateService] Error listing templates:', error);
throw error;
}
}
/**
* Update template
*/
async updateTemplate(
templateId: string,
userId: string,
updateData: {
templateName?: string;
templateDescription?: string;
templateCategory?: string;
approvalLevelsConfig?: any;
defaultTatHours?: number;
formStepsConfig?: any;
userFieldMappings?: any;
dynamicApproverConfig?: any;
isActive?: boolean;
}
): Promise<WorkflowTemplate> {
try {
const template = await WorkflowTemplate.findByPk(templateId);
if (!template) {
throw new Error('Template not found');
}
// Check if template is system template (system templates should not be modified)
if (template.isSystemTemplate && updateData.approvalLevelsConfig) {
throw new Error('Cannot modify approval levels of system templates');
}
await template.update(updateData);
logger.info(`[TemplateService] Updated template: ${templateId}`);
return template;
} catch (error) {
logger.error('[TemplateService] Error updating template:', error);
throw error;
}
}
/**
* Delete template (soft delete by setting isActive to false)
*/
async deleteTemplate(templateId: string): Promise<void> {
try {
const template = await WorkflowTemplate.findByPk(templateId);
if (!template) {
throw new Error('Template not found');
}
// Check if template is in use
const usageCount = await WorkflowRequest.count({
where: { templateId }
});
if (usageCount > 0) {
throw new Error(`Cannot delete template: ${usageCount} request(s) are using this template`);
}
// System templates cannot be deleted
if (template.isSystemTemplate) {
throw new Error('Cannot delete system templates');
}
// Soft delete by deactivating
await template.update({ isActive: false });
logger.info(`[TemplateService] Deleted (deactivated) template: ${templateId}`);
} catch (error) {
logger.error('[TemplateService] Error deleting template:', error);
throw error;
}
}
/**
* Get active templates for workflow creation
*/
async getActiveTemplates(): Promise<WorkflowTemplate[]> {
try {
return await WorkflowTemplate.findAll({
where: { isActive: true },
order: [['templateName', 'ASC']]
});
} catch (error) {
logger.error('[TemplateService] Error getting active templates:', error);
throw error;
}
}
/**
* Increment usage count when template is used
*/
async incrementUsageCount(templateId: string): Promise<void> {
try {
await WorkflowTemplate.increment('usageCount', {
where: { templateId }
});
} catch (error) {
logger.error('[TemplateService] Error incrementing usage count:', error);
// Don't throw - this is not critical
}
}
}

View File

@ -1,287 +0,0 @@
import { WorkflowRequest } from '../models/WorkflowRequest';
import { ApprovalLevel } from '../models/ApprovalLevel';
import { User } from '../models/User';
import { Participant } from '../models/Participant';
import logger from '../utils/logger';
/**
* Interface for user reference configuration in form fields
*/
export interface UserReference {
role: 'initiator' | 'dealer' | 'approver' | 'team_lead' | 'department_lead' | 'current_approver' | 'previous_approver';
level?: number; // For approver: which approval level
field: 'name' | 'email' | 'phone' | 'department' | 'employee_id' | 'all'; // Which user field to reference
autoPopulate: boolean; // Auto-fill from user data
editable: boolean; // Can user edit the auto-populated value
}
/**
* Interface for form step configuration
*/
export interface FormStepConfig {
stepNumber: number;
stepName: string;
stepDescription?: string;
fields: FormFieldConfig[];
userReferences?: UserReferenceConfig[];
}
export interface FormFieldConfig {
fieldId: string;
fieldType: string;
label: string;
required: boolean;
defaultValue?: any;
userReference?: UserReference;
}
export interface UserReferenceConfig {
role: string;
captureFields: string[];
autoPopulateFrom: 'workflow' | 'user_profile' | 'approval_level';
allowOverride: boolean;
}
/**
* Service to resolve user references in template forms
*/
export class TemplateFieldResolver {
/**
* Resolve user reference fields in a step
*/
async resolveUserReferences(
stepConfig: FormStepConfig,
request: WorkflowRequest,
currentUserId: string,
context?: {
currentLevel?: number;
approvers?: Map<number, ApprovalLevel>;
}
): Promise<Record<string, any>> {
const resolvedFields: Record<string, any> = {};
try {
for (const field of stepConfig.fields) {
if (field.userReference) {
const userData = await this.getUserDataForReference(
field.userReference,
request,
currentUserId,
context
);
if (field.userReference.autoPopulate && userData) {
resolvedFields[field.fieldId] = this.extractUserField(
userData,
field.userReference.field
);
}
}
}
} catch (error) {
logger.error('[TemplateFieldResolver] Error resolving user references:', error);
}
return resolvedFields;
}
/**
* Get user data based on reference configuration
*/
private async getUserDataForReference(
userRef: UserReference,
request: WorkflowRequest,
currentUserId: string,
context?: any
): Promise<User | null> {
try {
switch (userRef.role) {
case 'initiator':
return await User.findByPk(request.initiatorId);
case 'dealer':
// Get dealer from participants
const dealerParticipant = await Participant.findOne({
where: {
requestId: request.requestId,
participantType: 'DEALER' as any,
isActive: true
},
include: [{ model: User, as: 'user' }]
});
return dealerParticipant?.user || null;
case 'approver':
if (userRef.level && context?.approvers) {
const approverLevel = context.approvers.get(userRef.level);
if (approverLevel?.approverId) {
return await User.findByPk(approverLevel.approverId);
}
}
// Fallback to current approver
const currentLevel = await ApprovalLevel.findOne({
where: {
requestId: request.requestId,
levelNumber: context?.currentLevel || request.currentLevel,
status: 'PENDING' as any
}
});
if (currentLevel?.approverId) {
return await User.findByPk(currentLevel.approverId);
}
return null;
case 'team_lead':
// Find team lead based on initiator's manager
const initiator = await User.findByPk(request.initiatorId);
if (initiator?.manager) {
return await User.findOne({
where: {
email: initiator.manager,
role: 'MANAGEMENT' as any
}
});
}
return null;
case 'department_lead':
const initiatorUser = await User.findByPk(request.initiatorId);
if (initiatorUser?.department) {
return await User.findOne({
where: {
department: initiatorUser.department,
role: 'MANAGEMENT' as any
},
order: [['created_at', 'DESC']]
});
}
return null;
case 'current_approver':
const currentApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId: request.requestId,
status: 'PENDING' as any
},
order: [['level_number', 'ASC']]
});
if (currentApprovalLevel?.approverId) {
return await User.findByPk(currentApprovalLevel.approverId);
}
return null;
case 'previous_approver':
const previousLevel = request.currentLevel - 1;
if (previousLevel > 0) {
const previousApprovalLevel = await ApprovalLevel.findOne({
where: {
requestId: request.requestId,
levelNumber: previousLevel
}
});
if (previousApprovalLevel?.approverId) {
return await User.findByPk(previousApprovalLevel.approverId);
}
}
return null;
default:
return null;
}
} catch (error) {
logger.error(`[TemplateFieldResolver] Error getting user data for role ${userRef.role}:`, error);
return null;
}
}
/**
* Extract specific field from user data
*/
private extractUserField(user: User, field: string): any {
if (!user) return null;
switch (field) {
case 'name':
return user.displayName || `${user.firstName || ''} ${user.lastName || ''}`.trim();
case 'email':
return user.email;
case 'phone':
return user.phone || user.mobilePhone;
case 'department':
return user.department;
case 'employee_id':
return user.employeeId;
case 'all':
return {
name: user.displayName || `${user.firstName || ''} ${user.lastName || ''}`.trim(),
email: user.email,
phone: user.phone || user.mobilePhone,
department: user.department,
employeeId: user.employeeId
};
default:
return null;
}
}
/**
* Resolve dynamic approver based on configuration
*/
async resolveDynamicApprover(
level: number,
config: any, // DynamicApproverConfig
request: WorkflowRequest
): Promise<User | null> {
if (!config?.enabled || !config?.approverSelection?.dynamicRules) {
return null;
}
try {
const rule = config.approverSelection.dynamicRules.find((r: any) => r.level === level);
if (!rule) return null;
const criteria = rule.selectionCriteria;
switch (criteria.type) {
case 'role':
return await User.findOne({
where: {
role: criteria.value as any
},
order: [['created_at', 'DESC']]
});
case 'department':
const initiator = await User.findByPk(request.initiatorId);
const deptValue = criteria.value?.replace('${initiator.department}', initiator?.department || '') || initiator?.department;
if (deptValue) {
return await User.findOne({
where: {
department: deptValue,
role: 'MANAGEMENT' as any
}
});
}
return null;
case 'manager':
const initiatorUser = await User.findByPk(request.initiatorId);
if (initiatorUser?.manager) {
return await User.findOne({
where: {
email: initiatorUser.manager
}
});
}
return null;
default:
return null;
}
} catch (error) {
logger.error('[TemplateFieldResolver] Error resolving dynamic approver:', error);
return null;
}
}
}

View File

@ -2,7 +2,6 @@ import { User as UserModel } from '../models/User';
import { Op } from 'sequelize'; import { Op } from 'sequelize';
import { SSOUserData } from '../types/auth.types'; // Use shared type import { SSOUserData } from '../types/auth.types'; // Use shared type
import axios from 'axios'; import axios from 'axios';
import logger from '../utils/logger';
// Using UserModel type directly - interface removed to avoid duplication // Using UserModel type directly - interface removed to avoid duplication
@ -17,83 +16,10 @@ interface OktaUser {
login: string; login: string;
department?: string; department?: string;
mobilePhone?: string; mobilePhone?: string;
[key: string]: any; // Allow any additional profile fields
}; };
} }
/**
* Extract full user data from Okta Users API response (centralized extraction)
* This ensures consistent field mapping across all user creation/update operations
*/
function extractOktaUserData(oktaUserResponse: any): SSOUserData | null {
try {
const profile = oktaUserResponse.profile || {};
const userData: SSOUserData = {
oktaSub: oktaUserResponse.id || '',
email: profile.email || profile.login || '',
employeeId: profile.employeeID || profile.employeeId || profile.employee_id || undefined,
firstName: profile.firstName || undefined,
lastName: profile.lastName || undefined,
displayName: profile.displayName || undefined,
department: profile.department || undefined,
designation: profile.title || profile.designation || undefined,
phone: profile.mobilePhone || profile.phone || profile.phoneNumber || undefined,
manager: profile.manager || undefined, // Manager name from Okta
jobTitle: profile.title || undefined,
postalAddress: profile.postalAddress || undefined,
mobilePhone: profile.mobilePhone || undefined,
secondEmail: profile.secondEmail || profile.second_email || undefined,
adGroups: Array.isArray(profile.memberOf) ? profile.memberOf : undefined,
};
// Validate required fields
if (!userData.oktaSub || !userData.email) {
return null;
}
return userData;
} catch (error) {
return null;
}
}
export class UserService { export class UserService {
/**
* Build a consistent user payload for create/update from SSO data.
* @param isUpdate - If true, excludes email from payload (email should never be updated)
*/
private buildUserPayload(ssoData: SSOUserData, existingRole?: string, isUpdate: boolean = false) {
const now = new Date();
const payload: any = {
oktaSub: ssoData.oktaSub,
employeeId: ssoData.employeeId || null,
firstName: ssoData.firstName || null,
lastName: ssoData.lastName || null,
displayName: ssoData.displayName || null,
department: ssoData.department || null,
designation: ssoData.designation || null,
phone: ssoData.phone || null,
manager: ssoData.manager || null,
jobTitle: ssoData.designation || ssoData.jobTitle || null,
postalAddress: ssoData.postalAddress || null,
mobilePhone: ssoData.mobilePhone || null,
secondEmail: ssoData.secondEmail || null,
adGroups: ssoData.adGroups || null,
lastLogin: now,
updatedAt: now,
isActive: ssoData.isActive ?? true,
role: (ssoData.role as any) || existingRole || 'USER',
};
// Only include email for new users (never update email for existing users)
if (!isUpdate) {
payload.email = ssoData.email;
}
return payload;
}
async createOrUpdateUser(ssoData: SSOUserData): Promise<UserModel> { async createOrUpdateUser(ssoData: SSOUserData): Promise<UserModel> {
// Validate required fields // Validate required fields
if (!ssoData.email || !ssoData.oktaSub) { if (!ssoData.email || !ssoData.oktaSub) {
@ -110,18 +36,44 @@ export class UserService {
} }
}); });
if (existingUser) { const now = new Date();
// Update existing user - DO NOT update email (crucial identifier)
const updatePayload = this.buildUserPayload(ssoData, existingUser.role, true); // isUpdate = true
await existingUser.update(updatePayload); if (existingUser) {
// Update existing user - include oktaSub to ensure it's synced
await existingUser.update({
email: ssoData.email,
oktaSub: ssoData.oktaSub,
employeeId: ssoData.employeeId || null, // Optional
firstName: ssoData.firstName || null,
lastName: ssoData.lastName || null,
displayName: ssoData.displayName || null,
department: ssoData.department || null,
designation: ssoData.designation || null,
phone: ssoData.phone || null,
// location: (ssoData as any).location || null, // Ignored for now - schema not finalized
lastLogin: now,
updatedAt: now,
isActive: true, // Ensure user is active after SSO login
});
return existingUser; return existingUser;
} else { } else {
// Create new user - oktaSub is required, email is included // Create new user - oktaSub is required
const createPayload = this.buildUserPayload(ssoData, 'USER', false); // isUpdate = false const newUser = await UserModel.create({
email: ssoData.email,
const newUser = await UserModel.create(createPayload); oktaSub: ssoData.oktaSub, // Required
employeeId: ssoData.employeeId || null, // Optional
firstName: ssoData.firstName || null,
lastName: ssoData.lastName || null,
displayName: ssoData.displayName || null,
department: ssoData.department || null,
designation: ssoData.designation || null,
phone: ssoData.phone || null,
// location: (ssoData as any).location || null, // Ignored for now - schema not finalized
isActive: true,
role: 'USER', // Default role for new users
lastLogin: now
});
return newUser; return newUser;
} }
@ -269,10 +221,9 @@ export class UserService {
} }
/** /**
* Fetch user from Okta by email and extract full profile data * Fetch user from Okta by email
* Returns SSOUserData with all fields including manager, jobTitle, etc.
*/ */
async fetchAndExtractOktaUserByEmail(email: string): Promise<SSOUserData | null> { async fetchUserFromOktaByEmail(email: string): Promise<OktaUser | null> {
try { try {
const oktaDomain = process.env.OKTA_DOMAIN; const oktaDomain = process.env.OKTA_DOMAIN;
const oktaApiToken = process.env.OKTA_API_TOKEN; const oktaApiToken = process.env.OKTA_API_TOKEN;
@ -281,25 +232,7 @@ export class UserService {
return null; return null;
} }
// Try to fetch by email directly first (more reliable) // Search Okta users by email (exact match)
try {
const directResponse = await axios.get(`${oktaDomain}/api/v1/users/${encodeURIComponent(email)}`, {
headers: {
'Authorization': `SSWS ${oktaApiToken}`,
'Accept': 'application/json'
},
timeout: 5000,
validateStatus: (status) => status < 500
});
if (directResponse.status === 200 && directResponse.data) {
return extractOktaUserData(directResponse.data);
}
} catch (directError) {
// Fall through to search method
}
// Fallback: Search Okta users by email
const response = await axios.get(`${oktaDomain}/api/v1/users`, { const response = await axios.get(`${oktaDomain}/api/v1/users`, {
params: { search: `profile.email eq "${email}"`, limit: 1 }, params: { search: `profile.email eq "${email}"`, limit: 1 },
headers: { headers: {
@ -309,81 +242,14 @@ export class UserService {
timeout: 5000 timeout: 5000
}); });
const users: any[] = response.data || []; const users: OktaUser[] = response.data || [];
if (users.length > 0) { return users.length > 0 ? users[0] : null;
return extractOktaUserData(users[0]);
}
return null;
} catch (error: any) { } catch (error: any) {
console.error(`Failed to fetch user from Okta by email ${email}:`, error.message); console.error(`Failed to fetch user from Okta by email ${email}:`, error.message);
return null; return null;
} }
} }
/**
* Search users in Okta by displayName
* Uses Okta search API: /api/v1/users?search=profile.displayName eq "displayName"
* @param displayName - Display name to search for
* @returns Array of matching users from Okta
*/
async searchOktaByDisplayName(displayName: string): Promise<OktaUser[]> {
try {
const oktaDomain = process.env.OKTA_DOMAIN;
const oktaApiToken = process.env.OKTA_API_TOKEN;
if (!oktaDomain || !oktaApiToken) {
logger.warn('[UserService] Okta not configured, returning empty array for displayName search');
return [];
}
// Search Okta users by displayName
const response = await axios.get(`${oktaDomain}/api/v1/users`, {
params: {
search: `profile.displayName eq "${displayName}"`,
limit: 50
},
headers: {
'Authorization': `SSWS ${oktaApiToken}`,
'Accept': 'application/json'
},
timeout: 5000
});
const oktaUsers: OktaUser[] = response.data || [];
// Filter only active users
return oktaUsers.filter(u => u.status === 'ACTIVE');
} catch (error: any) {
logger.error(`[UserService] Error searching Okta by displayName "${displayName}":`, error.message);
return [];
}
}
/**
* Fetch user from Okta by email (legacy method, kept for backward compatibility)
* @deprecated Use fetchAndExtractOktaUserByEmail instead for full profile extraction
*/
async fetchUserFromOktaByEmail(email: string): Promise<OktaUser | null> {
const userData = await this.fetchAndExtractOktaUserByEmail(email);
if (!userData) return null;
// Return in legacy format for backward compatibility
return {
id: userData.oktaSub,
status: 'ACTIVE',
profile: {
email: userData.email,
login: userData.email,
firstName: userData.firstName,
lastName: userData.lastName,
displayName: userData.displayName,
department: userData.department,
mobilePhone: userData.mobilePhone,
}
};
}
/** /**
* Ensure user exists in database (create if not exists) * Ensure user exists in database (create if not exists)
* Used when tagging users from Okta search results or when only email is provided * Used when tagging users from Okta search results or when only email is provided

View File

@ -332,9 +332,6 @@ export class WorkflowService {
// Shift existing levels at and after target level // Shift existing levels at and after target level
// IMPORTANT: Shift in REVERSE order to avoid unique constraint violations // IMPORTANT: Shift in REVERSE order to avoid unique constraint violations
// IMPORTANT: Preserve original level names when shifting (don't overwrite them)
// IMPORTANT: Update status of shifted levels - if they were IN_PROGRESS, set to PENDING
// because they're no longer the current active step (new approver is being added before them)
const levelsToShift = allLevels const levelsToShift = allLevels
.filter(l => (l as any).levelNumber >= targetLevel) .filter(l => (l as any).levelNumber >= targetLevel)
.sort((a, b) => (b as any).levelNumber - (a as any).levelNumber); // Sort descending .sort((a, b) => (b as any).levelNumber - (a as any).levelNumber); // Sort descending
@ -342,47 +339,24 @@ export class WorkflowService {
for (const levelToShift of levelsToShift) { for (const levelToShift of levelsToShift) {
const oldLevelNumber = (levelToShift as any).levelNumber; const oldLevelNumber = (levelToShift as any).levelNumber;
const newLevelNumber = oldLevelNumber + 1; const newLevelNumber = oldLevelNumber + 1;
const existingLevelName = (levelToShift as any).levelName;
const currentStatus = (levelToShift as any).status;
// If the level being shifted was IN_PROGRESS or PENDING, set it to PENDING
// because it's no longer the current active step (a new approver is being added before it)
const newStatus = (currentStatus === ApprovalStatus.IN_PROGRESS || currentStatus === ApprovalStatus.PENDING)
? ApprovalStatus.PENDING
: currentStatus; // Keep APPROVED, REJECTED, SKIPPED as-is
// Preserve the original level name - don't overwrite it
await levelToShift.update({ await levelToShift.update({
levelNumber: newLevelNumber, levelNumber: newLevelNumber,
// Keep existing levelName if it exists, otherwise use generic levelName: `Level ${newLevelNumber}`
levelName: existingLevelName || `Level ${newLevelNumber}`, });
status: newStatus, logger.info(`[Workflow] Shifted level ${oldLevelNumber}${newLevelNumber}`);
// Clear levelStartTime and tatStartTime since this is no longer the active step
levelStartTime: undefined,
tatStartTime: undefined,
} as any);
logger.info(`[Workflow] Shifted level ${oldLevelNumber}${newLevelNumber}, preserved levelName: ${existingLevelName || 'N/A'}, updated status: ${currentStatus}${newStatus}`);
} }
// Update total levels in workflow // Update total levels in workflow
await workflow.update({ totalLevels: allLevels.length + 1 }); await workflow.update({ totalLevels: allLevels.length + 1 });
// Auto-generate smart level name for newly added approver // Auto-generate smart level name
// Use "Additional Approver" to identify dynamically added approvers let levelName = `Level ${targetLevel}`;
let levelName = `Additional Approver`;
if (designation) { if (designation) {
levelName = `Additional Approver - ${designation}`; levelName = `${designation} Approval`;
} else if (department) { } else if (department) {
levelName = `Additional Approver - ${department}`; levelName = `${department} Approval`;
} else if (userName) {
levelName = `Additional Approver - ${userName}`;
} }
// Determine if the new level should be IN_PROGRESS
// If we're adding at the current level, the new approver becomes the active approver
const workflowCurrentLevel = (workflow as any).currentLevel;
const isAddingAtCurrentLevel = targetLevel === workflowCurrentLevel;
// Create new approval level at target position // Create new approval level at target position
const newLevel = await ApprovalLevel.create({ const newLevel = await ApprovalLevel.create({
requestId, requestId,
@ -393,17 +367,12 @@ export class WorkflowService {
approverName: userName, approverName: userName,
tatHours, tatHours,
// tatDays is auto-calculated by database as a generated column // tatDays is auto-calculated by database as a generated column
status: isAddingAtCurrentLevel ? ApprovalStatus.IN_PROGRESS : ApprovalStatus.PENDING, status: targetLevel === (workflow as any).currentLevel ? ApprovalStatus.IN_PROGRESS : ApprovalStatus.PENDING,
isFinalApprover: targetLevel === allLevels.length + 1, isFinalApprover: targetLevel === allLevels.length + 1,
levelStartTime: isAddingAtCurrentLevel ? new Date() : null, levelStartTime: targetLevel === (workflow as any).currentLevel ? new Date() : null,
tatStartTime: isAddingAtCurrentLevel ? new Date() : null tatStartTime: targetLevel === (workflow as any).currentLevel ? new Date() : null
} as any); } as any);
// IMPORTANT: If we're adding at the current level, the workflow's currentLevel stays the same
// (it's still the same level number, just with a new approver)
// The status update we did above ensures the shifted level becomes PENDING
// No need to update workflow.currentLevel - it's already correct
// Update isFinalApprover for previous final approver (now it's not final anymore) // Update isFinalApprover for previous final approver (now it's not final anymore)
if (allLevels.length > 0) { if (allLevels.length > 0) {
const previousFinal = allLevels.find(l => (l as any).isFinalApprover); const previousFinal = allLevels.find(l => (l as any).isFinalApprover);
@ -998,9 +967,6 @@ export class WorkflowService {
closureDate: (wf as any).closureDate, closureDate: (wf as any).closureDate,
conclusionRemark: (wf as any).conclusionRemark, conclusionRemark: (wf as any).conclusionRemark,
closureType: closureType, // 'approved' or 'rejected' - indicates path to closure closureType: closureType, // 'approved' or 'rejected' - indicates path to closure
workflowType: (wf as any).workflowType || null, // 'CLAIM_MANAGEMENT', 'NON_TEMPLATIZED', etc.
templateType: (wf as any).templateType || null, // 'CUSTOM', 'TEMPLATE', 'DEALER CLAIM'
templateId: (wf as any).templateId || null, // Reference to workflow_templates if using admin template
initiator: (wf as any).initiator, initiator: (wf as any).initiator,
department: (wf as any).initiator?.department, department: (wf as any).initiator?.department,
totalLevels: (wf as any).totalLevels, totalLevels: (wf as any).totalLevels,
@ -2785,16 +2751,10 @@ export class WorkflowService {
const status = (approval.status || '').toString().toUpperCase(); const status = (approval.status || '').toString().toUpperCase();
const approvalData = approval.toJSON(); const approvalData = approval.toJSON();
const isPausedLevel = status === 'PAUSED' || approval.isPaused; const isPausedLevel = status === 'PAUSED' || approval.isPaused;
const approvalLevelNumber = approval.levelNumber || 0;
const workflowCurrentLevelNumber = currentLevel ? (currentLevel as any).levelNumber : ((workflow as any).currentLevel || 1);
// Calculate SLA ONLY for the CURRENT active level (matching currentLevel) // Calculate SLA for active approvals (pending/in-progress/paused)
// This ensures that when in step 1, only step 1 has elapsed time, others have 0
// Include PAUSED so we show SLA for the paused approver, not the next one // Include PAUSED so we show SLA for the paused approver, not the next one
const isCurrentLevel = approvalLevelNumber === workflowCurrentLevelNumber; if (status === 'PENDING' || status === 'IN_PROGRESS' || status === 'PAUSED') {
const shouldCalculateSLA = isCurrentLevel && (status === 'PENDING' || status === 'IN_PROGRESS' || status === 'PAUSED');
if (shouldCalculateSLA) {
const levelStartTime = approval.levelStartTime || approval.tatStartTime || approval.createdAt; const levelStartTime = approval.levelStartTime || approval.tatStartTime || approval.createdAt;
const tatHours = Number(approval.tatHours || 0); const tatHours = Number(approval.tatHours || 0);
@ -2851,39 +2811,23 @@ export class WorkflowService {
} }
} }
// For waiting levels (future levels that haven't started), set elapsedHours to 0
// This ensures that when in step 1, steps 2-8 show elapsedHours = 0
if (approvalLevelNumber > workflowCurrentLevelNumber && status !== 'APPROVED' && status !== 'REJECTED') {
return {
...approvalData,
elapsedHours: 0,
remainingHours: Number(approval.tatHours || 0),
tatPercentageUsed: 0,
};
}
// For completed/rejected levels, return as-is (already has final values from database) // For completed/rejected levels, return as-is (already has final values from database)
return approvalData; return approvalData;
})); }));
// Calculate overall request SLA based on cumulative elapsed hours from all levels // Calculate overall request SLA based on cumulative elapsed hours from all levels
// This correctly accounts for pause periods since each level's elapsedHours is pause-adjusted // This correctly accounts for pause periods since each level's elapsedHours is pause-adjusted
// Use submissionDate if available, otherwise fallback to createdAt for SLA calculation const submissionDate = (workflow as any).submissionDate;
const submissionDate = (workflow as any).submissionDate || (workflow as any).createdAt;
const totalTatHours = updatedApprovals.reduce((sum, a) => sum + Number(a.tatHours || 0), 0); const totalTatHours = updatedApprovals.reduce((sum, a) => sum + Number(a.tatHours || 0), 0);
let overallSLA = null; let overallSLA = null;
if (submissionDate && totalTatHours > 0) { if (submissionDate && totalTatHours > 0) {
// Calculate total elapsed hours by summing elapsed hours from all levels // Calculate total elapsed hours by summing elapsed hours from all levels
// CRITICAL: Only count elapsed hours from completed levels + current active level // This ensures pause periods are correctly excluded from the overall calculation
// Waiting levels (future steps) should contribute 0 elapsed hours
// This ensures that when in step 1, only step 1's elapsed hours are counted
let totalElapsedHours = 0; let totalElapsedHours = 0;
const workflowCurrentLevelNumber = currentLevel ? (currentLevel as any).levelNumber : ((workflow as any).currentLevel || 1);
for (const approval of updatedApprovals) { for (const approval of updatedApprovals) {
const status = (approval.status || '').toString().toUpperCase(); const status = (approval.status || '').toString().toUpperCase();
const approvalLevelNumber = approval.levelNumber || 0;
if (status === 'APPROVED' || status === 'REJECTED') { if (status === 'APPROVED' || status === 'REJECTED') {
// For completed levels, use the stored elapsedHours (already pause-adjusted from when level was completed) // For completed levels, use the stored elapsedHours (already pause-adjusted from when level was completed)
@ -2892,11 +2836,6 @@ export class WorkflowService {
// Skipped levels don't contribute to elapsed time // Skipped levels don't contribute to elapsed time
continue; continue;
} else if (status === 'PENDING' || status === 'IN_PROGRESS' || status === 'PAUSED') { } else if (status === 'PENDING' || status === 'IN_PROGRESS' || status === 'PAUSED') {
// CRITICAL: Only count elapsed hours for the CURRENT active level
// Waiting levels (future steps) should NOT contribute elapsed hours
// This ensures request-level elapsed time matches the current step's elapsed time
const isCurrentLevel = approvalLevelNumber === workflowCurrentLevelNumber;
if (isCurrentLevel) {
// For active/paused levels, use the SLA-calculated elapsedHours (pause-adjusted) // For active/paused levels, use the SLA-calculated elapsedHours (pause-adjusted)
if (approval.sla?.elapsedHours !== undefined) { if (approval.sla?.elapsedHours !== undefined) {
totalElapsedHours += Number(approval.sla.elapsedHours); totalElapsedHours += Number(approval.sla.elapsedHours);
@ -2904,8 +2843,6 @@ export class WorkflowService {
totalElapsedHours += Number(approval.elapsedHours || 0); totalElapsedHours += Number(approval.elapsedHours || 0);
} }
} }
// Waiting levels (approvalLevelNumber > workflowCurrentLevelNumber) contribute 0 elapsed hours
}
// WAITING levels haven't started yet, so no elapsed time // WAITING levels haven't started yet, so no elapsed time
} }
@ -2925,7 +2862,7 @@ export class WorkflowService {
overallStatus = 'approaching'; overallStatus = 'approaching';
} }
// Format time display (simple format - frontend will handle detailed formatting) // Format time display
const formatTime = (hours: number) => { const formatTime = (hours: number) => {
if (hours < 1) return `${Math.round(hours * 60)}m`; if (hours < 1) return `${Math.round(hours * 60)}m`;
const wholeHours = Math.floor(hours); const wholeHours = Math.floor(hours);

View File

@ -9,14 +9,6 @@ export interface SSOUserData {
designation?: string; designation?: string;
phone?: string; phone?: string;
reportingManagerId?: string; reportingManagerId?: string;
manager?: string; // Optional - Manager name from Okta profile
jobTitle?: string; // Detailed title from Okta profile.title
postalAddress?: string;
mobilePhone?: string;
secondEmail?: string;
adGroups?: string[];
role?: 'USER' | 'MANAGEMENT' | 'ADMIN';
isActive?: boolean;
} }
export interface SSOConfig { export interface SSOConfig {
@ -28,7 +20,6 @@ export interface SSOConfig {
oktaDomain: string; oktaDomain: string;
oktaClientId: string; oktaClientId: string;
oktaClientSecret: string; oktaClientSecret: string;
oktaApiToken?: string; // Optional - SSWS token for Okta Users API
} }
export interface AuthTokens { export interface AuthTokens {