Compare commits
72 Commits
main
...
laxman_dev
| Author | SHA1 | Date | |
|---|---|---|---|
| 16deafd42d | |||
|
|
d01e248a35 | ||
|
|
d25ffbaf7b | ||
|
|
8e40b73f65 | ||
|
|
3c55404f18 | ||
|
|
387d1881f7 | ||
|
|
dfe2c1423a | ||
|
|
7d35a1d167 | ||
|
|
657191ce2b | ||
|
|
e739b8b5ee | ||
|
|
1a02781731 | ||
|
|
44a19bbfea | ||
|
|
872bda4731 | ||
|
|
80e28fb0eb | ||
|
|
729a0d2d26 | ||
|
|
7828c8d463 | ||
|
|
3f94e4fe47 | ||
|
|
876ec26e97 | ||
|
|
34c488ae16 | ||
|
|
f43251ac13 | ||
|
|
c3e08ebfea | ||
|
|
42e6c2356b | ||
|
|
2b2a1bc6ce | ||
|
|
d7f44057cc | ||
|
|
5e29adef1b | ||
|
|
3c1c743df6 | ||
|
|
8e176cdf25 | ||
| d6e86ff7fd | |||
| b76aa9b322 | |||
| 88a86b867b | |||
| 919b92d10e | |||
| 4052f3fe1f | |||
| 975f266640 | |||
| f0435c47e4 | |||
|
|
0aec45f7aa | ||
| abba8aefdd | |||
|
|
bae0b8017e | ||
|
|
fd6032f21b | ||
|
|
4f36428593 | ||
| 7d74bc43bc | |||
|
|
d3ff1791ac | ||
|
|
604dcfbef5 | ||
| d699a5f31c | |||
| 830aa8d140 | |||
| 5a65acd333 | |||
|
|
06e70435c0 | ||
|
|
02e2f1e2a0 | ||
| 3ae0504ce3 | |||
| bf7574734e | |||
| 41b8b57efe | |||
|
|
62ca4f985a | ||
| 0f99fe68d5 | |||
| 64e8c2237a | |||
| bb66c930a8 | |||
| 26be132945 | |||
|
|
fe42078e88 | ||
| 068ff023ef | |||
| 9c003e9a16 | |||
|
|
89beffee2e | ||
|
|
b3dcaca697 | ||
| 9f3327ce38 | |||
| e9ed4ca4d3 | |||
| e4948e5cab | |||
|
|
eb3db7cd3a | ||
| 07577b4156 | |||
| e4d45b4fca | |||
| c50d481698 | |||
| 7b1df12a5b | |||
| cf95347fd7 | |||
| a67eb9da3c | |||
| 185d96e8b1 | |||
| 56d9d9c169 |
5
.env.docker
Normal file
5
.env.docker
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Auto-loaded by `docker compose` for ${VAR} substitution in docker-compose.yml (not passed into every container).
|
||||||
|
# Okta SPA values (public) — same preview tenant as CPC-CSD client dev.
|
||||||
|
VITE_OKTA_DOMAIN=https://dev-830839.oktapreview.com
|
||||||
|
VITE_OKTA_CLIENT_ID=0oa2jgzvrpdwx2iqd0h8
|
||||||
|
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -135,4 +135,5 @@ uploads/
|
|||||||
|
|
||||||
# GCP Service Account Key
|
# GCP Service Account Key
|
||||||
config/gcp-key.json
|
config/gcp-key.json
|
||||||
Jenkinsfile
|
Jenkinsfile
|
||||||
|
clear-26as-data.ts
|
||||||
325
Jenkinsfile
vendored
325
Jenkinsfile
vendored
@ -1,325 +0,0 @@
|
|||||||
pipeline {
|
|
||||||
agent any
|
|
||||||
|
|
||||||
environment {
|
|
||||||
SSH_CREDENTIALS = 'cloudtopiaa'
|
|
||||||
REMOTE_SERVER = 'ubuntu@160.187.166.17'
|
|
||||||
PROJECT_NAME = 'Royal-Enfield-Backend'
|
|
||||||
DEPLOY_PATH = '/home/ubuntu/Royal-Enfield/Re_Backend'
|
|
||||||
GIT_CREDENTIALS = 'git-cred'
|
|
||||||
REPO_URL = 'https://git.tech4biz.wiki/laxmanhalaki/Re_Backend.git'
|
|
||||||
GIT_BRANCH = 'main'
|
|
||||||
NPM_PATH = '/home/ubuntu/.nvm/versions/node/v22.21.1/bin/npm'
|
|
||||||
NODE_PATH = '/home/ubuntu/.nvm/versions/node/v22.21.1/bin/node'
|
|
||||||
PM2_PATH = '/home/ubuntu/.nvm/versions/node/v22.21.1/bin/pm2'
|
|
||||||
PM2_APP_NAME = 'royal-enfield-backend'
|
|
||||||
APP_PORT = '5000'
|
|
||||||
EMAIL_RECIPIENT = 'laxman.halaki@tech4biz.org'
|
|
||||||
}
|
|
||||||
|
|
||||||
options {
|
|
||||||
timeout(time: 20, unit: 'MINUTES')
|
|
||||||
disableConcurrentBuilds()
|
|
||||||
timestamps()
|
|
||||||
buildDiscarder(logRotator(numToKeepStr: '10', daysToKeepStr: '30'))
|
|
||||||
}
|
|
||||||
|
|
||||||
stages {
|
|
||||||
stage('Pre-deployment Check') {
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
echo "═══════════════════════════════════════════"
|
|
||||||
echo "🚀 Starting ${PROJECT_NAME} Deployment"
|
|
||||||
echo "═══════════════════════════════════════════"
|
|
||||||
echo "Server: ${REMOTE_SERVER}"
|
|
||||||
echo "Deploy Path: ${DEPLOY_PATH}"
|
|
||||||
echo "PM2 App: ${PM2_APP_NAME}"
|
|
||||||
echo "Build #: ${BUILD_NUMBER}"
|
|
||||||
echo "═══════════════════════════════════════════"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Pull Latest Code') {
|
|
||||||
steps {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
withCredentials([usernamePassword(credentialsId: GIT_CREDENTIALS, usernameVariable: 'GIT_USER', passwordVariable: 'GIT_PASS')]) {
|
|
||||||
sh """
|
|
||||||
ssh -o StrictHostKeyChecking=no -o ConnectTimeout=10 ${REMOTE_SERVER} << 'ENDSSH'
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "📦 Git Operations..."
|
|
||||||
|
|
||||||
if [ -d "${DEPLOY_PATH}/.git" ]; then
|
|
||||||
cd ${DEPLOY_PATH}
|
|
||||||
|
|
||||||
echo "Configuring git..."
|
|
||||||
git config --global --add safe.directory ${DEPLOY_PATH}
|
|
||||||
git config credential.helper store
|
|
||||||
|
|
||||||
echo "Fetching updates..."
|
|
||||||
git fetch https://${GIT_USER}:${GIT_PASS}@git.tech4biz.wiki/laxmanhalaki/Re_Backend.git ${GIT_BRANCH}
|
|
||||||
|
|
||||||
CURRENT_COMMIT=\$(git rev-parse HEAD)
|
|
||||||
LATEST_COMMIT=\$(git rev-parse FETCH_HEAD)
|
|
||||||
|
|
||||||
if [ "\$CURRENT_COMMIT" = "\$LATEST_COMMIT" ]; then
|
|
||||||
echo "⚠️ Already up to date. No changes to deploy."
|
|
||||||
echo "Current: \$CURRENT_COMMIT"
|
|
||||||
else
|
|
||||||
echo "Pulling new changes..."
|
|
||||||
git reset --hard FETCH_HEAD
|
|
||||||
git clean -fd
|
|
||||||
echo "✓ Updated from \${CURRENT_COMMIT:0:7} to \${LATEST_COMMIT:0:7}"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Cloning repository..."
|
|
||||||
rm -rf ${DEPLOY_PATH}
|
|
||||||
mkdir -p /home/ubuntu/Royal-Enfield
|
|
||||||
cd /home/ubuntu/Royal-Enfield
|
|
||||||
git clone https://${GIT_USER}:${GIT_PASS}@git.tech4biz.wiki/laxmanhalaki/Re_Backend.git Re_Backend
|
|
||||||
cd ${DEPLOY_PATH}
|
|
||||||
git checkout ${GIT_BRANCH}
|
|
||||||
git config --global --add safe.directory ${DEPLOY_PATH}
|
|
||||||
echo "✓ Repository cloned successfully"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd ${DEPLOY_PATH}
|
|
||||||
echo "Current commit: \$(git log -1 --oneline)"
|
|
||||||
ENDSSH
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Install Dependencies') {
|
|
||||||
steps {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
sh """
|
|
||||||
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
|
|
||||||
set -e
|
|
||||||
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
|
|
||||||
cd ${DEPLOY_PATH}
|
|
||||||
|
|
||||||
echo "🔧 Environment Check..."
|
|
||||||
echo "Node: \$(${NODE_PATH} -v)"
|
|
||||||
echo "NPM: \$(${NPM_PATH} -v)"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "📥 Installing Dependencies..."
|
|
||||||
${NPM_PATH} install --prefer-offline --no-audit --progress=false
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "✅ Dependencies installed successfully!"
|
|
||||||
ENDSSH
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Build Application') {
|
|
||||||
steps {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
sh """
|
|
||||||
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
|
|
||||||
set -e
|
|
||||||
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
|
|
||||||
cd ${DEPLOY_PATH}
|
|
||||||
|
|
||||||
echo "🔨 Building application..."
|
|
||||||
${NPM_PATH} run build
|
|
||||||
echo "✅ Build completed successfully!"
|
|
||||||
ENDSSH
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Stop PM2 Process') {
|
|
||||||
steps {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
sh """
|
|
||||||
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
|
|
||||||
set -e
|
|
||||||
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
|
|
||||||
|
|
||||||
echo "🛑 Stopping existing PM2 process..."
|
|
||||||
|
|
||||||
if ${PM2_PATH} list | grep -q "${PM2_APP_NAME}"; then
|
|
||||||
echo "Stopping ${PM2_APP_NAME}..."
|
|
||||||
${PM2_PATH} stop ${PM2_APP_NAME} || true
|
|
||||||
${PM2_PATH} delete ${PM2_APP_NAME} || true
|
|
||||||
echo "✓ Process stopped"
|
|
||||||
else
|
|
||||||
echo "No existing process found"
|
|
||||||
fi
|
|
||||||
ENDSSH
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Start with PM2') {
|
|
||||||
steps {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
sh """
|
|
||||||
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
|
|
||||||
set -e
|
|
||||||
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
|
|
||||||
cd ${DEPLOY_PATH}
|
|
||||||
|
|
||||||
echo "🚀 Starting application with PM2..."
|
|
||||||
|
|
||||||
# Start with PM2
|
|
||||||
${PM2_PATH} start ${NPM_PATH} --name "${PM2_APP_NAME}" -- start
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "⏳ Waiting for application to start..."
|
|
||||||
sleep 5
|
|
||||||
|
|
||||||
# Save PM2 configuration
|
|
||||||
${PM2_PATH} save
|
|
||||||
|
|
||||||
# Show PM2 status
|
|
||||||
echo ""
|
|
||||||
echo "📊 PM2 Process Status:"
|
|
||||||
${PM2_PATH} list
|
|
||||||
|
|
||||||
# Show logs (last 20 lines)
|
|
||||||
echo ""
|
|
||||||
echo "📝 Application Logs:"
|
|
||||||
${PM2_PATH} logs ${PM2_APP_NAME} --lines 20 --nostream || true
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "✅ Application started successfully!"
|
|
||||||
ENDSSH
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage('Health Check') {
|
|
||||||
steps {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
sh """
|
|
||||||
ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} << 'ENDSSH'
|
|
||||||
set -e
|
|
||||||
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
|
|
||||||
|
|
||||||
echo "🔍 Deployment Verification..."
|
|
||||||
|
|
||||||
# Check if PM2 process is running
|
|
||||||
if ${PM2_PATH} list | grep -q "${PM2_APP_NAME}.*online"; then
|
|
||||||
echo "✓ PM2 process is running"
|
|
||||||
else
|
|
||||||
echo "✗ PM2 process is NOT running!"
|
|
||||||
${PM2_PATH} logs ${PM2_APP_NAME} --lines 50 --nostream || true
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if port is listening
|
|
||||||
echo ""
|
|
||||||
echo "Checking if port ${APP_PORT} is listening..."
|
|
||||||
if ss -tuln | grep -q ":${APP_PORT} "; then
|
|
||||||
echo "✓ Application is listening on port ${APP_PORT}"
|
|
||||||
else
|
|
||||||
echo "⚠️ Port ${APP_PORT} not detected (may take a moment to start)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Show process info
|
|
||||||
echo ""
|
|
||||||
echo "📊 Process Information:"
|
|
||||||
${PM2_PATH} info ${PM2_APP_NAME}
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "═══════════════════════════════════════════"
|
|
||||||
echo "✅ DEPLOYMENT SUCCESSFUL"
|
|
||||||
echo "═══════════════════════════════════════════"
|
|
||||||
ENDSSH
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
post {
|
|
||||||
always {
|
|
||||||
cleanWs()
|
|
||||||
}
|
|
||||||
success {
|
|
||||||
script {
|
|
||||||
def duration = currentBuild.durationString.replace(' and counting', '')
|
|
||||||
mail to: "${EMAIL_RECIPIENT}",
|
|
||||||
subject: "✅ ${PROJECT_NAME} - Deployment Successful #${BUILD_NUMBER}",
|
|
||||||
body: """
|
|
||||||
Deployment completed successfully!
|
|
||||||
|
|
||||||
Project: ${PROJECT_NAME}
|
|
||||||
Build: #${BUILD_NUMBER}
|
|
||||||
Duration: ${duration}
|
|
||||||
Server: ${REMOTE_SERVER}
|
|
||||||
PM2 App: ${PM2_APP_NAME}
|
|
||||||
Port: ${APP_PORT}
|
|
||||||
|
|
||||||
Deployed at: ${new Date().format('yyyy-MM-dd HH:mm:ss')}
|
|
||||||
|
|
||||||
Console: ${BUILD_URL}console
|
|
||||||
|
|
||||||
Commands to manage:
|
|
||||||
- View logs: pm2 logs ${PM2_APP_NAME}
|
|
||||||
- Restart: pm2 restart ${PM2_APP_NAME}
|
|
||||||
- Stop: pm2 stop ${PM2_APP_NAME}
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
failure {
|
|
||||||
script {
|
|
||||||
sshagent(credentials: [SSH_CREDENTIALS]) {
|
|
||||||
try {
|
|
||||||
def logs = sh(
|
|
||||||
script: """ssh -o StrictHostKeyChecking=no ${REMOTE_SERVER} '
|
|
||||||
export PATH="/home/ubuntu/.nvm/versions/node/v22.21.1/bin:\$PATH"
|
|
||||||
${PM2_PATH} logs ${PM2_APP_NAME} --lines 50 --nostream || echo "No logs available"
|
|
||||||
'""",
|
|
||||||
returnStdout: true
|
|
||||||
).trim()
|
|
||||||
|
|
||||||
mail to: "${EMAIL_RECIPIENT}",
|
|
||||||
subject: "❌ ${PROJECT_NAME} - Deployment Failed #${BUILD_NUMBER}",
|
|
||||||
body: """
|
|
||||||
Deployment FAILED!
|
|
||||||
|
|
||||||
Project: ${PROJECT_NAME}
|
|
||||||
Build: #${BUILD_NUMBER}
|
|
||||||
Server: ${REMOTE_SERVER}
|
|
||||||
Failed at: ${new Date().format('yyyy-MM-dd HH:mm:ss')}
|
|
||||||
|
|
||||||
Console Log: ${BUILD_URL}console
|
|
||||||
|
|
||||||
Recent PM2 Logs:
|
|
||||||
${logs}
|
|
||||||
|
|
||||||
Action required immediately!
|
|
||||||
"""
|
|
||||||
} catch (Exception e) {
|
|
||||||
mail to: "${EMAIL_RECIPIENT}",
|
|
||||||
subject: "❌ ${PROJECT_NAME} - Deployment Failed #${BUILD_NUMBER}",
|
|
||||||
body: """
|
|
||||||
Deployment FAILED!
|
|
||||||
|
|
||||||
Project: ${PROJECT_NAME}
|
|
||||||
Build: #${BUILD_NUMBER}
|
|
||||||
Server: ${REMOTE_SERVER}
|
|
||||||
Failed at: ${new Date().format('yyyy-MM-dd HH:mm:ss')}
|
|
||||||
|
|
||||||
Console Log: ${BUILD_URL}console
|
|
||||||
|
|
||||||
Could not retrieve PM2 logs. Please check manually.
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because one or more lines are too long
@ -0,0 +1,219 @@
|
|||||||
|
{
|
||||||
|
"id": "re-workflow-cpc-csd-api-env",
|
||||||
|
"name": "RE Workflow — CPC-CSD API (complete)",
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"key": "hostUrl",
|
||||||
|
"value": "http://localhost:5000",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "API origin only (scheme + host + port). No path. Node dev: 5000. Docker host-mapped API: often 5004. Nginx all-in-one: use 8080 only if you proxy everything through it."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "apiRoot",
|
||||||
|
"value": "http://localhost:5000/api/v1",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Must equal {{hostUrl}}/api/v1. All CPC-CSD REST calls in the bundled collection use {{apiRoot}}/cpc-csd/... Bare GCS upload uses POST {{hostUrl}}/api/upload (see collection folder 02). The SPA may still use {{hostUrl}}/api/documents/* — same handlers; see docs/CPC-CDC.md if you need those URLs."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "accessToken",
|
||||||
|
"value": "",
|
||||||
|
"type": "secret",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "JWT only (no Bearer prefix). From browser: DevTools → Application → Local Storage → access_token, or Network → Authorization header value after Bearer. Required for all CPC-CSD routes except GET /health."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "refreshToken",
|
||||||
|
"value": "",
|
||||||
|
"type": "secret",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional. Only if you chain POST /auth/refresh from another collection."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ocrProvider",
|
||||||
|
"value": "GEMINI_VERTEX_DIRECT",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Multipart field provider. GEMINI_VERTEX_DIRECT = Gemini on document bytes; skips Document AI OCR even if configured. GEMINI_VERTEX = optional Document AI then Gemini. RULES = rules engine on OCR text only, no Vertex."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "claimIdCpc",
|
||||||
|
"value": "CPC-POSTMAN-0001",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "claim_id and booking_id for CPC runs (same pattern as Dashboard finalBookingId: CPC-{suffix}). Must be unique enough for your DB rules."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "claimIdCsd",
|
||||||
|
"value": "CSD-POSTMAN-0001",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "claim_id and booking_id for CSD (PO) runs: CSD-{suffix}."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "claimIdRetail",
|
||||||
|
"value": "CSD-RETAIL-0001",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional booking/claim id for RETAIL_INVOICE tests (any string; booking_type often CSD in samples)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "cpcDocumentId",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "UUID from GET .../documents/recent (or history). Required for GET by id, GET file, PUT status, DELETE. Optional test script on recent can set this."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "documentGcpUrl",
|
||||||
|
"value": "gs://your-bucket/path/document.pdf",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "For POST .../v1/ocr/validate JSON only. File must already exist in GCS."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "reportAttemptQuery",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Per-claim Excel: append empty or ?attempt=2 (full query string including ?)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentPage",
|
||||||
|
"value": "1",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "`GET .../documents/recent` — **page** (integer, **1-based**). Increment to fetch the next page; reset to `1` when you change `recentSearch`, `recentStatus`, or `recentType`."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentLimit",
|
||||||
|
"value": "15",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "`GET .../documents/recent` — **limit** (page size, number of **document rows** per page). The SPA dashboard offers 10 / 15 / 30 / 50. Larger pages reduce the chance a multi-file CPC batch is split across pages."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentSearch",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional **`search`** query: case-insensitive substring on **`booking_id`**, **`claim_id`**, **`document_type`**, and document **`id`** (UUID). Examples: `CPC-114`, `POSTMAN`, part of a UUID. Leave **empty** to list without text filter (matches Dashboard debounced booking search)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentStatus",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional **`status`** filter. **Empty** or omit in URL = all statuses.\n\n| Value | Server behaviour |\n|-------|------------------|\n| *(empty)* | No status filter — “All submissions”. |\n| `SUCCESSFUL` | `MATCH`, `SUCCESSFUL`, `APPROVED`. |\n| `UNSUCCESSFUL` | `MISMATCH`, `REJECTED`, `UNSUCCESSFUL`, `NEED_MANUAL` — use for **“Rejected / mismatch”** tab parity. |\n| `ALL` | Explicit no-op filter. |\n| Any other string | Treated as exact **`validation_status`** value. |\n\nImplementation: `appendCpcDocumentFilters` in `re-workflow-be/src/services/cpc-cdc/utils.ts`."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentType",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional **`type`** (document family). **Empty** = all types.\n\nSupported tokens include **`AADHAAR`**, **`CPC_AUTH`**, **`CSD_PO`**, **`RETAIL_INVOICE`**, **`ALL`** — server maps to `document_type` `ILIKE` patterns (see same `appendCpcDocumentFilters`)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentSortBy",
|
||||||
|
"value": "createdAt",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "`sortBy` query — must be one of: **`id`**, **`bookingId`**, **`createdAt`**, **`documentType`**, **`validationStatus`**, **`claimId`**, **`matchPercentage`**. Invalid values fall back to **`createdAt`** in the controller."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "recentOrder",
|
||||||
|
"value": "desc",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "`order` query — **`asc`** or **`desc`** (case-insensitive). **`desc`** = newest first (dashboard default)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "masterReportSearch",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "GET .../report/all/download optional search query param."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "masterReportStatus",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional validation_status filter for master Excel."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "masterReportType",
|
||||||
|
"value": "",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Optional document_type filter for master Excel."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "putStatusBodyJson",
|
||||||
|
"value": "{\n \"status\": \"APPROVED\",\n \"remarks\": \"Manual review via Postman\"\n}",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Body for PUT .../documents/:id/status. Adjust status, remarks, optional correctedFields per API contract."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadataQueueJsonCsdPo",
|
||||||
|
"value": "[{\"document_type\":\"CSD_PO\",\"msd_payload\":{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-2024-001\",\"po_amount\":\"25000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"po_number\",\"po_amount\",\"signature_and_stamp\"]}]",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "CSD (1 doc) PO — Purchase Order. JSON keys: `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (yes/no). Legacy keys still work. Stringify for `metadata_queue`."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadataQueueJsonCpcTwoFiles",
|
||||||
|
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-2024-77\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]},{\"document_type\":\"AADHAAR\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"},\"expected_field_keys\":[\"customer_name\",\"aadhar_number\"]}]",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "CPC (2 docs), order = file order. Doc1: `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`. Doc2: `customer_name`, `aadhar_number`. Legacy keys still work."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadataQueueJsonCpcAuthOnly",
|
||||||
|
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-99\",\"letter_amount\":\"10000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]}]",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Single CPC_AUTH upload (skip_min). Same keys as CPC doc1 (`customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadataQueueJsonRetailInvoice",
|
||||||
|
"value": "[{\"document_type\":\"RETAIL_INVOICE\",\"msd_payload\":{\"vendor_name\":\"Royal Enfield Store\",\"order_or_authorisation_number\":\"INV-2024-1001\",\"invoice_value\":\"185000\",\"invoice_date\":\"15-01-2024\"},\"expected_field_keys\":[\"vendor_name\",\"order_or_authorisation_number\",\"invoice_value\",\"invoice_date\"]}]",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Retail invoice: vendor, order, amount, and invoice date compared to the reference payload per validation policy."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "msdPayloadCpcAuth",
|
||||||
|
"value": "{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-1\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"}",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "validate-upload: Authorization letter — `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "msdPayloadAadhaar",
|
||||||
|
"value": "{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"}",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "validate-upload: Aadhaar — `customer_name`, `aadhar_number` (12 digits)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "msdPayloadCsdPo",
|
||||||
|
"value": "{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-001\",\"po_amount\":\"12000\",\"signature_and_stamp\":\"yes\"}",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "validate-upload: CSD PO — same keys as `metadataQueueJsonCsdPo`."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "msdPayloadRetailInvoice",
|
||||||
|
"value": "{\"vendor_name\":\"RE Store\",\"order_or_authorisation_number\":\"INV-99\",\"invoice_value\":\"50000\",\"invoice_date\":\"01-04-2024\"}",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "validate-upload: msd_payload for RETAIL_INVOICE."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"_postman_variable_scope": "environment",
|
||||||
|
"_postman_exported_at": "2026-04-20T12:00:00.000Z",
|
||||||
|
"_postman_exported_using": "RE Workflow CPC-CSD bundle"
|
||||||
|
}
|
||||||
@ -0,0 +1,90 @@
|
|||||||
|
{
|
||||||
|
"info": {
|
||||||
|
"_postman_id": "re-workflow-cpc-csd-ocr-single-2026",
|
||||||
|
"name": "RE Workflow — CPC-CSD OCR (single POST)",
|
||||||
|
"description": "## What this collection is\nOne **multipart** request that runs the **full CPC-CSD OCR pipeline** used by the app: optional OCR text → Vertex/Gemini extraction → validation → **persist** `cpc_documents` rows.\n\nThis is **not** a different backend route — it is exactly:\n`POST {{apiRoot}}/cpc-csd/v1/ocr/upload`\n\n## Import\n1. Import **RE_Workflow_CPC_CDC_OCR_SingleRequest.postman_environment.json** (or merge variables into your existing env).\n2. Set **accessToken** (JWT, no `Bearer ` prefix).\n3. Select this environment in the dropdown.\n4. Open **POST Full OCR pipeline**, attach file(s), Send.\n\n## Auth\n- Collection **Bearer**: `{{accessToken}}`\n- User must be allowed for CPC-CSD (same as main RE Workflow collection).\n\n## Request (exact)\n| Item | Value |\n|------|--------|\n| Method | **POST** |\n| URL | `{{apiRoot}}/cpc-csd/v1/ocr/upload` |\n| Body mode | **form-data** (multipart) |\n| Content-Type | Let Postman set **multipart boundary** (do not set `application/json` on this request). |\n\n### Multipart text fields (always these keys)\n| Field name | Type | Required | Description |\n|------------|------|----------|-------------|\n| `claim_id` | text | yes | Claim id string; same family as Dashboard (`CPC-…` / `CSD-…`). |\n| `booking_id` | text | yes | In samples same as `claim_id`; backend accepts booking id pattern. |\n| `booking_type` | text | yes | **`CSD`** = one PO file. **`CPC`** = two files (auth + Aadhaar). |\n| `provider` | text | yes | e.g. `GEMINI_VERTEX_DIRECT` (see env `ocrProvider`). |\n| `metadata_queue` | text | yes | **Stringified JSON array** (not a Postman JSON body). Each element describes one uploaded file in order. |\n\n### Multipart file field(s)\n| Field name | Type | Count | Rule |\n|------------|------|-------|------|\n| `files` | file | **1** for CSD | One PO PDF/image. |\n| `files` | file | **2** for CPC | **Duplicate** the key `files` in Postman (two rows, same key `files`): first row = authorization letter, second = Aadhaar. Order **must** match `metadata_queue` array order. |\n\n### `metadata_queue` JSON shape (per array element)\nEach object **must** include:\n- `document_type`: `CSD_PO` | `CPC_AUTH` | `AADHAAR` | `RETAIL_INVOICE` (this collection documents CSD + CPC).\n- `msd_payload`: object — MSD/reference values for that file.\n- `expected_field_keys`: string array — **same keys** as in `msd_payload` you want validated (order preserved).\n\n**CSD_PO** keys (current canonical): `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (`yes`/`no`).\n\n**CPC_AUTH** (doc 1): `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`.\n\n**AADHAAR** (doc 2): `customer_name`, `aadhar_number` (12 digits).\n\nUse env **`metadata_queue_json`** for CSD default, **`metadata_queue_json_cpc`** for CPC (set the `metadata_queue` field value to that variable when testing CPC).\n\n## Limits (server)\n- Max **20** `files` parts; **15 MB** per file; ZIP not allowed (same as main API).\n\n## Response\n- **200** JSON: per-file results with `document_id`, `validation_status`, `field_results`, etc. (same contract as main collection folder `03`/`04`).\n\n## Optional (not in this one-request collection)\n- `POST .../ocr/validate-upload` — single file validate without persisting as the same dashboard flow.\n- `POST .../ocr/validate` — JSON body + GCS URL.\n- `POST {{hostUrl}}/api/upload` — bare GCS staging without CPC metadata.",
|
||||||
|
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
|
||||||
|
},
|
||||||
|
"auth": {
|
||||||
|
"type": "bearer",
|
||||||
|
"bearer": [
|
||||||
|
{
|
||||||
|
"key": "token",
|
||||||
|
"value": "{{accessToken}}",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"variable": [
|
||||||
|
{
|
||||||
|
"key": "hostUrl",
|
||||||
|
"value": "http://localhost:5000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "apiRoot",
|
||||||
|
"value": "http://localhost:5000/api/v1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "accessToken",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ocrProvider",
|
||||||
|
"value": "GEMINI_VERTEX_DIRECT"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"item": [
|
||||||
|
{
|
||||||
|
"name": "POST Full OCR pipeline (multipart upload)",
|
||||||
|
"description": "**Single API** for end-to-end OCR on CPC-CSD: `POST {{apiRoot}}/cpc-csd/v1/ocr/upload`.\n\n**CSD (1 file):** `ocr_booking_type=CSD`, attach **one** `files` part, `metadata_queue` = `{{metadata_queue_json}}` (default CSD_PO).\n\n**CPC (2 files):** Set `ocr_booking_type` to `CPC`, set `metadata_queue` to `{{metadata_queue_json_cpc}}`, **add a second form row** with key `files` (duplicate key), attach auth PDF then Aadhaar PDF in that order.\n\n**claim_id / booking_id:** both use `{{ocr_claim_id}}` — change env when switching CSD vs CPC claim ids.",
|
||||||
|
"request": {
|
||||||
|
"method": "POST",
|
||||||
|
"header": [],
|
||||||
|
"body": {
|
||||||
|
"mode": "formdata",
|
||||||
|
"formdata": [
|
||||||
|
{
|
||||||
|
"key": "claim_id",
|
||||||
|
"value": "{{ocr_claim_id}}",
|
||||||
|
"type": "text",
|
||||||
|
"description": "Same as Dashboard claim id string."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "booking_id",
|
||||||
|
"value": "{{ocr_claim_id}}",
|
||||||
|
"type": "text",
|
||||||
|
"description": "Samples use same value as claim_id; must match your booking/claim convention."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "booking_type",
|
||||||
|
"value": "{{ocr_booking_type}}",
|
||||||
|
"type": "text",
|
||||||
|
"description": "CSD = 1 file. CPC = 2 files + CPC metadata array."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "provider",
|
||||||
|
"value": "{{ocrProvider}}",
|
||||||
|
"type": "text",
|
||||||
|
"description": "Vertex/Rules mode; see env ocrProvider."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadata_queue",
|
||||||
|
"value": "{{metadata_queue_json}}",
|
||||||
|
"type": "text",
|
||||||
|
"description": "Stringified JSON array. CSD default from env `metadata_queue_json`. For CPC switch value to {{metadata_queue_json_cpc}} in this field (or paste)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "files",
|
||||||
|
"type": "file",
|
||||||
|
"src": [],
|
||||||
|
"description": "CSD: attach PO here only. CPC: first file = authorization letter; add another `files` row below for Aadhaar."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
|
||||||
|
"description": "Multipart form-data only. Do not set Content-Type manually."
|
||||||
|
},
|
||||||
|
"response": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"id": "re-workflow-cpc-csd-ocr-single-env",
|
||||||
|
"name": "RE Workflow — CPC-CSD OCR (single POST)",
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"key": "hostUrl",
|
||||||
|
"value": "http://localhost:5000",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "API origin (scheme + host + port). Docker: often http://localhost:5004. No trailing slash."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "apiRoot",
|
||||||
|
"value": "http://localhost:5000/api/v1",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Must be {{hostUrl}}/api/v1. Used only by the OCR collection URL."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "accessToken",
|
||||||
|
"value": "",
|
||||||
|
"type": "secret",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "JWT only (no 'Bearer ' prefix). Required: collection uses Bearer auth with this variable."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ocrProvider",
|
||||||
|
"value": "GEMINI_VERTEX_DIRECT",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Multipart text field `provider`. GEMINI_VERTEX_DIRECT = Gemini on file bytes. GEMINI_VERTEX = optional Document AI then Gemini. RULES = regex/rules on OCR text only (no Vertex)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ocr_claim_id",
|
||||||
|
"value": "CSD-OCR-0001",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Used for BOTH `claim_id` and `booking_id` form fields (same as Dashboard). For CPC use e.g. CPC-OCR-0001 and set ocr_booking_type=CPC."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ocr_booking_type",
|
||||||
|
"value": "CSD",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Multipart `booking_type`: CSD (1 file, PO) or CPC (2 files: auth letter + Aadhaar)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadata_queue_json",
|
||||||
|
"value": "[{\"document_type\":\"CSD_PO\",\"msd_payload\":{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-2024-001\",\"po_amount\":\"25000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"po_number\",\"po_amount\",\"signature_and_stamp\"]}]",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "Default for CSD. Single-line JSON STRING for form field `metadata_queue`. For CPC: set Body `metadata_queue` to {{metadata_queue_json_cpc}} (or paste that value) and add a second `files` row."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "metadata_queue_json_cpc",
|
||||||
|
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-2024-77\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]},{\"document_type\":\"AADHAAR\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"},\"expected_field_keys\":[\"customer_name\",\"aadhar_number\"]}]",
|
||||||
|
"type": "default",
|
||||||
|
"enabled": true,
|
||||||
|
"description": "CPC 2-file metadata_queue. Array order MUST match file order: [0]=first `files` part (auth letter), [1]=second `files` part (Aadhaar)."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"_postman_variable_scope": "environment",
|
||||||
|
"_postman_exported_at": "2026-04-15T15:00:00.000Z",
|
||||||
|
"_postman_exported_using": "RE Workflow CPC-CSD OCR single-request bundle"
|
||||||
|
}
|
||||||
35
README.md
35
README.md
@ -83,6 +83,41 @@ A comprehensive backend API for the Royal Enfield Workflow Management System bui
|
|||||||
|
|
||||||
The API will be available at `http://localhost:5000`
|
The API will be available at `http://localhost:5000`
|
||||||
|
|
||||||
|
### Redis (for TAT and Pause-Resume jobs)
|
||||||
|
|
||||||
|
The backend uses **Redis** for TAT (turnaround time) alerts and Pause-Resume workflow jobs. The app runs without Redis, but those features need Redis on `localhost:6379`.
|
||||||
|
|
||||||
|
**Option 1 – Docker (easiest if you have Docker)**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start Redis in the background (port 6379)
|
||||||
|
npm run redis:start
|
||||||
|
|
||||||
|
# Stop when done
|
||||||
|
npm run redis:stop
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 2 – Windows (Memurai or WSL)**
|
||||||
|
|
||||||
|
- **Memurai** (Redis-compatible, native Windows): Download from [memurai.com](https://www.memurai.com/) and install. Default port 6379. You can install as a Windows service.
|
||||||
|
- **WSL2**: Install Ubuntu from Microsoft Store, then:
|
||||||
|
```bash
|
||||||
|
sudo apt update && sudo apt install redis-server -y
|
||||||
|
redis-server --daemonize yes
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 3 – macOS / Linux**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# macOS (Homebrew)
|
||||||
|
brew install redis && brew services start redis
|
||||||
|
|
||||||
|
# Ubuntu/Debian
|
||||||
|
sudo apt install redis-server -y && sudo systemctl start redis-server
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verify:** `redis-cli ping` should return `PONG`. Then restart the backend so it connects to Redis.
|
||||||
|
|
||||||
### Docker Setup
|
### Docker Setup
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@ -19,7 +19,12 @@
|
|||||||
"variable": [
|
"variable": [
|
||||||
{
|
{
|
||||||
"key": "baseUrl",
|
"key": "baseUrl",
|
||||||
"value": "http://localhost:3000/api/v1",
|
"value": "http://localhost:5000/api/v1",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "healthUrl",
|
||||||
|
"value": "http://localhost:5000/health",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -101,7 +106,31 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Token Exchange (Development)",
|
"name": "Token Exchange (Okta authorization code)",
|
||||||
|
"event": [
|
||||||
|
{
|
||||||
|
"listen": "test",
|
||||||
|
"script": {
|
||||||
|
"exec": [
|
||||||
|
"if (pm.response.code === 200) {",
|
||||||
|
" const jsonData = pm.response.json();",
|
||||||
|
" const data = jsonData.data || jsonData;",
|
||||||
|
" const token = data && (data.accessToken || data.access_token);",
|
||||||
|
" const refresh = data && (data.refreshToken || data.refresh_token);",
|
||||||
|
" if (token) {",
|
||||||
|
" pm.collectionVariables.set('accessToken', token);",
|
||||||
|
" pm.environment.set('accessToken', token);",
|
||||||
|
" }",
|
||||||
|
" if (refresh) {",
|
||||||
|
" pm.collectionVariables.set('refreshToken', refresh);",
|
||||||
|
" pm.environment.set('refreshToken', refresh);",
|
||||||
|
" }",
|
||||||
|
"}"
|
||||||
|
],
|
||||||
|
"type": "text/javascript"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
"request": {
|
"request": {
|
||||||
"method": "POST",
|
"method": "POST",
|
||||||
"header": [
|
"header": [
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@ -1 +1 @@
|
|||||||
import{a as s}from"./index-CULgQ-8S.js";import"./radix-vendor-CYvDqP9X.js";import"./charts-vendor-BVfwAPj-.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-CX5oLBI_.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-B_rK4TXr.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
|
import{a as s}from"./index-r8G8cQlR.js";import"./radix-vendor-CLtqm-Ae.js";import"./charts-vendor-CmYZJIYl.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-DgwXkk2Y.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-DbXFJHwt.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};
|
||||||
1
build/assets/index-Bap1UWaI.css
Normal file
1
build/assets/index-Bap1UWaI.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
64
build/assets/index-r8G8cQlR.js
Normal file
64
build/assets/index-r8G8cQlR.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
3
build/assets/router-vendor-DbXFJHwt.js
Normal file
3
build/assets/router-vendor-DbXFJHwt.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
build/assets/ui-vendor-DgwXkk2Y.js
Normal file
2
build/assets/ui-vendor-DgwXkk2Y.js
Normal file
File diff suppressed because one or more lines are too long
@ -13,15 +13,15 @@
|
|||||||
<!-- Preload essential fonts and icons -->
|
<!-- Preload essential fonts and icons -->
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
<script type="module" crossorigin src="/assets/index-CULgQ-8S.js"></script>
|
<script type="module" crossorigin src="/assets/index-r8G8cQlR.js"></script>
|
||||||
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-BVfwAPj-.js">
|
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-CmYZJIYl.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-CYvDqP9X.js">
|
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-CLtqm-Ae.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js">
|
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-CX5oLBI_.js">
|
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-DgwXkk2Y.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
|
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
|
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
|
||||||
<link rel="modulepreload" crossorigin href="/assets/router-vendor-B_rK4TXr.js">
|
<link rel="modulepreload" crossorigin href="/assets/router-vendor-DbXFJHwt.js">
|
||||||
<link rel="stylesheet" crossorigin href="/assets/index-XBJXaMj2.css">
|
<link rel="stylesheet" crossorigin href="/assets/index-Bap1UWaI.css">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
68
docs/CPC-CDC.md
Normal file
68
docs/CPC-CDC.md
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
# CPC-CSD module (re-workflow)
|
||||||
|
|
||||||
|
This module (formerly referred to as CPC-CDC in code comments) covers **CPC/CSD document upload, OCR/extraction, validation against MSD payloads, audit history, dashboards, and Excel reports**. It was consolidated from the standalone **CPC-CSD** app into this backend.
|
||||||
|
|
||||||
|
## HTTP API
|
||||||
|
|
||||||
|
**CPC-CSD-compatible URLs** (same as `CPC-CSD/server/src/routes/index.js` + Postman `CPC-CSD-Full-Flow`): `POST /api/upload`, `GET /api/documents/*`, `POST /api/v1/ocr/validate`, `POST /api/v1/ocr/validate-upload` (field **`file`**), `POST /api/v1/ocr/upload` (field **`files`**, max 20), report downloads under `/api/v1/ocr/report/...`. Registered from `src/routes/cpc-csd-compat.mount.ts` before `/api/v1`; disable with **`CPC_LEGACY_COMPAT_ROUTES=false`**.
|
||||||
|
|
||||||
|
**Namespaced API** — canonical prefix **`/api/v1/cpc-csd`**; legacy alias **`/api/v1/cpc-cdc`** (`src/routes/cpc-cdc.routes.ts`) mounts the same handlers and auth.
|
||||||
|
|
||||||
|
| Method | Path (prefix **`/api`** or **`/api/v1/cpc-csd`** or legacy **`/api/v1/cpc-cdc`**) | Purpose |
|
||||||
|
|--------|------|---------|
|
||||||
|
| POST | `/upload` | GCS-only: multipart field **`file`** → `{ gcsUrl }` (compat: **`/api/upload`**) |
|
||||||
|
| POST | `/v1/ocr/validate` | JSON URL mode — returns **400** with legacy message (use validate-upload) |
|
||||||
|
| POST | `/v1/ocr/validate-upload` | Single file field **`file`** + `claim_id` / `msd_payload` / … |
|
||||||
|
| POST | `/v1/ocr/upload` | Bulk: field **`files`** (max 20) + `metadata_queue` or `msd_payload` / `document_type` |
|
||||||
|
| GET | `/documents/analytics` | Totals, pass rate, distribution, `dailyVolume`, `topMismatchFields` |
|
||||||
|
| GET | `/documents/history` | `claimId` query — attempts grouped |
|
||||||
|
| GET | `/documents/recent` | Paginated list; query: `page`, `limit`, `search`, `status`, `type`, `sortBy`, `order` |
|
||||||
|
| GET | `/documents/:id/file` | Authenticated file bytes for preview (browser cannot use `gs://` directly) |
|
||||||
|
| GET | `/documents/:id` | Document + audit logs + `field_results` |
|
||||||
|
| PUT | `/documents/:id/status` | Manual status / corrected fields |
|
||||||
|
| DELETE | `/documents/:id` | Remove document row |
|
||||||
|
| GET | `/v1/ocr/report/:claimId/download` | Per-claim Excel |
|
||||||
|
| GET | `/v1/ocr/report/all/download` | Master Excel (supports `search`, `status`, `type`) |
|
||||||
|
|
||||||
|
Compat paths are under **`/api/...`**; namespaced routes are **`/api/v1/cpc-csd/...`** with **`/api/v1/cpc-cdc/...`** as an alias (same path suffixes as in the table’s second column).
|
||||||
|
|
||||||
|
## Database
|
||||||
|
|
||||||
|
Sequelize models: **`CpcDocument`** (`cpc_documents`), **`CpcAuditLog`** (`cpc_audit_logs`). Migration: `src/migrations/2026041300-create-cpc-cdc-tables.ts`.
|
||||||
|
|
||||||
|
**Admin viewer list** is stored under `admin_configurations.config_key = CPC_CSD_ADMIN_CONFIG` (migration `20260416120000-rename-cpc-cdc-admin-config-key.ts` renames the legacy `CPC_CDC_ADMIN_CONFIG` row when applied).
|
||||||
|
|
||||||
|
On **application startup**, `ensureCpcCdcSchema()` runs after DB connect (`src/services/cpc-cdc/ensureCpcCdcSchema.ts`) so `CREATE TABLE IF NOT EXISTS` applies if migrations were skipped; still run `npm run migrate` for a full schema history.
|
||||||
|
|
||||||
|
Notable columns on `cpc_documents`: `booking_id`, `claim_id`, `attempt_no`, `document_type`, `document_gcp_url`, `provider`, JSONB `msd_payload`, `extracted_fields`, `field_confidence`, `validation_status`, `match_percentage`, `mismatch_reasons`, `field_results`, `ip_address`.
|
||||||
|
|
||||||
|
Unique index: `(claim_id, attempt_no, document_type)` — important when migrating legacy data with duplicates.
|
||||||
|
|
||||||
|
## Environment variables
|
||||||
|
|
||||||
|
Copy **`re-workflow-be/.env.example`** to `.env` and adjust. Typical keys (see `CpcCdcController` and `src/services/cpc-cdc/*`):
|
||||||
|
|
||||||
|
- **`GCP_PROJECT_ID`** — GCP project for Vertex / optional Document AI.
|
||||||
|
- **`VERTEX_AI_LOCATION`** — Vertex region (e.g. `asia-south1`).
|
||||||
|
- **`DOC_AI_PROCESSOR_ID`** — Optional; when set and valid, Document AI OCR may run before Gemini.
|
||||||
|
- **`GCP_LOCATION_DOC_AI`** — Document AI region (default `us`).
|
||||||
|
- **GCS** — Bucket/credentials as required by `CpcGcsService` (service account via `GOOGLE_APPLICATION_CREDENTIALS` or workload identity).
|
||||||
|
- **`CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI`** — **`true`**: always allow saving after failed/missing Vertex. **`false`**: in **production** only, disallow degraded saves. **Omitted in non-production**: degraded saves are **allowed** so local CPC works without GCP; set to **`false`** in dev to force strict Vertex. **Omitted in production**: strict (Vertex required unless `RULES` provider).
|
||||||
|
|
||||||
|
**Extraction behaviour (upload response):**
|
||||||
|
|
||||||
|
- **`extraction_source`: `vertex_gemini`** — Fields came from the Vertex Gemini API (document bytes + optional Document AI OCR text).
|
||||||
|
- **`extraction_source`: `rules_engine`** — Provider was **`RULES`**; fields come from `CpcRuleExtractService` on OCR text only (no Gemini).
|
||||||
|
- **`extraction_source`: `degraded_empty`** — Extraction was skipped, failed, or (in **non-production**) hit a **Vertex auth / ADC** problem; the row is still stored with empty `extracted_fields` so you can test DB/history. In production this only happens when **`CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI=true`** or missing `GCP_PROJECT_ID` with degraded policy.
|
||||||
|
|
||||||
|
## One-off data migration from legacy Prisma DB
|
||||||
|
|
||||||
|
If you still have the old **`Document`** / **`AuditLog`** tables (CPC-CSD Prisma schema) in PostgreSQL, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run migrate:cpc-csd
|
||||||
|
```
|
||||||
|
|
||||||
|
Optional **`CPC_CSD_DATABASE_URL`**: if set, rows are read from that database and written to the database in **`DATABASE_URL`** (re-workflow). If unset, both read and write use **`DATABASE_URL`** (same cluster; both table sets must exist).
|
||||||
|
|
||||||
|
After migration, spot-check history, document detail, and Excel downloads, then decommission the legacy app.
|
||||||
137
docs/FORM16_CREDIT_DEBIT_PROCESS.md
Normal file
137
docs/FORM16_CREDIT_DEBIT_PROCESS.md
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
# Form 16 – Full Process: Credit & Debit Notes (Incoming & Outgoing)
|
||||||
|
|
||||||
|
This document describes the end-to-end flow for Form 16 (Form 16A TDS Credit): 26AS reconciliation, credit/debit note creation, WFM/SAP incoming and outgoing file handling, and how users view SAP responses.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. High-Level Flow
|
||||||
|
|
||||||
|
- **26AS**: TDS entries are uploaded (RE) and aggregated by TAN + Financial Year + Quarter (Section 194Q, Booking F/O only).
|
||||||
|
- **Form 16A submission**: Dealer submits Form 16A (PDF). OCR extracts TAN, FY, Quarter, TDS amount, certificate number, etc.
|
||||||
|
- **Credit note**: When a submission is validated, the system matches it against the latest 26AS aggregate for that TAN/FY/Quarter. On match, a **credit note** is created, ledger updated, quarter marked **SETTLED**, and a CSV is pushed to WFM **INCOMING** for SAP (credit note generation).
|
||||||
|
- **Debit note**: When a new 26AS upload changes the quarter total and that quarter was already **SETTLED**, the system creates a **debit note** (reversing the earlier credit), updates ledger, sets quarter to **DEBIT_ISSUED_PENDING_FORM16**, and pushes a CSV to WFM **INCOMING** for SAP (debit note generation).
|
||||||
|
- **SAP responses**: SAP processes the INCOMING files and drops response CSVs in WFM **OUTGOING**. The backend ingests these (scheduler every 5 min or on-demand Pull), stores them in DB, and users can **View** (and for credit, **Download**) the SAP response.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Paths (WFM Folder Structure)
|
||||||
|
|
||||||
|
All paths are relative to **WFM_BASE_PATH** (default `C:\WFM`). Can be overridden via `.env` (e.g. `WFM_BASE_PATH=D:\Form-16 Main`). The job also tries `<process.cwd()>\WFM-QRE\...` if the default path does not exist.
|
||||||
|
|
||||||
|
| Direction | Type | Default path (under WFM_BASE_PATH) |
|
||||||
|
|------------|--------|-------------------------------------|
|
||||||
|
| **INCOMING** | Credit | `WFM-QRE\INCOMING\WFM_MAIN\FORM16_CRDT` |
|
||||||
|
| **INCOMING** | Debit | `WFM-QRE\INCOMING\WFM_MAIN\FORM16_DEBT` |
|
||||||
|
| **OUTGOING** | Credit | `WFM-QRE\OUTGOING\WFM_SAP_MAIN\FORM16_CRDT` |
|
||||||
|
| **OUTGOING** | Debit | `WFM-QRE\OUTGOING\WFM_SAP_MAIN\FORM16_DBT` |
|
||||||
|
|
||||||
|
- **INCOMING** = files we **push** to WFM (for SAP to pick up and process).
|
||||||
|
- **OUTGOING** = files **SAP drops** (responses); we read and store them.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Credit Note Flow
|
||||||
|
|
||||||
|
### 3.1 When is a credit note created?
|
||||||
|
|
||||||
|
- On **Form 16A submission validation** (after OCR and 26AS check).
|
||||||
|
- `run26asMatchAndCreditNote(submission)` is called (e.g. from submission validation flow).
|
||||||
|
- Conditions: TAN + FY + Quarter match latest 26AS aggregate (Section 194Q, F/O), amount within tolerance, quarter not already settled with same amount.
|
||||||
|
- On success: create `Form16CreditNote`, ledger entry (CREDIT), set quarter status **SETTLED**, then push **INCOMING** CSV.
|
||||||
|
|
||||||
|
### 3.2 Credit note – INCOMING (we push to WFM/SAP)
|
||||||
|
|
||||||
|
- **Path**: `WFM-QRE\INCOMING\WFM_MAIN\FORM16_CRDT`
|
||||||
|
- **When**: Immediately after credit note is created.
|
||||||
|
- **File name**: `{creditNoteNumber}.csv` (e.g. `CN00628226Q20001.csv`).
|
||||||
|
- **Content** (pipe `|` separated):
|
||||||
|
`TRNS_UNIQ_NO` (e.g. `F16-CN-{submissionId}-{creditNoteId}-{timestamp}`),
|
||||||
|
`TDS_TRNS_ID` (= credit note number),
|
||||||
|
`DEALER_CODE`, `TDS_TRNS_DOC_TYP`, `DLR_TAN_NO`, `FIN_YEAR & QUARTER`, `DOC_DATE`, `TDS_AMT`.
|
||||||
|
- **TDS_TRNS_ID** = credit note number (format: `CN` + 6-digit dealer code + 2-digit FY + quarter + 4-digit sequence, e.g. `CN00628226Q20001`).
|
||||||
|
- A copy is also written to the Form 16 credit archive path (INCOMING archive).
|
||||||
|
|
||||||
|
### 3.3 Credit note – OUTGOING (SAP response)
|
||||||
|
|
||||||
|
- **Path**: `WFM-QRE\OUTGOING\WFM_SAP_MAIN\FORM16_CRDT`
|
||||||
|
- **Who writes**: SAP (response CSVs placed here by SAP/WFM).
|
||||||
|
- **Who reads**: Backend **Form 16 SAP response job** (scheduler every 5 min + on **Pull** button).
|
||||||
|
- **What we do**: Read each CSV, parse first “real” data row, match to credit note by `TRNS_UNIQ_NO` or `creditNoteNumber` (TDS_TRNS_ID in response), upload file to storage, insert/update row in **`form16_sap_responses`** with `type = 'credit'`, `credit_note_id`, `storage_url`, etc.
|
||||||
|
- **User**: Credit notes list shows **View** when a response exists; **View** opens popup with SAP fields and **Download CSV**; **Pull** triggers ingestion and list refresh.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Debit Note Flow
|
||||||
|
|
||||||
|
### 4.1 When is a debit note created?
|
||||||
|
|
||||||
|
- On **26AS upload** that changes the quarter aggregate for a quarter that is already **SETTLED** (had a credit note).
|
||||||
|
- `process26asUploadAggregation(uploadLogId)` is called after 26AS file upload (controller calls it when records are imported).
|
||||||
|
- For each (TAN, FY, Quarter) where new 26AS total ≠ previous snapshot and status is SETTLED: create `Form16DebitNote` (linked to the last credit note for that quarter), ledger entry (DEBIT), set quarter status **DEBIT_ISSUED_PENDING_FORM16**, then push **INCOMING** CSV.
|
||||||
|
|
||||||
|
### 4.2 Debit note – INCOMING (we push to WFM/SAP)
|
||||||
|
|
||||||
|
- **Path**: `WFM-QRE\INCOMING\WFM_MAIN\FORM16_DEBT`
|
||||||
|
- **When**: Immediately after debit note is created in `process26asUploadAggregation`.
|
||||||
|
- **File name**: `{debitNoteNumber}.csv` (e.g. `DN00628226Q20001.csv`).
|
||||||
|
- **Content** (pipe `|` separated):
|
||||||
|
`TRNS_UNIQ_NO` (e.g. `F16-DN-{creditNoteId}-{debitId}-{timestamp}`),
|
||||||
|
**`TDS_TRNS_ID`** = **credit note number** (not debit note number),
|
||||||
|
`DEALER_CODE`, `TDS_TRNS_DOC_TYP`, `Org.Document Number` (= debit id), `DLR_TAN_NO`, `FIN_YEAR & QUARTER`, `DOC_DATE`, `TDS_AMT`.
|
||||||
|
- **TDS_TRNS_ID** in debit incoming = credit note number (same format as credit, e.g. `CN00628226Q20001`). Debit note number = same string with `CN` replaced by `DN` (e.g. `DN00628226Q20001`).
|
||||||
|
- A copy is also written to the Form 16 debit archive path.
|
||||||
|
|
||||||
|
### 4.3 Debit note – OUTGOING (SAP response)
|
||||||
|
|
||||||
|
- **Path**: `WFM-QRE\OUTGOING\WFM_SAP_MAIN\FORM16_DBT`
|
||||||
|
- **Who writes**: SAP (response CSVs placed here).
|
||||||
|
- **Who reads**: Same **Form 16 SAP response job** (every 5 min + **Pull** on Debit Notes page).
|
||||||
|
- **What we do**: Read each CSV, parse, match to debit note by (in order):
|
||||||
|
(1) `TRNS_UNIQ_NO` → `form_16_debit_notes.trns_uniq_no`,
|
||||||
|
(2) `CLAIM_NUMBER` → `form_16_debit_notes.debit_note_number`,
|
||||||
|
(3) **filename (without .csv)** → `form_16_debit_notes.debit_note_number`.
|
||||||
|
Upload file to storage, insert/update row in **`form16_debit_note_sap_responses`** (separate table from credit) with `debit_note_id`, `storage_url`, etc.
|
||||||
|
- **User**: Debit notes list shows **View** when a response exists; **View** opens popup (no download); **Pull** triggers ingestion and list refresh.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Database Tables for SAP Responses
|
||||||
|
|
||||||
|
| Table | Purpose |
|
||||||
|
|-----------------------------------|--------|
|
||||||
|
| **form16_sap_responses** | Credit note SAP responses only. Columns: `type` ('credit'), `file_name`, `credit_note_id`, `claim_number`, `sap_document_number`, `msg_typ`, `message`, `raw_row`, `storage_url`, timestamps. |
|
||||||
|
| **form16_debit_note_sap_responses**| Debit note SAP responses only. Columns: `file_name`, `debit_note_id`, `claim_number`, `sap_document_number`, `msg_typ`, `message`, `raw_row`, `storage_url`, timestamps. No `type` or `credit_note_id`. |
|
||||||
|
|
||||||
|
Credit and debit SAP responses are **not** mixed; each has its own table.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Scheduler and Pull
|
||||||
|
|
||||||
|
- **Scheduler**: `startForm16SapResponseJob()` runs **every 5 minutes** (cron `*/5 * * * *`). It calls `runForm16SapResponseIngestionOnce()`, which:
|
||||||
|
- Scans **OUTGOING** credit dir (`FORM16_CRDT`) and **OUTGOING** debit dir (`FORM16_DBT`) for `.csv` files.
|
||||||
|
- For each file: parse, match to credit or debit note, upload to storage, write to `form16_sap_responses` (credit) or `form16_debit_note_sap_responses` (debit).
|
||||||
|
- **Pull button** (Credit Notes page and Debit Notes page): `POST /api/v1/form16/sap/pull` triggers the **same** `runForm16SapResponseIngestionOnce()`, then the frontend refetches the list. So Pull = one-off run of the same ingestion logic; no separate “pull-only” path.
|
||||||
|
- **View** appears when the corresponding table has a row for that note with a non-null `storage_url` (and for list, we check by `credit_note_id` / `debit_note_id`).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. End-to-End Summary
|
||||||
|
|
||||||
|
| Step | Credit note | Debit note |
|
||||||
|
|------|-------------|------------|
|
||||||
|
| **Trigger** | Form 16A submission validated, 26AS match | 26AS upload changes total for a SETTLED quarter |
|
||||||
|
| **INCOMING (we push)** | CSV to `INCOMING\WFM_MAIN\FORM16_CRDT` | CSV to `INCOMING\WFM_MAIN\FORM16_DEBT` |
|
||||||
|
| **TDS_TRNS_ID in CSV** | Credit note number | Credit note number |
|
||||||
|
| **File name** | `{creditNoteNumber}.csv` | `{debitNoteNumber}.csv` |
|
||||||
|
| **OUTGOING (SAP writes)** | SAP drops response in `OUTGOING\WFM_SAP_MAIN\FORM16_CRDT` | SAP drops response in `OUTGOING\WFM_SAP_MAIN\FORM16_DBT` |
|
||||||
|
| **We read & store** | Job reads CSV, matches, stores in `form16_sap_responses` | Job reads CSV, matches, stores in `form16_debit_note_sap_responses` |
|
||||||
|
| **User action** | View / Download CSV (Pull to refresh) | View only (Pull to refresh) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Env / Config (relevant)
|
||||||
|
|
||||||
|
- **WFM_BASE_PATH**: Base folder that contains `WFM-QRE` (e.g. `C:\WFM` or `D:\Form-16 Main`). If not set and default path missing, job tries `process.cwd()\WFM-QRE\...`.
|
||||||
|
- **WFM_FORM16_CREDIT_INCOMING_PATH**, **WFM_FORM16_DEBIT_INCOMING_PATH**: Override INCOMING paths.
|
||||||
|
- **WFM_FORM16_CREDIT_OUTGOING_PATH**, **WFM_FORM16_DEBIT_OUTGOING_PATH**: Override OUTGOING paths.
|
||||||
33
env.example
33
env.example
@ -73,6 +73,23 @@ RATE_LIMIT_MAX_REQUESTS=100
|
|||||||
MAX_FILE_SIZE_MB=10
|
MAX_FILE_SIZE_MB=10
|
||||||
ALLOWED_FILE_TYPES=pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif
|
ALLOWED_FILE_TYPES=pdf,doc,docx,xls,xlsx,ppt,pptx,jpg,jpeg,png,gif
|
||||||
|
|
||||||
|
# WFM Folder Structure Configuration
|
||||||
|
WFM_BASE_PATH=C:\\WFM
|
||||||
|
WFM_INCOMING_GST_CLAIMS_PATH=WFM-QRE\\INCOMING\\WFM_MAIN\\DLR_INC_CLAIMS_GST
|
||||||
|
WFM_INCOMING_NON_GST_CLAIMS_PATH=WFM-QRE\\INCOMING\\WFM_MAIN\\DLR_INC_CLAIMS_NON_GST
|
||||||
|
WFM_OUTGOING_GST_CLAIMS_PATH=WFM-QRE\\OUTGOING\\WFM_SAP_MAIN\\DLR_INC_CLAIMS_GST
|
||||||
|
WFM_OUTGOING_NON_GST_CLAIMS_PATH=WFM-QRE\\OUTGOING\\WFM_SAP_MAIN\\DLR_INC_CLAIMS_NON_GST
|
||||||
|
WFM_FORM16_CREDIT_INCOMING_PATH=WFM-QRE\\INCOMING\\WFM_MAIN\\FORM16_CRDT
|
||||||
|
WFM_FORM16_DEBIT_INCOMING_PATH=WFM-QRE\\INCOMING\\WFM_MAIN\\FORM16_DBT
|
||||||
|
WFM_FORM16_CREDIT_OUTGOING_PATH=WFM-QRE\\OUTGOING\\WFM_SAP_MAIN\\FORM16_CRDT
|
||||||
|
WFM_FORM16_DEBIT_OUTGOING_PATH=WFM-QRE\\OUTGOING\\WFM_SAP_MAIN\\FORM16_DBT
|
||||||
|
|
||||||
|
# WFM Archive Configuration (INCOMING)
|
||||||
|
WFM_ARCHIVE_GST_CLAIMS_PATH=WFM-QRE\\INCOMING\\WFM_ARACHIVE\\DLR_INC_CLAIMS_GST
|
||||||
|
WFM_ARCHIVE_NON_GST_CLAIMS_PATH=WFM-QRE\\INCOMING\\WFM_ARACHIVE\\DLR_INC_CLAIMS_NON_GST
|
||||||
|
WFM_FORM16_CREDIT_ARCHIVE_PATH=WFM-QRE\\INCOMING\\WFM_ARACHIVE\\FORM16_CRDT
|
||||||
|
WFM_FORM16_DEBIT_ARCHIVE_PATH=WFM-QRE\\INCOMING\\WFM_ARACHIVE\\FORM16_DBT
|
||||||
|
|
||||||
# TAT Monitoring
|
# TAT Monitoring
|
||||||
TAT_CHECK_INTERVAL_MINUTES=30
|
TAT_CHECK_INTERVAL_MINUTES=30
|
||||||
TAT_REMINDER_THRESHOLD_1=50
|
TAT_REMINDER_THRESHOLD_1=50
|
||||||
@ -106,3 +123,19 @@ SAP_REQUESTER=REFMS
|
|||||||
# WARNING: Only use in development/testing environments
|
# WARNING: Only use in development/testing environments
|
||||||
SAP_DISABLE_SSL_VERIFY=false
|
SAP_DISABLE_SSL_VERIFY=false
|
||||||
|
|
||||||
|
# WFM file paths (base path; dealer claims use DLR_INC_CLAIMS, Form 16 uses FORM16_CRDT / FORM16_DEBT)
|
||||||
|
# If unset: Windows defaults to C:\WFM; Linux/Mac defaults to <cwd>/wfm (paths are cross-platform).
|
||||||
|
# WFM_BASE_PATH=C:\WFM
|
||||||
|
# WFM_INCOMING_CLAIMS_PATH=WFM-QRE\INCOMING\WFM_MAIN\DLR_INC_CLAIMS
|
||||||
|
# WFM_OUTGOING_CLAIMS_PATH=WFM-QRE\OUTGOING\WFM_SAP_MAIN\DLR_INC_CLAIMS
|
||||||
|
# Form 16 credit note CSV (incoming): INCOMING/WFM_MAIN/FORM16_CRDT
|
||||||
|
# Form 16 debit note CSV (incoming): INCOMING/WFM_MAIN/FORM16_DBT
|
||||||
|
# Form 16 SAP responses (outgoing): OUTGOING/WFM_SAP_MAIN/FORM16_CRDT
|
||||||
|
# WFM_FORM16_CREDIT_INCOMING_PATH=WFM-QRE\INCOMING\WFM_MAIN\FORM16_CRDT
|
||||||
|
# WFM_FORM16_DEBIT_INCOMING_PATH=WFM-QRE\INCOMING\WFM_MAIN\FORM16_DBT
|
||||||
|
# WFM_FORM16_OUTGOING_PATH=WFM-QRE\OUTGOING\WFM_SAP_MAIN\FORM16_CRDT
|
||||||
|
|
||||||
|
# WFM Archive configuration examples (if overrides are needed)
|
||||||
|
# WFM_ARCHIVE_GST_CLAIMS_PATH=WFM-QRE\INCOMING\WFM_ARACHIVE\DLR_INC_CLAIMS_GST
|
||||||
|
# WFM_FORM16_CREDIT_ARCHIVE_PATH=WFM-QRE\INCOMING\WFM_ARACHIVE\FORM16_CRDT
|
||||||
|
#CREDIT_NOTE_SYNC_INTERVAL_MINUTES=1
|
||||||
|
|||||||
@ -14,7 +14,7 @@ module.exports = {
|
|||||||
coverageDirectory: 'coverage',
|
coverageDirectory: 'coverage',
|
||||||
coverageReporters: ['text', 'lcov', 'html'],
|
coverageReporters: ['text', 'lcov', 'html'],
|
||||||
setupFilesAfterEnv: ['<rootDir>/tests/setup.js'],
|
setupFilesAfterEnv: ['<rootDir>/tests/setup.js'],
|
||||||
moduleNameMapping: {
|
moduleNameMapper: {
|
||||||
'^@/(.*)$': '<rootDir>/src/$1',
|
'^@/(.*)$': '<rootDir>/src/$1',
|
||||||
'^@controllers/(.*)$': '<rootDir>/src/controllers/$1',
|
'^@controllers/(.*)$': '<rootDir>/src/controllers/$1',
|
||||||
'^@services/(.*)$': '<rootDir>/src/services/$1',
|
'^@services/(.*)$': '<rootDir>/src/services/$1',
|
||||||
@ -23,5 +23,6 @@ module.exports = {
|
|||||||
'^@utils/(.*)$': '<rootDir>/src/utils/$1',
|
'^@utils/(.*)$': '<rootDir>/src/utils/$1',
|
||||||
'^@types/(.*)$': '<rootDir>/src/types/$1',
|
'^@types/(.*)$': '<rootDir>/src/types/$1',
|
||||||
'^@config/(.*)$': '<rootDir>/src/config/$1',
|
'^@config/(.*)$': '<rootDir>/src/config/$1',
|
||||||
|
'^@validators/(.*)$': '<rootDir>/src/validators/$1',
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
1028
package-lock.json
generated
1028
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
19
package.json
19
package.json
@ -17,14 +17,26 @@
|
|||||||
"clean": "rm -rf dist",
|
"clean": "rm -rf dist",
|
||||||
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
||||||
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
||||||
|
"migrate:cpc-csd": "ts-node -r tsconfig-paths/register src/scripts/migrate-cpc-csd-to-cpc-tables.ts",
|
||||||
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
||||||
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
||||||
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts"
|
"seed:dealer-user": "ts-node -r tsconfig-paths/register src/scripts/seed-dealer-user.ts",
|
||||||
|
"seed:rohit-user": "ts-node -r tsconfig-paths/register src/scripts/seed-rohit-user.ts",
|
||||||
|
"seed:admin-user": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-user.ts",
|
||||||
|
"seed:demo-requests": "ts-node -r tsconfig-paths/register src/scripts/seed-demo-requests.ts",
|
||||||
|
"seed:demo-dealers": "ts-node -r tsconfig-paths/register src/scripts/seed-demo-dealers.ts",
|
||||||
|
"cleanup:dealer-claims": "ts-node -r tsconfig-paths/register src/scripts/cleanup-dealer-claims.ts",
|
||||||
|
"redis:start": "docker run -d --name redis-workflow -p 6379:6379 redis:7-alpine",
|
||||||
|
"redis:stop": "docker rm -f redis-workflow",
|
||||||
|
"test": "jest --passWithNoTests --forceExit",
|
||||||
|
"test:ci": "jest --ci --coverage --passWithNoTests --forceExit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@google-cloud/documentai": "^9.6.0",
|
||||||
"@google-cloud/secret-manager": "^6.1.1",
|
"@google-cloud/secret-manager": "^6.1.1",
|
||||||
"@google-cloud/storage": "^7.18.0",
|
"@google-cloud/storage": "^7.18.0",
|
||||||
"@google-cloud/vertexai": "^1.10.0",
|
"@google-cloud/vertexai": "^1.10.0",
|
||||||
|
"@google/generative-ai": "^0.24.1",
|
||||||
"@types/nodemailer": "^7.0.4",
|
"@types/nodemailer": "^7.0.4",
|
||||||
"@types/uuid": "^8.3.4",
|
"@types/uuid": "^8.3.4",
|
||||||
"axios": "^1.7.9",
|
"axios": "^1.7.9",
|
||||||
@ -35,6 +47,7 @@
|
|||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dayjs": "^1.11.19",
|
"dayjs": "^1.11.19",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
|
"exceljs": "^4.4.0",
|
||||||
"express": "^4.21.2",
|
"express": "^4.21.2",
|
||||||
"express-rate-limit": "^7.5.0",
|
"express-rate-limit": "^7.5.0",
|
||||||
"fast-xml-parser": "^5.3.3",
|
"fast-xml-parser": "^5.3.3",
|
||||||
@ -48,6 +61,7 @@
|
|||||||
"openai": "^6.8.1",
|
"openai": "^6.8.1",
|
||||||
"passport": "^0.7.0",
|
"passport": "^0.7.0",
|
||||||
"passport-jwt": "^4.0.1",
|
"passport-jwt": "^4.0.1",
|
||||||
|
"pdf-parse": "^2.4.5",
|
||||||
"pg": "^8.13.1",
|
"pg": "^8.13.1",
|
||||||
"pg-hstore": "^2.3.4",
|
"pg-hstore": "^2.3.4",
|
||||||
"prom-client": "^15.1.3",
|
"prom-client": "^15.1.3",
|
||||||
@ -55,6 +69,7 @@
|
|||||||
"sanitize-html": "^2.17.1",
|
"sanitize-html": "^2.17.1",
|
||||||
"sequelize": "^6.37.5",
|
"sequelize": "^6.37.5",
|
||||||
"socket.io": "^4.8.1",
|
"socket.io": "^4.8.1",
|
||||||
|
"string-similarity": "^4.0.4",
|
||||||
"uuid": "^8.3.2",
|
"uuid": "^8.3.2",
|
||||||
"web-push": "^3.6.7",
|
"web-push": "^3.6.7",
|
||||||
"winston": "^3.17.0",
|
"winston": "^3.17.0",
|
||||||
@ -76,10 +91,12 @@
|
|||||||
"@types/passport-jwt": "^4.0.1",
|
"@types/passport-jwt": "^4.0.1",
|
||||||
"@types/pg": "^8.15.6",
|
"@types/pg": "^8.15.6",
|
||||||
"@types/sanitize-html": "^2.16.0",
|
"@types/sanitize-html": "^2.16.0",
|
||||||
|
"@types/string-similarity": "^4.0.2",
|
||||||
"@types/supertest": "^6.0.2",
|
"@types/supertest": "^6.0.2",
|
||||||
"@types/web-push": "^3.6.4",
|
"@types/web-push": "^3.6.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
||||||
"@typescript-eslint/parser": "^8.19.1",
|
"@typescript-eslint/parser": "^8.19.1",
|
||||||
|
"concurrently": "^9.1.2",
|
||||||
"eslint": "^9.17.0",
|
"eslint": "^9.17.0",
|
||||||
"jest": "^29.7.0",
|
"jest": "^29.7.0",
|
||||||
"nodemon": "^3.1.9",
|
"nodemon": "^3.1.9",
|
||||||
|
|||||||
43
set-admin.ts
Normal file
43
set-admin.ts
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import { sequelize } from './src/config/database';
|
||||||
|
import { User } from './src/models/User';
|
||||||
|
|
||||||
|
async function makeAdmin() {
|
||||||
|
try {
|
||||||
|
const email = 'testuser11@eichergroup.com';
|
||||||
|
console.log(`Setting role to ADMIN for: ${email}`);
|
||||||
|
|
||||||
|
// Test connection first
|
||||||
|
await sequelize.authenticate();
|
||||||
|
console.log('Database connected.');
|
||||||
|
|
||||||
|
const [updatedRows] = await User.update(
|
||||||
|
{ role: 'ADMIN' },
|
||||||
|
{ where: { email: email } }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (updatedRows > 0) {
|
||||||
|
console.log(`✅ Success! ${email} is now an ADMIN.`);
|
||||||
|
} else {
|
||||||
|
console.log(`⚠️ User not found in database: ${email}`);
|
||||||
|
console.log(`Creating user ${email} with ADMIN role...`);
|
||||||
|
|
||||||
|
const newUser = await User.create({
|
||||||
|
email: email,
|
||||||
|
oktaSub: `MANUAL_ADMIN_${Date.now()}`,
|
||||||
|
firstName: 'Test',
|
||||||
|
lastName: 'User 11',
|
||||||
|
displayName: 'Test User 11',
|
||||||
|
role: 'ADMIN',
|
||||||
|
isActive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`✅ Success! Created new ADMIN user: ${newUser.email}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error updating user:', error);
|
||||||
|
} finally {
|
||||||
|
await sequelize.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
makeAdmin();
|
||||||
120
src/__tests__/api/smoke.test.ts
Normal file
120
src/__tests__/api/smoke.test.ts
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
/**
|
||||||
|
* API smoke tests for UAT / production readiness.
|
||||||
|
* Tests health, routing, and auth validation without requiring full DB/Redis in CI.
|
||||||
|
*
|
||||||
|
* Run: npm test -- smoke
|
||||||
|
*/
|
||||||
|
|
||||||
|
import request from 'supertest';
|
||||||
|
|
||||||
|
// Load app without starting server (server.ts is not imported)
|
||||||
|
// Suppress DB config console logs in test
|
||||||
|
const originalEnv = process.env.NODE_ENV;
|
||||||
|
process.env.NODE_ENV = process.env.NODE_ENV || 'test';
|
||||||
|
|
||||||
|
let app: import('express').Application;
|
||||||
|
beforeAll(() => {
|
||||||
|
app = require('../../app').default;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
process.env.NODE_ENV = originalEnv;
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('API Smoke – Health & Routing', () => {
|
||||||
|
it('SMK-01: GET /health returns 200 and status OK', async () => {
|
||||||
|
const res = await request(app).get('/health');
|
||||||
|
expect(res.status).toBe(200);
|
||||||
|
expect(res.body).toHaveProperty('status', 'OK');
|
||||||
|
expect(res.body).toHaveProperty('timestamp');
|
||||||
|
expect(res.body).toHaveProperty('uptime');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('SMK-02: GET /api/v1/health returns 200 and service name', async () => {
|
||||||
|
const res = await request(app).get('/api/v1/health');
|
||||||
|
expect(res.status).toBe(200);
|
||||||
|
expect(res.body).toHaveProperty('status', 'OK');
|
||||||
|
expect(res.body).toHaveProperty('service', 're-workflow-backend');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('SMK-03: GET /api/v1/health/db returns 200 (connected) or 503 (disconnected)', async () => {
|
||||||
|
const res = await request(app).get('/api/v1/health/db');
|
||||||
|
expect([200, 503]).toContain(res.status);
|
||||||
|
if (res.status === 200) {
|
||||||
|
expect(res.body).toHaveProperty('database', 'connected');
|
||||||
|
} else {
|
||||||
|
expect(res.body).toHaveProperty('database', 'disconnected');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('SMK-04: Invalid API route returns 404 with JSON', async () => {
|
||||||
|
const res = await request(app).get('/api/v1/invalid-route-xyz');
|
||||||
|
expect(res.status).toBe(404);
|
||||||
|
expect(res.body).toHaveProperty('success', false);
|
||||||
|
expect(res.body).toHaveProperty('message');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('API Smoke – Authentication', () => {
|
||||||
|
it('AUTH-01: POST /api/v1/auth/sso-callback with empty body returns 400', async () => {
|
||||||
|
const res = await request(app)
|
||||||
|
.post('/api/v1/auth/sso-callback')
|
||||||
|
.send({})
|
||||||
|
.set('Content-Type', 'application/json');
|
||||||
|
expect(res.status).toBe(400);
|
||||||
|
expect(res.body).toHaveProperty('success', false);
|
||||||
|
// Auth route validator returns "Request body validation failed"; legacy app route returns "email and oktaSub required"
|
||||||
|
expect(res.body.message).toMatch(/email|oktaSub|required|validation failed/i);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('AUTH-02: POST /api/v1/auth/sso-callback without oktaSub returns 400', async () => {
|
||||||
|
const res = await request(app)
|
||||||
|
.post('/api/v1/auth/sso-callback')
|
||||||
|
.send({ email: 'test@example.com' })
|
||||||
|
.set('Content-Type', 'application/json');
|
||||||
|
expect(res.status).toBe(400);
|
||||||
|
expect(res.body).toHaveProperty('success', false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('AUTH-03: POST /api/v1/auth/sso-callback without email returns 400', async () => {
|
||||||
|
const res = await request(app)
|
||||||
|
.post('/api/v1/auth/sso-callback')
|
||||||
|
.send({ oktaSub: 'okta-123' })
|
||||||
|
.set('Content-Type', 'application/json');
|
||||||
|
expect(res.status).toBe(400);
|
||||||
|
expect(res.body).toHaveProperty('success', false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('AUTH-04: GET /api/v1/users without token returns 401', async () => {
|
||||||
|
const res = await request(app).get('/api/v1/users');
|
||||||
|
expect(res.status).toBe(401);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('AUTH-05: GET /api/v1/users with invalid token returns 401', async () => {
|
||||||
|
const res = await request(app)
|
||||||
|
.get('/api/v1/users')
|
||||||
|
.set('Authorization', 'Bearer invalid-token');
|
||||||
|
expect(res.status).toBe(401);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('API Smoke – Security Headers (SEC-01, SEC-02, SEC-03)', () => {
|
||||||
|
it('SEC-01: Response includes Content-Security-Policy header', async () => {
|
||||||
|
const res = await request(app).get('/health');
|
||||||
|
expect(res.status).toBe(200);
|
||||||
|
expect(res.headers).toHaveProperty('content-security-policy');
|
||||||
|
expect(res.headers['content-security-policy']).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('SEC-02: Response includes X-Frame-Options (SAMEORIGIN or deny)', async () => {
|
||||||
|
const res = await request(app).get('/health');
|
||||||
|
expect(res.status).toBe(200);
|
||||||
|
expect(res.headers).toHaveProperty('x-frame-options');
|
||||||
|
expect(res.headers['x-frame-options'].toUpperCase()).toMatch(/SAMEORIGIN|DENY/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('SEC-03: GET /metrics without admin auth returns 401', async () => {
|
||||||
|
const res = await request(app).get('/metrics');
|
||||||
|
expect(res.status).toBe(401);
|
||||||
|
});
|
||||||
|
});
|
||||||
80
src/__tests__/form16-permission.middleware.test.ts
Normal file
80
src/__tests__/form16-permission.middleware.test.ts
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import { NextFunction, Request, Response } from 'express';
|
||||||
|
import {
|
||||||
|
requireForm1626AsAccess,
|
||||||
|
requireForm16ReOnly,
|
||||||
|
requireForm16SubmissionAccess,
|
||||||
|
} from '../middlewares/form16Permission.middleware';
|
||||||
|
import { canView26As, canViewForm16Submission } from '../services/form16Permission.service';
|
||||||
|
import { getDealerCodeForUser } from '../services/form16.service';
|
||||||
|
|
||||||
|
jest.mock('../services/form16Permission.service', () => ({
|
||||||
|
canView26As: jest.fn(),
|
||||||
|
canViewForm16Submission: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('../services/form16.service', () => ({
|
||||||
|
getDealerCodeForUser: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
function createRes(): Response {
|
||||||
|
const res: Partial<Response> = {};
|
||||||
|
res.status = jest.fn().mockReturnValue(res);
|
||||||
|
res.json = jest.fn().mockReturnValue(res);
|
||||||
|
return res as Response;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Form16 Permission Middlewares', () => {
|
||||||
|
const mockedCanView26As = canView26As as jest.Mock;
|
||||||
|
const mockedCanViewForm16Submission = canViewForm16Submission as jest.Mock;
|
||||||
|
const mockedGetDealerCodeForUser = getDealerCodeForUser as jest.Mock;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows ADMIN on 26AS middleware without config dependency', async () => {
|
||||||
|
const req = { user: { userId: 'a1', email: 'admin@royalenfield.com', role: 'ADMIN' } } as unknown as Request;
|
||||||
|
const res = createRes();
|
||||||
|
const next = jest.fn() as NextFunction;
|
||||||
|
|
||||||
|
await requireForm1626AsAccess(req, res, next);
|
||||||
|
|
||||||
|
expect(next).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockedCanView26As).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('denies non-authorized user on 26AS middleware', async () => {
|
||||||
|
mockedCanView26As.mockResolvedValue(false);
|
||||||
|
const req = { user: { userId: 'u1', email: 'user@royalenfield.com', role: 'USER' } } as unknown as Request;
|
||||||
|
const res = createRes();
|
||||||
|
const next = jest.fn() as NextFunction;
|
||||||
|
|
||||||
|
await requireForm1626AsAccess(req, res, next);
|
||||||
|
|
||||||
|
expect(next).not.toHaveBeenCalled();
|
||||||
|
expect((res.status as jest.Mock).mock.calls[0][0]).toBe(403);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('denies dealer on RE-only middleware', async () => {
|
||||||
|
mockedGetDealerCodeForUser.mockResolvedValue('DLR001');
|
||||||
|
const req = { user: { userId: 'u2', email: 'dealer@royalenfield.com', role: 'USER' } } as unknown as Request;
|
||||||
|
const res = createRes();
|
||||||
|
const next = jest.fn() as NextFunction;
|
||||||
|
|
||||||
|
await requireForm16ReOnly(req, res, next);
|
||||||
|
|
||||||
|
expect(next).not.toHaveBeenCalled();
|
||||||
|
expect((res.status as jest.Mock).mock.calls[0][0]).toBe(403);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows submission middleware for authorized non-admin RE user', async () => {
|
||||||
|
mockedCanViewForm16Submission.mockResolvedValue(true);
|
||||||
|
const req = { user: { userId: 'u3', email: 'submission@royalenfield.com', role: 'USER' } } as unknown as Request;
|
||||||
|
const res = createRes();
|
||||||
|
const next = jest.fn() as NextFunction;
|
||||||
|
|
||||||
|
await requireForm16SubmissionAccess(req, res, next);
|
||||||
|
|
||||||
|
expect(next).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
96
src/__tests__/form16-permission.service.test.ts
Normal file
96
src/__tests__/form16-permission.service.test.ts
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import { canView26As, canViewForm16Submission, getForm16ViewerConfig } from '../services/form16Permission.service';
|
||||||
|
import { sequelize } from '../config/database';
|
||||||
|
import { getDealerCodeForUser } from '../services/form16.service';
|
||||||
|
|
||||||
|
jest.mock('../config/database', () => ({
|
||||||
|
sequelize: {
|
||||||
|
query: jest.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('../services/form16.service', () => ({
|
||||||
|
getDealerCodeForUser: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('Form16 Permission Service (strict RBAC)', () => {
|
||||||
|
const mockedQuery = sequelize.query as jest.Mock;
|
||||||
|
const mockedGetDealerCodeForUser = getDealerCodeForUser as jest.Mock;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns normalized viewer lists from config', async () => {
|
||||||
|
mockedQuery.mockResolvedValue([
|
||||||
|
{
|
||||||
|
config_value: JSON.stringify({
|
||||||
|
submissionViewerEmails: [' User1@royalenfield.com '],
|
||||||
|
twentySixAsViewerEmails: ['USER2@royalenfield.com'],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const config = await getForm16ViewerConfig();
|
||||||
|
|
||||||
|
expect(config.submissionViewerEmails).toEqual(['user1@royalenfield.com']);
|
||||||
|
expect(config.twentySixAsViewerEmails).toEqual(['user2@royalenfield.com']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ADMIN always has submission and 26AS access', async () => {
|
||||||
|
expect(await canViewForm16Submission('admin@royalenfield.com', 'u-admin', 'ADMIN')).toBe(true);
|
||||||
|
expect(await canView26As('admin@royalenfield.com', 'ADMIN')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('dealer always has submission access, but not implicit 26AS access', async () => {
|
||||||
|
mockedGetDealerCodeForUser.mockResolvedValue('DLR001');
|
||||||
|
mockedQuery.mockResolvedValue([{ config_value: JSON.stringify({ submissionViewerEmails: [], twentySixAsViewerEmails: [] }) }]);
|
||||||
|
|
||||||
|
expect(await canViewForm16Submission('dealer@royalenfield.com', 'u-dealer', 'USER')).toBe(true);
|
||||||
|
expect(await canView26As('dealer@royalenfield.com', 'USER')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-admin RE user gets submission access only when listed in submission viewers', async () => {
|
||||||
|
mockedGetDealerCodeForUser.mockResolvedValue(null);
|
||||||
|
mockedQuery.mockResolvedValue([
|
||||||
|
{
|
||||||
|
config_value: JSON.stringify({
|
||||||
|
submissionViewerEmails: ['submissions@royalenfield.com'],
|
||||||
|
twentySixAsViewerEmails: [],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(await canViewForm16Submission('submissions@royalenfield.com', 'u1', 'USER')).toBe(true);
|
||||||
|
expect(await canViewForm16Submission('other@royalenfield.com', 'u2', 'USER')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('26AS viewers implicitly have submission access', async () => {
|
||||||
|
mockedGetDealerCodeForUser.mockResolvedValue(null);
|
||||||
|
mockedQuery.mockResolvedValue([
|
||||||
|
{
|
||||||
|
config_value: JSON.stringify({
|
||||||
|
submissionViewerEmails: [],
|
||||||
|
twentySixAsViewerEmails: ['twentysix@royalenfield.com'],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(await canViewForm16Submission('twentysix@royalenfield.com', 'u3', 'USER')).toBe(true);
|
||||||
|
expect(await canView26As('twentysix@royalenfield.com', 'USER')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('strict deny when viewer lists are empty for non-admin RE user', async () => {
|
||||||
|
mockedGetDealerCodeForUser.mockResolvedValue(null);
|
||||||
|
mockedQuery.mockResolvedValue([
|
||||||
|
{
|
||||||
|
config_value: JSON.stringify({
|
||||||
|
submissionViewerEmails: [],
|
||||||
|
twentySixAsViewerEmails: [],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(await canViewForm16Submission('re-user@royalenfield.com', 'u4', 'USER')).toBe(false);
|
||||||
|
expect(await canView26As('re-user@royalenfield.com', 'USER')).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
67
src/__tests__/form16-reconciliation.test.ts
Normal file
67
src/__tests__/form16-reconciliation.test.ts
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 reconciliation tests.
|
||||||
|
* - Ledger: one CREDIT/DEBIT row per credit/debit note; no deletion (see docs/form16/LEDGER.md).
|
||||||
|
* - 26AS: only Section 194Q, Booking F/O; quarter aggregation; snapshot + auto-debit when total changes.
|
||||||
|
* - Form 16 match: latest 26AS aggregate only; reject mismatch/duplicate.
|
||||||
|
*
|
||||||
|
* Run: npm test -- form16-reconciliation
|
||||||
|
* Or: npm test -- --testPathPattern=form16
|
||||||
|
*
|
||||||
|
* Optional: FORM16_TEST_DB=1 to run integration tests (requires DB).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
getLatest26asAggregatedForQuarter,
|
||||||
|
getLatest26asSnapshot,
|
||||||
|
getQuarterStatus,
|
||||||
|
process26asUploadAggregation,
|
||||||
|
upload26asFile,
|
||||||
|
parse26asTxtFile,
|
||||||
|
} from '../services/form16.service';
|
||||||
|
|
||||||
|
describe('Form 16 reconciliation', () => {
|
||||||
|
describe('parse26asTxtFile', () => {
|
||||||
|
it('parses 26AS official format (^ delimiter) and extracts sectionCode 194Q and statusOltas F', () => {
|
||||||
|
const header = '1^Deductor Name^TAN12345G^^^^^Total^Tax^TDS';
|
||||||
|
const line = '^1^194Q^30-Sep-2024^F^24-Oct-2024^-^1000^100^100';
|
||||||
|
const buffer = Buffer.from([header, line].join('\n'), 'utf8');
|
||||||
|
const { rows, errors } = parse26asTxtFile(buffer);
|
||||||
|
expect(errors).toEqual([]);
|
||||||
|
expect(rows.length).toBeGreaterThanOrEqual(1);
|
||||||
|
expect(rows[0].sectionCode).toBe('194Q');
|
||||||
|
expect(rows[0].statusOltas).toBe('F');
|
||||||
|
expect(rows[0].taxDeducted).toBe(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty rows for empty buffer', () => {
|
||||||
|
const { rows, errors } = parse26asTxtFile(Buffer.from('', 'utf8'));
|
||||||
|
expect(rows).toEqual([]);
|
||||||
|
expect(errors).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('filters by section and booking status in aggregation (194Q, F/O only) – documented behavior', () => {
|
||||||
|
expect(parse26asTxtFile(Buffer.from('x', 'utf8'))).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('aggregation and snapshot helpers', () => {
|
||||||
|
it('getLatest26asAggregatedForQuarter is a function', () => {
|
||||||
|
expect(typeof getLatest26asAggregatedForQuarter).toBe('function');
|
||||||
|
});
|
||||||
|
it('getLatest26asSnapshot is a function', () => {
|
||||||
|
expect(typeof getLatest26asSnapshot).toBe('function');
|
||||||
|
});
|
||||||
|
it('getQuarterStatus is a function', () => {
|
||||||
|
expect(typeof getQuarterStatus).toBe('function');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('upload and process', () => {
|
||||||
|
it('upload26asFile accepts buffer and optional uploadLogId', () => {
|
||||||
|
expect(typeof upload26asFile).toBe('function');
|
||||||
|
});
|
||||||
|
it('process26asUploadAggregation returns snapshotsCreated and debitsCreated', () => {
|
||||||
|
expect(typeof process26asUploadAggregation).toBe('function');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
92
src/__tests__/workflow-validator.test.ts
Normal file
92
src/__tests__/workflow-validator.test.ts
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
/**
|
||||||
|
* Workflow validator (Zod schema) tests for UAT.
|
||||||
|
* Covers create and update workflow validation – WF-01 to WF-04.
|
||||||
|
*
|
||||||
|
* Run: npm test -- workflow-validator
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
createWorkflowSchema,
|
||||||
|
updateWorkflowSchema,
|
||||||
|
validateCreateWorkflow,
|
||||||
|
validateUpdateWorkflow,
|
||||||
|
} from '../validators/workflow.validator';
|
||||||
|
|
||||||
|
const validApprovalLevel = {
|
||||||
|
email: 'approver@example.com',
|
||||||
|
tatHours: 24,
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Workflow validator', () => {
|
||||||
|
describe('createWorkflowSchema (WF-01 to WF-04)', () => {
|
||||||
|
it('WF-01: rejects missing title', () => {
|
||||||
|
const data = {
|
||||||
|
templateType: 'CUSTOM',
|
||||||
|
description: 'Test description',
|
||||||
|
priority: 'STANDARD',
|
||||||
|
approvalLevels: [validApprovalLevel],
|
||||||
|
};
|
||||||
|
expect(() => createWorkflowSchema.parse(data)).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('WF-02: rejects invalid priority', () => {
|
||||||
|
const data = {
|
||||||
|
templateType: 'CUSTOM',
|
||||||
|
title: 'Test',
|
||||||
|
description: 'Desc',
|
||||||
|
priority: 'INVALID_PRIORITY',
|
||||||
|
approvalLevels: [validApprovalLevel],
|
||||||
|
};
|
||||||
|
expect(() => createWorkflowSchema.parse(data)).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('WF-03: rejects empty approval levels', () => {
|
||||||
|
const data = {
|
||||||
|
templateType: 'CUSTOM',
|
||||||
|
title: 'Test',
|
||||||
|
description: 'Desc',
|
||||||
|
priority: 'STANDARD',
|
||||||
|
approvalLevels: [],
|
||||||
|
};
|
||||||
|
expect(() => createWorkflowSchema.parse(data)).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('WF-04: accepts valid minimal create payload', () => {
|
||||||
|
const data = {
|
||||||
|
templateType: 'CUSTOM',
|
||||||
|
title: 'Valid Title',
|
||||||
|
description: 'Valid description',
|
||||||
|
priority: 'STANDARD',
|
||||||
|
approvalLevels: [validApprovalLevel],
|
||||||
|
};
|
||||||
|
const result = validateCreateWorkflow(data);
|
||||||
|
expect(result.title).toBe('Valid Title');
|
||||||
|
expect(result.priority).toBe('STANDARD');
|
||||||
|
expect(result.approvalLevels).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('accepts EXPRESS priority', () => {
|
||||||
|
const data = {
|
||||||
|
templateType: 'CUSTOM',
|
||||||
|
title: 'Express',
|
||||||
|
description: 'Desc',
|
||||||
|
priority: 'EXPRESS',
|
||||||
|
approvalLevels: [validApprovalLevel],
|
||||||
|
};
|
||||||
|
const result = createWorkflowSchema.parse(data);
|
||||||
|
expect(result.priority).toBe('EXPRESS');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateWorkflowSchema', () => {
|
||||||
|
it('accepts partial update with valid status', () => {
|
||||||
|
const data = { status: 'APPROVED' };
|
||||||
|
const result = updateWorkflowSchema.parse(data);
|
||||||
|
expect(result.status).toBe('APPROVED');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects invalid status', () => {
|
||||||
|
expect(() => updateWorkflowSchema.parse({ status: 'INVALID' })).toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
30
src/app.ts
30
src/app.ts
@ -6,11 +6,14 @@ import cookieParser from 'cookie-parser';
|
|||||||
import { UserService } from './services/user.service';
|
import { UserService } from './services/user.service';
|
||||||
import { SSOUserData } from './types/auth.types';
|
import { SSOUserData } from './types/auth.types';
|
||||||
import { sequelize } from './config/database';
|
import { sequelize } from './config/database';
|
||||||
|
import { ensureCpcCdcSchema } from './services/cpc-cdc/ensureCpcCdcSchema';
|
||||||
import { corsMiddleware } from './middlewares/cors.middleware';
|
import { corsMiddleware } from './middlewares/cors.middleware';
|
||||||
import { authenticateToken } from './middlewares/auth.middleware';
|
import { authenticateToken } from './middlewares/auth.middleware';
|
||||||
import { requireAdmin } from './middlewares/authorization.middleware';
|
import { requireAdmin } from './middlewares/authorization.middleware';
|
||||||
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
||||||
import routes from './routes/index';
|
import routes from './routes/index';
|
||||||
|
import { registerCpcCsdCompatRoutes } from './routes/cpc-csd-compat.mount';
|
||||||
|
import form16Routes from './routes/form16.routes';
|
||||||
import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
|
import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
|
||||||
import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
|
import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
|
||||||
import { sanitizationMiddleware } from './middlewares/sanitization.middleware';
|
import { sanitizationMiddleware } from './middlewares/sanitization.middleware';
|
||||||
@ -27,15 +30,25 @@ const app: express.Application = express();
|
|||||||
// 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc.
|
// 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc.
|
||||||
// This handles a specific Express/Helmet edge case where redirects lose headers.
|
// This handles a specific Express/Helmet edge case where redirects lose headers.
|
||||||
app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
|
app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||||
const isDev = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'local';
|
// Match server.ts: anything except production is "dev" for local tooling (.env often uses NODE_ENV=dev)
|
||||||
|
const nodeEnv = (process.env.NODE_ENV || '').toLowerCase();
|
||||||
|
const isDev = nodeEnv !== 'production' && nodeEnv !== 'prod';
|
||||||
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
|
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
|
||||||
|
|
||||||
// Build connect-src dynamically
|
// Build connect-src dynamically
|
||||||
const connectSrc = ["'self'", "blob:", "data:"];
|
const connectSrc = ["'self'", "blob:", "data:"];
|
||||||
if (isDev) {
|
if (isDev) {
|
||||||
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000");
|
for (let port = 3000; port <= 3010; port++) {
|
||||||
if (frontendUrl.includes('localhost')) connectSrc.push(frontendUrl);
|
connectSrc.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
|
||||||
|
connectSrc.push(`ws://localhost:${port}`, `ws://127.0.0.1:${port}`);
|
||||||
|
}
|
||||||
|
for (let port = 5000; port <= 5005; port++) {
|
||||||
|
connectSrc.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
|
||||||
|
connectSrc.push(`ws://localhost:${port}`, `ws://127.0.0.1:${port}`);
|
||||||
|
}
|
||||||
|
if (frontendUrl.includes('localhost') || frontendUrl.includes('127.0.0.1')) connectSrc.push(frontendUrl);
|
||||||
} else if (frontendUrl && frontendUrl !== '*') {
|
} else if (frontendUrl && frontendUrl !== '*') {
|
||||||
|
|
||||||
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
||||||
connectSrc.push(...origins);
|
connectSrc.push(...origins);
|
||||||
}
|
}
|
||||||
@ -96,6 +109,7 @@ export const initializeAppDatabase = async () => {
|
|||||||
try {
|
try {
|
||||||
await sequelize.authenticate();
|
await sequelize.authenticate();
|
||||||
console.log('✅ App database connection established');
|
console.log('✅ App database connection established');
|
||||||
|
await ensureCpcCdcSchema();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('❌ App database connection failed:', error);
|
console.error('❌ App database connection failed:', error);
|
||||||
throw error;
|
throw error;
|
||||||
@ -112,6 +126,11 @@ if (process.env.TRUST_PROXY === 'true' || process.env.NODE_ENV === 'production')
|
|||||||
app.set('trust proxy', 1);
|
app.set('trust proxy', 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Form 16 extract MUST be mounted BEFORE body parsers so multipart stream is not consumed
|
||||||
|
// (REform16 pattern: extract uses multer disk storage; mounting first guarantees raw stream for multer)
|
||||||
|
ensureUploadDir();
|
||||||
|
app.use('/api/v1/form16', form16Routes);
|
||||||
|
|
||||||
// Body parsing middleware
|
// Body parsing middleware
|
||||||
app.use(express.json({ limit: '10mb' }));
|
app.use(express.json({ limit: '10mb' }));
|
||||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||||
@ -141,7 +160,10 @@ app.get('/health', (_req: express.Request, res: express.Response) => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// Mount API routes - MUST be before static file serving
|
// CPC-CSD-compatible paths (`/api/upload`, `/api/documents/*`, `/api/v1/ocr/*`) — same as `CPC-CSD/server` router
|
||||||
|
registerCpcCsdCompatRoutes(app);
|
||||||
|
|
||||||
|
// Mount API routes (form16 already mounted above before body parser)
|
||||||
app.use('/api/v1', routes);
|
app.use('/api/v1', routes);
|
||||||
|
|
||||||
// Serve uploaded files statically
|
// Serve uploaded files statically
|
||||||
|
|||||||
10
src/config/sessionPolicy.ts
Normal file
10
src/config/sessionPolicy.ts
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
/**
|
||||||
|
* Centralized session policy for VAPT compliance.
|
||||||
|
* Keep strict constants (no environment overrides) to prevent accidental relaxation.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const ACCESS_TOKEN_TTL = '30m';
|
||||||
|
export const REFRESH_TOKEN_TTL = '30m';
|
||||||
|
export const ACCESS_TOKEN_TTL_MS = 30 * 60 * 1000;
|
||||||
|
export const REFRESH_TOKEN_TTL_MS = 30 * 60 * 1000;
|
||||||
|
|
||||||
@ -1,11 +1,13 @@
|
|||||||
import { SSOConfig, SSOUserData } from '../types/auth.types';
|
import { SSOConfig, SSOUserData } from '../types/auth.types';
|
||||||
|
import { ACCESS_TOKEN_TTL, REFRESH_TOKEN_TTL } from './sessionPolicy';
|
||||||
|
|
||||||
// Use getter functions to read from process.env dynamically
|
// Use getter functions to read from process.env dynamically
|
||||||
// This ensures values are read after secrets are loaded from Google Secret Manager
|
// This ensures values are read after secrets are loaded from Google Secret Manager
|
||||||
const ssoConfig: SSOConfig = {
|
const ssoConfig: SSOConfig = {
|
||||||
get jwtSecret() { return process.env.JWT_SECRET || ''; },
|
get jwtSecret() { return process.env.JWT_SECRET || ''; },
|
||||||
get jwtExpiry() { return process.env.JWT_EXPIRY || '24h'; },
|
// VAPT hard policy: no env-based override for token lifetimes.
|
||||||
get refreshTokenExpiry() { return process.env.REFRESH_TOKEN_EXPIRY || '7d'; },
|
get jwtExpiry() { return ACCESS_TOKEN_TTL; },
|
||||||
|
get refreshTokenExpiry() { return REFRESH_TOKEN_TTL; },
|
||||||
get sessionSecret() { return process.env.SESSION_SECRET || ''; },
|
get sessionSecret() { return process.env.SESSION_SECRET || ''; },
|
||||||
// Use only FRONTEND_URL from environment - no fallbacks
|
// Use only FRONTEND_URL from environment - no fallbacks
|
||||||
get allowedOrigins() {
|
get allowedOrigins() {
|
||||||
|
|||||||
@ -67,8 +67,8 @@ export const SYSTEM_CONFIG = {
|
|||||||
|
|
||||||
// Session & Security
|
// Session & Security
|
||||||
SECURITY: {
|
SECURITY: {
|
||||||
SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '480', 10), // 8 hours
|
SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '30', 10),
|
||||||
JWT_EXPIRY: process.env.JWT_EXPIRY || '8h',
|
JWT_EXPIRY: process.env.JWT_EXPIRY || '30m',
|
||||||
ENABLE_2FA: process.env.ENABLE_2FA === 'true',
|
ENABLE_2FA: process.env.ENABLE_2FA === 'true',
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|||||||
1157
src/controllers/CpcCdcController.ts
Normal file
1157
src/controllers/CpcCdcController.ts
Normal file
File diff suppressed because it is too large
Load Diff
204
src/controllers/CpcReportController.ts
Normal file
204
src/controllers/CpcReportController.ts
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
import { Request, Response } from 'express';
|
||||||
|
import { CpcHistoryService } from '../services/cpc-cdc/CpcHistoryService';
|
||||||
|
import { CpcDocument } from '../models/CpcDocument';
|
||||||
|
import { appendCpcDocumentFilters, cpcWhereFromAndParts } from '../services/cpc-cdc/utils';
|
||||||
|
import ExcelJS from 'exceljs';
|
||||||
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
|
||||||
|
import { Op } from 'sequelize';
|
||||||
|
|
||||||
|
export class CpcReportController {
|
||||||
|
/**
|
||||||
|
* Download Excel report for a specific claim
|
||||||
|
*/
|
||||||
|
async downloadReport(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const { claimId } = req.params;
|
||||||
|
const { attempt } = req.query;
|
||||||
|
|
||||||
|
const where: any = {
|
||||||
|
[Op.or]: [
|
||||||
|
{ claimId: claimId },
|
||||||
|
{ bookingId: claimId }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
if (attempt) where.attemptNo = parseInt(attempt as string);
|
||||||
|
|
||||||
|
const docs = await CpcDocument.findAll({
|
||||||
|
where,
|
||||||
|
order: [['createdAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!docs || docs.length === 0) {
|
||||||
|
return ResponseHandler.error(res, "No records found for this claim", 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
const workbook = new ExcelJS.Workbook();
|
||||||
|
const sheet = workbook.addWorksheet('Validation Report');
|
||||||
|
|
||||||
|
// HEADERS
|
||||||
|
const row1 = sheet.getRow(1);
|
||||||
|
row1.values = [
|
||||||
|
'Booking Type', 'Booking Number', 'Document Count', 'Document Name',
|
||||||
|
'Customer Name', '', '', '', '',
|
||||||
|
'PO Number /Authorisation Letter Number', '', '', '', '',
|
||||||
|
'Aadhar Number', '', '', '', '',
|
||||||
|
'PO Amount / Authorisation Letter Amount', '', '', '', '',
|
||||||
|
'Signature & Stamp Availability', '', '', '', '',
|
||||||
|
'Final Validation'
|
||||||
|
];
|
||||||
|
|
||||||
|
const row2 = sheet.getRow(2);
|
||||||
|
row2.values = [
|
||||||
|
'', '', '', '',
|
||||||
|
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||||
|
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||||
|
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||||
|
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||||
|
'Expected', 'OCR', 'Accuracy Matching Availability', 'Accuracy Criteria', 'Success Ratio',
|
||||||
|
''
|
||||||
|
];
|
||||||
|
|
||||||
|
sheet.mergeCells('E1:I1');
|
||||||
|
sheet.mergeCells('J1:N1');
|
||||||
|
sheet.mergeCells('O1:S1');
|
||||||
|
sheet.mergeCells('T1:X1');
|
||||||
|
sheet.mergeCells('Y1:AC1');
|
||||||
|
sheet.mergeCells('A1:A2'); sheet.mergeCells('B1:B2'); sheet.mergeCells('C1:C2'); sheet.mergeCells('D1:D2');
|
||||||
|
sheet.mergeCells('AD1:AD2');
|
||||||
|
|
||||||
|
[row1, row2].forEach((r: any) => {
|
||||||
|
r.font = { bold: true, size: 9 };
|
||||||
|
r.alignment = { vertical: 'middle', horizontal: 'center', wrapText: true };
|
||||||
|
r.eachCell((cell: any) => {
|
||||||
|
cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFD9D9D9' } };
|
||||||
|
cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } };
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
docs.forEach((doc: any, idx: number) => {
|
||||||
|
|
||||||
|
const rowData = CpcHistoryService.getSummaryRow(doc, idx);
|
||||||
|
const values = [
|
||||||
|
rowData.booking_type,
|
||||||
|
rowData.booking_number,
|
||||||
|
rowData.document_count,
|
||||||
|
rowData.document_name,
|
||||||
|
rowData.customer_name_group.msd, rowData.customer_name_group.ocr, rowData.customer_name_group.accuracy_pct, rowData.customer_name_group.criteria, rowData.customer_name_group.is_match,
|
||||||
|
rowData.po_or_auth_number_group.msd, rowData.po_or_auth_number_group.ocr, rowData.po_or_auth_number_group.accuracy_pct, rowData.po_or_auth_number_group.criteria, rowData.po_or_auth_number_group.is_match,
|
||||||
|
rowData.aadhaar_number_group.msd, rowData.aadhaar_number_group.ocr, rowData.aadhaar_number_group.accuracy_pct, rowData.aadhaar_number_group.criteria, rowData.aadhaar_number_group.is_match,
|
||||||
|
rowData.amount_group.msd, rowData.amount_group.ocr, rowData.amount_group.accuracy_pct, rowData.amount_group.criteria, rowData.amount_group.is_match,
|
||||||
|
rowData.stamp_group.msd, rowData.stamp_group.ocr, rowData.stamp_group.accuracy_pct, rowData.stamp_group.criteria, rowData.stamp_group.is_match,
|
||||||
|
rowData.final_validation
|
||||||
|
];
|
||||||
|
const row = sheet.addRow(values);
|
||||||
|
row.eachCell((cell: any, colNum: number) => {
|
||||||
|
cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } };
|
||||||
|
cell.font = { size: 8 };
|
||||||
|
cell.alignment = { vertical: 'middle', horizontal: 'center' };
|
||||||
|
|
||||||
|
if (cell.value === 'N.A.' && colNum > 4) {
|
||||||
|
cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFFF0000' } };
|
||||||
|
cell.font = { color: { argb: 'FFFFFFFF' }, size: 8, bold: true };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
sheet.addRow([]);
|
||||||
|
sheet.addRow([]);
|
||||||
|
const detailHeader = sheet.addRow(['Detailed Field-Wise Comparison']);
|
||||||
|
detailHeader.font = { bold: true, size: 12 };
|
||||||
|
|
||||||
|
docs.forEach((doc: any) => {
|
||||||
|
const docHeader = sheet.addRow([`Document: ${doc.documentType?.replace(/_/g, ' ')}`]);
|
||||||
|
docHeader.font = { bold: true, size: 10 };
|
||||||
|
|
||||||
|
|
||||||
|
const subHeader = sheet.addRow(['Field', 'Expected', 'Extracted (OCR)', 'Accuracy %', 'Criteria', 'Status', 'Message']);
|
||||||
|
const finalResults = CpcHistoryService.getDetailedFieldResults(doc);
|
||||||
|
|
||||||
|
finalResults.forEach((f: any) => {
|
||||||
|
sheet.addRow([
|
||||||
|
f.field.replace(/_/g, ' '),
|
||||||
|
f.expected || '-',
|
||||||
|
f.extracted || 'Not extracted',
|
||||||
|
f.accuracy,
|
||||||
|
f.criteria,
|
||||||
|
f.pass ? 'PASS' : 'FAIL',
|
||||||
|
f.message
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
sheet.addRow([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||||
|
res.setHeader('Content-Disposition', `attachment; filename=Report_${claimId}.xlsx`);
|
||||||
|
await workbook.xlsx.write(res);
|
||||||
|
res.end();
|
||||||
|
} catch (error: any) {
|
||||||
|
return ResponseHandler.error(res, error.message || "Report generation failed", 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download Master Audit Report for all filtered documents
|
||||||
|
*/
|
||||||
|
async downloadAllReport(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const { search, status, type } = req.query;
|
||||||
|
const andParts: Record<string, unknown>[] = [];
|
||||||
|
appendCpcDocumentFilters(andParts, {
|
||||||
|
type: type as string,
|
||||||
|
status: status as string,
|
||||||
|
search: search as string,
|
||||||
|
searchIncludeId: false
|
||||||
|
});
|
||||||
|
const where = cpcWhereFromAndParts(andParts);
|
||||||
|
|
||||||
|
const docs = await CpcDocument.findAll({
|
||||||
|
where,
|
||||||
|
order: [['createdAt', 'DESC']]
|
||||||
|
});
|
||||||
|
|
||||||
|
const workbook = new ExcelJS.Workbook();
|
||||||
|
const sheet = workbook.addWorksheet('Master Audit Trail');
|
||||||
|
|
||||||
|
const row1 = sheet.getRow(1);
|
||||||
|
row1.values = ['Booking Type', 'Booking Number', 'Doc ID', 'Document Name', 'Customer Name', '', '', '', '', 'PO Number /Authorisation Letter Number', '', '', '', '', 'Aadhar Number', '', '', '', '', 'PO Amount / Authorisation Letter Amount', '', '', '', '', 'Signature & Stamp Availability', '', '', '', '', 'Final Validation'];
|
||||||
|
|
||||||
|
const row2 = sheet.getRow(2);
|
||||||
|
row2.values = ['', '', '', '', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching Availability', 'Accuracy Criteria', 'Success Ratio', ''];
|
||||||
|
|
||||||
|
sheet.mergeCells('E1:I1'); sheet.mergeCells('J1:N1'); sheet.mergeCells('O1:S1'); sheet.mergeCells('T1:X1'); sheet.mergeCells('Y1:AC1'); sheet.mergeCells('A1:A2'); sheet.mergeCells('B1:B2'); sheet.mergeCells('C1:C2'); sheet.mergeCells('D1:D2'); sheet.mergeCells('AD1:AD2');
|
||||||
|
|
||||||
|
[row1, row2].forEach((r: any) => {
|
||||||
|
r.font = { bold: true, size: 9 };
|
||||||
|
r.alignment = { vertical: 'middle', horizontal: 'center', wrapText: true };
|
||||||
|
r.eachCell((cell: any) => { cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFD9D9D9' } }; cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } }; });
|
||||||
|
});
|
||||||
|
|
||||||
|
docs.forEach((doc: any, idx: number) => {
|
||||||
|
|
||||||
|
const rowData = CpcHistoryService.getSummaryRow(doc, idx);
|
||||||
|
const values = [
|
||||||
|
rowData.booking_type, rowData.booking_number, String(doc.id).slice(0, 8), rowData.document_name,
|
||||||
|
rowData.customer_name_group.msd, rowData.customer_name_group.ocr, rowData.customer_name_group.accuracy_pct, rowData.customer_name_group.criteria, rowData.customer_name_group.is_match,
|
||||||
|
rowData.po_or_auth_number_group.msd, rowData.po_or_auth_number_group.ocr, rowData.po_or_auth_number_group.accuracy_pct, rowData.po_or_auth_number_group.criteria, rowData.po_or_auth_number_group.is_match,
|
||||||
|
rowData.aadhaar_number_group.msd, rowData.aadhaar_number_group.ocr, rowData.aadhaar_number_group.accuracy_pct, rowData.aadhaar_number_group.criteria, rowData.aadhaar_number_group.is_match,
|
||||||
|
rowData.amount_group.msd, rowData.amount_group.ocr, rowData.amount_group.accuracy_pct, rowData.amount_group.criteria, rowData.amount_group.is_match,
|
||||||
|
rowData.stamp_group.msd, rowData.stamp_group.ocr, rowData.stamp_group.accuracy_pct, rowData.stamp_group.criteria, rowData.stamp_group.is_match,
|
||||||
|
rowData.final_validation
|
||||||
|
];
|
||||||
|
const row = sheet.addRow(values);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||||
|
res.setHeader('Content-Disposition', `attachment; filename=Master_Audit_Report.xlsx`);
|
||||||
|
await workbook.xlsx.write(res);
|
||||||
|
res.end();
|
||||||
|
} catch (error: any) {
|
||||||
|
return ResponseHandler.error(res, error.message || "Master report failed", 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -8,7 +8,12 @@ import logger from '@utils/logger';
|
|||||||
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeUtils';
|
||||||
import { clearConfigCache } from '@services/configReader.service';
|
import { clearConfigCache } from '@services/configReader.service';
|
||||||
import { User, UserRole } from '@models/User';
|
import { User, UserRole } from '@models/User';
|
||||||
import { sanitizeHtml } from '@utils/sanitizer';
|
import { sanitizeHtml, sanitizeObject, isHtmlEmpty } from '@utils/sanitizer';
|
||||||
|
import {
|
||||||
|
CPC_CSD_ADMIN_CONFIG_KEY,
|
||||||
|
CPC_CDC_ADMIN_CONFIG_KEY_LEGACY,
|
||||||
|
selectCpcCsdAdminConfigValue,
|
||||||
|
} from '@utils/cpcCsdAdminConfigDb';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all holidays (with optional year filter)
|
* Get all holidays (with optional year filter)
|
||||||
@ -125,7 +130,9 @@ export const createHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
logger.error('[Admin] Error creating holiday:', error);
|
logger.error('[Admin] Error creating holiday:', error);
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
success: false,
|
success: false,
|
||||||
error: error.message || 'Failed to create holiday'
|
message: 'Failed to create holiday',
|
||||||
|
error: error.message,
|
||||||
|
details: error.errors // Sequelize validation errors are usually in .errors
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -172,7 +179,9 @@ export const updateHoliday = async (req: Request, res: Response): Promise<void>
|
|||||||
logger.error('[Admin] Error updating holiday:', error);
|
logger.error('[Admin] Error updating holiday:', error);
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
success: false,
|
success: false,
|
||||||
error: error.message || 'Failed to update holiday'
|
message: 'Failed to update holiday',
|
||||||
|
error: error.message,
|
||||||
|
details: error.errors
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -403,10 +412,18 @@ export const updateConfiguration = async (req: Request, res: Response): Promise<
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sanitize config value if it's likely to be rendered as HTML
|
// Sanitize config value using unified sanitizeObject
|
||||||
// We can be selective or just sanitize all strings for safety
|
// This will handle strings, numbers, and nested objects consistently
|
||||||
if (typeof configValue === 'string') {
|
const sanitizedObj = sanitizeObject({ [configKey]: configValue });
|
||||||
configValue = sanitizeHtml(configValue);
|
configValue = sanitizedObj[configKey];
|
||||||
|
|
||||||
|
// If it's a string, ensure it's not effectively empty after sanitization
|
||||||
|
if (typeof configValue === 'string' && isHtmlEmpty(configValue)) {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: 'Config value is required and must contain valid content'
|
||||||
|
});
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update configuration
|
// Update configuration
|
||||||
@ -512,6 +529,294 @@ export const resetConfiguration = async (req: Request, res: Response): Promise<v
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Form 16 admin config – stored in same admin_configurations table as other workflow/admin configs (config_key = FORM16_ADMIN_CONFIG, config_value = JSON). */
|
||||||
|
const FORM16_CONFIG_KEY = 'FORM16_ADMIN_CONFIG';
|
||||||
|
|
||||||
|
/** Normalize run-at time to HH:mm (e.g. "9:0" -> "09:00"). */
|
||||||
|
function normalizeRunAtTime(s: string): string {
|
||||||
|
const [h, m] = s.split(':').map((x) => parseInt(x, 10));
|
||||||
|
if (Number.isNaN(h) || Number.isNaN(m)) return '09:00';
|
||||||
|
const hh = Math.max(0, Math.min(23, h));
|
||||||
|
const mm = Math.max(0, Math.min(59, m));
|
||||||
|
return `${String(hh).padStart(2, '0')}:${String(mm).padStart(2, '0')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultNotificationItem = (template: string) => ({ enabled: true, template });
|
||||||
|
/** 26AS data added: separate message for RE users and for dealers */
|
||||||
|
const default26AsNotification = () => ({
|
||||||
|
enabled: true,
|
||||||
|
templateRe: '26AS data has been added. Please review and use for matching dealer Form 16 submissions.',
|
||||||
|
templateDealers: 'New 26AS data has been uploaded. You can now submit your Form 16 for the relevant quarter if you haven’t already.',
|
||||||
|
});
|
||||||
|
const DEFAULT_FORM16_CONFIG = {
|
||||||
|
submissionViewerEmails: [] as string[],
|
||||||
|
twentySixAsViewerEmails: [] as string[],
|
||||||
|
reminderEnabled: true,
|
||||||
|
reminderDays: 7,
|
||||||
|
// Form 16 notification events (each: enabled + optional template(s))
|
||||||
|
notification26AsDataAdded: default26AsNotification(),
|
||||||
|
notificationForm16SuccessCreditNote: defaultNotificationItem('Form 16 submitted successfully. Credit note: [CreditNoteRef].'),
|
||||||
|
notificationForm16Unsuccessful: defaultNotificationItem('Form 16 submission was unsuccessful. Issue: [Issue].'),
|
||||||
|
alertSubmitForm16Enabled: true,
|
||||||
|
alertSubmitForm16FrequencyDays: 0,
|
||||||
|
alertSubmitForm16FrequencyHours: 24,
|
||||||
|
alertSubmitForm16RunAtTime: '09:00',
|
||||||
|
alertSubmitForm16Template: 'Dear [Name], please submit Form 16A for the pending period. Due: [DueDate].',
|
||||||
|
reminderNotificationEnabled: true,
|
||||||
|
reminderFrequencyDays: 0,
|
||||||
|
reminderFrequencyHours: 12,
|
||||||
|
reminderRunAtTime: '10:00',
|
||||||
|
reminderNotificationTemplate: 'Reminder: Dear [Name], your Form 16A submission is pending for request [Request ID]. Please complete it.',
|
||||||
|
};
|
||||||
|
|
||||||
|
const DEFAULT_CPC_CSD_CONFIG = {
|
||||||
|
viewerEmails: [] as string[],
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Form 16 admin configuration (who can see submission data, 26AS, reminders)
|
||||||
|
*/
|
||||||
|
export const getForm16Config = async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const result = await sequelize.query<{ config_value: string }>(
|
||||||
|
`SELECT config_value FROM admin_configurations WHERE config_key = :configKey LIMIT 1`,
|
||||||
|
{ replacements: { configKey: FORM16_CONFIG_KEY }, type: QueryTypes.SELECT }
|
||||||
|
);
|
||||||
|
if (result && result.length > 0 && result[0].config_value) {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(result[0].config_value);
|
||||||
|
const mergeNotification = (key: keyof typeof DEFAULT_FORM16_CONFIG) => {
|
||||||
|
const val = parsed[key];
|
||||||
|
const def = DEFAULT_FORM16_CONFIG[key] as any;
|
||||||
|
if (val && typeof val === 'object' && typeof val.enabled === 'boolean') {
|
||||||
|
return { enabled: val.enabled, template: typeof val.template === 'string' ? val.template : (def?.template ?? '') };
|
||||||
|
}
|
||||||
|
return def;
|
||||||
|
};
|
||||||
|
const def26As = DEFAULT_FORM16_CONFIG.notification26AsDataAdded as { enabled: boolean; templateRe: string; templateDealers: string };
|
||||||
|
const merge26As = () => {
|
||||||
|
const val = parsed.notification26AsDataAdded;
|
||||||
|
if (val && typeof val === 'object' && typeof val.enabled === 'boolean') {
|
||||||
|
return {
|
||||||
|
enabled: val.enabled,
|
||||||
|
templateRe: typeof val.templateRe === 'string' ? val.templateRe : (typeof val.template === 'string' ? val.template : def26As.templateRe),
|
||||||
|
templateDealers: typeof val.templateDealers === 'string' ? val.templateDealers : def26As.templateDealers,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return def26As;
|
||||||
|
};
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
submissionViewerEmails: Array.isArray(parsed.submissionViewerEmails) ? parsed.submissionViewerEmails : DEFAULT_FORM16_CONFIG.submissionViewerEmails,
|
||||||
|
twentySixAsViewerEmails: Array.isArray(parsed.twentySixAsViewerEmails) ? parsed.twentySixAsViewerEmails : DEFAULT_FORM16_CONFIG.twentySixAsViewerEmails,
|
||||||
|
reminderEnabled: typeof parsed.reminderEnabled === 'boolean' ? parsed.reminderEnabled : DEFAULT_FORM16_CONFIG.reminderEnabled,
|
||||||
|
reminderDays: typeof parsed.reminderDays === 'number' ? parsed.reminderDays : DEFAULT_FORM16_CONFIG.reminderDays,
|
||||||
|
notification26AsDataAdded: merge26As(),
|
||||||
|
notificationForm16SuccessCreditNote: mergeNotification('notificationForm16SuccessCreditNote'),
|
||||||
|
notificationForm16Unsuccessful: mergeNotification('notificationForm16Unsuccessful'),
|
||||||
|
alertSubmitForm16Enabled: typeof parsed.alertSubmitForm16Enabled === 'boolean' ? parsed.alertSubmitForm16Enabled : DEFAULT_FORM16_CONFIG.alertSubmitForm16Enabled,
|
||||||
|
alertSubmitForm16FrequencyDays: typeof parsed.alertSubmitForm16FrequencyDays === 'number' ? parsed.alertSubmitForm16FrequencyDays : DEFAULT_FORM16_CONFIG.alertSubmitForm16FrequencyDays,
|
||||||
|
alertSubmitForm16FrequencyHours: typeof parsed.alertSubmitForm16FrequencyHours === 'number' ? parsed.alertSubmitForm16FrequencyHours : DEFAULT_FORM16_CONFIG.alertSubmitForm16FrequencyHours,
|
||||||
|
alertSubmitForm16RunAtTime: typeof parsed.alertSubmitForm16RunAtTime === 'string' ? (parsed.alertSubmitForm16RunAtTime.trim() ? (/^\d{1,2}:\d{2}$/.test(parsed.alertSubmitForm16RunAtTime.trim()) ? normalizeRunAtTime(parsed.alertSubmitForm16RunAtTime.trim()) : DEFAULT_FORM16_CONFIG.alertSubmitForm16RunAtTime) : '') : DEFAULT_FORM16_CONFIG.alertSubmitForm16RunAtTime,
|
||||||
|
alertSubmitForm16Template: typeof parsed.alertSubmitForm16Template === 'string' ? parsed.alertSubmitForm16Template : DEFAULT_FORM16_CONFIG.alertSubmitForm16Template,
|
||||||
|
reminderNotificationEnabled: typeof parsed.reminderNotificationEnabled === 'boolean' ? parsed.reminderNotificationEnabled : DEFAULT_FORM16_CONFIG.reminderNotificationEnabled,
|
||||||
|
reminderFrequencyDays: typeof parsed.reminderFrequencyDays === 'number' ? parsed.reminderFrequencyDays : DEFAULT_FORM16_CONFIG.reminderFrequencyDays,
|
||||||
|
reminderFrequencyHours: typeof parsed.reminderFrequencyHours === 'number' ? parsed.reminderFrequencyHours : DEFAULT_FORM16_CONFIG.reminderFrequencyHours,
|
||||||
|
reminderRunAtTime: typeof parsed.reminderRunAtTime === 'string' ? (parsed.reminderRunAtTime.trim() ? (/^\d{1,2}:\d{2}$/.test(parsed.reminderRunAtTime.trim()) ? normalizeRunAtTime(parsed.reminderRunAtTime.trim()) : DEFAULT_FORM16_CONFIG.reminderRunAtTime) : '') : DEFAULT_FORM16_CONFIG.reminderRunAtTime,
|
||||||
|
reminderNotificationTemplate: typeof parsed.reminderNotificationTemplate === 'string' ? parsed.reminderNotificationTemplate : DEFAULT_FORM16_CONFIG.reminderNotificationTemplate,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
} catch {
|
||||||
|
// fall through to default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res.json({ success: true, data: DEFAULT_FORM16_CONFIG });
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('[Admin] Error fetching Form 16 config:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message || 'Failed to fetch Form 16 configuration',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update Form 16 admin configuration
|
||||||
|
*/
|
||||||
|
export const putForm16Config = async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const userId = req.user?.userId;
|
||||||
|
if (!userId) {
|
||||||
|
res.status(401).json({ success: false, error: 'User not authenticated' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const body = sanitizeObject(req.body as Record<string, unknown>);
|
||||||
|
const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase();
|
||||||
|
const submissionViewerEmails = Array.isArray(body.submissionViewerEmails)
|
||||||
|
? body.submissionViewerEmails.map(normalizeEmail).filter(Boolean)
|
||||||
|
: DEFAULT_FORM16_CONFIG.submissionViewerEmails;
|
||||||
|
const twentySixAsViewerEmails = Array.isArray(body.twentySixAsViewerEmails)
|
||||||
|
? body.twentySixAsViewerEmails.map(normalizeEmail).filter(Boolean)
|
||||||
|
: DEFAULT_FORM16_CONFIG.twentySixAsViewerEmails;
|
||||||
|
const reminderEnabled = typeof body.reminderEnabled === 'boolean' ? body.reminderEnabled : DEFAULT_FORM16_CONFIG.reminderEnabled;
|
||||||
|
const reminderDays = typeof body.reminderDays === 'number' ? body.reminderDays : DEFAULT_FORM16_CONFIG.reminderDays;
|
||||||
|
const mergeNotif = (key: keyof typeof DEFAULT_FORM16_CONFIG) => {
|
||||||
|
const v = body[key];
|
||||||
|
const d = DEFAULT_FORM16_CONFIG[key] as any;
|
||||||
|
if (v && typeof v === 'object' && typeof (v as any).enabled === 'boolean') {
|
||||||
|
return { enabled: (v as any).enabled, template: typeof (v as any).template === 'string' ? (v as any).template : (d?.template ?? '') };
|
||||||
|
}
|
||||||
|
return d;
|
||||||
|
};
|
||||||
|
const d26As = DEFAULT_FORM16_CONFIG.notification26AsDataAdded as { enabled: boolean; templateRe: string; templateDealers: string };
|
||||||
|
const merge26As = () => {
|
||||||
|
const v = body.notification26AsDataAdded;
|
||||||
|
if (v && typeof v === 'object' && typeof (v as any).enabled === 'boolean') {
|
||||||
|
return {
|
||||||
|
enabled: (v as any).enabled,
|
||||||
|
templateRe: typeof (v as any).templateRe === 'string' ? (v as any).templateRe : d26As.templateRe,
|
||||||
|
templateDealers: typeof (v as any).templateDealers === 'string' ? (v as any).templateDealers : d26As.templateDealers,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return d26As;
|
||||||
|
};
|
||||||
|
const configValue = JSON.stringify({
|
||||||
|
submissionViewerEmails,
|
||||||
|
twentySixAsViewerEmails,
|
||||||
|
reminderEnabled,
|
||||||
|
reminderDays,
|
||||||
|
notification26AsDataAdded: merge26As(),
|
||||||
|
notificationForm16SuccessCreditNote: mergeNotif('notificationForm16SuccessCreditNote'),
|
||||||
|
notificationForm16Unsuccessful: mergeNotif('notificationForm16Unsuccessful'),
|
||||||
|
alertSubmitForm16Enabled: typeof body.alertSubmitForm16Enabled === 'boolean' ? body.alertSubmitForm16Enabled : DEFAULT_FORM16_CONFIG.alertSubmitForm16Enabled,
|
||||||
|
alertSubmitForm16FrequencyDays: typeof body.alertSubmitForm16FrequencyDays === 'number' ? body.alertSubmitForm16FrequencyDays : DEFAULT_FORM16_CONFIG.alertSubmitForm16FrequencyDays,
|
||||||
|
alertSubmitForm16FrequencyHours: typeof body.alertSubmitForm16FrequencyHours === 'number' ? body.alertSubmitForm16FrequencyHours : DEFAULT_FORM16_CONFIG.alertSubmitForm16FrequencyHours,
|
||||||
|
alertSubmitForm16RunAtTime: typeof body.alertSubmitForm16RunAtTime === 'string' ? (String(body.alertSubmitForm16RunAtTime).trim() ? (/^\d{1,2}:\d{2}$/.test(String(body.alertSubmitForm16RunAtTime).trim()) ? normalizeRunAtTime(String(body.alertSubmitForm16RunAtTime).trim()) : DEFAULT_FORM16_CONFIG.alertSubmitForm16RunAtTime) : '') : DEFAULT_FORM16_CONFIG.alertSubmitForm16RunAtTime,
|
||||||
|
alertSubmitForm16Template: typeof body.alertSubmitForm16Template === 'string' ? body.alertSubmitForm16Template : DEFAULT_FORM16_CONFIG.alertSubmitForm16Template,
|
||||||
|
reminderNotificationEnabled: typeof body.reminderNotificationEnabled === 'boolean' ? body.reminderNotificationEnabled : DEFAULT_FORM16_CONFIG.reminderNotificationEnabled,
|
||||||
|
reminderFrequencyDays: typeof body.reminderFrequencyDays === 'number' ? body.reminderFrequencyDays : DEFAULT_FORM16_CONFIG.reminderFrequencyDays,
|
||||||
|
reminderFrequencyHours: typeof body.reminderFrequencyHours === 'number' ? body.reminderFrequencyHours : DEFAULT_FORM16_CONFIG.reminderFrequencyHours,
|
||||||
|
reminderRunAtTime: typeof body.reminderRunAtTime === 'string' ? (String(body.reminderRunAtTime).trim() ? (/^\d{1,2}:\d{2}$/.test(String(body.reminderRunAtTime).trim()) ? normalizeRunAtTime(String(body.reminderRunAtTime).trim()) : DEFAULT_FORM16_CONFIG.reminderRunAtTime) : '') : DEFAULT_FORM16_CONFIG.reminderRunAtTime,
|
||||||
|
reminderNotificationTemplate: typeof body.reminderNotificationTemplate === 'string' ? body.reminderNotificationTemplate : DEFAULT_FORM16_CONFIG.reminderNotificationTemplate,
|
||||||
|
});
|
||||||
|
|
||||||
|
await sequelize.query(
|
||||||
|
`INSERT INTO admin_configurations (
|
||||||
|
config_id, config_key, config_category, config_value, value_type, display_name, description, is_editable, is_sensitive, sort_order, created_at, updated_at, last_modified_by, last_modified_at
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid(), :configKey, 'SYSTEM_SETTINGS', :configValue, 'JSON', 'Form 16 Admin Config', 'Form 16 visibility and reminder settings', true, false, 0, NOW(), NOW(), :userId, NOW()
|
||||||
|
)
|
||||||
|
ON CONFLICT (config_key) DO UPDATE SET
|
||||||
|
config_value = EXCLUDED.config_value,
|
||||||
|
last_modified_by = EXCLUDED.last_modified_by,
|
||||||
|
last_modified_at = NOW(),
|
||||||
|
updated_at = NOW()`,
|
||||||
|
{
|
||||||
|
replacements: { configKey: FORM16_CONFIG_KEY, configValue, userId },
|
||||||
|
type: QueryTypes.RAW,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
clearConfigCache();
|
||||||
|
logger.info('[Admin] Form 16 configuration updated');
|
||||||
|
res.json({ success: true, message: 'Form 16 configuration saved' });
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('[Admin] Error updating Form 16 config:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message || 'Failed to save Form 16 configuration',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get CPC-CSD admin configuration (who can access CPC-CSD module).
|
||||||
|
*/
|
||||||
|
export const getCpcCdcConfig = async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const raw = await selectCpcCsdAdminConfigValue();
|
||||||
|
|
||||||
|
if (raw) {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(raw);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
viewerEmails: Array.isArray(parsed.viewerEmails) ? parsed.viewerEmails : DEFAULT_CPC_CSD_CONFIG.viewerEmails,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
} catch {
|
||||||
|
// fall through to defaults
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({ success: true, data: DEFAULT_CPC_CSD_CONFIG });
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('[Admin] Error fetching CPC-CSD config:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message || 'Failed to fetch CPC-CSD configuration',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update CPC-CSD admin configuration.
|
||||||
|
*/
|
||||||
|
export const putCpcCdcConfig = async (req: Request, res: Response): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const userId = req.user?.userId;
|
||||||
|
if (!userId) {
|
||||||
|
res.status(401).json({ success: false, error: 'User not authenticated' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = sanitizeObject(req.body as Record<string, unknown>);
|
||||||
|
const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase();
|
||||||
|
const viewerEmails = Array.isArray(body.viewerEmails)
|
||||||
|
? body.viewerEmails.map(normalizeEmail).filter(Boolean)
|
||||||
|
: DEFAULT_CPC_CSD_CONFIG.viewerEmails;
|
||||||
|
|
||||||
|
const configValue = JSON.stringify({
|
||||||
|
viewerEmails,
|
||||||
|
});
|
||||||
|
|
||||||
|
await sequelize.query(
|
||||||
|
`INSERT INTO admin_configurations (
|
||||||
|
config_id, config_key, config_category, config_value, value_type, display_name, description, is_editable, is_sensitive, sort_order, created_at, updated_at, last_modified_by, last_modified_at
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid(), :configKey, 'SYSTEM_SETTINGS', :configValue, 'JSON', 'CPC-CSD Admin Config', 'CPC-CSD module visibility settings', true, false, 0, NOW(), NOW(), :userId, NOW()
|
||||||
|
)
|
||||||
|
ON CONFLICT (config_key) DO UPDATE SET
|
||||||
|
config_value = EXCLUDED.config_value,
|
||||||
|
last_modified_by = EXCLUDED.last_modified_by,
|
||||||
|
last_modified_at = NOW(),
|
||||||
|
updated_at = NOW()`,
|
||||||
|
{
|
||||||
|
replacements: { configKey: CPC_CSD_ADMIN_CONFIG_KEY, configValue, userId },
|
||||||
|
type: QueryTypes.RAW,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
await sequelize.query(
|
||||||
|
`DELETE FROM admin_configurations WHERE config_key = :legacy`,
|
||||||
|
{ replacements: { legacy: CPC_CDC_ADMIN_CONFIG_KEY_LEGACY }, type: QueryTypes.RAW }
|
||||||
|
);
|
||||||
|
|
||||||
|
clearConfigCache();
|
||||||
|
logger.info('[Admin] CPC-CSD configuration updated');
|
||||||
|
res.json({ success: true, message: 'CPC-CSD configuration saved' });
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('[Admin] Error updating CPC-CSD config:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message || 'Failed to save CPC-CSD configuration',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ============================================
|
* ============================================
|
||||||
* USER ROLE MANAGEMENT (RBAC)
|
* USER ROLE MANAGEMENT (RBAC)
|
||||||
@ -980,6 +1285,7 @@ export const createActivityType = async (req: Request, res: Response): Promise<v
|
|||||||
itemCode: itemCode || null,
|
itemCode: itemCode || null,
|
||||||
taxationType: taxationType || null,
|
taxationType: taxationType || null,
|
||||||
sapRefNo: sapRefNo || null,
|
sapRefNo: sapRefNo || null,
|
||||||
|
creditPostingOn: req.body.creditPostingOn || null,
|
||||||
createdBy: userId
|
createdBy: userId
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -6,14 +6,41 @@ import type { AuthenticatedRequest } from '../types/express';
|
|||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
|
||||||
import { getRequestMetadata } from '../utils/requestUtils';
|
import { getRequestMetadata } from '../utils/requestUtils';
|
||||||
|
import { ACCESS_TOKEN_TTL_MS, REFRESH_TOKEN_TTL_MS } from '../config/sessionPolicy';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
|
||||||
export class AuthController {
|
export class AuthController {
|
||||||
private authService: AuthService;
|
private authService: AuthService;
|
||||||
|
// One-time code usage guard (in-memory, per instance).
|
||||||
|
private readonly consumedAuthCodes = new Map<string, number>();
|
||||||
|
private readonly authCodeTtlMs = 10 * 60 * 1000;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.authService = new AuthService();
|
this.authService = new AuthService();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getCodeDigest(code: string): string {
|
||||||
|
return crypto.createHash('sha256').update(code).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
private pruneConsumedCodes(now: number): void {
|
||||||
|
for (const [digest, ts] of this.consumedAuthCodes.entries()) {
|
||||||
|
if (now - ts > this.authCodeTtlMs) this.consumedAuthCodes.delete(digest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private hasConsumedCode(code: string): boolean {
|
||||||
|
const now = Date.now();
|
||||||
|
this.pruneConsumedCodes(now);
|
||||||
|
return this.consumedAuthCodes.has(this.getCodeDigest(code));
|
||||||
|
}
|
||||||
|
|
||||||
|
private markCodeConsumed(code: string): void {
|
||||||
|
const now = Date.now();
|
||||||
|
this.pruneConsumedCodes(now);
|
||||||
|
this.consumedAuthCodes.set(this.getCodeDigest(code), now);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle SSO callback from frontend
|
* Handle SSO callback from frontend
|
||||||
* POST /api/v1/auth/sso-callback
|
* POST /api/v1/auth/sso-callback
|
||||||
@ -23,7 +50,8 @@ export class AuthController {
|
|||||||
// Validate request body
|
// Validate request body
|
||||||
const validatedData = validateSSOCallback(req.body);
|
const validatedData = validateSSOCallback(req.body);
|
||||||
|
|
||||||
const result = await this.authService.handleSSOCallback(validatedData as any);
|
const userAgent = req.headers['user-agent'] || getRequestMetadata(req).userAgent;
|
||||||
|
const result = await this.authService.handleSSOCallback(validatedData as any, userAgent);
|
||||||
|
|
||||||
// Log login activity
|
// Log login activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
@ -128,7 +156,7 @@ export class AuthController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const newAccessToken = await this.authService.refreshAccessToken(refreshToken);
|
const refreshResult = await this.authService.refreshAccessToken(refreshToken);
|
||||||
|
|
||||||
// Set new access token in cookie if using cookie-based auth
|
// Set new access token in cookie if using cookie-based auth
|
||||||
const isProduction = process.env.NODE_ENV === 'production';
|
const isProduction = process.env.NODE_ENV === 'production';
|
||||||
@ -139,10 +167,10 @@ export class AuthController {
|
|||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isSecureEnv,
|
||||||
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' is safer and works on same-domain
|
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' is safer and works on same-domain
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
maxAge: Math.max(1000, refreshResult.accessTokenTtlMs),
|
||||||
};
|
};
|
||||||
|
|
||||||
res.cookie('accessToken', newAccessToken, cookieOptions);
|
res.cookie('accessToken', refreshResult.accessToken, cookieOptions);
|
||||||
|
|
||||||
// SECURITY: In production, don't return token in response body
|
// SECURITY: In production, don't return token in response body
|
||||||
// Token is securely stored in httpOnly cookie
|
// Token is securely stored in httpOnly cookie
|
||||||
@ -153,7 +181,7 @@ export class AuthController {
|
|||||||
} else {
|
} else {
|
||||||
// Dev: Include token for debugging
|
// Dev: Include token for debugging
|
||||||
ResponseHandler.success(res, {
|
ResponseHandler.success(res, {
|
||||||
accessToken: newAccessToken
|
accessToken: refreshResult.accessToken
|
||||||
}, 'Token refreshed successfully');
|
}, 'Token refreshed successfully');
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -180,7 +208,8 @@ export class AuthController {
|
|||||||
const { code, redirectUri } = validateTokenExchange(req.body);
|
const { code, redirectUri } = validateTokenExchange(req.body);
|
||||||
logger.info('Tanflow token exchange validation passed', { redirectUri });
|
logger.info('Tanflow token exchange validation passed', { redirectUri });
|
||||||
|
|
||||||
const result = await this.authService.exchangeTanflowCodeForTokens(code, redirectUri);
|
const userAgent = req.headers['user-agent'] || getRequestMetadata(req).userAgent;
|
||||||
|
const result = await this.authService.exchangeTanflowCodeForTokens(code, redirectUri, userAgent);
|
||||||
|
|
||||||
// Log login activity
|
// Log login activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
@ -216,7 +245,7 @@ export class AuthController {
|
|||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isSecureEnv,
|
||||||
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
|
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
maxAge: ACCESS_TOKEN_TTL_MS,
|
||||||
path: '/',
|
path: '/',
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -269,7 +298,7 @@ export class AuthController {
|
|||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isSecureEnv,
|
||||||
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
|
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
|
||||||
maxAge: 24 * 60 * 60 * 1000,
|
maxAge: ACCESS_TOKEN_TTL_MS,
|
||||||
path: '/',
|
path: '/',
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -395,6 +424,13 @@ export class AuthController {
|
|||||||
// Clear all cookies using multiple methods
|
// Clear all cookies using multiple methods
|
||||||
clearCookiesCompletely();
|
clearCookiesCompletely();
|
||||||
|
|
||||||
|
if (userId !== 'unknown') {
|
||||||
|
const user = await this.authService.getUserProfile(userId);
|
||||||
|
if (user) {
|
||||||
|
await this.authService.updateUserProfile(userId, { sessionToken: null, lastLoginDevice: null });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('User logout successful - cookies cleared', {
|
logger.info('User logout successful - cookies cleared', {
|
||||||
userId: req.user?.userId || 'unknown',
|
userId: req.user?.userId || 'unknown',
|
||||||
email: req.user?.email || 'unknown',
|
email: req.user?.email || 'unknown',
|
||||||
@ -452,7 +488,8 @@ export class AuthController {
|
|||||||
|
|
||||||
const { username, password } = validatePasswordLogin(req.body);
|
const { username, password } = validatePasswordLogin(req.body);
|
||||||
|
|
||||||
const result = await this.authService.authenticateWithPassword(username, password);
|
const userAgent = req.headers['user-agent'] || getRequestMetadata(req).userAgent;
|
||||||
|
const result = await this.authService.authenticateWithPassword(username, password, userAgent);
|
||||||
|
|
||||||
// Log login activity
|
// Log login activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
@ -488,14 +525,14 @@ export class AuthController {
|
|||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isSecureEnv,
|
||||||
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const,
|
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const,
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours
|
maxAge: ACCESS_TOKEN_TTL_MS,
|
||||||
};
|
};
|
||||||
|
|
||||||
res.cookie('accessToken', result.accessToken, cookieOptions);
|
res.cookie('accessToken', result.accessToken, cookieOptions);
|
||||||
|
|
||||||
const refreshCookieOptions = {
|
const refreshCookieOptions = {
|
||||||
...cookieOptions,
|
...cookieOptions,
|
||||||
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days
|
maxAge: REFRESH_TOKEN_TTL_MS,
|
||||||
};
|
};
|
||||||
|
|
||||||
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
|
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
|
||||||
@ -535,7 +572,19 @@ export class AuthController {
|
|||||||
const { code, redirectUri } = validateTokenExchange(req.body);
|
const { code, redirectUri } = validateTokenExchange(req.body);
|
||||||
logger.info('Token exchange validation passed', { redirectUri });
|
logger.info('Token exchange validation passed', { redirectUri });
|
||||||
|
|
||||||
const result = await this.authService.exchangeCodeForTokens(code, redirectUri);
|
if (this.hasConsumedCode(code)) {
|
||||||
|
ResponseHandler.error(
|
||||||
|
res,
|
||||||
|
'Token exchange failed',
|
||||||
|
400,
|
||||||
|
'RELOGIN_REQUIRED'
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.markCodeConsumed(code);
|
||||||
|
|
||||||
|
const userAgent = req.headers['user-agent'] || getRequestMetadata(req).userAgent;
|
||||||
|
const result = await this.authService.exchangeCodeForTokens(code, redirectUri, userAgent);
|
||||||
|
|
||||||
// Log login activity
|
// Log login activity
|
||||||
const requestMeta = getRequestMetadata(req);
|
const requestMeta = getRequestMetadata(req);
|
||||||
@ -571,14 +620,14 @@ export class AuthController {
|
|||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: isSecureEnv,
|
secure: isSecureEnv,
|
||||||
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' for same-domain
|
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' for same-domain
|
||||||
maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token
|
maxAge: ACCESS_TOKEN_TTL_MS,
|
||||||
};
|
};
|
||||||
|
|
||||||
res.cookie('accessToken', result.accessToken, cookieOptions);
|
res.cookie('accessToken', result.accessToken, cookieOptions);
|
||||||
|
|
||||||
const refreshCookieOptions = {
|
const refreshCookieOptions = {
|
||||||
...cookieOptions,
|
...cookieOptions,
|
||||||
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days for refresh token
|
maxAge: REFRESH_TOKEN_TTL_MS,
|
||||||
};
|
};
|
||||||
|
|
||||||
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
|
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
|
||||||
@ -613,6 +662,14 @@ export class AuthController {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Token exchange failed:', error);
|
logger.error('Token exchange failed:', error);
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
const normalized = String(errorMessage || '').toLowerCase();
|
||||||
|
const isExpiredOrInvalidCode =
|
||||||
|
normalized.includes('authorization code is invalid or has expired') ||
|
||||||
|
normalized.includes('invalid_grant');
|
||||||
|
if (isExpiredOrInvalidCode) {
|
||||||
|
ResponseHandler.error(res, 'Token exchange failed', 400, 'RELOGIN_REQUIRED');
|
||||||
|
return;
|
||||||
|
}
|
||||||
ResponseHandler.error(res, 'Token exchange failed', 400, errorMessage);
|
ResponseHandler.error(res, 'Token exchange failed', 400, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
|
import { WorkflowRequest, ApprovalLevel, WorkNote, Document, Activity, ConclusionRemark } from '@models/index';
|
||||||
|
import { isHtmlEmpty } from '../utils/sanitizer';
|
||||||
import { aiService } from '@services/ai.service';
|
import { aiService } from '@services/ai.service';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityService } from '@services/activity.service';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
@ -227,8 +228,8 @@ export class ConclusionController {
|
|||||||
const { finalRemark } = req.body;
|
const { finalRemark } = req.body;
|
||||||
const userId = (req as any).user?.userId;
|
const userId = (req as any).user?.userId;
|
||||||
|
|
||||||
if (!finalRemark || typeof finalRemark !== 'string') {
|
if (isHtmlEmpty(finalRemark)) {
|
||||||
return res.status(400).json({ error: 'Final remark is required' });
|
return res.status(400).json({ error: 'A valid final remark is required. Please ensure the remark contains valid content.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch request
|
// Fetch request
|
||||||
|
|||||||
36
src/controllers/cpcPermission.controller.ts
Normal file
36
src/controllers/cpcPermission.controller.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import { Request, Response } from 'express';
|
||||||
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
import { canAccessCpcCdc } from '../services/cpcPermission.service';
|
||||||
|
|
||||||
|
class CpcPermissionController {
|
||||||
|
/**
|
||||||
|
* GET /api/v1/cpc-csd/permissions (legacy: /api/v1/cpc-cdc/permissions)
|
||||||
|
* Returns CPC-CSD access permission for current user.
|
||||||
|
*/
|
||||||
|
async getPermissions(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const user = req.user;
|
||||||
|
if (!user?.userId || !user?.email) {
|
||||||
|
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const role = (user as any).role as string | undefined;
|
||||||
|
const canViewCpcCsd = await canAccessCpcCdc(user.email, role);
|
||||||
|
|
||||||
|
ResponseHandler.success(
|
||||||
|
res,
|
||||||
|
{ canViewCpcCsd, canViewCpcCdc: canViewCpcCsd },
|
||||||
|
'CPC-CSD permissions'
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[CpcPermissionController] getPermissions error:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to get CPC-CSD permissions', 500, errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const cpcPermissionController = new CpcPermissionController();
|
||||||
|
|
||||||
@ -12,11 +12,16 @@ import { sapIntegrationService } from '../services/sapIntegration.service';
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { WorkflowRequest } from '../models/WorkflowRequest';
|
|
||||||
import { DealerClaimDetails } from '../models/DealerClaimDetails';
|
import { DealerClaimDetails } from '../models/DealerClaimDetails';
|
||||||
import { ClaimInvoice } from '../models/ClaimInvoice';
|
import { ClaimInvoice } from '../models/ClaimInvoice';
|
||||||
import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem';
|
import { ClaimInvoiceItem } from '../models/ClaimInvoiceItem';
|
||||||
|
import { ClaimCreditNote } from '../models/ClaimCreditNote';
|
||||||
|
import { ClaimCreditNoteItem } from '../models/ClaimCreditNoteItem';
|
||||||
import { ActivityType } from '../models/ActivityType';
|
import { ActivityType } from '../models/ActivityType';
|
||||||
|
import { Participant } from '../models/Participant';
|
||||||
|
import { sanitizeObject, sanitizePermissive } from '../utils/sanitizer';
|
||||||
|
import { buildWfmClaimCsvRow, padDealerCode, WFM_CLAIM_CSV_HEADERS } from '../utils/helpers';
|
||||||
|
import { costBreakupSchema, closedExpensesSchema, updateEInvoiceSchema, updateIOSchema } from '../validators/dealerClaim.validator';
|
||||||
|
|
||||||
export class DealerClaimController {
|
export class DealerClaimController {
|
||||||
private dealerClaimService = new DealerClaimService();
|
private dealerClaimService = new DealerClaimService();
|
||||||
@ -50,8 +55,25 @@ export class DealerClaimController {
|
|||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
// Validation
|
// Validation
|
||||||
if (!activityName || !activityType || !dealerCode || !dealerName || !location || !requestDescription) {
|
const requiredFields = [
|
||||||
return ResponseHandler.error(res, 'Missing required fields', 400);
|
{ key: 'activityName', label: 'Activity Name' },
|
||||||
|
{ key: 'activityType', label: 'Activity Type' },
|
||||||
|
{ key: 'dealerCode', label: 'Dealer Code' },
|
||||||
|
{ key: 'dealerName', label: 'Dealer Name' },
|
||||||
|
{ key: 'location', label: 'Location' },
|
||||||
|
{ key: 'requestDescription', label: 'Request Description' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const missingFields = requiredFields
|
||||||
|
.filter(field => !req.body[field.key])
|
||||||
|
.map(field => field.label);
|
||||||
|
|
||||||
|
if (missingFields.length > 0) {
|
||||||
|
return ResponseHandler.error(
|
||||||
|
res,
|
||||||
|
`Required fields are missing or contain invalid content: ${missingFields.join(', ')}`,
|
||||||
|
400
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const claimRequest = await this.dealerClaimService.createClaimRequest(userId, {
|
const claimRequest = await this.dealerClaimService.createClaimRequest(userId, {
|
||||||
@ -76,9 +98,16 @@ export class DealerClaimController {
|
|||||||
message: 'Claim request created successfully'
|
message: 'Claim request created successfully'
|
||||||
}, 'Claim request created');
|
}, 'Claim request created');
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
// Handle approver validation errors
|
// Handle validation and business logic errors
|
||||||
if (error.message && error.message.includes('Approver')) {
|
const isValidationError = error.message && (
|
||||||
logger.warn('[DealerClaimController] Approver validation error:', { message: error.message });
|
error.message.includes('Approver') ||
|
||||||
|
error.message.includes('Valid content is required') ||
|
||||||
|
error.message.includes('invalid script') ||
|
||||||
|
error.message.includes('empty input detected')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isValidationError) {
|
||||||
|
logger.warn('[DealerClaimController] Validation error:', { message: error.message });
|
||||||
return ResponseHandler.error(res, error.message, 400);
|
return ResponseHandler.error(res, error.message, 400);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -173,6 +202,8 @@ export class DealerClaimController {
|
|||||||
if (typeof costBreakup === 'string') {
|
if (typeof costBreakup === 'string') {
|
||||||
try {
|
try {
|
||||||
parsedCostBreakup = JSON.parse(costBreakup);
|
parsedCostBreakup = JSON.parse(costBreakup);
|
||||||
|
// Sanitize cost items
|
||||||
|
parsedCostBreakup = sanitizeObject(parsedCostBreakup);
|
||||||
} catch (parseError) {
|
} catch (parseError) {
|
||||||
logger.error('[DealerClaimController] Failed to parse costBreakup JSON:', parseError);
|
logger.error('[DealerClaimController] Failed to parse costBreakup JSON:', parseError);
|
||||||
return ResponseHandler.error(res, 'Invalid costBreakup format. Expected JSON array.', 400);
|
return ResponseHandler.error(res, 'Invalid costBreakup format. Expected JSON array.', 400);
|
||||||
@ -185,17 +216,15 @@ export class DealerClaimController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate costBreakup is an array
|
|
||||||
if (!Array.isArray(parsedCostBreakup)) {
|
|
||||||
logger.error('[DealerClaimController] costBreakup is not an array after parsing:', parsedCostBreakup);
|
|
||||||
return ResponseHandler.error(res, 'costBreakup must be an array of cost items', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate each cost item has required fields
|
// Validate costBreakup array using Zod schema
|
||||||
for (const item of parsedCostBreakup) {
|
const costValidation = costBreakupSchema.safeParse(parsedCostBreakup);
|
||||||
if (!item.description || item.amount === undefined || item.amount === null) {
|
if (!costValidation.success) {
|
||||||
return ResponseHandler.error(res, 'Each cost item must have description and amount', 400);
|
return ResponseHandler.error(
|
||||||
}
|
res,
|
||||||
|
`Invalid cost breakup data: ${costValidation.error.errors.map((e: any) => e.message).join(', ')}`,
|
||||||
|
400
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle file upload if present
|
// Handle file upload if present
|
||||||
@ -232,7 +261,7 @@ export class DealerClaimController {
|
|||||||
timelineMode: timelineMode || 'date',
|
timelineMode: timelineMode || 'date',
|
||||||
expectedCompletionDate: expectedCompletionDate ? new Date(expectedCompletionDate) : undefined,
|
expectedCompletionDate: expectedCompletionDate ? new Date(expectedCompletionDate) : undefined,
|
||||||
expectedCompletionDays: expectedCompletionDays ? parseInt(expectedCompletionDays) : undefined,
|
expectedCompletionDays: expectedCompletionDays ? parseInt(expectedCompletionDays) : undefined,
|
||||||
dealerComments: dealerComments || '',
|
dealerComments: dealerComments ? sanitizePermissive(dealerComments) : '',
|
||||||
});
|
});
|
||||||
|
|
||||||
return ResponseHandler.success(res, { message: 'Proposal submitted successfully' }, 'Proposal submitted');
|
return ResponseHandler.success(res, { message: 'Proposal submitted successfully' }, 'Proposal submitted');
|
||||||
@ -264,12 +293,24 @@ export class DealerClaimController {
|
|||||||
if (closedExpenses) {
|
if (closedExpenses) {
|
||||||
try {
|
try {
|
||||||
parsedClosedExpenses = typeof closedExpenses === 'string' ? JSON.parse(closedExpenses) : closedExpenses;
|
parsedClosedExpenses = typeof closedExpenses === 'string' ? JSON.parse(closedExpenses) : closedExpenses;
|
||||||
|
// Sanitize expenses
|
||||||
|
parsedClosedExpenses = sanitizeObject(parsedClosedExpenses);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.warn('[DealerClaimController] Failed to parse closedExpenses JSON:', e);
|
logger.warn('[DealerClaimController] Failed to parse closedExpenses JSON:', e);
|
||||||
parsedClosedExpenses = [];
|
parsedClosedExpenses = [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate closed expenses using Zod schema
|
||||||
|
const expenseValidation = closedExpensesSchema.safeParse(parsedClosedExpenses);
|
||||||
|
if (!expenseValidation.success) {
|
||||||
|
return ResponseHandler.error(
|
||||||
|
res,
|
||||||
|
`Invalid closed expenses: ${expenseValidation.error.errors.map((e: any) => e.message).join(', ')}`,
|
||||||
|
400
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// Get files from multer
|
// Get files from multer
|
||||||
const files = req.files as { [fieldname: string]: Express.Multer.File[] } | undefined;
|
const files = req.files as { [fieldname: string]: Express.Multer.File[] } | undefined;
|
||||||
const completionDocumentsFiles = files?.completionDocuments || [];
|
const completionDocumentsFiles = files?.completionDocuments || [];
|
||||||
@ -547,7 +588,7 @@ export class DealerClaimController {
|
|||||||
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
|
totalClosedExpenses: totalClosedExpenses ? parseFloat(totalClosedExpenses) : 0,
|
||||||
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
|
invoicesReceipts: invoicesReceipts.length > 0 ? invoicesReceipts : undefined,
|
||||||
attendanceSheet: attendanceSheet || undefined,
|
attendanceSheet: attendanceSheet || undefined,
|
||||||
completionDescription: completionDescription || undefined,
|
completionDescription: completionDescription ? sanitizePermissive(completionDescription) : undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');
|
return ResponseHandler.success(res, { message: 'Completion documents submitted successfully' }, 'Completion submitted');
|
||||||
@ -624,8 +665,14 @@ export class DealerClaimController {
|
|||||||
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ioNumber) {
|
// Validate request body using Zod schema
|
||||||
return ResponseHandler.error(res, 'IO number is required', 400);
|
const ioValidation = updateIOSchema.safeParse(req.body);
|
||||||
|
if (!ioValidation.success) {
|
||||||
|
return ResponseHandler.error(
|
||||||
|
res,
|
||||||
|
`Invalid IO details: ${ioValidation.error.errors.map((e: any) => e.message).join(', ')}`,
|
||||||
|
400
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const blockAmount = blockedAmount ? parseFloat(blockedAmount) : 0;
|
const blockAmount = blockedAmount ? parseFloat(blockedAmount) : 0;
|
||||||
@ -730,6 +777,16 @@ export class DealerClaimController {
|
|||||||
description,
|
description,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
|
// Validate request body using Zod schema
|
||||||
|
const einvoiceValidation = updateEInvoiceSchema.safeParse(req.body);
|
||||||
|
if (!einvoiceValidation.success) {
|
||||||
|
return ResponseHandler.error(
|
||||||
|
res,
|
||||||
|
`Invalid e-invoice details: ${einvoiceValidation.error.errors.map((e: any) => e.message).join(', ')}`,
|
||||||
|
400
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// Find workflow to get actual UUID
|
// Find workflow to get actual UUID
|
||||||
const workflow = await this.findWorkflowByIdentifier(identifier);
|
const workflow = await this.findWorkflowByIdentifier(identifier);
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
@ -784,6 +841,20 @@ export class DealerClaimController {
|
|||||||
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Authorization Check
|
||||||
|
const userRole = (req as any).user?.role;
|
||||||
|
const userId = (req as any).user?.userId;
|
||||||
|
|
||||||
|
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
|
||||||
|
const participant = await Participant.findOne({
|
||||||
|
where: { requestId, userId, isActive: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!participant) {
|
||||||
|
return ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const { ClaimInvoice } = await import('../models/ClaimInvoice');
|
const { ClaimInvoice } = await import('../models/ClaimInvoice');
|
||||||
let invoice = await ClaimInvoice.findOne({ where: { requestId } });
|
let invoice = await ClaimInvoice.findOne({ where: { requestId } });
|
||||||
|
|
||||||
@ -1005,6 +1076,24 @@ export class DealerClaimController {
|
|||||||
const requestId = (workflow as any).requestId || (workflow as any).request_id;
|
const requestId = (workflow as any).requestId || (workflow as any).request_id;
|
||||||
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
|
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
|
||||||
|
|
||||||
|
if (!requestId) {
|
||||||
|
return ResponseHandler.error(res, 'Invalid workflow request', 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authorization Check
|
||||||
|
const userRole = (req as any).user?.role;
|
||||||
|
const userId = (req as any).user?.userId;
|
||||||
|
|
||||||
|
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
|
||||||
|
const participant = await Participant.findOne({
|
||||||
|
where: { requestId, userId, isActive: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!participant) {
|
||||||
|
return ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch related data
|
// Fetch related data
|
||||||
logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`);
|
logger.info(`[DealerClaimController] Preparing CSV for requestId: ${requestId}`);
|
||||||
const [invoice, items, claimDetails, internalOrder] = await Promise.all([
|
const [invoice, items, claimDetails, internalOrder] = await Promise.all([
|
||||||
@ -1024,52 +1113,55 @@ export class DealerClaimController {
|
|||||||
taxationType = activity?.taxationType || (claimDetails.activityType.toLowerCase().includes('non') ? 'Non GST' : 'GST');
|
taxationType = activity?.taxationType || (claimDetails.activityType.toLowerCase().includes('non') ? 'Non GST' : 'GST');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct CSV
|
const isNonGst = taxationType === 'Non GST' || taxationType === 'Non-GST';
|
||||||
const headers = [
|
|
||||||
'TRNS_UNIQ_NO',
|
// Construct CSV with pipe separator
|
||||||
'CLAIM_NUMBER',
|
const headers = isNonGst
|
||||||
'INV_NUMBER',
|
? [
|
||||||
'DEALER_CODE',
|
'TRNS_UNIQ_NO',
|
||||||
'IO_NUMBER',
|
'CLAIM_NUMBER',
|
||||||
'CLAIM_DOC_TYP',
|
'INV_NUMBER',
|
||||||
'CLAIM_DATE',
|
'DEALER_CODE',
|
||||||
'CLAIM_AMT',
|
'IO_NUMBER',
|
||||||
'GST_AMT',
|
'CLAIM_DOC_TYP',
|
||||||
'GST_PERCENTAG'
|
'CLAIM_TYPE',
|
||||||
];
|
'CLAIM_DATE',
|
||||||
|
'CLAIM_AMT'
|
||||||
|
]
|
||||||
|
: [...WFM_CLAIM_CSV_HEADERS];
|
||||||
|
|
||||||
const rows = items.map(item => {
|
const rows = items.map(item => {
|
||||||
const isNonGst = taxationType === 'Non GST' || taxationType === 'Non-GST';
|
if (isNonGst) {
|
||||||
|
const d = new Date(invoice?.invoiceDate || invoice?.createdAt || new Date());
|
||||||
|
const claimDate = `${d.getFullYear()}${String(d.getMonth() + 1).padStart(2, '0')}${String(d.getDate()).padStart(2, '0')}`;
|
||||||
|
return [
|
||||||
|
item.transactionCode || '',
|
||||||
|
requestNumber,
|
||||||
|
invoice?.invoiceNumber || '',
|
||||||
|
padDealerCode(claimDetails?.dealerCode || ''),
|
||||||
|
internalOrder?.ioNumber || '',
|
||||||
|
sapRefNo,
|
||||||
|
claimDetails?.activityType || '',
|
||||||
|
claimDate,
|
||||||
|
item.assAmt
|
||||||
|
].join('|');
|
||||||
|
}
|
||||||
|
|
||||||
// For Non-GST, we hide HSN (often stored in transactionCode) and GST details
|
const row = buildWfmClaimCsvRow({
|
||||||
const trnsUniqNo = isNonGst ? '' : (item.transactionCode || '');
|
item: item as any,
|
||||||
const claimNumber = requestNumber;
|
requestNumber,
|
||||||
const invNumber = invoice?.invoiceNumber || '';
|
invoiceNumber: invoice?.invoiceNumber || '',
|
||||||
const dealerCode = claimDetails?.dealerCode || '';
|
invoiceDate: (invoice?.invoiceDate as Date) || (invoice?.createdAt as Date) || new Date(),
|
||||||
const ioNumber = internalOrder?.ioNumber || '';
|
dealerCode: claimDetails?.dealerCode || '',
|
||||||
const claimDocTyp = sapRefNo;
|
ioNumber: internalOrder?.ioNumber || '',
|
||||||
const claimDate = invoice?.createdAt ? new Date(invoice.createdAt).toISOString().split('T')[0] : '';
|
claimDocTyp: sapRefNo,
|
||||||
const claimAmt = item.assAmt;
|
claimType: claimDetails?.activityType || '',
|
||||||
|
});
|
||||||
|
|
||||||
// Zero out tax for Non-GST
|
return headers.map((key) => String((row as any)[key] ?? '')).join('|');
|
||||||
const totalTax = isNonGst ? 0 : (Number(item.igstAmt || 0) + Number(item.cgstAmt || 0) + Number(item.sgstAmt || 0) + Number(item.utgstAmt || 0));
|
|
||||||
const gstPercentag = isNonGst ? 0 : (item.gstRt || 0);
|
|
||||||
|
|
||||||
return [
|
|
||||||
trnsUniqNo,
|
|
||||||
claimNumber,
|
|
||||||
invNumber,
|
|
||||||
dealerCode,
|
|
||||||
ioNumber,
|
|
||||||
claimDocTyp,
|
|
||||||
claimDate,
|
|
||||||
claimAmt,
|
|
||||||
totalTax.toFixed(2),
|
|
||||||
gstPercentag
|
|
||||||
].join(',');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const csvContent = [headers.join(','), ...rows].join('\n');
|
const csvContent = [headers.join('|'), ...rows].join('\n');
|
||||||
|
|
||||||
res.setHeader('Content-Type', 'text/csv');
|
res.setHeader('Content-Type', 'text/csv');
|
||||||
res.setHeader('Content-Disposition', `attachment; filename="Invoice_${requestNumber}.csv"`);
|
res.setHeader('Content-Disposition', `attachment; filename="Invoice_${requestNumber}.csv"`);
|
||||||
@ -1090,16 +1182,16 @@ export class DealerClaimController {
|
|||||||
async retriggerWFMPush(req: Request, res: Response): Promise<void> {
|
async retriggerWFMPush(req: Request, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { requestId: identifier } = req.params;
|
const { requestId: identifier } = req.params;
|
||||||
|
|
||||||
const workflow = await this.findWorkflowByIdentifier(identifier);
|
const workflow = await this.findWorkflowByIdentifier(identifier);
|
||||||
if (!workflow) {
|
if (!workflow) {
|
||||||
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
const requestId = (workflow as any).id || (workflow as any).requestId;
|
const requestId = (workflow as any).id || (workflow as any).requestId;
|
||||||
|
|
||||||
await this.dealerClaimService.pushWFMCSV(requestId);
|
await this.dealerClaimService.pushWFMCSV(requestId);
|
||||||
|
|
||||||
return ResponseHandler.success(res, {
|
return ResponseHandler.success(res, {
|
||||||
message: 'WFM CSV push re-triggered successfully'
|
message: 'WFM CSV push re-triggered successfully'
|
||||||
}, 'WFM push re-triggered');
|
}, 'WFM push re-triggered');
|
||||||
@ -1109,4 +1201,101 @@ export class DealerClaimController {
|
|||||||
return ResponseHandler.error(res, 'Failed to re-trigger WFM push', 500, errorMessage);
|
return ResponseHandler.error(res, 'Failed to re-trigger WFM push', 500, errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch parsed WFM credit note CSV from outgoing folder
|
||||||
|
* GET /api/v1/dealer-claims/:requestId/credit-note-wfm
|
||||||
|
*/
|
||||||
|
async fetchCreditNoteWfm(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const identifier = req.params.requestId;
|
||||||
|
|
||||||
|
const workflow = await this.findWorkflowByIdentifier(identifier);
|
||||||
|
if (!workflow) {
|
||||||
|
return ResponseHandler.error(res, 'Workflow request not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestId = (workflow as any).requestId || (workflow as any).request_id;
|
||||||
|
const requestNumber = (workflow as any).requestNumber || (workflow as any).request_number;
|
||||||
|
|
||||||
|
const claimDetails = await DealerClaimDetails.findOne({ where: { requestId } });
|
||||||
|
if (!claimDetails) {
|
||||||
|
return ResponseHandler.error(res, 'Dealer claim details not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
let isNonGst = false;
|
||||||
|
if (claimDetails.activityType) {
|
||||||
|
const activity = await ActivityType.findOne({ where: { title: claimDetails.activityType } });
|
||||||
|
const taxationType = activity?.taxationType || (claimDetails.activityType.toLowerCase().includes('non') ? 'Non GST' : 'GST');
|
||||||
|
isNonGst = taxationType === 'Non GST' || taxationType === 'Non-GST';
|
||||||
|
}
|
||||||
|
|
||||||
|
const { wfmFileService } = await import('../services/wfmFile.service');
|
||||||
|
const existingCreditNote = await ClaimCreditNote.findOne({
|
||||||
|
where: { requestId },
|
||||||
|
include: [{
|
||||||
|
model: ClaimCreditNoteItem,
|
||||||
|
as: 'items',
|
||||||
|
attributes: ['transactionNo'],
|
||||||
|
order: [['slNo', 'ASC']]
|
||||||
|
}]
|
||||||
|
}) as any;
|
||||||
|
|
||||||
|
if (existingCreditNote?.sapDocumentNumber || existingCreditNote?.creditNoteNumber) {
|
||||||
|
let displayTxn = existingCreditNote.transactionNo || '';
|
||||||
|
const items = existingCreditNote.items || [];
|
||||||
|
if (items.length > 1) {
|
||||||
|
const first = items[0].transactionNo;
|
||||||
|
const last = items[items.length - 1].transactionNo;
|
||||||
|
if (first && last && first !== last) {
|
||||||
|
displayTxn = `${first} - ${last}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = [{
|
||||||
|
TRNS_UNIQ_NO: displayTxn,
|
||||||
|
CLAIM_NUMBER: requestNumber,
|
||||||
|
DOC_NO: existingCreditNote.sapDocumentNumber || existingCreditNote.creditNoteNumber || '',
|
||||||
|
MSG_TYP: existingCreditNote.status || '',
|
||||||
|
MESSAGE: existingCreditNote.errorMessage || ''
|
||||||
|
}];
|
||||||
|
return ResponseHandler.success(res, payload, 'Credit note data fetched successfully');
|
||||||
|
}
|
||||||
|
|
||||||
|
const { filePath, data: creditNoteData } = await wfmFileService.getCreditNoteDetailsWithPath(
|
||||||
|
claimDetails.dealerCode,
|
||||||
|
requestNumber,
|
||||||
|
isNonGst
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!creditNoteData.length) {
|
||||||
|
return ResponseHandler.success(res, [], 'Credit note data fetched successfully');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the file using the sync service (handles grouping and transactions)
|
||||||
|
const { creditNoteSyncService } = await import('../services/creditNoteSync.service');
|
||||||
|
await creditNoteSyncService.processFile(filePath);
|
||||||
|
|
||||||
|
// Return unified row with range if multiple rows exist for this claim
|
||||||
|
const claimRows = creditNoteData.filter(row => row.CLAIM_NUMBER === requestNumber);
|
||||||
|
if (claimRows.length === 0) {
|
||||||
|
return ResponseHandler.success(res, [], 'Credit note data fetched successfully');
|
||||||
|
}
|
||||||
|
|
||||||
|
const claimRow = { ...claimRows[0] };
|
||||||
|
if (claimRows.length > 1) {
|
||||||
|
const first = claimRows[0].TRNS_UNIQ_NO;
|
||||||
|
const last = claimRows[claimRows.length - 1].TRNS_UNIQ_NO;
|
||||||
|
if (first && last && first !== last) {
|
||||||
|
claimRow.TRNS_UNIQ_NO = `${first} - ${last}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ResponseHandler.success(res, [claimRow], 'Credit note data fetched successfully');
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
logger.error('[DealerClaimController] Error fetching credit note WFM data:', error);
|
||||||
|
return ResponseHandler.error(res, 'Failed to fetch credit note CSV data', 500, errorMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,6 +7,8 @@ import { User } from '@models/User';
|
|||||||
import { WorkflowRequest } from '@models/WorkflowRequest';
|
import { WorkflowRequest } from '@models/WorkflowRequest';
|
||||||
import { Participant } from '@models/Participant';
|
import { Participant } from '@models/Participant';
|
||||||
import { ApprovalLevel } from '@models/ApprovalLevel';
|
import { ApprovalLevel } from '@models/ApprovalLevel';
|
||||||
|
import { WorkNote } from '@models/WorkNote';
|
||||||
|
import { WorkNoteAttachment } from '@models/WorkNoteAttachment';
|
||||||
import { Op } from 'sequelize';
|
import { Op } from 'sequelize';
|
||||||
import { ResponseHandler } from '@utils/responseHandler';
|
import { ResponseHandler } from '@utils/responseHandler';
|
||||||
import { activityService } from '@services/activity.service';
|
import { activityService } from '@services/activity.service';
|
||||||
@ -17,6 +19,9 @@ import type { AuthenticatedRequest } from '../types/express';
|
|||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
import { getConfigNumber, getConfigValue } from '@services/configReader.service';
|
||||||
import { logDocumentEvent, logWithContext } from '@utils/logger';
|
import { logDocumentEvent, logWithContext } from '@utils/logger';
|
||||||
|
import { UPLOAD_DIR } from '../config/storage';
|
||||||
|
import { Storage } from '@google-cloud/storage';
|
||||||
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
export class DocumentController {
|
export class DocumentController {
|
||||||
async upload(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async upload(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
@ -517,6 +522,196 @@ export class DocumentController {
|
|||||||
ResponseHandler.error(res, 'Upload failed', 500, message);
|
ResponseHandler.error(res, 'Upload failed', 500, message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to create proper Content-Disposition header
|
||||||
|
*/
|
||||||
|
private createContentDisposition(disposition: 'inline' | 'attachment', filename: string): string {
|
||||||
|
const cleanFilename = filename
|
||||||
|
.replace(/[<>:"|?*\x00-\x1F\x7F]/g, '_')
|
||||||
|
.replace(/\\/g, '_')
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
const hasNonASCII = /[^\x00-\x7F]/.test(filename);
|
||||||
|
|
||||||
|
if (hasNonASCII) {
|
||||||
|
const encodedFilename = encodeURIComponent(filename);
|
||||||
|
return `${disposition}; filename="${cleanFilename}"; filename*=UTF-8''${encodedFilename}`;
|
||||||
|
} else {
|
||||||
|
return `${disposition}; filename="${cleanFilename}"`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preview or Download a standard workflow document
|
||||||
|
*/
|
||||||
|
async getWorkflowDocument(req: AuthenticatedRequest, res: Response, mode: 'preview' | 'download'): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { documentId } = req.params;
|
||||||
|
const userRole = req.user?.role;
|
||||||
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
|
const document = await Document.findOne({ where: { documentId } });
|
||||||
|
if (!document) {
|
||||||
|
ResponseHandler.error(res, 'Document not found', 404);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authorization Check
|
||||||
|
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
|
||||||
|
const participant = await Participant.findOne({
|
||||||
|
where: { requestId: document.requestId, userId, isActive: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!participant) {
|
||||||
|
ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const canAccess = mode === 'download' ? participant.canDownloadDocuments : participant.canViewDocuments;
|
||||||
|
if (!canAccess) {
|
||||||
|
ResponseHandler.error(res, `Access denied. You do not have permission to ${mode} documents in this workflow.`, 403);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.serveFile(res, {
|
||||||
|
storageUrl: (document as any).storageUrl || (document as any).storage_url,
|
||||||
|
filePath: (document as any).filePath || (document as any).file_path,
|
||||||
|
fileName: (document as any).originalFileName || (document as any).original_file_name || (document as any).fileName,
|
||||||
|
mimeType: (document as any).mimeType || (document as any).mime_type,
|
||||||
|
mode
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[DocumentController] Error getting workflow document:`, error);
|
||||||
|
ResponseHandler.error(res, 'Failed to access document', 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preview or Download a work note attachment
|
||||||
|
*/
|
||||||
|
async getWorkNoteAttachment(req: AuthenticatedRequest, res: Response, mode: 'preview' | 'download'): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { attachmentId } = req.params;
|
||||||
|
const userRole = req.user?.role;
|
||||||
|
const userId = req.user?.userId;
|
||||||
|
|
||||||
|
const attachment = await WorkNoteAttachment.findOne({ where: { attachmentId } });
|
||||||
|
if (!attachment) {
|
||||||
|
ResponseHandler.error(res, 'Attachment not found', 404);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const note = await WorkNote.findOne({ where: { noteId: attachment.noteId } });
|
||||||
|
if (!note) {
|
||||||
|
ResponseHandler.error(res, 'Associated work note not found', 404);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authorization Check (Work note attachments follow general document permissions)
|
||||||
|
if (userRole !== 'ADMIN' && userRole !== 'MANAGEMENT') {
|
||||||
|
const participant = await Participant.findOne({
|
||||||
|
where: { requestId: note.requestId, userId, isActive: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!participant) {
|
||||||
|
ResponseHandler.error(res, 'Access denied. You are not a participant in this workflow.', 403);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const canAccess = mode === 'download' ? participant.canDownloadDocuments : participant.canViewDocuments;
|
||||||
|
if (!canAccess) {
|
||||||
|
ResponseHandler.error(res, `Access denied. You do not have permission to ${mode} documentation in this workflow.`, 403);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.serveFile(res, {
|
||||||
|
storageUrl: (attachment as any).storageUrl || (attachment as any).storage_url,
|
||||||
|
filePath: (attachment as any).filePath || (attachment as any).file_path,
|
||||||
|
fileName: (attachment as any).fileName || (attachment as any).file_name,
|
||||||
|
mimeType: (attachment as any).fileType || (attachment as any).file_type,
|
||||||
|
mode
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[DocumentController] Error getting work note attachment:`, error);
|
||||||
|
ResponseHandler.error(res, 'Failed to access attachment', 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Common logic to serve files from GCS or local storage
|
||||||
|
*/
|
||||||
|
private async serveFile(res: Response, options: {
|
||||||
|
storageUrl?: string,
|
||||||
|
filePath?: string,
|
||||||
|
fileName: string,
|
||||||
|
mimeType?: string,
|
||||||
|
mode: 'preview' | 'download'
|
||||||
|
}): Promise<void> {
|
||||||
|
const { storageUrl, filePath, fileName, mimeType, mode } = options;
|
||||||
|
const isGcsUrl = storageUrl && (storageUrl.startsWith('https://storage.googleapis.com') || storageUrl.startsWith('gs://'));
|
||||||
|
|
||||||
|
// Set CORS and basic headers
|
||||||
|
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
|
||||||
|
const dispositionType = mode === 'download' ? 'attachment' : (mimeType?.includes('pdf') || mimeType?.includes('image') ? 'inline' : 'attachment');
|
||||||
|
res.setHeader('Content-Disposition', this.createContentDisposition(dispositionType, fileName));
|
||||||
|
res.contentType(mimeType || 'application/octet-stream');
|
||||||
|
|
||||||
|
if (isGcsUrl) {
|
||||||
|
res.redirect(storageUrl!);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stream from GCS if filePath is a GCS path
|
||||||
|
if (!storageUrl && filePath && (filePath.startsWith('requests/') || filePath.startsWith('worknotes/'))) {
|
||||||
|
try {
|
||||||
|
const keyFilePath = process.env.GCP_KEY_FILE || '';
|
||||||
|
const bucketName = process.env.GCP_BUCKET_NAME || '';
|
||||||
|
const resolvedKeyPath = path.isAbsolute(keyFilePath) ? keyFilePath : path.resolve(process.cwd(), keyFilePath);
|
||||||
|
|
||||||
|
const storage = new Storage({
|
||||||
|
projectId: process.env.GCP_PROJECT_ID || '',
|
||||||
|
keyFilename: resolvedKeyPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
const bucket = storage.bucket(bucketName);
|
||||||
|
const file = bucket.file(filePath);
|
||||||
|
|
||||||
|
const [exists] = await file.exists();
|
||||||
|
if (!exists) {
|
||||||
|
ResponseHandler.error(res, 'File not found in storage', 404);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
file.createReadStream()
|
||||||
|
.on('error', (err) => {
|
||||||
|
logger.error('[DocumentController] GCS Stream Error:', err);
|
||||||
|
if (!res.headersSent) ResponseHandler.error(res, 'Streaming failed', 500);
|
||||||
|
})
|
||||||
|
.pipe(res);
|
||||||
|
return;
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[DocumentController] GCS Access Error:', err);
|
||||||
|
ResponseHandler.error(res, 'Failed to access cloud storage', 500);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Local file handling
|
||||||
|
const absolutePath = filePath && !path.isAbsolute(filePath) ? path.join(UPLOAD_DIR, filePath) : filePath;
|
||||||
|
if (absolutePath && fs.existsSync(absolutePath)) {
|
||||||
|
res.sendFile(absolutePath, (err) => {
|
||||||
|
if (err && !res.headersSent) ResponseHandler.error(res, 'Failed to send file', 500);
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ResponseHandler.error(res, 'File not found on server', 404);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const documentController = new DocumentController();
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1075
src/controllers/form16.controller.ts
Normal file
1075
src/controllers/form16.controller.ts
Normal file
File diff suppressed because it is too large
Load Diff
24
src/controllers/form16Sap.controller.ts
Normal file
24
src/controllers/form16Sap.controller.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import { Request, Response } from 'express';
|
||||||
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import { runForm16SapResponseIngestionOnce } from '../jobs/form16SapResponseJob';
|
||||||
|
|
||||||
|
export class Form16SapController {
|
||||||
|
/**
|
||||||
|
* POST /api/v1/form16/sap/pull
|
||||||
|
* Trigger an immediate scan of the SAP OUTGOING directories for new Form 16 response CSVs.
|
||||||
|
* Safe to call multiple times; ingestion is idempotent by file name.
|
||||||
|
*/
|
||||||
|
async pull(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const result = await runForm16SapResponseIngestionOnce();
|
||||||
|
return ResponseHandler.success(res, result, 'Pulled SAP responses');
|
||||||
|
} catch (e: any) {
|
||||||
|
logger.error('[Form16SapController] pull error:', e);
|
||||||
|
return ResponseHandler.error(res, 'Failed to pull SAP responses', 500, e?.message || 'Unknown error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const form16SapController = new Form16SapController();
|
||||||
|
|
||||||
133
src/controllers/hsnSacCode.controller.ts
Normal file
133
src/controllers/hsnSacCode.controller.ts
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
import { Request, Response } from 'express';
|
||||||
|
import { hsnSacCodeService } from '../services/hsnSacCode.service';
|
||||||
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
export class HsnSacCodeController {
|
||||||
|
/**
|
||||||
|
* Get HSN/SAC codes with pagination and search
|
||||||
|
*/
|
||||||
|
async getAllCodes(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const onlyActive = req.query.active === 'true';
|
||||||
|
const page = parseInt(req.query.page as string) || 1;
|
||||||
|
const limit = parseInt(req.query.limit as string) || 10;
|
||||||
|
const search = req.query.search as string;
|
||||||
|
|
||||||
|
const result = await hsnSacCodeService.getAllCodes(onlyActive, page, limit, search);
|
||||||
|
|
||||||
|
ResponseHandler.success(
|
||||||
|
res,
|
||||||
|
result.codes,
|
||||||
|
'HSN/SAC codes fetched successfully',
|
||||||
|
200,
|
||||||
|
result.pagination
|
||||||
|
);
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Error in getAllCodes controller:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to fetch HSN/SAC codes', 500, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get code by ID
|
||||||
|
*/
|
||||||
|
async getCodeById(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
const code = await hsnSacCodeService.getCodeById(id);
|
||||||
|
if (!code) {
|
||||||
|
return ResponseHandler.error(res, 'HSN/SAC code not found', 404);
|
||||||
|
}
|
||||||
|
ResponseHandler.success(res, code, 'HSN/SAC code fetched successfully');
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Error in getCodeById controller:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to fetch HSN/SAC code', 500, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create new code
|
||||||
|
*/
|
||||||
|
async createCode(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { code, type, gstRate, description, isActive } = req.body;
|
||||||
|
|
||||||
|
if (!code || !type) {
|
||||||
|
return ResponseHandler.error(res, 'Code and type are required', 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const newCode = await hsnSacCodeService.createCode({
|
||||||
|
code,
|
||||||
|
type,
|
||||||
|
gstRate,
|
||||||
|
description,
|
||||||
|
isActive: isActive !== undefined ? isActive : true
|
||||||
|
});
|
||||||
|
|
||||||
|
ResponseHandler.success(res, newCode, 'HSN/SAC code created successfully', 201);
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Error in createCode controller:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to create HSN/SAC code', 500, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update code
|
||||||
|
*/
|
||||||
|
async updateCode(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
const updatedCode = await hsnSacCodeService.updateCode(id, req.body);
|
||||||
|
|
||||||
|
if (!updatedCode) {
|
||||||
|
return ResponseHandler.error(res, 'HSN/SAC code not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
ResponseHandler.success(res, updatedCode, 'HSN/SAC code updated successfully');
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Error in updateCode controller:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to update HSN/SAC code', 500, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete code
|
||||||
|
*/
|
||||||
|
async deleteCode(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
const success = await hsnSacCodeService.deleteCode(id);
|
||||||
|
|
||||||
|
if (!success) {
|
||||||
|
return ResponseHandler.error(res, 'HSN/SAC code not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
ResponseHandler.success(res, null, 'HSN/SAC code deleted successfully');
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Error in deleteCode controller:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to delete HSN/SAC code', 500, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Toggle active status
|
||||||
|
*/
|
||||||
|
async toggleActive(req: Request, res: Response): Promise<void> {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
const updatedCode = await hsnSacCodeService.toggleActive(id);
|
||||||
|
|
||||||
|
if (!updatedCode) {
|
||||||
|
return ResponseHandler.error(res, 'HSN/SAC code not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
ResponseHandler.success(res, updatedCode, 'HSN/SAC code status toggled successfully');
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Error in toggleActive controller:', error);
|
||||||
|
ResponseHandler.error(res, 'Failed to toggle HSN/SAC code status', 500, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const hsnSacCodeController = new HsnSacCodeController();
|
||||||
@ -14,11 +14,27 @@ import crypto from 'crypto';
|
|||||||
import { getRequestMetadata } from '@utils/requestUtils';
|
import { getRequestMetadata } from '@utils/requestUtils';
|
||||||
import { enrichApprovalLevels, enrichSpectators, validateInitiator, validateDealerUser } from '@services/userEnrichment.service';
|
import { enrichApprovalLevels, enrichSpectators, validateInitiator, validateDealerUser } from '@services/userEnrichment.service';
|
||||||
import { DealerClaimService } from '@services/dealerClaim.service';
|
import { DealerClaimService } from '@services/dealerClaim.service';
|
||||||
|
import { canViewForm16Submission } from '@services/form16Permission.service';
|
||||||
|
import { sanitizeObject, isHtmlEmpty } from '@utils/sanitizer';
|
||||||
import logger from '@utils/logger';
|
import logger from '@utils/logger';
|
||||||
|
|
||||||
const workflowService = new WorkflowService();
|
const workflowService = new WorkflowService();
|
||||||
const dealerClaimService = new DealerClaimService();
|
const dealerClaimService = new DealerClaimService();
|
||||||
|
|
||||||
|
/** Filter FORM_16 from list result when user does not have Form 16 submission access. Admin always sees all. */
|
||||||
|
async function filterForm16FromListIfNeeded<T extends { data: any[]; pagination: any }>(
|
||||||
|
req: Request,
|
||||||
|
result: T
|
||||||
|
): Promise<T> {
|
||||||
|
if (!req.user?.email || !req.user?.userId || !result?.data?.length) return result;
|
||||||
|
const role = (req.user as any).role;
|
||||||
|
if (role === 'ADMIN') return result;
|
||||||
|
const allowed = await canViewForm16Submission(req.user.email, req.user.userId, role);
|
||||||
|
if (allowed) return result;
|
||||||
|
const filtered = result.data.filter((w: any) => ((w.templateType || '').toString().toUpperCase() !== 'FORM_16'));
|
||||||
|
return { ...result, data: filtered } as T;
|
||||||
|
}
|
||||||
|
|
||||||
export class WorkflowController {
|
export class WorkflowController {
|
||||||
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
async createWorkflow(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||||
try {
|
try {
|
||||||
@ -124,7 +140,7 @@ export class WorkflowController {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
logger.error('[WorkflowController] Failed to create workflow:', error);
|
logger.error('[WorkflowController] Failed to create workflow:', error);
|
||||||
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
ResponseHandler.error(res, errorMessage, 400);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,10 +162,24 @@ export class WorkflowController {
|
|||||||
let parsed;
|
let parsed;
|
||||||
try {
|
try {
|
||||||
parsed = JSON.parse(raw);
|
parsed = JSON.parse(raw);
|
||||||
|
// Explicitly sanitize the parsed object since multipart bypasses global middleware
|
||||||
|
parsed = sanitizeObject(parsed);
|
||||||
} catch (parseError) {
|
} catch (parseError) {
|
||||||
ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error');
|
ResponseHandler.error(res, 'Invalid JSON in payload', 400, parseError instanceof Error ? parseError.message : 'JSON parse error');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Explicitly check for empty content after sanitization for non-drafts
|
||||||
|
if (parsed.isDraft !== true) {
|
||||||
|
if (!parsed.title || !parsed.title.trim()) {
|
||||||
|
ResponseHandler.error(res, 'A valid title is required. Please ensure the title contains valid content.', 400);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (isHtmlEmpty(parsed.description)) {
|
||||||
|
ResponseHandler.error(res, 'A valid description is required. Please ensure the description contains valid content.', 400);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Transform frontend format to backend format BEFORE validation
|
// Transform frontend format to backend format BEFORE validation
|
||||||
// Map 'approvers' -> 'approvalLevels' for backward compatibility
|
// Map 'approvers' -> 'approvalLevels' for backward compatibility
|
||||||
@ -435,7 +465,7 @@ export class WorkflowController {
|
|||||||
userId: req.user?.userId,
|
userId: req.user?.userId,
|
||||||
filesCount: (req as any).files?.length || 0,
|
filesCount: (req as any).files?.length || 0,
|
||||||
});
|
});
|
||||||
ResponseHandler.error(res, 'Failed to create workflow', 400, errorMessage);
|
ResponseHandler.error(res, errorMessage, 400);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -449,6 +479,20 @@ export class WorkflowController {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const templateType = (workflow as any).templateType || '';
|
||||||
|
if (templateType.toString().toUpperCase() === 'FORM_16') {
|
||||||
|
if (!req.user?.email || !req.user?.userId) {
|
||||||
|
ResponseHandler.forbidden(res, 'Authentication required to view Form 16 request');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const role = (req.user as any).role;
|
||||||
|
const allowed = await canViewForm16Submission(req.user.email, req.user.userId, role);
|
||||||
|
if (!allowed) {
|
||||||
|
ResponseHandler.forbidden(res, 'You do not have permission to view this Form 16 request');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ResponseHandler.success(res, workflow, 'Workflow retrieved successfully');
|
ResponseHandler.success(res, workflow, 'Workflow retrieved successfully');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -478,6 +522,15 @@ export class WorkflowController {
|
|||||||
ResponseHandler.notFound(res, 'Workflow not found');
|
ResponseHandler.notFound(res, 'Workflow not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
const templateType = (result as any).templateType || (result as any).workflow?.templateType || '';
|
||||||
|
if (templateType.toString().toUpperCase() === 'FORM_16') {
|
||||||
|
const role = (req.user as any).role;
|
||||||
|
const allowed = await canViewForm16Submission(req.user!.email, req.user!.userId, role);
|
||||||
|
if (!allowed) {
|
||||||
|
ResponseHandler.forbidden(res, 'You do not have permission to view this Form 16 request');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
ResponseHandler.success(res, result, 'Workflow details fetched');
|
ResponseHandler.success(res, result, 'Workflow details fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -490,12 +543,14 @@ export class WorkflowController {
|
|||||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||||
|
|
||||||
// Extract filter parameters
|
// Extract filter parameters (financialYear, quarter for Form 16)
|
||||||
const filters = {
|
const filters = {
|
||||||
search: req.query.search as string | undefined,
|
search: req.query.search as string | undefined,
|
||||||
status: req.query.status as string | undefined,
|
status: req.query.status as string | undefined,
|
||||||
priority: req.query.priority as string | undefined,
|
priority: req.query.priority as string | undefined,
|
||||||
templateType: req.query.templateType as string | undefined,
|
templateType: req.query.templateType as string | undefined,
|
||||||
|
financialYear: req.query.financialYear as string | undefined,
|
||||||
|
quarter: req.query.quarter as string | undefined,
|
||||||
department: req.query.department as string | undefined,
|
department: req.query.department as string | undefined,
|
||||||
initiator: req.query.initiator as string | undefined,
|
initiator: req.query.initiator as string | undefined,
|
||||||
approver: req.query.approver as string | undefined,
|
approver: req.query.approver as string | undefined,
|
||||||
@ -506,7 +561,15 @@ export class WorkflowController {
|
|||||||
endDate: req.query.endDate as string | undefined,
|
endDate: req.query.endDate as string | undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await workflowService.listWorkflows(page, limit, filters);
|
let result = await workflowService.listWorkflows(page, limit, filters);
|
||||||
|
if (req.user?.email && req.user?.userId) {
|
||||||
|
const role = (req.user as any).role;
|
||||||
|
const allowed = await canViewForm16Submission(req.user.email, req.user.userId, role);
|
||||||
|
if (!allowed && result?.data?.length) {
|
||||||
|
const filtered = result.data.filter((w: any) => ((w.templateType || '').toString().toUpperCase() !== 'FORM_16'));
|
||||||
|
result = { ...result, data: filtered };
|
||||||
|
}
|
||||||
|
}
|
||||||
ResponseHandler.success(res, result, 'Workflows fetched');
|
ResponseHandler.success(res, result, 'Workflows fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -535,7 +598,8 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyRequests(userId, page, limit, filters);
|
let result = await workflowService.listMyRequests(userId, page, limit, filters);
|
||||||
|
result = await filterForm16FromListIfNeeded(req, result);
|
||||||
ResponseHandler.success(res, result, 'My requests fetched');
|
ResponseHandler.success(res, result, 'My requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -566,10 +630,13 @@ export class WorkflowController {
|
|||||||
const dateRange = req.query.dateRange as string | undefined;
|
const dateRange = req.query.dateRange as string | undefined;
|
||||||
const startDate = req.query.startDate as string | undefined;
|
const startDate = req.query.startDate as string | undefined;
|
||||||
const endDate = req.query.endDate as string | undefined;
|
const endDate = req.query.endDate as string | undefined;
|
||||||
|
const financialYear = req.query.financialYear as string | undefined;
|
||||||
|
const quarter = req.query.quarter as string | undefined;
|
||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, templateType, financialYear, quarter, department, initiator, approver, approverType, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
let result = await workflowService.listParticipantRequests(userId, page, limit, filters);
|
||||||
|
result = await filterForm16FromListIfNeeded(req, result);
|
||||||
ResponseHandler.success(res, result, 'Participant requests fetched');
|
ResponseHandler.success(res, result, 'Participant requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -599,7 +666,8 @@ export class WorkflowController {
|
|||||||
|
|
||||||
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
const filters = { search, status, priority, templateType, department, slaCompliance, dateRange, startDate, endDate };
|
||||||
|
|
||||||
const result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
let result = await workflowService.listMyInitiatedRequests(userId, page, limit, filters);
|
||||||
|
result = await filterForm16FromListIfNeeded(req, result);
|
||||||
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
ResponseHandler.success(res, result, 'My initiated requests fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -613,19 +681,22 @@ export class WorkflowController {
|
|||||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||||
|
|
||||||
// Extract filter parameters
|
// Extract filter parameters (Form 16: financialYear, quarter when templateType is FORM_16)
|
||||||
const filters = {
|
const filters = {
|
||||||
search: req.query.search as string | undefined,
|
search: req.query.search as string | undefined,
|
||||||
status: req.query.status as string | undefined,
|
status: req.query.status as string | undefined,
|
||||||
priority: req.query.priority as string | undefined,
|
priority: req.query.priority as string | undefined,
|
||||||
templateType: req.query.templateType as string | undefined
|
templateType: req.query.templateType as string | undefined,
|
||||||
|
financialYear: req.query.financialYear as string | undefined,
|
||||||
|
quarter: req.query.quarter as string | undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Extract sorting parameters
|
// Extract sorting parameters
|
||||||
const sortBy = req.query.sortBy as string | undefined;
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
const result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
let result = await workflowService.listOpenForMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
|
result = await filterForm16FromListIfNeeded(req, result);
|
||||||
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
ResponseHandler.success(res, result, 'Open requests for user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
@ -639,19 +710,22 @@ export class WorkflowController {
|
|||||||
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
const page = Math.max(parseInt(String(req.query.page || '1'), 10), 1);
|
||||||
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '20'), 10), 1), 100);
|
||||||
|
|
||||||
// Extract filter parameters
|
// Extract filter parameters (Form 16: financialYear, quarter when templateType is FORM_16)
|
||||||
const filters = {
|
const filters = {
|
||||||
search: req.query.search as string | undefined,
|
search: req.query.search as string | undefined,
|
||||||
status: req.query.status as string | undefined,
|
status: req.query.status as string | undefined,
|
||||||
priority: req.query.priority as string | undefined,
|
priority: req.query.priority as string | undefined,
|
||||||
templateType: req.query.templateType as string | undefined
|
templateType: req.query.templateType as string | undefined,
|
||||||
|
financialYear: req.query.financialYear as string | undefined,
|
||||||
|
quarter: req.query.quarter as string | undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Extract sorting parameters
|
// Extract sorting parameters
|
||||||
const sortBy = req.query.sortBy as string | undefined;
|
const sortBy = req.query.sortBy as string | undefined;
|
||||||
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
const sortOrder = (req.query.sortOrder as string | undefined) || 'desc';
|
||||||
|
|
||||||
const result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
let result = await workflowService.listClosedByMe(userId, page, limit, filters, sortBy, sortOrder);
|
||||||
|
result = await filterForm16FromListIfNeeded(req, result);
|
||||||
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
ResponseHandler.success(res, result, 'Closed requests by user fetched');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ApprovalRequestData } from './types';
|
import { ApprovalRequestData } from './types';
|
||||||
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles } from './helpers';
|
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles, getCustomMessageSection } from './helpers';
|
||||||
import { getBrandedHeader } from './branding.config';
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
||||||
@ -102,6 +102,9 @@ export function getApprovalRequestEmail(data: ApprovalRequestData): string {
|
|||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<!-- Custom Message Section -->
|
||||||
|
${getCustomMessageSection(data.customMessage)}
|
||||||
|
|
||||||
<!-- Description (supports rich text HTML including tables) -->
|
<!-- Description (supports rich text HTML including tables) -->
|
||||||
<div style="margin-bottom: 30px;">
|
<div style="margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Description:</h3>
|
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Description:</h3>
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { DealerProposalRequiredData } from './types';
|
import { DealerProposalRequiredData } from './types';
|
||||||
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles } from './helpers';
|
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles, getCustomMessageSection } from './helpers';
|
||||||
import { getBrandedHeader } from './branding.config';
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
export function getDealerCompletionRequiredEmail(data: DealerProposalRequiredData): string {
|
export function getDealerCompletionRequiredEmail(data: DealerProposalRequiredData): string {
|
||||||
@ -103,6 +103,9 @@ export function getDealerCompletionRequiredEmail(data: DealerProposalRequiredDat
|
|||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<!-- Custom Message Section -->
|
||||||
|
${getCustomMessageSection(data.customMessage)}
|
||||||
|
|
||||||
<div style="padding: 20px; background-color: #fff3cd; border-left: 4px solid #ffc107; border-radius: 4px; margin-bottom: 30px;">
|
<div style="padding: 20px; background-color: #fff3cd; border-left: 4px solid #ffc107; border-radius: 4px; margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 10px; color: #856404; font-size: 16px; font-weight: 600;">What You Need to Submit:</h3>
|
<h3 style="margin: 0 0 10px; color: #856404; font-size: 16px; font-weight: 600;">What You Need to Submit:</h3>
|
||||||
<ul style="margin: 0; padding: 0 0 0 20px; color: #666666; font-size: 14px; line-height: 1.6;">
|
<ul style="margin: 0; padding: 0 0 0 20px; color: #666666; font-size: 14px; line-height: 1.6;">
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { DealerProposalRequiredData } from './types';
|
import { DealerProposalRequiredData } from './types';
|
||||||
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles } from './helpers';
|
import { getEmailFooter, getPrioritySection, getEmailHeader, HeaderStyles, getResponsiveStyles, wrapRichText, getEmailContainerStyles, getCustomMessageSection } from './helpers';
|
||||||
import { getBrandedHeader } from './branding.config';
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
export function getDealerProposalRequiredEmail(data: DealerProposalRequiredData): string {
|
export function getDealerProposalRequiredEmail(data: DealerProposalRequiredData): string {
|
||||||
@ -152,6 +152,9 @@ export function getDealerProposalRequiredEmail(data: DealerProposalRequiredData)
|
|||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<!-- Custom Message Section -->
|
||||||
|
${getCustomMessageSection(data.customMessage)}
|
||||||
|
|
||||||
<!-- Description (supports rich text HTML including tables) -->
|
<!-- Description (supports rich text HTML including tables) -->
|
||||||
${data.requestDescription ? `
|
${data.requestDescription ? `
|
||||||
<div style="margin-bottom: 30px;">
|
<div style="margin-bottom: 30px;">
|
||||||
|
|||||||
@ -32,7 +32,8 @@ export enum EmailNotificationType {
|
|||||||
COMPLETION_DOCUMENTS_SUBMITTED = 'completion_documents_submitted',
|
COMPLETION_DOCUMENTS_SUBMITTED = 'completion_documents_submitted',
|
||||||
EINVOICE_GENERATED = 'einvoice_generated',
|
EINVOICE_GENERATED = 'einvoice_generated',
|
||||||
CREDIT_NOTE_SENT = 'credit_note_sent',
|
CREDIT_NOTE_SENT = 'credit_note_sent',
|
||||||
ADDITIONAL_DOCUMENT_ADDED = 'additional_document_added'
|
ADDITIONAL_DOCUMENT_ADDED = 'additional_document_added',
|
||||||
|
RE_QUOTATION = 're_quotation',
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
102
src/emailtemplates/form_16_email.template.ts
Normal file
102
src/emailtemplates/form_16_email.template.ts
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 Email Template (generic wrapper for Form 16 notification types)
|
||||||
|
*
|
||||||
|
* Used by notification.service.ts when payload.type starts with `form16_`.
|
||||||
|
* Payload body comes from Form 16 admin-config templates (plain text with placeholders already substituted).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Form16EmailData } from './types';
|
||||||
|
import { getEmailFooter, getEmailHeader, getEmailContainerStyles, getResponsiveStyles, HeaderStyles } from './helpers';
|
||||||
|
import { getBrandedHeader } from './branding.config';
|
||||||
|
|
||||||
|
export function getForm16Email(data: Form16EmailData): string {
|
||||||
|
const headerStyle =
|
||||||
|
data.variant === 'success'
|
||||||
|
? HeaderStyles.success
|
||||||
|
: data.variant === 'warning'
|
||||||
|
? HeaderStyles.warning
|
||||||
|
: data.variant === 'error'
|
||||||
|
? HeaderStyles.error
|
||||||
|
: HeaderStyles.info;
|
||||||
|
|
||||||
|
const requestBlock = data.requestId
|
||||||
|
? `
|
||||||
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f8f9fa; border-radius: 6px; margin-bottom: 26px;" cellpadding="0" cellspacing="0">
|
||||||
|
<tr>
|
||||||
|
<td style="padding: 18px 20px;">
|
||||||
|
<table role="presentation" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
||||||
|
<tr>
|
||||||
|
<td style="padding: 4px 0; color: #666666; font-size: 13px; width: 110px;"><strong>Request ID:</strong></td>
|
||||||
|
<td style="padding: 4px 0; color: #333333; font-size: 13px;">${data.requestId}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
`
|
||||||
|
: '';
|
||||||
|
|
||||||
|
const ctaBlock = data.viewDetailsLink
|
||||||
|
? `
|
||||||
|
<table role="presentation" style="width: 100%; border-collapse: collapse; margin: 10px 0 6px;" cellpadding="0" cellspacing="0">
|
||||||
|
<tr>
|
||||||
|
<td style="text-align: center;">
|
||||||
|
<a href="${data.viewDetailsLink}" class="cta-button" style="display: inline-block; padding: 14px 34px; background-color: #1a1a1a; color: #ffffff; text-decoration: none; text-align: center; border-radius: 6px; font-size: 15px; font-weight: 600; box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); min-width: 200px;">
|
||||||
|
View Request Details
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
`
|
||||||
|
: '';
|
||||||
|
|
||||||
|
return `
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="format-detection" content="telephone=no">
|
||||||
|
<title>${data.title}</title>
|
||||||
|
${getResponsiveStyles()}
|
||||||
|
</head>
|
||||||
|
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
||||||
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f4f4f4;" cellpadding="0" cellspacing="0">
|
||||||
|
<tr>
|
||||||
|
<td style="padding: 40px 0;">
|
||||||
|
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
||||||
|
${getEmailHeader(getBrandedHeader({ title: data.title, ...headerStyle }))}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td class="email-content" style="padding: 40px 30px;">
|
||||||
|
<p style="margin: 0 0 18px; color: #333333; font-size: 16px; line-height: 1.6;">
|
||||||
|
Dear <strong style="color: #667eea;">${data.recipientName}</strong>,
|
||||||
|
</p>
|
||||||
|
|
||||||
|
${requestBlock}
|
||||||
|
|
||||||
|
<div style="padding: 18px 18px; background-color: #ffffff; border: 1px solid #e9ecef; border-radius: 6px; margin-bottom: 24px;">
|
||||||
|
<div style="margin: 0; color: #333333; font-size: 14px; line-height: 1.8;">
|
||||||
|
${data.messageHtml}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
${ctaBlock}
|
||||||
|
|
||||||
|
<p style="margin: 18px 0 0; color: #666666; font-size: 13px; line-height: 1.6; text-align: center;">
|
||||||
|
Thank you for using the ${data.companyName} Workflow System.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
${getEmailFooter(data.companyName)}
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
@ -799,6 +799,22 @@ export function getRoleDescription(role: 'Approver' | 'Spectator'): string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate custom message section (e.g., for re-quotation notes)
|
||||||
|
*/
|
||||||
|
export function getCustomMessageSection(message?: string): string {
|
||||||
|
if (!message) return '';
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div style="margin-bottom: 30px; padding: 20px; background-color: #f0f7ff; border-left: 4px solid #667eea; border-radius: 4px;">
|
||||||
|
<h3 style="margin: 0 0 10px; color: #333333; font-size: 16px; font-weight: 600;">Note/Instructions:</h3>
|
||||||
|
<p style="margin: 0; color: #444444; font-size: 15px; line-height: 1.6; font-style: italic;">
|
||||||
|
"${message}"
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate action required section for workflow resumed
|
* Generate action required section for workflow resumed
|
||||||
*/
|
*/
|
||||||
@ -882,6 +898,11 @@ export function getTemplateTypeLabel(templateType?: string): string {
|
|||||||
if (upper === 'DEALER CLAIM' || upper === 'DEALER_CLAIM' || upper === 'CLAIM-MANAGEMENT' || upper === 'CLAIM_MANAGEMENT') {
|
if (upper === 'DEALER CLAIM' || upper === 'DEALER_CLAIM' || upper === 'CLAIM-MANAGEMENT' || upper === 'CLAIM_MANAGEMENT') {
|
||||||
return 'Dealer Claim';
|
return 'Dealer Claim';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Form 16 (Form 16A TDS)
|
||||||
|
if (upper === 'FORM_16' || upper === 'FORM16') {
|
||||||
|
return 'Form 16';
|
||||||
|
}
|
||||||
|
|
||||||
// Handle template type
|
// Handle template type
|
||||||
if (upper === 'TEMPLATE') {
|
if (upper === 'TEMPLATE') {
|
||||||
|
|||||||
@ -36,4 +36,5 @@ export { getCompletionDocumentsSubmittedEmail } from './completionDocumentsSubmi
|
|||||||
export { getEInvoiceGeneratedEmail } from './einvoiceGenerated.template';
|
export { getEInvoiceGeneratedEmail } from './einvoiceGenerated.template';
|
||||||
export { getCreditNoteSentEmail } from './creditNoteSent.template';
|
export { getCreditNoteSentEmail } from './creditNoteSent.template';
|
||||||
export { getAdditionalDocumentAddedEmail } from './additionalDocumentAdded.template';
|
export { getAdditionalDocumentAddedEmail } from './additionalDocumentAdded.template';
|
||||||
|
export { getForm16Email } from './form_16_email.template';
|
||||||
|
|
||||||
|
|||||||
@ -15,7 +15,7 @@ export function getRejectionNotificationEmail(data: RejectionNotificationData):
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="format-detection" content="telephone=no">
|
<meta name="format-detection" content="telephone=no">
|
||||||
<title>Request Rejected</title>
|
<title>${data.isReturnedForRevision ? 'Request Returned for Revision' : 'Request Rejected'}</title>
|
||||||
${getResponsiveStyles()}
|
${getResponsiveStyles()}
|
||||||
</head>
|
</head>
|
||||||
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
<body style="margin: 0; padding: 0; font-family: Arial, Helvetica, sans-serif; background-color: #f4f4f4;">
|
||||||
@ -24,63 +24,65 @@ export function getRejectionNotificationEmail(data: RejectionNotificationData):
|
|||||||
<td style="padding: 40px 0;">
|
<td style="padding: 40px 0;">
|
||||||
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
<table role="presentation" class="email-container" style="${getEmailContainerStyles()}" cellpadding="0" cellspacing="0">
|
||||||
${getEmailHeader(getBrandedHeader({
|
${getEmailHeader(getBrandedHeader({
|
||||||
title: 'Request Rejected',
|
title: data.isReturnedForRevision ? 'Revision Required' : 'Request Rejected',
|
||||||
...HeaderStyles.error
|
...(data.isReturnedForRevision ? HeaderStyles.warning : HeaderStyles.error)
|
||||||
}))}
|
}))}
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 40px 30px;">
|
<td style="padding: 40px 30px;">
|
||||||
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
<p style="margin: 0 0 20px; color: #333333; font-size: 16px; line-height: 1.6;">
|
||||||
Dear <strong style="color: #dc3545;">${data.initiatorName}</strong>,
|
Dear <strong style="color: ${data.isReturnedForRevision ? '#856404' : '#dc3545'};">${data.initiatorName}</strong>,
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p style="margin: 0 0 30px; color: #666666; font-size: 16px; line-height: 1.6;">
|
<p style="margin: 0 0 30px; color: #666666; font-size: 16px; line-height: 1.6;">
|
||||||
We regret to inform you that your request has been <strong style="color: #dc3545;">rejected</strong> by <strong>${data.approverName}</strong>.
|
${data.isReturnedForRevision
|
||||||
|
? `Your request has been <strong>returned for revision</strong> by <strong>${data.approverName}</strong>.`
|
||||||
|
: `We regret to inform you that your request has been <strong style="color: #dc3545;">rejected</strong> by <strong>${data.approverName}</strong>.`}
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: #f8d7da; border: 1px solid #f5c6cb; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse; background-color: ${data.isReturnedForRevision ? '#fff3cd' : '#f8d7da'}; border: 1px solid ${data.isReturnedForRevision ? '#ffeeba' : '#f5c6cb'}; border-radius: 6px; margin-bottom: 30px;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 25px;">
|
<td style="padding: 25px;">
|
||||||
<h2 style="margin: 0 0 20px; color: #721c24; font-size: 18px; font-weight: 600;">Request Summary</h2>
|
<h2 style="margin: 0 0 20px; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 18px; font-weight: 600;">Request Summary</h2>
|
||||||
|
|
||||||
<table role="presentation" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
<table role="presentation" style="width: 100%; border-collapse: collapse;" cellpadding="0" cellspacing="0">
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px; width: 140px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px; width: 140px;">
|
||||||
<strong>Request ID:</strong>
|
<strong>Request ID:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
${data.requestId}
|
${data.requestId}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
<strong>Rejected By:</strong>
|
<strong>Action By:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
${data.approverName}
|
${data.approverName}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
<strong>Rejected On:</strong>
|
<strong>Action On:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
${data.rejectionDate}
|
${data.rejectionDate}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
<strong>Time:</strong>
|
<strong>Time:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
${data.rejectionTime}
|
${data.rejectionTime}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
<strong>Request Type:</strong>
|
<strong>Request Type:</strong>
|
||||||
</td>
|
</td>
|
||||||
<td style="padding: 8px 0; color: #721c24; font-size: 14px;">
|
<td style="padding: 8px 0; color: ${data.isReturnedForRevision ? '#856404' : '#721c24'}; font-size: 14px;">
|
||||||
${data.requestType}
|
${data.requestType}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -90,8 +92,8 @@ export function getRejectionNotificationEmail(data: RejectionNotificationData):
|
|||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div style="margin-bottom: 30px;">
|
<div style="margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">Reason for Rejection:</h3>
|
<h3 style="margin: 0 0 15px; color: #333333; font-size: 16px; font-weight: 600;">${data.isReturnedForRevision ? 'Reason for Revision:' : 'Reason for Rejection:'}</h3>
|
||||||
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid #dc3545; border-radius: 4px; overflow-x: auto;">
|
<div style="padding: 15px; background-color: #f8f9fa; border-left: 4px solid ${data.isReturnedForRevision ? '#ffc107' : '#dc3545'}; border-radius: 4px; overflow-x: auto;">
|
||||||
${wrapRichText(data.rejectionReason)}
|
${wrapRichText(data.rejectionReason)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -99,9 +101,13 @@ export function getRejectionNotificationEmail(data: RejectionNotificationData):
|
|||||||
<div style="padding: 20px; background-color: #fff3cd; border-left: 4px solid #ffc107; border-radius: 4px; margin-bottom: 30px;">
|
<div style="padding: 20px; background-color: #fff3cd; border-left: 4px solid #ffc107; border-radius: 4px; margin-bottom: 30px;">
|
||||||
<h3 style="margin: 0 0 10px; color: #856404; font-size: 16px; font-weight: 600;">What You Can Do:</h3>
|
<h3 style="margin: 0 0 10px; color: #856404; font-size: 16px; font-weight: 600;">What You Can Do:</h3>
|
||||||
<ul style="margin: 10px 0 0 0; padding-left: 20px; color: #856404; font-size: 14px; line-height: 1.8;">
|
<ul style="margin: 10px 0 0 0; padding-left: 20px; color: #856404; font-size: 14px; line-height: 1.8;">
|
||||||
<li>Review the rejection reason carefully</li>
|
${data.isReturnedForRevision
|
||||||
<li>Make necessary adjustments to your request</li>
|
? `<li>Review the requested changes carefully</li>
|
||||||
<li>Submit a new request with the required changes</li>
|
<li>Adjust the proposal or documents as needed</li>
|
||||||
|
<li>Resubmit the request for approval</li>`
|
||||||
|
: `<li>Review the rejection reason carefully</li>
|
||||||
|
<li>Make necessary adjustments to your request</li>
|
||||||
|
<li>Submit a new request with the required changes</li>`}
|
||||||
<li>Contact ${data.approverName} for more clarification if needed</li>
|
<li>Contact ${data.approverName} for more clarification if needed</li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -12,6 +12,22 @@ export interface BaseEmailData {
|
|||||||
companyName: string;
|
companyName: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Form16EmailData {
|
||||||
|
recipientName: string;
|
||||||
|
/** Email title shown in header + subject */
|
||||||
|
title: string;
|
||||||
|
/** Already-sanitized HTML (escaped) message body */
|
||||||
|
messageHtml: string;
|
||||||
|
/** Optional: request UUID for link + context */
|
||||||
|
requestId?: string;
|
||||||
|
/** Optional: deep link to /request/:requestId */
|
||||||
|
viewDetailsLink?: string;
|
||||||
|
/** Brand name */
|
||||||
|
companyName: string;
|
||||||
|
/** Controls header color */
|
||||||
|
variant?: 'info' | 'success' | 'warning' | 'error';
|
||||||
|
}
|
||||||
|
|
||||||
export interface RequestCreatedData extends BaseEmailData {
|
export interface RequestCreatedData extends BaseEmailData {
|
||||||
initiatorName: string;
|
initiatorName: string;
|
||||||
firstApproverName: string;
|
firstApproverName: string;
|
||||||
@ -31,6 +47,7 @@ export interface ApprovalRequestData extends BaseEmailData {
|
|||||||
requestType: string;
|
requestType: string;
|
||||||
requestDescription: string;
|
requestDescription: string;
|
||||||
priority: 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL';
|
priority: 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL';
|
||||||
|
customMessage?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface MultiApproverRequestData extends ApprovalRequestData {
|
export interface MultiApproverRequestData extends ApprovalRequestData {
|
||||||
@ -64,6 +81,7 @@ export interface RejectionNotificationData extends BaseEmailData {
|
|||||||
rejectionTime: string;
|
rejectionTime: string;
|
||||||
requestType: string;
|
requestType: string;
|
||||||
rejectionReason: string;
|
rejectionReason: string;
|
||||||
|
isReturnedForRevision?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TATReminderData extends BaseEmailData {
|
export interface TATReminderData extends BaseEmailData {
|
||||||
@ -234,6 +252,7 @@ export interface DealerProposalRequiredData extends BaseEmailData {
|
|||||||
priority: 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL';
|
priority: 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL';
|
||||||
tatHours?: number;
|
tatHours?: number;
|
||||||
dueDate?: string;
|
dueDate?: string;
|
||||||
|
customMessage?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AdditionalDocumentAddedData extends BaseEmailData {
|
export interface AdditionalDocumentAddedData extends BaseEmailData {
|
||||||
|
|||||||
25
src/jobs/creditNoteSyncJob.ts
Normal file
25
src/jobs/creditNoteSyncJob.ts
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import { creditNoteSyncService } from '../services/creditNoteSync.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main sync function to process all outgoing files
|
||||||
|
* Delegates to creditNoteSyncService
|
||||||
|
*/
|
||||||
|
export async function syncCreditNotes(): Promise<void> {
|
||||||
|
await creditNoteSyncService.syncCreditNotes();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts the background sync job
|
||||||
|
*/
|
||||||
|
export function startCreditNoteSyncJob(): void {
|
||||||
|
const intervalMinutes = Number(process.env.CREDIT_NOTE_SYNC_INTERVAL_MINUTES) || 5;
|
||||||
|
logger.info(`[CreditNoteSyncJob] Background job initialized (Interval: ${intervalMinutes}m)`);
|
||||||
|
|
||||||
|
// Run once immediately on startup
|
||||||
|
syncCreditNotes().catch(err => logger.error('[CreditNoteSyncJob] Initial sync failed:', err));
|
||||||
|
|
||||||
|
setInterval(() => {
|
||||||
|
syncCreditNotes().catch(err => logger.error('[CreditNoteSyncJob] Periodic sync failed:', err));
|
||||||
|
}, intervalMinutes * 60 * 1000);
|
||||||
|
}
|
||||||
81
src/jobs/form16NotificationJob.ts
Normal file
81
src/jobs/form16NotificationJob.ts
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import { getForm16Config } from '../services/form16Config.service';
|
||||||
|
import { runForm16AlertSubmitJob, runForm16ReminderJob, runForm16Remind26AsUploadJob } from '../services/form16Notification.service';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
|
const TZ = process.env.TZ || 'Asia/Kolkata';
|
||||||
|
// 26AS reminder is quarter-based; we evaluate once daily at this fixed time.
|
||||||
|
const RE_26AS_REMINDER_CHECK_TIME = '08:30';
|
||||||
|
|
||||||
|
/** Last date (YYYY-MM-DD) we ran the alert job in the configured timezone. */
|
||||||
|
let lastAlertRunDate: string | null = null;
|
||||||
|
/** Last date (YYYY-MM-DD) we ran the reminder job in the configured timezone. */
|
||||||
|
let lastReminderRunDate: string | null = null;
|
||||||
|
/** Last date (YYYY-MM-DD) we ran the 26AS upload reminder job in the configured timezone. */
|
||||||
|
let last26AsReminderRunDate: string | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current time in configured TZ as HH:mm (24h, zero-padded).
|
||||||
|
*/
|
||||||
|
function getCurrentTimeHHmm(): string {
|
||||||
|
const now = new Date();
|
||||||
|
const str = now.toLocaleTimeString('en-CA', { hour: '2-digit', minute: '2-digit', hour12: false, timeZone: TZ });
|
||||||
|
const [h, m] = str.split(':').map((x) => parseInt(x, 10));
|
||||||
|
if (Number.isNaN(h) || Number.isNaN(m)) return '00:00';
|
||||||
|
return `${String(h).padStart(2, '0')}:${String(m).padStart(2, '0')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current date in configured TZ as YYYY-MM-DD.
|
||||||
|
*/
|
||||||
|
function getCurrentDateString(): string {
|
||||||
|
const now = new Date();
|
||||||
|
const str = now.toLocaleDateString('en-CA', { timeZone: TZ });
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tick: run every minute; if current time matches config run-at time and we haven't run today, run the job.
|
||||||
|
*/
|
||||||
|
async function form16NotificationTick(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const config = await getForm16Config();
|
||||||
|
const nowTime = getCurrentTimeHHmm();
|
||||||
|
const today = getCurrentDateString();
|
||||||
|
|
||||||
|
const alertTime = (config.alertSubmitForm16RunAtTime || '').trim();
|
||||||
|
if (config.alertSubmitForm16Enabled && alertTime && alertTime === nowTime && lastAlertRunDate !== today) {
|
||||||
|
lastAlertRunDate = today;
|
||||||
|
logger.info(`[Form16 Job] Running alert submit job (scheduled at ${alertTime})`);
|
||||||
|
await runForm16AlertSubmitJob();
|
||||||
|
}
|
||||||
|
|
||||||
|
const reminderTime = (config.reminderRunAtTime || '').trim();
|
||||||
|
if (config.reminderNotificationEnabled && reminderTime && reminderTime === nowTime && lastReminderRunDate !== today) {
|
||||||
|
lastReminderRunDate = today;
|
||||||
|
logger.info(`[Form16 Job] Running reminder job (scheduled at ${reminderTime})`);
|
||||||
|
await runForm16ReminderJob();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.reminder26AsUploadEnabled && RE_26AS_REMINDER_CHECK_TIME === nowTime && last26AsReminderRunDate !== today) {
|
||||||
|
last26AsReminderRunDate = today;
|
||||||
|
logger.info(`[Form16 Job] Running 26AS upload reminder job (daily check at ${RE_26AS_REMINDER_CHECK_TIME})`);
|
||||||
|
await runForm16Remind26AsUploadJob();
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
logger.error('[Form16 Job] Tick error:', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start Form 16 scheduled notification jobs.
|
||||||
|
* Schedule is read from Form 16 admin config (alertSubmitForm16RunAtTime, reminderRunAtTime).
|
||||||
|
* A tick runs every minute; when server time (in configured TZ) matches the run-at time, the job runs once that day.
|
||||||
|
*/
|
||||||
|
export function startForm16NotificationJobs(): void {
|
||||||
|
const cron = require('node-cron');
|
||||||
|
cron.schedule('* * * * *', () => {
|
||||||
|
form16NotificationTick();
|
||||||
|
}, { timezone: TZ });
|
||||||
|
|
||||||
|
logger.info(`[Form16 Job] Form 16 notification jobs scheduled (config-driven run times, TZ: ${TZ})`);
|
||||||
|
}
|
||||||
193
src/jobs/form16SapResponseJob.ts
Normal file
193
src/jobs/form16SapResponseJob.ts
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
import { wfmFileService } from '../services/wfmFile.service';
|
||||||
|
import {
|
||||||
|
Form16CreditNote,
|
||||||
|
Form16DebitNote,
|
||||||
|
Form16SapResponse,
|
||||||
|
From16SapReadFile,
|
||||||
|
} from '../models';
|
||||||
|
|
||||||
|
type CsvRow = Record<string, string | undefined>;
|
||||||
|
|
||||||
|
function extractCsvFields(r: CsvRow) {
|
||||||
|
const trnsUniqNo = (r.TRNS_UNIQ_NO || r.TRNSUNIQNO || '').trim() || null;
|
||||||
|
const tdsTransId = (r.TDS_TRNS_ID || '').trim() || null;
|
||||||
|
const docNo = (r.DOC_NO || r.DOCNO || '').trim() || null;
|
||||||
|
const msgTyp = (r.MSG_TYP || r.MSGTYP || '').trim() || null;
|
||||||
|
const message = (r.MESSAGE || '').trim() || null;
|
||||||
|
return { trnsUniqNo, tdsTransId, docNo, msgTyp, message };
|
||||||
|
}
|
||||||
|
|
||||||
|
function isUsableRow(r: CsvRow): boolean {
|
||||||
|
const { tdsTransId } = extractCsvFields(r);
|
||||||
|
if (!tdsTransId) return false;
|
||||||
|
const upper = tdsTransId.toUpperCase();
|
||||||
|
if (upper === 'TDS_TRNS_ID' || upper === 'MSG_TYP' || upper === 'MESSAGE') return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveRowsAndUpdateNotes(rows: CsvRow[]): Promise<{ totalRecords: number; totalCreditNotes: number; totalDebitNotes: number }> {
|
||||||
|
let totalRecords = 0;
|
||||||
|
let totalCreditNotes = 0;
|
||||||
|
let totalDebitNotes = 0;
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
if (!isUsableRow(row)) continue;
|
||||||
|
const parsed = extractCsvFields(row);
|
||||||
|
if (!parsed.tdsTransId) continue;
|
||||||
|
|
||||||
|
await (Form16SapResponse as any).create({
|
||||||
|
trnsUniqNo: parsed.trnsUniqNo,
|
||||||
|
tdsTransId: parsed.tdsTransId,
|
||||||
|
docNo: parsed.docNo,
|
||||||
|
msgTyp: parsed.msgTyp,
|
||||||
|
message: parsed.message,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
});
|
||||||
|
totalRecords++;
|
||||||
|
|
||||||
|
const idUpper = parsed.tdsTransId.toUpperCase();
|
||||||
|
if (idUpper.startsWith('CN')) {
|
||||||
|
totalCreditNotes++;
|
||||||
|
await (Form16CreditNote as any).update(
|
||||||
|
{
|
||||||
|
sapDocumentNumber: parsed.docNo,
|
||||||
|
status: 'completed',
|
||||||
|
},
|
||||||
|
{ where: { creditNoteNumber: parsed.tdsTransId } }
|
||||||
|
);
|
||||||
|
} else if (idUpper.startsWith('DN')) {
|
||||||
|
totalDebitNotes++;
|
||||||
|
await (Form16DebitNote as any).update(
|
||||||
|
{
|
||||||
|
sapDocumentNumber: parsed.docNo,
|
||||||
|
status: 'completed',
|
||||||
|
},
|
||||||
|
{ where: { debitNoteNumber: parsed.tdsTransId } }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { totalRecords, totalCreditNotes, totalDebitNotes };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processOutgoingFile(fileName: string, resolvedOutgoingDir: string): Promise<{ totalRecords: number; totalCreditNotes: number; totalDebitNotes: number } | null> {
|
||||||
|
const alreadyRead = await (From16SapReadFile as any).findOne({
|
||||||
|
where: { fileName },
|
||||||
|
attributes: ['id'],
|
||||||
|
});
|
||||||
|
if (alreadyRead) {
|
||||||
|
logger.debug(`[Form16 SAP Job] Skipping already-read file: ${fileName}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const rows = (await wfmFileService.readForm16OutgoingResponseByPath(path.join(resolvedOutgoingDir, fileName))) as CsvRow[];
|
||||||
|
const counts = await saveRowsAndUpdateNotes(rows || []);
|
||||||
|
|
||||||
|
await (From16SapReadFile as any).create({
|
||||||
|
fileName,
|
||||||
|
totalRecords: counts.totalRecords,
|
||||||
|
totalCreditNotes: counts.totalCreditNotes,
|
||||||
|
totalDebitNotes: counts.totalDebitNotes,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete source CSV only after successful DB persistence + read-marking.
|
||||||
|
// SAP team keeps a parallel archive copy, so main OUTGOING can be safely cleaned.
|
||||||
|
const sourcePath = path.join(resolvedOutgoingDir, fileName);
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(sourcePath)) {
|
||||||
|
fs.unlinkSync(sourcePath);
|
||||||
|
logger.info(`[Form16 SAP Job] Deleted processed OUTGOING file: ${sourcePath}`);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Keep processing successful even if cleanup fails; next pull will skip due to read marker.
|
||||||
|
logger.warn(`[Form16 SAP Job] Could not delete processed file: ${sourcePath}`, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return counts;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Public API (called by Pull button controller) ────────────────────────────
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan both OUTGOING dirs, read every CSV, match to a DB note via TDS_TRNS_ID (primary),
|
||||||
|
* TRNS_UNIQ_NO, CLAIM_NUMBER, or filename (fallbacks), save all known CSV columns to their
|
||||||
|
* own DB columns and any extra columns to raw_row.
|
||||||
|
*
|
||||||
|
* Called by POST /form16/sap/pull – no scheduler, Pull button is the only trigger.
|
||||||
|
*/
|
||||||
|
export async function runForm16SapResponseIngestionOnce(): Promise<{
|
||||||
|
processed: number;
|
||||||
|
creditProcessed: number;
|
||||||
|
debitProcessed: number;
|
||||||
|
filesProcessed: number;
|
||||||
|
}> {
|
||||||
|
let creditProcessed = 0;
|
||||||
|
let debitProcessed = 0;
|
||||||
|
let filesProcessed = 0;
|
||||||
|
|
||||||
|
const RELATIVE_FORM16_OUT = path.join('WFM-QRE', 'OUTGOING', 'WFM_SAP_MAIN', 'FORM16');
|
||||||
|
const resolvedDirs = [
|
||||||
|
path.dirname(wfmFileService.getForm16OutgoingPath('__probe__.csv', 'credit')),
|
||||||
|
path.dirname(wfmFileService.getForm16OutgoingPath('__probe__.csv', 'debit')),
|
||||||
|
];
|
||||||
|
const dirs: Array<{ dir: string; relSubdir: string }> = [...new Set(resolvedDirs)].map((d) => ({
|
||||||
|
dir: d,
|
||||||
|
relSubdir: RELATIVE_FORM16_OUT,
|
||||||
|
}));
|
||||||
|
|
||||||
|
try {
|
||||||
|
const base = process.env.WFM_BASE_PATH || 'C:\\WFM';
|
||||||
|
|
||||||
|
for (const { dir, relSubdir } of dirs) {
|
||||||
|
let abs = path.isAbsolute(dir) ? dir : path.join(base, dir);
|
||||||
|
|
||||||
|
if (!fs.existsSync(abs)) {
|
||||||
|
const cwdFallback = path.join(process.cwd(), relSubdir);
|
||||||
|
if (fs.existsSync(cwdFallback)) {
|
||||||
|
abs = cwdFallback;
|
||||||
|
logger.info(`[Form16 SAP Job] OUTGOING dir resolved via cwd: ${abs}`);
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
`[Form16 SAP Job] OUTGOING dir not found. Tried: ${abs} | ${cwdFallback}. ` +
|
||||||
|
`Set WFM_BASE_PATH to the folder containing WFM-QRE.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = fs.readdirSync(abs).filter((f) => f.toLowerCase().endsWith('.csv'));
|
||||||
|
logger.info(`[Form16 SAP Job] OUTGOING dir: ${abs} → ${files.length} CSV file(s)${files.length ? ': ' + files.join(', ') : ''}`);
|
||||||
|
|
||||||
|
for (const f of files) {
|
||||||
|
try {
|
||||||
|
const counts = await processOutgoingFile(f, abs);
|
||||||
|
if (!counts) continue;
|
||||||
|
filesProcessed++;
|
||||||
|
creditProcessed += counts.totalCreditNotes;
|
||||||
|
debitProcessed += counts.totalDebitNotes;
|
||||||
|
} catch (e) {
|
||||||
|
logger.error(`[Form16 SAP Job] Error processing file ${f}:`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
logger.error('[Form16 SAP Job] Ingestion error:', e);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[Form16 SAP Job] Pull complete – credit: ${creditProcessed}, debit: ${debitProcessed}, total: ${creditProcessed + debitProcessed}`
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
processed: creditProcessed + debitProcessed,
|
||||||
|
creditProcessed,
|
||||||
|
debitProcessed,
|
||||||
|
filesProcessed,
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -12,6 +12,7 @@ interface JwtPayload {
|
|||||||
employeeId: string;
|
employeeId: string;
|
||||||
email: string;
|
email: string;
|
||||||
role: string;
|
role: string;
|
||||||
|
sessionToken: string;
|
||||||
iat: number;
|
iat: number;
|
||||||
exp: number;
|
exp: number;
|
||||||
}
|
}
|
||||||
@ -70,6 +71,15 @@ export const authenticateToken = async (
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!decoded.sessionToken || decoded.sessionToken !== user.sessionToken) {
|
||||||
|
res.status(401).json({
|
||||||
|
success: false,
|
||||||
|
errorCode: 'SESSION_SUPERSEDED',
|
||||||
|
message: `You have been logged out because an active session was detected from ${user.lastLoginDevice || 'another device'}.`
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Attach user info to request object
|
// Attach user info to request object
|
||||||
req.user = {
|
req.user = {
|
||||||
userId: user.userId,
|
userId: user.userId,
|
||||||
@ -117,12 +127,14 @@ export const optionalAuth = async (
|
|||||||
const user = await User.findByPk(decoded.userId);
|
const user = await User.findByPk(decoded.userId);
|
||||||
|
|
||||||
if (user && user.isActive) {
|
if (user && user.isActive) {
|
||||||
req.user = {
|
if (!decoded.sessionToken || decoded.sessionToken === user.sessionToken) {
|
||||||
userId: user.userId,
|
req.user = {
|
||||||
email: user.email,
|
userId: user.userId,
|
||||||
employeeId: user.employeeId || null, // Optional - schema not finalized
|
email: user.email,
|
||||||
role: user.role // Keep uppercase: USER, MANAGEMENT, ADMIN
|
employeeId: user.employeeId || null, // Optional - schema not finalized
|
||||||
};
|
role: user.role // Keep uppercase: USER, MANAGEMENT, ADMIN
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,14 @@
|
|||||||
import cors from 'cors';
|
import cors from 'cors';
|
||||||
|
|
||||||
|
/** Vite dev: localhost vs 127.0.0.1, and ports 3000–3010 when 3000/3001 are already taken. */
|
||||||
|
function getDevViteOrigins(): string[] {
|
||||||
|
const out: string[] = [];
|
||||||
|
for (let port = 3000; port <= 3010; port++) {
|
||||||
|
out.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
// Configure allowed origins - uses only FRONTEND_URL from environment
|
// Configure allowed origins - uses only FRONTEND_URL from environment
|
||||||
const getAllowedOrigins = (): string[] | boolean => {
|
const getAllowedOrigins = (): string[] | boolean => {
|
||||||
const frontendUrl = process.env.FRONTEND_URL;
|
const frontendUrl = process.env.FRONTEND_URL;
|
||||||
@ -15,10 +24,9 @@ const getAllowedOrigins = (): string[] | boolean => {
|
|||||||
console.error(' Multiple origins: FRONTEND_URL=https://app1.com,https://app2.com');
|
console.error(' Multiple origins: FRONTEND_URL=https://app1.com,https://app2.com');
|
||||||
return [];
|
return [];
|
||||||
} else {
|
} else {
|
||||||
// Dev fallback: allow localhost:3000
|
console.warn('⚠️ WARNING: FRONTEND_URL not set. Defaulting Vite dev origins (localhost + 127.0.0.1).');
|
||||||
console.warn('⚠️ WARNING: FRONTEND_URL not set. Defaulting to http://localhost:3000 for development.');
|
console.warn(' Set FRONTEND_URL in .env if you use another host/port.');
|
||||||
console.warn(' To avoid this warning, set FRONTEND_URL=http://localhost:3000 in your .env file');
|
return getDevViteOrigins();
|
||||||
return ['http://localhost:3000'];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,11 +39,18 @@ const getAllowedOrigins = (): string[] | boolean => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse comma-separated URLs or use single URL
|
// Parse comma-separated URLs or use single URL
|
||||||
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
let origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
||||||
|
|
||||||
if (origins.length === 0) {
|
if (origins.length === 0) {
|
||||||
console.error('❌ ERROR: FRONTEND_URL is set but contains no valid URLs!');
|
console.error('❌ ERROR: FRONTEND_URL is set but contains no valid URLs!');
|
||||||
return isProduction ? [] : ['http://localhost:3000']; // Fallback for development
|
return isProduction ? [] : getDevViteOrigins(); // Fallback for development
|
||||||
|
}
|
||||||
|
|
||||||
|
// In development allow common Vite host/port combos (avoids CORS when Vite bumps to 3002+)
|
||||||
|
if (!isProduction) {
|
||||||
|
for (const o of getDevViteOrigins()) {
|
||||||
|
if (!origins.includes(o)) origins.push(o);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`✅ CORS: Allowing origins from FRONTEND_URL: ${origins.join(', ')}`);
|
console.log(`✅ CORS: Allowing origins from FRONTEND_URL: ${origins.join(', ')}`);
|
||||||
|
|||||||
34
src/middlewares/cpcPermission.middleware.ts
Normal file
34
src/middlewares/cpcPermission.middleware.ts
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
/**
|
||||||
|
* CPC-CSD permission middleware – enforces API-driven viewer list.
|
||||||
|
* Use after authenticateToken so req.user is available.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Request, Response, NextFunction } from 'express';
|
||||||
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
import { canAccessCpcCdc } from '../services/cpcPermission.service';
|
||||||
|
|
||||||
|
export const requireCpcCdcAccess = async (
|
||||||
|
req: Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction
|
||||||
|
): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const user = req.user;
|
||||||
|
if (!user?.userId || !user?.email) {
|
||||||
|
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const role = (user as any).role as string | undefined;
|
||||||
|
const allowed = await canAccessCpcCdc(user.email, role);
|
||||||
|
if (!allowed) {
|
||||||
|
ResponseHandler.forbidden(res, 'You do not have permission to access CPC-CSD');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHandler.error(res, 'Permission check failed', 500, error instanceof Error ? error.message : 'Unknown error');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
@ -2,7 +2,7 @@ import { Request, Response, NextFunction } from 'express';
|
|||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
export const errorHandlerMiddleware = (
|
export const errorHandlerMiddleware = (
|
||||||
error: Error,
|
error: Error & { code?: string },
|
||||||
req: Request,
|
req: Request,
|
||||||
res: Response,
|
res: Response,
|
||||||
next: NextFunction
|
next: NextFunction
|
||||||
@ -15,6 +15,24 @@ export const errorHandlerMiddleware = (
|
|||||||
ip: req.ip,
|
ip: req.ip,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Multer errors (e.g. LIMIT_FILE_SIZE, file filter) → 400
|
||||||
|
if (error.code === 'LIMIT_FILE_SIZE') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
message: 'File too large. Maximum size is 15MB.',
|
||||||
|
timestamp: new Date(),
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (error.message === 'Only PDF files are allowed' || (error as any).code === 'LIMIT_UNEXPECTED_FILE') {
|
||||||
|
res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
message: error.message || 'Invalid file type.',
|
||||||
|
timestamp: new Date(),
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
success: false,
|
success: false,
|
||||||
message: 'Internal Server Error',
|
message: 'Internal Server Error',
|
||||||
|
|||||||
95
src/middlewares/form16Permission.middleware.ts
Normal file
95
src/middlewares/form16Permission.middleware.ts
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 permission middleware – enforces API-driven config (submission viewers, 26AS viewers).
|
||||||
|
* Use after authenticateToken so req.user is set.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Request, Response, NextFunction } from 'express';
|
||||||
|
import { ResponseHandler } from '../utils/responseHandler';
|
||||||
|
import { canViewForm16Submission, canView26As } from '../services/form16Permission.service';
|
||||||
|
import { getDealerCodeForUser } from '../services/form16.service';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Require RE user only (block dealers). Use for endpoints that are RE-only (e.g. withdraw credit note, non-submitted dealers).
|
||||||
|
* Call after authenticateToken; use before or with requireForm16SubmissionAccess as needed.
|
||||||
|
*/
|
||||||
|
export const requireForm16ReOnly = async (
|
||||||
|
req: Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction
|
||||||
|
): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const user = req.user;
|
||||||
|
if (!user?.userId || !user?.email) {
|
||||||
|
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const dealerCode = await getDealerCodeForUser(user.userId);
|
||||||
|
if (dealerCode) {
|
||||||
|
ResponseHandler.forbidden(res, 'This action is only available to RE users, not dealers');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHandler.error(res, 'Permission check failed', 500, error instanceof Error ? error.message : 'Unknown error');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Require Form 16 submission data access.
|
||||||
|
* Admin has full access. Dealers are always allowed. RE users must be in submissionViewerEmails (or list empty).
|
||||||
|
*/
|
||||||
|
export const requireForm16SubmissionAccess = async (
|
||||||
|
req: Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction
|
||||||
|
): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const user = req.user;
|
||||||
|
if (!user?.userId || !user?.email) {
|
||||||
|
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if ((user as any).role === 'ADMIN') {
|
||||||
|
next();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const allowed = await canViewForm16Submission(user.email, user.userId, (user as any).role);
|
||||||
|
if (!allowed) {
|
||||||
|
ResponseHandler.forbidden(res, 'You do not have permission to view Form 16 submission data');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHandler.error(res, 'Permission check failed', 500, error instanceof Error ? error.message : 'Unknown error');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Require 26AS access (view/upload/manage 26AS data).
|
||||||
|
* Admin has full access. Otherwise user must be in twentySixAsViewerEmails (or list empty).
|
||||||
|
*/
|
||||||
|
export const requireForm1626AsAccess = async (
|
||||||
|
req: Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction
|
||||||
|
): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const user = req.user;
|
||||||
|
if (!user?.userId || !user?.email) {
|
||||||
|
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if ((user as any).role === 'ADMIN') {
|
||||||
|
next();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const allowed = await canView26As(user.email, (user as any).role);
|
||||||
|
if (!allowed) {
|
||||||
|
ResponseHandler.forbidden(res, 'You do not have permission to access 26AS data');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHandler.error(res, 'Permission check failed', 500, error instanceof Error ? error.message : 'Unknown error');
|
||||||
|
}
|
||||||
|
};
|
||||||
@ -390,6 +390,10 @@ export function recordAIServiceCall(provider: string, operation: string, success
|
|||||||
// QUEUE METRICS COLLECTION
|
// QUEUE METRICS COLLECTION
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
|
// Throttle queue-metrics error logs when Redis is down (avoid flooding terminal)
|
||||||
|
const queueMetricsLastErrorLog = new Map<string, number>();
|
||||||
|
const QUEUE_METRICS_ERROR_LOG_INTERVAL_MS = 5 * 60 * 1000; // 5 minutes
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update queue metrics for a specific queue
|
* Update queue metrics for a specific queue
|
||||||
* Call this periodically or on queue events
|
* Call this periodically or on queue events
|
||||||
@ -410,8 +414,13 @@ export async function updateQueueMetrics(queueName: string, queue: any): Promise
|
|||||||
queueJobsFailed.set({ queue_name: queueName }, failed);
|
queueJobsFailed.set({ queue_name: queueName }, failed);
|
||||||
queueJobsDelayed.set({ queue_name: queueName }, delayed);
|
queueJobsDelayed.set({ queue_name: queueName }, delayed);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Silently fail to avoid breaking metrics collection
|
// Log at most once per queue per 5 min when Redis is down so terminal stays readable
|
||||||
console.error(`[Metrics] Failed to update queue metrics for ${queueName}:`, error);
|
const now = Date.now();
|
||||||
|
const last = queueMetricsLastErrorLog.get(queueName) ?? 0;
|
||||||
|
if (now - last >= QUEUE_METRICS_ERROR_LOG_INTERVAL_MS) {
|
||||||
|
queueMetricsLastErrorLog.set(queueName, now);
|
||||||
|
console.warn(`[Metrics] Queue metrics unavailable for ${queueName} (Redis may be down). Next log in 5 min.`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -2,133 +2,15 @@
|
|||||||
* Sanitization Middleware
|
* Sanitization Middleware
|
||||||
* Sanitizes string inputs in req.body and req.query to prevent stored XSS.
|
* Sanitizes string inputs in req.body and req.query to prevent stored XSS.
|
||||||
*
|
*
|
||||||
* Uses TWO strategies:
|
* Uses the unified sanitizeObject utility from @utils/sanitizer.
|
||||||
* 1. STRICT — strips ALL HTML tags (for normal text fields like names, emails, titles)
|
|
||||||
* 2. PERMISSIVE — allows safe formatting tags (for rich text fields like description, message, comments)
|
|
||||||
*
|
*
|
||||||
* This middleware runs AFTER body parsing and BEFORE route handlers.
|
* This middleware runs AFTER body parsing and BEFORE route handlers.
|
||||||
* File upload routes (multipart) are skipped — those are handled
|
* File upload routes (multipart) are skipped — those are handled
|
||||||
* by the malwareScan middleware pipeline.
|
* by the malwareScan middleware pipeline (but can be manually sanitized in controllers).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import sanitizeHtml from 'sanitize-html';
|
import { sanitizeObject, sanitizeStrict } from '@utils/sanitizer';
|
||||||
|
|
||||||
/**
|
|
||||||
* Fields that intentionally store HTML from rich text editors.
|
|
||||||
* These get PERMISSIVE sanitization (safe formatting tags allowed).
|
|
||||||
* All other string fields get STRICT sanitization (all tags stripped).
|
|
||||||
*/
|
|
||||||
const RICH_TEXT_FIELDS = new Set([
|
|
||||||
'description',
|
|
||||||
'requestDescription',
|
|
||||||
'message',
|
|
||||||
'content',
|
|
||||||
'comments',
|
|
||||||
'rejectionReason',
|
|
||||||
'pauseReason',
|
|
||||||
'conclusionRemark',
|
|
||||||
'aiGeneratedRemark',
|
|
||||||
'finalRemark',
|
|
||||||
'closingRemarks',
|
|
||||||
'effectiveFinalRemark',
|
|
||||||
'keyDiscussionPoints',
|
|
||||||
'keyPoints',
|
|
||||||
'remarksText',
|
|
||||||
'remark',
|
|
||||||
'remarks',
|
|
||||||
'feedback',
|
|
||||||
'note',
|
|
||||||
'notes',
|
|
||||||
'skipReason',
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Strict config: zero allowed tags, zero allowed attributes
|
|
||||||
const strictSanitizeConfig: sanitizeHtml.IOptions = {
|
|
||||||
allowedTags: [],
|
|
||||||
allowedAttributes: {},
|
|
||||||
allowedIframeHostnames: [],
|
|
||||||
disallowedTagsMode: 'discard',
|
|
||||||
nonTextTags: ['script', 'style', 'iframe', 'embed', 'object'],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Permissive config: allow safe formatting tags from rich text editors
|
|
||||||
// Blocks dangerous elements (script, iframe, object, embed, form, input)
|
|
||||||
const permissiveSanitizeConfig: sanitizeHtml.IOptions = {
|
|
||||||
allowedTags: [
|
|
||||||
// Text formatting
|
|
||||||
'p', 'br', 'b', 'i', 'u', 'em', 'strong', 's', 'strike', 'del', 'sub', 'sup', 'mark', 'small',
|
|
||||||
// Headings
|
|
||||||
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
|
|
||||||
// Lists
|
|
||||||
'ul', 'ol', 'li',
|
|
||||||
// Block elements
|
|
||||||
'blockquote', 'pre', 'code', 'hr', 'div', 'span',
|
|
||||||
// Tables
|
|
||||||
'table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td', 'caption', 'colgroup', 'col',
|
|
||||||
// Links (href checked below)
|
|
||||||
'a',
|
|
||||||
// Images (src checked below)
|
|
||||||
'img',
|
|
||||||
],
|
|
||||||
allowedAttributes: {
|
|
||||||
'a': ['href', 'title', 'target', 'rel'],
|
|
||||||
'img': ['src', 'alt', 'title', 'width', 'height'],
|
|
||||||
'td': ['colspan', 'rowspan', 'style'],
|
|
||||||
'th': ['colspan', 'rowspan', 'style'],
|
|
||||||
'span': ['class', 'style'],
|
|
||||||
'div': ['class', 'style'],
|
|
||||||
'pre': ['class', 'style'],
|
|
||||||
'code': ['class', 'style'],
|
|
||||||
'p': ['class', 'style'],
|
|
||||||
'h1': ['class', 'style'],
|
|
||||||
'h2': ['class', 'style'],
|
|
||||||
'h3': ['class', 'style'],
|
|
||||||
'h4': ['class', 'style'],
|
|
||||||
'h5': ['class', 'style'],
|
|
||||||
'h6': ['class', 'style'],
|
|
||||||
'ul': ['class', 'style'],
|
|
||||||
'ol': ['class', 'style', 'start', 'type'],
|
|
||||||
'li': ['class', 'style'],
|
|
||||||
'blockquote': ['class', 'style'],
|
|
||||||
'table': ['class', 'style'],
|
|
||||||
},
|
|
||||||
allowedSchemes: ['http', 'https', 'mailto'],
|
|
||||||
allowedIframeHostnames: [],
|
|
||||||
disallowedTagsMode: 'discard',
|
|
||||||
nonTextTags: ['script', 'style', 'iframe', 'embed', 'object', 'applet', 'form', 'input', 'textarea', 'select', 'button'],
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Recursively sanitize all string values in an object or array
|
|
||||||
* Uses the field key to decide strict vs permissive sanitization
|
|
||||||
*/
|
|
||||||
function sanitizeValue(value: any, fieldKey?: string): any {
|
|
||||||
if (typeof value === 'string') {
|
|
||||||
const isRichTextField = fieldKey && RICH_TEXT_FIELDS.has(fieldKey);
|
|
||||||
const config = isRichTextField ? permissiveSanitizeConfig : strictSanitizeConfig;
|
|
||||||
return sanitizeHtml(value, config);
|
|
||||||
}
|
|
||||||
if (Array.isArray(value)) {
|
|
||||||
return value.map((item) => sanitizeValue(item, fieldKey));
|
|
||||||
}
|
|
||||||
if (value !== null && typeof value === 'object') {
|
|
||||||
return sanitizeObject(value);
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sanitize all string properties of an object (recursively)
|
|
||||||
* Passes the key name to sanitizeValue so it can choose the right config
|
|
||||||
*/
|
|
||||||
function sanitizeObject(obj: Record<string, any>): Record<string, any> {
|
|
||||||
const sanitized: Record<string, any> = {};
|
|
||||||
for (const key of Object.keys(obj)) {
|
|
||||||
sanitized[key] = sanitizeValue(obj[key], key);
|
|
||||||
}
|
|
||||||
return sanitized;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Express middleware that sanitizes req.body and req.query
|
* Express middleware that sanitizes req.body and req.query
|
||||||
@ -137,6 +19,7 @@ function sanitizeObject(obj: Record<string, any>): Record<string, any> {
|
|||||||
export const sanitizationMiddleware = (req: Request, _res: Response, next: NextFunction): void => {
|
export const sanitizationMiddleware = (req: Request, _res: Response, next: NextFunction): void => {
|
||||||
try {
|
try {
|
||||||
// Skip multipart requests — file uploads are sanitized by the malware scan pipeline
|
// Skip multipart requests — file uploads are sanitized by the malware scan pipeline
|
||||||
|
// Note: Multipart payloads should be manually sanitized in the controller if used.
|
||||||
const contentType = req.headers['content-type'] || '';
|
const contentType = req.headers['content-type'] || '';
|
||||||
if (contentType.includes('multipart/form-data')) {
|
if (contentType.includes('multipart/form-data')) {
|
||||||
return next();
|
return next();
|
||||||
@ -153,7 +36,7 @@ export const sanitizationMiddleware = (req: Request, _res: Response, next: NextF
|
|||||||
for (const key of Object.keys(req.query)) {
|
for (const key of Object.keys(req.query)) {
|
||||||
const val = req.query[key];
|
const val = req.query[key];
|
||||||
if (typeof val === 'string') {
|
if (typeof val === 'string') {
|
||||||
strictQuery[key] = sanitizeHtml(val, strictSanitizeConfig);
|
strictQuery[key] = sanitizeStrict(val);
|
||||||
} else {
|
} else {
|
||||||
strictQuery[key] = val;
|
strictQuery[key] = val;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,12 +13,8 @@ export const validateRequest = (schema: ZodSchema) => {
|
|||||||
next();
|
next();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof ZodError) {
|
if (error instanceof ZodError) {
|
||||||
const errorMessages = error.errors.map(err => ({
|
const errorMessage = error.errors.map(err => err.message).join(', ');
|
||||||
field: err.path.join('.'),
|
ResponseHandler.validationError(res, 'Validation failed', errorMessage);
|
||||||
message: err.message,
|
|
||||||
}));
|
|
||||||
|
|
||||||
ResponseHandler.validationError(res, 'Validation failed', errorMessages);
|
|
||||||
} else {
|
} else {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
||||||
@ -34,12 +30,8 @@ export const validateBody = (schema: ZodSchema) => {
|
|||||||
next();
|
next();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof ZodError) {
|
if (error instanceof ZodError) {
|
||||||
const errorMessages = error.errors.map(err => ({
|
const errorMessage = error.errors.map(err => err.message).join(', ');
|
||||||
field: err.path.join('.'),
|
ResponseHandler.validationError(res, 'Request body validation failed', errorMessage);
|
||||||
message: err.message,
|
|
||||||
}));
|
|
||||||
|
|
||||||
ResponseHandler.validationError(res, 'Request body validation failed', errorMessages);
|
|
||||||
} else {
|
} else {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
||||||
@ -55,12 +47,8 @@ export const validateQuery = (schema: ZodSchema) => {
|
|||||||
next();
|
next();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof ZodError) {
|
if (error instanceof ZodError) {
|
||||||
const errorMessages = error.errors.map(err => ({
|
const errorMessage = error.errors.map(err => err.message).join(', ');
|
||||||
field: err.path.join('.'),
|
ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessage);
|
||||||
message: err.message,
|
|
||||||
}));
|
|
||||||
|
|
||||||
ResponseHandler.validationError(res, 'Query parameters validation failed', errorMessages);
|
|
||||||
} else {
|
} else {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
||||||
@ -76,12 +64,8 @@ export const validateParams = (schema: ZodSchema) => {
|
|||||||
next();
|
next();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof ZodError) {
|
if (error instanceof ZodError) {
|
||||||
const errorMessages = error.errors.map(err => ({
|
const errorMessage = error.errors.map(err => err.message).join(', ');
|
||||||
field: err.path.join('.'),
|
ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessage);
|
||||||
message: err.message,
|
|
||||||
}));
|
|
||||||
|
|
||||||
ResponseHandler.validationError(res, 'URL parameters validation failed', errorMessages);
|
|
||||||
} else {
|
} else {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||||
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
ResponseHandler.error(res, 'Validation error', 400, errorMessage);
|
||||||
|
|||||||
179
src/migrations/20260220-create-form16-tables.ts
Normal file
179
src/migrations/20260220-create-form16-tables.ts
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 integration: form16a_submissions (linked to workflow_requests) and form_16_credit_notes.
|
||||||
|
* Single workflow DB; Form16 submissions appear in Open/Closed requests via workflow_requests row.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
// 1. form16a_submissions: one row per Form 16A submission; request_id links to workflow_requests
|
||||||
|
await queryInterface.createTable('form16a_submissions', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
request_id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'workflow_requests', key: 'request_id' },
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
dealer_code: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
form16a_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
version: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 1,
|
||||||
|
},
|
||||||
|
tds_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
total_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
tan_number: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
deductor_name: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
document_url: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 'pending',
|
||||||
|
},
|
||||||
|
validation_status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
validation_notes: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
submitted_date: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
processed_date: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
processed_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['request_id'], { name: 'idx_form16a_submissions_request_id' });
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['dealer_code'], { name: 'idx_form16a_submissions_dealer_code' });
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['status'], { name: 'idx_form16a_submissions_status' });
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['financial_year', 'quarter'], { name: 'idx_form16a_submissions_fy_quarter' });
|
||||||
|
|
||||||
|
// 2. form_16_credit_notes: credit notes generated (e.g. via SAP); linked to form16a_submissions
|
||||||
|
await queryInterface.createTable('form_16_credit_notes', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
submission_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'form16a_submissions', key: 'id' },
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
},
|
||||||
|
credit_note_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
sap_document_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
issue_date: {
|
||||||
|
type: DataTypes.DATEONLY,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 'pending',
|
||||||
|
},
|
||||||
|
remarks: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
issued_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form_16_credit_notes', ['submission_id'], { name: 'idx_form_16_credit_notes_submission_id' });
|
||||||
|
await queryInterface.addIndex('form_16_credit_notes', ['status'], { name: 'idx_form_16_credit_notes_status' });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('form_16_credit_notes');
|
||||||
|
await queryInterface.dropTable('form16a_submissions');
|
||||||
|
}
|
||||||
228
src/migrations/20260220000001-add-form16-ocr-extracted-data.ts
Normal file
228
src/migrations/20260220000001-add-form16-ocr-extracted-data.ts
Normal file
@ -0,0 +1,228 @@
|
|||||||
|
/**
|
||||||
|
* Add ocr_extracted_data (JSONB) to form16a_submissions for audit/support.
|
||||||
|
* Stores the raw OCR response when a submission is created from an extracted PDF.
|
||||||
|
*
|
||||||
|
* If form16a_submissions does not exist (e.g. 20260220-create-form16-tables was marked
|
||||||
|
* run but used the wrong module on a previous deploy), this migration creates the table
|
||||||
|
* and form_16_credit_notes first, then adds the column. Safe for UAT/fresh DBs.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
async function tableExists(queryInterface: QueryInterface, tableName: string): Promise<boolean> {
|
||||||
|
const [rows] = await queryInterface.sequelize.query(
|
||||||
|
`SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public' AND table_name = :tableName
|
||||||
|
) AS "exists"`,
|
||||||
|
{ replacements: { tableName } }
|
||||||
|
);
|
||||||
|
const val = (rows as { exists: boolean | string }[])?.[0]?.exists;
|
||||||
|
return val === true || val === 't';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function columnExists(queryInterface: QueryInterface, tableName: string, columnName: string): Promise<boolean> {
|
||||||
|
const [rows] = await queryInterface.sequelize.query(
|
||||||
|
`SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_schema = 'public' AND table_name = :tableName AND column_name = :columnName
|
||||||
|
) AS "exists"`,
|
||||||
|
{ replacements: { tableName, columnName } }
|
||||||
|
);
|
||||||
|
const val = (rows as { exists: boolean | string }[])?.[0]?.exists;
|
||||||
|
return val === true || val === 't';
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Create form16a_submissions and form_16_credit_notes if missing (same as 20260220-create-form16-tables). */
|
||||||
|
async function ensureForm16Tables(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
const exists = await tableExists(queryInterface, 'form16a_submissions');
|
||||||
|
if (exists) return;
|
||||||
|
|
||||||
|
await queryInterface.createTable('form16a_submissions', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
request_id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'workflow_requests', key: 'request_id' },
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
dealer_code: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
form16a_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
version: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 1,
|
||||||
|
},
|
||||||
|
tds_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
total_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
tan_number: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
deductor_name: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
document_url: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 'pending',
|
||||||
|
},
|
||||||
|
validation_status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
validation_notes: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
submitted_date: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
processed_date: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
processed_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['request_id'], { name: 'idx_form16a_submissions_request_id' });
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['dealer_code'], { name: 'idx_form16a_submissions_dealer_code' });
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['status'], { name: 'idx_form16a_submissions_status' });
|
||||||
|
await queryInterface.addIndex('form16a_submissions', ['financial_year', 'quarter'], { name: 'idx_form16a_submissions_fy_quarter' });
|
||||||
|
|
||||||
|
if (!(await tableExists(queryInterface, 'form_16_credit_notes'))) {
|
||||||
|
await queryInterface.createTable('form_16_credit_notes', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
submission_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'form16a_submissions', key: 'id' },
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
},
|
||||||
|
credit_note_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
sap_document_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
issue_date: {
|
||||||
|
type: DataTypes.DATEONLY,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 'pending',
|
||||||
|
},
|
||||||
|
remarks: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
issued_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_credit_notes', ['submission_id'], { name: 'idx_form_16_credit_notes_submission_id' });
|
||||||
|
await queryInterface.addIndex('form_16_credit_notes', ['status'], { name: 'idx_form_16_credit_notes_status' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await ensureForm16Tables(queryInterface);
|
||||||
|
|
||||||
|
const hasColumn = await columnExists(queryInterface, 'form16a_submissions', 'ocr_extracted_data');
|
||||||
|
if (!hasColumn) {
|
||||||
|
await queryInterface.addColumn(
|
||||||
|
'form16a_submissions',
|
||||||
|
'ocr_extracted_data',
|
||||||
|
{
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
const hasColumn = await columnExists(queryInterface, 'form16a_submissions', 'ocr_extracted_data');
|
||||||
|
if (hasColumn) {
|
||||||
|
await queryInterface.removeColumn('form16a_submissions', 'ocr_extracted_data');
|
||||||
|
}
|
||||||
|
}
|
||||||
97
src/migrations/20260222000001-create-tds-26as-entries.ts
Normal file
97
src/migrations/20260222000001-create-tds-26as-entries.ts
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 – 26AS TDS entries table for RE admin.
|
||||||
|
* Stores TDS credit data from 26AS (Income Tax portal) for validation against Form 16A submissions.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.createTable('tds_26as_entries', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
tan_number: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'TAN of deductor',
|
||||||
|
},
|
||||||
|
deductor_name: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'Q1, Q2, Q3, Q4',
|
||||||
|
},
|
||||||
|
assessment_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'e.g. 2024-25',
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'e.g. 2024-25',
|
||||||
|
},
|
||||||
|
section_code: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'e.g. 194C, 194A',
|
||||||
|
},
|
||||||
|
amount_paid: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
tax_deducted: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
total_tds_deposited: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
nature_of_payment: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
transaction_date: {
|
||||||
|
type: DataTypes.DATEONLY,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
date_of_booking: {
|
||||||
|
type: DataTypes.DATEONLY,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
status_oltas: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Status of matching with OLTAS',
|
||||||
|
},
|
||||||
|
remarks: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('tds_26as_entries', ['tan_number'], { name: 'idx_tds_26as_tan' });
|
||||||
|
await queryInterface.addIndex('tds_26as_entries', ['financial_year', 'quarter'], { name: 'idx_tds_26as_fy_quarter' });
|
||||||
|
await queryInterface.addIndex('tds_26as_entries', ['financial_year'], { name: 'idx_tds_26as_fy' });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('tds_26as_entries');
|
||||||
|
}
|
||||||
75
src/migrations/20260223000001-create-form-16-debit-notes.ts
Normal file
75
src/migrations/20260223000001-create-form-16-debit-notes.ts
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 Debit Notes: issued when RE withdraws a credit note (e.g. duplicate/wrong).
|
||||||
|
* One debit note per withdrawn credit note. SAP document number filled when SAP API is integrated.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.createTable('form_16_debit_notes', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
credit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'form_16_credit_notes', key: 'id' },
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
debit_note_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
sap_document_number: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
issue_date: {
|
||||||
|
type: DataTypes.DATEONLY,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 'pending',
|
||||||
|
},
|
||||||
|
reason: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
created_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form_16_debit_notes', ['credit_note_id'], {
|
||||||
|
name: 'idx_form_16_debit_notes_credit_note_id',
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_debit_notes', ['status'], {
|
||||||
|
name: 'idx_form_16_debit_notes_status',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('form_16_debit_notes');
|
||||||
|
}
|
||||||
@ -0,0 +1,51 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 – 26AS upload audit log.
|
||||||
|
* Records each 26AS file upload: who uploaded, when, and how many records were imported.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.createTable('form_16_26as_upload_log', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
uploaded_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
uploaded_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
file_name: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
records_imported: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
errors_count: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form_16_26as_upload_log', ['uploaded_at'], {
|
||||||
|
name: 'idx_form_16_26as_upload_log_uploaded_at',
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_26as_upload_log', ['uploaded_by'], {
|
||||||
|
name: 'idx_form_16_26as_upload_log_uploaded_by',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('form_16_26as_upload_log');
|
||||||
|
}
|
||||||
@ -0,0 +1,180 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 critical changes:
|
||||||
|
* 1. Add upload_log_id to tds_26as_entries to link each record to an upload.
|
||||||
|
* 2. form_16_26as_quarter_snapshots: stores aggregated 26AS total per (tan, fy, quarter) per upload version.
|
||||||
|
* 3. form_16_quarter_status: current status per (tan, fy, quarter) - SETTLED | DEBIT_ISSUED_PENDING_FORM16.
|
||||||
|
* 4. form_16_ledger_entries: full audit trail of every credit/debit (no deletion).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
// 1. Add upload_log_id to tds_26as_entries (nullable for existing rows)
|
||||||
|
await queryInterface.addColumn('tds_26as_entries', 'upload_log_id', {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_26as_upload_log', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('tds_26as_entries', ['upload_log_id'], { name: 'idx_tds_26as_upload_log_id' });
|
||||||
|
|
||||||
|
// 2. form_16_26as_quarter_snapshots: one row per "version" of 26AS aggregated total for (tan, fy, quarter)
|
||||||
|
await queryInterface.createTable('form_16_26as_quarter_snapshots', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
tan_number: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
aggregated_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'Sum of tax_deducted for this tan+fy+quarter (Section 194Q, Booking F/O only)',
|
||||||
|
},
|
||||||
|
upload_log_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_26as_upload_log', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_26as_quarter_snapshots', ['tan_number', 'financial_year', 'quarter'], {
|
||||||
|
name: 'idx_form16_26as_snap_tan_fy_qtr',
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_26as_quarter_snapshots', ['upload_log_id'], {
|
||||||
|
name: 'idx_form16_26as_snap_upload_log_id',
|
||||||
|
});
|
||||||
|
|
||||||
|
// 3. form_16_quarter_status: current status per (tan, fy, quarter) for reverse-first logic
|
||||||
|
await queryInterface.createTable('form_16_quarter_status', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
tan_number: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'SETTLED | DEBIT_ISSUED_PENDING_FORM16',
|
||||||
|
},
|
||||||
|
last_credit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_credit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
last_debit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_debit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_quarter_status', ['tan_number', 'financial_year', 'quarter'], {
|
||||||
|
name: 'idx_form16_quarter_status_tan_fy_qtr',
|
||||||
|
unique: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 4. form_16_ledger_entries: full history of every credit and debit (no deletion)
|
||||||
|
await queryInterface.createTable('form_16_ledger_entries', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
tan_number: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
entry_type: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'CREDIT | DEBIT',
|
||||||
|
},
|
||||||
|
amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
credit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_credit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
debit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_debit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
form16_submission_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form16a_submissions', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
snapshot_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_26as_quarter_snapshots', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form_16_ledger_entries', ['tan_number', 'financial_year', 'quarter'], {
|
||||||
|
name: 'idx_form16_ledger_tan_fy_qtr',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('form_16_ledger_entries');
|
||||||
|
await queryInterface.dropTable('form_16_quarter_status');
|
||||||
|
await queryInterface.dropTable('form_16_26as_quarter_snapshots');
|
||||||
|
await queryInterface.removeIndex('tds_26as_entries', 'idx_tds_26as_upload_log_id');
|
||||||
|
await queryInterface.removeColumn('tds_26as_entries', 'upload_log_id');
|
||||||
|
}
|
||||||
@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 – non-submitted dealer notification log.
|
||||||
|
* Records each time an RE user sends a "submit Form 16" notification to a non-submitted dealer (per FY).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.createTable('form16_non_submitted_notifications', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
primaryKey: true,
|
||||||
|
autoIncrement: true,
|
||||||
|
},
|
||||||
|
dealer_code: {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
financial_year: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
notified_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
notified_by: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16_non_submitted_notifications', ['dealer_code', 'financial_year'], {
|
||||||
|
name: 'idx_form16_ns_notif_dealer_fy',
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('form16_non_submitted_notifications', ['notified_at'], {
|
||||||
|
name: 'idx_form16_ns_notif_notified_at',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('form16_non_submitted_notifications');
|
||||||
|
}
|
||||||
36
src/migrations/20260225100001-add-form16-archived-at.ts
Normal file
36
src/migrations/20260225100001-add-form16-archived-at.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
/**
|
||||||
|
* Form 16 / 26AS data retention: add archived_at to keep last 5 FY active.
|
||||||
|
* Records with financial_year older than 5 years get archived_at set by scheduler (no deletion).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
const TABLES_WITH_FY = [
|
||||||
|
'tds_26as_entries',
|
||||||
|
'form_16_26as_quarter_snapshots',
|
||||||
|
'form_16_quarter_status',
|
||||||
|
'form_16_ledger_entries',
|
||||||
|
'form_16_credit_notes',
|
||||||
|
'form16a_submissions',
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
for (const table of TABLES_WITH_FY) {
|
||||||
|
await queryInterface.addColumn(table, 'archived_at', {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Set when record is older than 5 financial years; active when NULL',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await queryInterface.addColumn('form_16_debit_notes', 'archived_at', {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Set when linked credit_note is archived; active when NULL',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
for (const table of [...TABLES_WITH_FY, 'form_16_debit_notes']) {
|
||||||
|
await queryInterface.removeColumn(table, 'archived_at');
|
||||||
|
}
|
||||||
|
}
|
||||||
33
src/migrations/20260303100001-drop-form16a-number-unique.ts
Normal file
33
src/migrations/20260303100001-drop-form16a-number-unique.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
/**
|
||||||
|
* Allow multiple Form 16A submissions with the same certificate number.
|
||||||
|
* Duplicate submission is only when a credit note already exists for that dealer/quarter/amount
|
||||||
|
* (enforced in run26asMatchAndCreditNote). Same certificate number without an existing
|
||||||
|
* credit note (e.g. resubmission after 26AS upload) is allowed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { QueryInterface } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
const sequelize = queryInterface.sequelize;
|
||||||
|
// Drop unique constraint (PostgreSQL names it tablename_columnname_key when created via CREATE TABLE)
|
||||||
|
await sequelize.query(
|
||||||
|
`ALTER TABLE form16a_submissions DROP CONSTRAINT IF EXISTS form16a_submissions_form16a_number_key;`
|
||||||
|
);
|
||||||
|
// If a unique index exists on form16a_number (e.g. from Sequelize), drop it
|
||||||
|
const [indexRows] = (await sequelize.query(
|
||||||
|
`SELECT indexname FROM pg_indexes WHERE tablename = 'form16a_submissions' AND indexdef LIKE '%form16a_number%' AND indexdef LIKE '%UNIQUE%';`
|
||||||
|
)) as [{ indexname: string }[], unknown];
|
||||||
|
for (const row of indexRows || []) {
|
||||||
|
if (row?.indexname) {
|
||||||
|
await sequelize.query(`DROP INDEX IF EXISTS "${row.indexname}";`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.addConstraint('form16a_submissions', {
|
||||||
|
fields: ['form16a_number'],
|
||||||
|
type: 'unique',
|
||||||
|
name: 'form16a_submissions_form16a_number_key',
|
||||||
|
});
|
||||||
|
}
|
||||||
38
src/migrations/20260316-update-holiday-type-enum.ts
Normal file
38
src/migrations/20260316-update-holiday-type-enum.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import { QueryInterface } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration to ensure 'ORGANIZATIONAL' exists in the holiday_type enum
|
||||||
|
* and set 'NATIONAL' as the default value for the holiday_type column.
|
||||||
|
*/
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
// 1. Add 'ORGANIZATIONAL' to the enum_holidays_holiday_type enum type if it doesn't exist
|
||||||
|
// PostgreSQL doesn't support IF NOT EXISTS for ALTER TYPE ADD VALUE,
|
||||||
|
// so we check if it exists first using a PL/pgSQL block
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_enum
|
||||||
|
WHERE enumlabel = 'ORGANIZATIONAL'
|
||||||
|
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_holidays_holiday_type')
|
||||||
|
) THEN
|
||||||
|
ALTER TYPE enum_holidays_holiday_type ADD VALUE 'ORGANIZATIONAL';
|
||||||
|
END IF;
|
||||||
|
END$$;
|
||||||
|
`);
|
||||||
|
|
||||||
|
// 2. Set 'ORGANIZATIONAL' as the default value for the holiday_type column
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
ALTER TABLE "holidays" ALTER COLUMN "holiday_type" SET DEFAULT 'ORGANIZATIONAL';
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
// PostgreSQL doesn't support removing enum values directly.
|
||||||
|
// We can revert the default value back to 'ORGANIZATIONAL' if needed.
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
ALTER TABLE "holidays" ALTER COLUMN "holiday_type" SET DEFAULT 'NATIONAL';
|
||||||
|
`);
|
||||||
|
|
||||||
|
console.log('[Migration] Note: Cannot remove enum values in PostgreSQL. ORGANIZATIONAL will remain in enum_holidays_holiday_type.');
|
||||||
|
}
|
||||||
86
src/migrations/20260317-refactor-activity-types-columns.ts
Normal file
86
src/migrations/20260317-refactor-activity-types-columns.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper: returns true if the column exists in the table
|
||||||
|
*/
|
||||||
|
async function columnExists(
|
||||||
|
queryInterface: QueryInterface,
|
||||||
|
tableName: string,
|
||||||
|
columnName: string
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const description = await queryInterface.describeTable(tableName);
|
||||||
|
return columnName in description;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration: Refactor activity_types table
|
||||||
|
*
|
||||||
|
* Drops deprecated columns that will not be used going forward:
|
||||||
|
* hsn_code, sac_code, gst_rate, gl_code, credit_nature
|
||||||
|
*
|
||||||
|
* Adds new column:
|
||||||
|
* credit_posting_on VARCHAR(50) – indicates posting target (e.g. 'Spares', 'Vehicle')
|
||||||
|
*
|
||||||
|
* All drops are guarded so this migration is safe to run on a fresh database
|
||||||
|
* where these columns were never added.
|
||||||
|
*/
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
const TABLE = 'activity_types';
|
||||||
|
|
||||||
|
// ── Drop deprecated columns (safe: only if they exist) ──────────────────────
|
||||||
|
const columnsToDrop = ['hsn_code', 'sac_code', 'gst_rate', 'gl_code', 'credit_nature'];
|
||||||
|
|
||||||
|
for (const col of columnsToDrop) {
|
||||||
|
if (await columnExists(queryInterface, TABLE, col)) {
|
||||||
|
await queryInterface.removeColumn(TABLE, col);
|
||||||
|
console.log(`[Migration] Dropped column ${TABLE}.${col}`);
|
||||||
|
} else {
|
||||||
|
console.log(`[Migration] Column ${TABLE}.${col} does not exist – skipping drop`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Add new column ────────────────────────────────────────────────────────────
|
||||||
|
if (!(await columnExists(queryInterface, TABLE, 'credit_posting_on'))) {
|
||||||
|
await queryInterface.addColumn(TABLE, 'credit_posting_on', {
|
||||||
|
type: DataTypes.STRING(50),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: null,
|
||||||
|
comment: 'Indicates what the credit note is posted against (e.g. "Spares", "Vehicle")'
|
||||||
|
});
|
||||||
|
console.log(`[Migration] Added column ${TABLE}.credit_posting_on`);
|
||||||
|
} else {
|
||||||
|
console.log(`[Migration] Column ${TABLE}.credit_posting_on already exists – skipping add`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rollback: re-add the dropped columns and remove credit_posting_on.
|
||||||
|
* Columns are restored as nullable so existing rows are unaffected.
|
||||||
|
*/
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
const TABLE = 'activity_types';
|
||||||
|
|
||||||
|
// Remove the newly added column
|
||||||
|
if (await columnExists(queryInterface, TABLE, 'credit_posting_on')) {
|
||||||
|
await queryInterface.removeColumn(TABLE, 'credit_posting_on');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore dropped columns
|
||||||
|
const columnsToRestore: Record<string, any> = {
|
||||||
|
hsn_code: { type: DataTypes.STRING(20), allowNull: true, defaultValue: null },
|
||||||
|
sac_code: { type: DataTypes.STRING(20), allowNull: true, defaultValue: null },
|
||||||
|
gst_rate: { type: DataTypes.DECIMAL(5, 2), allowNull: true, defaultValue: null },
|
||||||
|
gl_code: { type: DataTypes.STRING(20), allowNull: true, defaultValue: null },
|
||||||
|
credit_nature: { type: DataTypes.STRING(50), allowNull: true, defaultValue: null }
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const [col, spec] of Object.entries(columnsToRestore)) {
|
||||||
|
if (!(await columnExists(queryInterface, TABLE, col))) {
|
||||||
|
await queryInterface.addColumn(TABLE, col, spec);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
85
src/migrations/20260317100001-create-form16-sap-responses.ts
Normal file
85
src/migrations/20260317100001-create-form16-sap-responses.ts
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores SAP/WFM outgoing response files for Form 16 credit/debit notes.
|
||||||
|
* Used for:
|
||||||
|
* - Showing "credit note is being generated, wait" until SAP response is received
|
||||||
|
* - Allowing users to download the SAP response file later
|
||||||
|
*/
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.createTable('form16_sap_responses', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
},
|
||||||
|
type: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false, // 'credit' | 'debit'
|
||||||
|
},
|
||||||
|
file_name: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
credit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_credit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
debit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_debit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
claim_number: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
sap_document_number: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
msg_typ: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
message: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
raw_row: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
storage_url: {
|
||||||
|
type: DataTypes.STRING(500),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['type']);
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['credit_note_id']);
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['debit_note_id']);
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['claim_number']);
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.dropTable('form16_sap_responses');
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
26
src/migrations/20260317120001-add-form16-trns-uniq-no.ts
Normal file
26
src/migrations/20260317120001-add-form16-trns-uniq-no.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.addColumn('form_16_credit_notes', 'trns_uniq_no', {
|
||||||
|
type: DataTypes.STRING(120),
|
||||||
|
allowNull: true,
|
||||||
|
});
|
||||||
|
await queryInterface.addColumn('form_16_debit_notes', 'trns_uniq_no', {
|
||||||
|
type: DataTypes.STRING(120),
|
||||||
|
allowNull: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form_16_credit_notes', ['trns_uniq_no'], { name: 'idx_form16_credit_notes_trns_uniq_no' });
|
||||||
|
await queryInterface.addIndex('form_16_debit_notes', ['trns_uniq_no'], { name: 'idx_form16_debit_notes_trns_uniq_no' });
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.removeIndex('form_16_debit_notes', 'idx_form16_debit_notes_trns_uniq_no');
|
||||||
|
await queryInterface.removeIndex('form_16_credit_notes', 'idx_form16_credit_notes_trns_uniq_no');
|
||||||
|
await queryInterface.removeColumn('form_16_debit_notes', 'trns_uniq_no');
|
||||||
|
await queryInterface.removeColumn('form_16_credit_notes', 'trns_uniq_no');
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
@ -0,0 +1,70 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Separate table for Form 16 debit note SAP responses (OUTGOING FORM16_DBT).
|
||||||
|
* Credit note SAP responses remain in form16_sap_responses only.
|
||||||
|
*/
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.createTable('form16_debit_note_sap_responses', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
},
|
||||||
|
file_name: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: false,
|
||||||
|
unique: true,
|
||||||
|
},
|
||||||
|
debit_note_id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
references: { model: 'form_16_debit_notes', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
claim_number: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
sap_document_number: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
msg_typ: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
message: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
raw_row: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
storage_url: {
|
||||||
|
type: DataTypes.STRING(500),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16_debit_note_sap_responses', ['debit_note_id']);
|
||||||
|
await queryInterface.addIndex('form16_debit_note_sap_responses', ['claim_number']);
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.dropTable('form16_debit_note_sap_responses');
|
||||||
|
},
|
||||||
|
};
|
||||||
41
src/migrations/20260318200001-add-sap-response-csv-fields.ts
Normal file
41
src/migrations/20260318200001-add-sap-response-csv-fields.ts
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add explicit CSV-column fields to both SAP response tables.
|
||||||
|
* Previously everything was dumped into raw_row; now each well-known SAP CSV column
|
||||||
|
* has its own DB column, and raw_row holds only unexpected/extra columns.
|
||||||
|
*
|
||||||
|
* New columns (both tables):
|
||||||
|
* trns_uniq_no – TRNS_UNIQ_NO from SAP response (our unique ID echoed back)
|
||||||
|
* tds_trns_id – TDS_TRNS_ID from SAP response (= credit note number we sent)
|
||||||
|
* doc_date – DOC_DATE (SAP document date)
|
||||||
|
* tds_amt – TDS_AMT (amount confirmed by SAP)
|
||||||
|
*/
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
const commonColumns = [
|
||||||
|
['trns_uniq_no', { type: DataTypes.STRING(200), allowNull: true }],
|
||||||
|
['tds_trns_id', { type: DataTypes.STRING(200), allowNull: true }],
|
||||||
|
['doc_date', { type: DataTypes.STRING(20), allowNull: true }],
|
||||||
|
['tds_amt', { type: DataTypes.STRING(50), allowNull: true }],
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
for (const [col, def] of commonColumns) {
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', col, def).catch(() => {/* already exists */});
|
||||||
|
await queryInterface.addColumn('form16_debit_note_sap_responses', col, def).catch(() => {/* already exists */});
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['trns_uniq_no'], { name: 'idx_f16_sap_resp_trns_uniq_no' }).catch(() => {});
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['tds_trns_id'], { name: 'idx_f16_sap_resp_tds_trns_id' }).catch(() => {});
|
||||||
|
await queryInterface.addIndex('form16_debit_note_sap_responses', ['trns_uniq_no'], { name: 'idx_f16_dbt_sap_trns_uniq_no' }).catch(() => {});
|
||||||
|
await queryInterface.addIndex('form16_debit_note_sap_responses', ['tds_trns_id'], { name: 'idx_f16_dbt_sap_tds_trns_id' }).catch(() => {});
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
for (const col of ['trns_uniq_no', 'tds_trns_id', 'doc_date', 'tds_amt']) {
|
||||||
|
await queryInterface.removeColumn('form16_sap_responses', col).catch(() => {});
|
||||||
|
await queryInterface.removeColumn('form16_debit_note_sap_responses', col).catch(() => {});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
@ -0,0 +1,87 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
// 1) Create read-log table for processed SAP CSV files
|
||||||
|
await queryInterface.createTable('from16_sap_read_file', {
|
||||||
|
id: { type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true },
|
||||||
|
file_name: { type: DataTypes.STRING(255), allowNull: false, unique: true },
|
||||||
|
total_records: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 0 },
|
||||||
|
total_credit_notes: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 0 },
|
||||||
|
total_debit_notes: { type: DataTypes.INTEGER, allowNull: false, defaultValue: 0 },
|
||||||
|
created_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||||
|
updated_at: { type: DataTypes.DATE, allowNull: false, defaultValue: DataTypes.NOW },
|
||||||
|
}).catch(() => {});
|
||||||
|
|
||||||
|
// 2) Add required new fields to form16_sap_responses
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'doc_no', {
|
||||||
|
type: DataTypes.STRING(200),
|
||||||
|
allowNull: true,
|
||||||
|
}).catch(() => {});
|
||||||
|
|
||||||
|
// 3) Drop old fields from form16_sap_responses (as requested)
|
||||||
|
for (const col of [
|
||||||
|
'type',
|
||||||
|
'file_name',
|
||||||
|
'credit_note_id',
|
||||||
|
'debit_note_id',
|
||||||
|
'claim_number',
|
||||||
|
'sap_document_number',
|
||||||
|
'doc_date',
|
||||||
|
'tds_amt',
|
||||||
|
'raw_row',
|
||||||
|
'storage_url',
|
||||||
|
]) {
|
||||||
|
await queryInterface.removeColumn('form16_sap_responses', col).catch(() => {});
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4) Ensure required columns exist for new contract
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'trns_uniq_no', {
|
||||||
|
type: DataTypes.STRING(200),
|
||||||
|
allowNull: true,
|
||||||
|
}).catch(() => {});
|
||||||
|
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'tds_trns_id', {
|
||||||
|
type: DataTypes.STRING(200),
|
||||||
|
allowNull: true,
|
||||||
|
}).catch(() => {});
|
||||||
|
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'msg_typ', {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
}).catch(() => {});
|
||||||
|
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'message', {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
}).catch(() => {});
|
||||||
|
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['tds_trns_id'], {
|
||||||
|
name: 'idx_form16_sap_responses_tds_trns_id',
|
||||||
|
}).catch(() => {});
|
||||||
|
await queryInterface.addIndex('form16_sap_responses', ['trns_uniq_no'], {
|
||||||
|
name: 'idx_form16_sap_responses_trns_uniq_no',
|
||||||
|
}).catch(() => {});
|
||||||
|
await queryInterface.addIndex('from16_sap_read_file', ['file_name'], {
|
||||||
|
name: 'idx_from16_sap_read_file_name',
|
||||||
|
}).catch(() => {});
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
// Recreate old columns in form16_sap_responses
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'type', { type: DataTypes.STRING(10), allowNull: false, defaultValue: 'credit' }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'file_name', { type: DataTypes.STRING(255), allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'credit_note_id', { type: DataTypes.INTEGER, allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'debit_note_id', { type: DataTypes.INTEGER, allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'claim_number', { type: DataTypes.STRING(100), allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'sap_document_number', { type: DataTypes.STRING(100), allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'doc_date', { type: DataTypes.STRING(20), allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'tds_amt', { type: DataTypes.STRING(50), allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'raw_row', { type: DataTypes.JSONB, allowNull: true }).catch(() => {});
|
||||||
|
await queryInterface.addColumn('form16_sap_responses', 'storage_url', { type: DataTypes.STRING(500), allowNull: true }).catch(() => {});
|
||||||
|
|
||||||
|
await queryInterface.removeColumn('form16_sap_responses', 'doc_no').catch(() => {});
|
||||||
|
await queryInterface.dropTable('from16_sap_read_file').catch(() => {});
|
||||||
|
},
|
||||||
|
};
|
||||||
18
src/migrations/20260324110001-add-pan-number-to-26as.ts
Normal file
18
src/migrations/20260324110001-add-pan-number-to-26as.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.addColumn('tds_26as_entries', 'pan_number', {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'PAN from 26AS header (assessee PAN)',
|
||||||
|
});
|
||||||
|
await queryInterface.addIndex('tds_26as_entries', ['pan_number'], { name: 'idx_tds_26as_pan' });
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
await queryInterface.removeIndex('tds_26as_entries', 'idx_tds_26as_pan');
|
||||||
|
await queryInterface.removeColumn('tds_26as_entries', 'pan_number');
|
||||||
|
},
|
||||||
|
};
|
||||||
46
src/migrations/20260325090001-ensure-pan-number-in-26as.ts
Normal file
46
src/migrations/20260325090001-ensure-pan-number-in-26as.ts
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import type { QueryInterface } from 'sequelize';
|
||||||
|
import { DataTypes, QueryTypes } from 'sequelize';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
// Use information_schema so this migration is safe even if a previous run
|
||||||
|
// recorded as "executed" but didn't actually alter the schema.
|
||||||
|
const sequelize = (queryInterface as any).sequelize;
|
||||||
|
|
||||||
|
const [colRow] = await sequelize.query(
|
||||||
|
`SELECT CASE WHEN COUNT(*) > 0 THEN true ELSE false END AS exists
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_name = 'tds_26as_entries' AND column_name = 'pan_number'`,
|
||||||
|
{ type: QueryTypes.SELECT }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!colRow?.exists) {
|
||||||
|
await queryInterface.addColumn('tds_26as_entries', 'pan_number', {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'PAN from 26AS header (assessee PAN)',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const [idxRow] = await sequelize.query(
|
||||||
|
`SELECT CASE WHEN COUNT(*) > 0 THEN true ELSE false END AS exists
|
||||||
|
FROM pg_indexes
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
AND tablename = 'tds_26as_entries'
|
||||||
|
AND indexname = 'idx_tds_26as_pan'`,
|
||||||
|
{ type: QueryTypes.SELECT }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!idxRow?.exists) {
|
||||||
|
await queryInterface.addIndex('tds_26as_entries', ['pan_number'], { name: 'idx_tds_26as_pan' });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
// Best-effort rollback. If column/index already absent, these may throw.
|
||||||
|
// We intentionally keep down strict because rollback isn't required for forward fixes.
|
||||||
|
await queryInterface.removeIndex('tds_26as_entries', 'idx_tds_26as_pan');
|
||||||
|
await queryInterface.removeColumn('tds_26as_entries', 'pan_number');
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
@ -0,0 +1,84 @@
|
|||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
// 1. Add sessionToken to users table
|
||||||
|
await queryInterface.addColumn('users', 'sessionToken', {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Unique token for active session to restrict concurrent logins'
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. Add lastLoginDevice to users table
|
||||||
|
await queryInterface.addColumn('users', 'lastLoginDevice', {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Browser/Device string from User-Agent of the active session'
|
||||||
|
});
|
||||||
|
|
||||||
|
// 3. Create hsn_sac_codes table
|
||||||
|
await queryInterface.createTable('hsn_sac_codes', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
},
|
||||||
|
code: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'The HSN or SAC code value'
|
||||||
|
},
|
||||||
|
type: {
|
||||||
|
type: DataTypes.ENUM('HSN', 'SAC'),
|
||||||
|
allowNull: false,
|
||||||
|
comment: 'Type of code: either HSN or SAC'
|
||||||
|
},
|
||||||
|
gstRate: {
|
||||||
|
type: DataTypes.DECIMAL(5, 2),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'gst_rate',
|
||||||
|
comment: 'Associated GST rate percentage'
|
||||||
|
},
|
||||||
|
description: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Description of the code'
|
||||||
|
},
|
||||||
|
isActive: {
|
||||||
|
type: DataTypes.BOOLEAN,
|
||||||
|
defaultValue: true,
|
||||||
|
allowNull: false,
|
||||||
|
field: 'is_active'
|
||||||
|
},
|
||||||
|
createdAt: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
field: 'created_at'
|
||||||
|
},
|
||||||
|
updatedAt: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
field: 'updated_at'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add indexes to hsn_sac_codes
|
||||||
|
await queryInterface.addIndex('hsn_sac_codes', ['code']);
|
||||||
|
await queryInterface.addIndex('hsn_sac_codes', ['type']);
|
||||||
|
await queryInterface.addIndex('hsn_sac_codes', ['is_active']);
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
// Drop hsn_sac_codes table
|
||||||
|
await queryInterface.dropTable('hsn_sac_codes');
|
||||||
|
|
||||||
|
// Remove columns from users table
|
||||||
|
await queryInterface.removeColumn('users', 'lastLoginDevice');
|
||||||
|
await queryInterface.removeColumn('users', 'sessionToken');
|
||||||
|
|
||||||
|
// Also drop the ENUM type created for hsn_sac_codes type
|
||||||
|
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_hsn_sac_codes_type";');
|
||||||
|
}
|
||||||
|
};
|
||||||
@ -0,0 +1,166 @@
|
|||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
up: async (queryInterface: QueryInterface) => {
|
||||||
|
// 1. Update claim_credit_notes table with idempotency checks
|
||||||
|
const tableDefinition = await queryInterface.describeTable('claim_credit_notes');
|
||||||
|
|
||||||
|
if (!tableDefinition.transaction_no) {
|
||||||
|
await queryInterface.addColumn('claim_credit_notes', 'transaction_no', {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Overall PWC transaction unique number'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!tableDefinition.tds_amount) {
|
||||||
|
await queryInterface.addColumn('claim_credit_notes', 'tds_amount', {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
comment: 'TDS amount for the credit note'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!tableDefinition.credit_amount) {
|
||||||
|
await queryInterface.addColumn('claim_credit_notes', 'credit_amount', {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
comment: 'Final credit amount after TDS'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Drop redundant columns if they exist
|
||||||
|
const columnsToDrop = [
|
||||||
|
'gst_rate', 'gst_amt', 'cgst_rate', 'cgst_amt',
|
||||||
|
'sgst_rate', 'sgst_amt', 'igst_rate', 'igst_amt',
|
||||||
|
'utgst_rate', 'utgst_amt', 'cess_rate', 'cess_amt',
|
||||||
|
'total_amt'
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const column of columnsToDrop) {
|
||||||
|
if (tableDefinition[column]) {
|
||||||
|
await queryInterface.removeColumn('claim_credit_notes', column);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Create claim_credit_note_items table (Refined & Unified)
|
||||||
|
const allTables = await queryInterface.showAllTables();
|
||||||
|
const tableExists = allTables.some(t => {
|
||||||
|
const name = typeof t === 'string' ? t : (t as any).tableName;
|
||||||
|
return name.toLowerCase() === 'claim_credit_note_items';
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!tableExists) {
|
||||||
|
await queryInterface.createTable('claim_credit_note_items', {
|
||||||
|
item_id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
},
|
||||||
|
credit_note_id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
references: {
|
||||||
|
model: 'claim_credit_notes',
|
||||||
|
key: 'credit_note_id',
|
||||||
|
},
|
||||||
|
onDelete: 'CASCADE',
|
||||||
|
onUpdate: 'CASCADE',
|
||||||
|
},
|
||||||
|
sl_no: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
transaction_no: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
comment: 'Per-item TRNS_UNIQ_NO'
|
||||||
|
},
|
||||||
|
description: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
hsn_cd: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
},
|
||||||
|
amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
claim_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
tds_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
credit_amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
},
|
||||||
|
updated_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add index for performance
|
||||||
|
await queryInterface.addIndex('claim_credit_note_items', ['credit_note_id']);
|
||||||
|
await queryInterface.addIndex('claim_credit_note_items', ['transaction_no']);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
down: async (queryInterface: QueryInterface) => {
|
||||||
|
// Drop the items table first
|
||||||
|
await queryInterface.dropTable('claim_credit_note_items');
|
||||||
|
|
||||||
|
// Re-add dropped columns to claim_credit_notes (if they were removed)
|
||||||
|
const tableDefinition = await queryInterface.describeTable('claim_credit_notes');
|
||||||
|
const columnsToReAdd = [
|
||||||
|
{ name: 'gst_rate', type: DataTypes.DECIMAL(5, 2) },
|
||||||
|
{ name: 'gst_amt', type: DataTypes.DECIMAL(15, 2) },
|
||||||
|
{ name: 'cgst_rate', type: DataTypes.DECIMAL(5, 2) },
|
||||||
|
{ name: 'cgst_amt', type: DataTypes.DECIMAL(15, 2) },
|
||||||
|
{ name: 'sgst_rate', type: DataTypes.DECIMAL(5, 2) },
|
||||||
|
{ name: 'sgst_amt', type: DataTypes.DECIMAL(15, 2) },
|
||||||
|
{ name: 'igst_rate', type: DataTypes.DECIMAL(5, 2) },
|
||||||
|
{ name: 'igst_amt', type: DataTypes.DECIMAL(15, 2) },
|
||||||
|
{ name: 'utgst_rate', type: DataTypes.DECIMAL(5, 2) },
|
||||||
|
{ name: 'utgst_amt', type: DataTypes.DECIMAL(15, 2) },
|
||||||
|
{ name: 'cess_rate', type: DataTypes.DECIMAL(5, 2) },
|
||||||
|
{ name: 'cess_amt', type: DataTypes.DECIMAL(15, 2) },
|
||||||
|
{ name: 'total_amt', type: DataTypes.DECIMAL(15, 2) }
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const col of columnsToReAdd) {
|
||||||
|
if (!tableDefinition[col.name]) {
|
||||||
|
await queryInterface.addColumn('claim_credit_notes', col.name, {
|
||||||
|
type: col.type,
|
||||||
|
allowNull: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove added columns
|
||||||
|
const addedCols = ['credit_amount', 'tds_amount', 'transaction_no'];
|
||||||
|
for (const col of addedCols) {
|
||||||
|
if (tableDefinition[col]) {
|
||||||
|
await queryInterface.removeColumn('claim_credit_notes', col);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
130
src/migrations/2026041300-create-cpc-cdc-tables.ts
Normal file
130
src/migrations/2026041300-create-cpc-cdc-tables.ts
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import { QueryInterface, DataTypes } from 'sequelize';
|
||||||
|
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
// Create cpc_documents table
|
||||||
|
await queryInterface.createTable('cpc_documents', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
allowNull: false
|
||||||
|
},
|
||||||
|
booking_id: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
claim_id: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
attempt_no: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
defaultValue: 1,
|
||||||
|
allowNull: false
|
||||||
|
},
|
||||||
|
document_type: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
document_gcp_url: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
provider: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
msd_payload: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
extracted_fields: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
field_confidence: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
validation_status: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
match_percentage: {
|
||||||
|
type: DataTypes.FLOAT,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
mismatch_reasons: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
field_results: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
ip_address: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
allowNull: false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create cpc_audit_logs table
|
||||||
|
await queryInterface.createTable('cpc_audit_logs', {
|
||||||
|
id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
allowNull: false
|
||||||
|
},
|
||||||
|
document_id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
references: {
|
||||||
|
model: 'cpc_documents',
|
||||||
|
key: 'id'
|
||||||
|
},
|
||||||
|
onDelete: 'CASCADE'
|
||||||
|
},
|
||||||
|
action: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: false
|
||||||
|
},
|
||||||
|
previous_state: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
new_state: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
performed_by: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
remarks: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
created_at: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
allowNull: false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Unique index for the multi-attempt claim logic (idempotent for repeated startup migrations)
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
|
||||||
|
ON cpc_documents (claim_id, attempt_no, document_type);
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.dropTable('cpc_audit_logs');
|
||||||
|
await queryInterface.dropTable('cpc_documents');
|
||||||
|
}
|
||||||
50
src/migrations/20260414100000-ensure-cpc-cdc-tables-exist.ts
Normal file
50
src/migrations/20260414100000-ensure-cpc-cdc-tables-exist.ts
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import { QueryInterface } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Idempotent CPC-CDC schema for environments where 2026041300 did not run or tables were dropped.
|
||||||
|
* Safe to run on top of an existing DB that already has these tables from the earlier migration.
|
||||||
|
*/
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS cpc_documents (
|
||||||
|
id UUID NOT NULL PRIMARY KEY,
|
||||||
|
booking_id VARCHAR(255),
|
||||||
|
claim_id VARCHAR(255),
|
||||||
|
attempt_no INTEGER NOT NULL DEFAULT 1,
|
||||||
|
document_type VARCHAR(255),
|
||||||
|
document_gcp_url TEXT,
|
||||||
|
provider VARCHAR(255),
|
||||||
|
msd_payload JSONB,
|
||||||
|
extracted_fields JSONB,
|
||||||
|
field_confidence JSONB,
|
||||||
|
validation_status VARCHAR(255),
|
||||||
|
match_percentage DOUBLE PRECISION,
|
||||||
|
mismatch_reasons JSONB,
|
||||||
|
field_results JSONB,
|
||||||
|
ip_address VARCHAR(255),
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
CREATE TABLE IF NOT EXISTS cpc_audit_logs (
|
||||||
|
id UUID NOT NULL PRIMARY KEY,
|
||||||
|
document_id UUID NOT NULL REFERENCES cpc_documents(id) ON DELETE CASCADE,
|
||||||
|
action VARCHAR(255) NOT NULL,
|
||||||
|
previous_state JSONB,
|
||||||
|
new_state JSONB,
|
||||||
|
performed_by VARCHAR(255),
|
||||||
|
remarks TEXT,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
|
||||||
|
ON cpc_documents (claim_id, attempt_no, booking_id);
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(_queryInterface: QueryInterface): Promise<void> {
|
||||||
|
// Non-destructive: tables may contain production CPC data.
|
||||||
|
}
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
import { QueryInterface } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batch upload can include multiple files of the same document_type in one attempt.
|
||||||
|
* Replace unique(claim_id, attempt_no, document_type) with unique(claim_id, attempt_no, booking_id)
|
||||||
|
* because booking_id is distinct per file (e.g. CLAIM-1, CLAIM-2, ...).
|
||||||
|
*/
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
DROP INDEX IF EXISTS unique_cpc_document_attempt;
|
||||||
|
`);
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_claim_attempt_booking
|
||||||
|
ON cpc_documents (claim_id, attempt_no, booking_id);
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
DROP INDEX IF EXISTS unique_cpc_document_claim_attempt_booking;
|
||||||
|
`);
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
|
||||||
|
ON cpc_documents (claim_id, attempt_no, document_type);
|
||||||
|
`);
|
||||||
|
}
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
import { QueryInterface } from 'sequelize';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rename CPC admin viewer-list config key from CPC_CDC_* to CPC_CSD_* (display name aligned).
|
||||||
|
*/
|
||||||
|
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
UPDATE admin_configurations
|
||||||
|
SET
|
||||||
|
config_key = 'CPC_CSD_ADMIN_CONFIG',
|
||||||
|
display_name = 'CPC-CSD Admin Config',
|
||||||
|
description = 'CPC-CSD module visibility settings'
|
||||||
|
WHERE config_key = 'CPC_CDC_ADMIN_CONFIG'
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||||
|
await queryInterface.sequelize.query(`
|
||||||
|
UPDATE admin_configurations
|
||||||
|
SET
|
||||||
|
config_key = 'CPC_CDC_ADMIN_CONFIG',
|
||||||
|
display_name = 'CPC-CDC Admin Config',
|
||||||
|
description = 'CPC-CDC module visibility settings'
|
||||||
|
WHERE config_key = 'CPC_CSD_ADMIN_CONFIG'
|
||||||
|
`);
|
||||||
|
}
|
||||||
@ -9,18 +9,14 @@ interface ActivityTypeAttributes {
|
|||||||
taxationType?: string;
|
taxationType?: string;
|
||||||
sapRefNo?: string;
|
sapRefNo?: string;
|
||||||
isActive: boolean;
|
isActive: boolean;
|
||||||
hsnCode?: string | null;
|
creditPostingOn?: string | null;
|
||||||
sacCode?: string | null;
|
|
||||||
gstRate?: number | null;
|
|
||||||
glCode?: string | null;
|
|
||||||
creditNature?: 'Commercial' | 'GST' | null;
|
|
||||||
createdBy: string;
|
createdBy: string;
|
||||||
updatedBy?: string;
|
updatedBy?: string;
|
||||||
createdAt: Date;
|
createdAt: Date;
|
||||||
updatedAt: Date;
|
updatedAt: Date;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ActivityTypeCreationAttributes extends Optional<ActivityTypeAttributes, 'activityTypeId' | 'itemCode' | 'taxationType' | 'sapRefNo' | 'isActive' | 'updatedBy' | 'createdAt' | 'updatedAt'> { }
|
interface ActivityTypeCreationAttributes extends Optional<ActivityTypeAttributes, 'activityTypeId' | 'itemCode' | 'taxationType' | 'sapRefNo' | 'isActive' | 'creditPostingOn' | 'updatedBy' | 'createdAt' | 'updatedAt'> { }
|
||||||
|
|
||||||
class ActivityType extends Model<ActivityTypeAttributes, ActivityTypeCreationAttributes> implements ActivityTypeAttributes {
|
class ActivityType extends Model<ActivityTypeAttributes, ActivityTypeCreationAttributes> implements ActivityTypeAttributes {
|
||||||
public activityTypeId!: string;
|
public activityTypeId!: string;
|
||||||
@ -29,11 +25,7 @@ class ActivityType extends Model<ActivityTypeAttributes, ActivityTypeCreationAtt
|
|||||||
public taxationType?: string;
|
public taxationType?: string;
|
||||||
public sapRefNo?: string;
|
public sapRefNo?: string;
|
||||||
public isActive!: boolean;
|
public isActive!: boolean;
|
||||||
public hsnCode?: string | null;
|
public creditPostingOn?: string | null;
|
||||||
public sacCode?: string | null;
|
|
||||||
public gstRate?: number | null;
|
|
||||||
public glCode?: string | null;
|
|
||||||
public creditNature?: 'Commercial' | 'GST' | null;
|
|
||||||
public createdBy!: string;
|
public createdBy!: string;
|
||||||
public updatedBy?: string;
|
public updatedBy?: string;
|
||||||
public createdAt!: Date;
|
public createdAt!: Date;
|
||||||
@ -81,30 +73,12 @@ ActivityType.init(
|
|||||||
defaultValue: true,
|
defaultValue: true,
|
||||||
field: 'is_active'
|
field: 'is_active'
|
||||||
},
|
},
|
||||||
hsnCode: {
|
creditPostingOn: {
|
||||||
type: DataTypes.STRING(20),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'hsn_code'
|
|
||||||
},
|
|
||||||
sacCode: {
|
|
||||||
type: DataTypes.STRING(20),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'sac_code'
|
|
||||||
},
|
|
||||||
gstRate: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'gst_rate'
|
|
||||||
},
|
|
||||||
glCode: {
|
|
||||||
type: DataTypes.STRING(50),
|
type: DataTypes.STRING(50),
|
||||||
allowNull: true,
|
allowNull: true,
|
||||||
field: 'gl_code'
|
defaultValue: null,
|
||||||
},
|
field: 'credit_posting_on',
|
||||||
creditNature: {
|
comment: 'Indicates what the credit note is posted against (e.g. "Spares", "Vehicle")'
|
||||||
type: DataTypes.ENUM('Commercial', 'GST'),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'credit_nature'
|
|
||||||
},
|
},
|
||||||
createdBy: {
|
createdBy: {
|
||||||
type: DataTypes.UUID,
|
type: DataTypes.UUID,
|
||||||
|
|||||||
@ -10,19 +10,9 @@ interface ClaimCreditNoteAttributes {
|
|||||||
creditNoteNumber?: string;
|
creditNoteNumber?: string;
|
||||||
creditNoteDate?: Date;
|
creditNoteDate?: Date;
|
||||||
creditNoteAmount: number;
|
creditNoteAmount: number;
|
||||||
gstRate?: number;
|
transactionNo?: string;
|
||||||
gstAmt?: number;
|
tdsAmount?: number;
|
||||||
cgstRate?: number;
|
creditAmount?: number;
|
||||||
cgstAmt?: number;
|
|
||||||
sgstRate?: number;
|
|
||||||
sgstAmt?: number;
|
|
||||||
igstRate?: number;
|
|
||||||
igstAmt?: number;
|
|
||||||
utgstRate?: number;
|
|
||||||
utgstAmt?: number;
|
|
||||||
cessRate?: number;
|
|
||||||
cessAmt?: number;
|
|
||||||
totalAmt?: number;
|
|
||||||
sapDocumentNumber?: string;
|
sapDocumentNumber?: string;
|
||||||
creditNoteFilePath?: string;
|
creditNoteFilePath?: string;
|
||||||
status?: string;
|
status?: string;
|
||||||
@ -35,7 +25,7 @@ interface ClaimCreditNoteAttributes {
|
|||||||
updatedAt: Date;
|
updatedAt: Date;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ClaimCreditNoteCreationAttributes extends Optional<ClaimCreditNoteAttributes, 'creditNoteId' | 'invoiceId' | 'creditNoteNumber' | 'creditNoteDate' | 'gstRate' | 'gstAmt' | 'cgstRate' | 'cgstAmt' | 'sgstRate' | 'sgstAmt' | 'igstRate' | 'igstAmt' | 'utgstRate' | 'utgstAmt' | 'cessRate' | 'cessAmt' | 'totalAmt' | 'sapDocumentNumber' | 'creditNoteFilePath' | 'status' | 'errorMessage' | 'confirmedBy' | 'confirmedAt' | 'reason' | 'description' | 'createdAt' | 'updatedAt'> { }
|
interface ClaimCreditNoteCreationAttributes extends Optional<ClaimCreditNoteAttributes, 'creditNoteId' | 'invoiceId' | 'creditNoteNumber' | 'creditNoteDate' | 'transactionNo' | 'tdsAmount' | 'creditAmount' | 'sapDocumentNumber' | 'creditNoteFilePath' | 'status' | 'errorMessage' | 'confirmedBy' | 'confirmedAt' | 'reason' | 'description' | 'createdAt' | 'updatedAt'> { }
|
||||||
|
|
||||||
class ClaimCreditNote extends Model<ClaimCreditNoteAttributes, ClaimCreditNoteCreationAttributes> implements ClaimCreditNoteAttributes {
|
class ClaimCreditNote extends Model<ClaimCreditNoteAttributes, ClaimCreditNoteCreationAttributes> implements ClaimCreditNoteAttributes {
|
||||||
public creditNoteId!: string;
|
public creditNoteId!: string;
|
||||||
@ -44,19 +34,9 @@ class ClaimCreditNote extends Model<ClaimCreditNoteAttributes, ClaimCreditNoteCr
|
|||||||
public creditNoteNumber?: string;
|
public creditNoteNumber?: string;
|
||||||
public creditNoteDate?: Date;
|
public creditNoteDate?: Date;
|
||||||
public creditNoteAmount!: number;
|
public creditNoteAmount!: number;
|
||||||
public gstRate?: number;
|
public transactionNo?: string;
|
||||||
public gstAmt?: number;
|
public tdsAmount?: number;
|
||||||
public cgstRate?: number;
|
public creditAmount?: number;
|
||||||
public cgstAmt?: number;
|
|
||||||
public sgstRate?: number;
|
|
||||||
public sgstAmt?: number;
|
|
||||||
public igstRate?: number;
|
|
||||||
public igstAmt?: number;
|
|
||||||
public utgstRate?: number;
|
|
||||||
public utgstAmt?: number;
|
|
||||||
public cessRate?: number;
|
|
||||||
public cessAmt?: number;
|
|
||||||
public totalAmt?: number;
|
|
||||||
public sapDocumentNumber?: string;
|
public sapDocumentNumber?: string;
|
||||||
public creditNoteFilePath?: string;
|
public creditNoteFilePath?: string;
|
||||||
public status?: string;
|
public status?: string;
|
||||||
@ -115,70 +95,22 @@ ClaimCreditNote.init(
|
|||||||
allowNull: false,
|
allowNull: false,
|
||||||
field: 'credit_amount'
|
field: 'credit_amount'
|
||||||
},
|
},
|
||||||
gstRate: {
|
transactionNo: {
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
type: DataTypes.STRING(100),
|
||||||
allowNull: true,
|
allowNull: true,
|
||||||
field: 'gst_rate'
|
field: 'transaction_no',
|
||||||
},
|
},
|
||||||
gstAmt: {
|
tdsAmount: {
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
allowNull: true,
|
allowNull: true,
|
||||||
field: 'gst_amt'
|
defaultValue: 0,
|
||||||
|
field: 'tds_amount',
|
||||||
},
|
},
|
||||||
cgstRate: {
|
creditAmount: {
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'cgst_rate'
|
|
||||||
},
|
|
||||||
cgstAmt: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
allowNull: true,
|
allowNull: true,
|
||||||
field: 'cgst_amt'
|
defaultValue: 0,
|
||||||
},
|
field: 'credit_amount',
|
||||||
sgstRate: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'sgst_rate'
|
|
||||||
},
|
|
||||||
sgstAmt: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'sgst_amt'
|
|
||||||
},
|
|
||||||
igstRate: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'igst_rate'
|
|
||||||
},
|
|
||||||
igstAmt: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'igst_amt'
|
|
||||||
},
|
|
||||||
utgstRate: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'utgst_rate'
|
|
||||||
},
|
|
||||||
utgstAmt: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'utgst_amt'
|
|
||||||
},
|
|
||||||
cessRate: {
|
|
||||||
type: DataTypes.DECIMAL(5, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'cess_rate'
|
|
||||||
},
|
|
||||||
cessAmt: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'cess_amt'
|
|
||||||
},
|
|
||||||
totalAmt: {
|
|
||||||
type: DataTypes.DECIMAL(15, 2),
|
|
||||||
allowNull: true,
|
|
||||||
field: 'total_amt'
|
|
||||||
},
|
},
|
||||||
sapDocumentNumber: {
|
sapDocumentNumber: {
|
||||||
type: DataTypes.STRING(100),
|
type: DataTypes.STRING(100),
|
||||||
|
|||||||
83
src/models/ClaimCreditNoteItem.ts
Normal file
83
src/models/ClaimCreditNoteItem.ts
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import { Model, DataTypes } from 'sequelize';
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
|
||||||
|
export class ClaimCreditNoteItem extends Model {
|
||||||
|
public itemId!: string;
|
||||||
|
public creditNoteId!: string;
|
||||||
|
public slNo!: number;
|
||||||
|
public transactionNo!: string | null;
|
||||||
|
public description!: string | null;
|
||||||
|
public hsnCd!: string | null;
|
||||||
|
public amount!: number;
|
||||||
|
public claimAmount!: number | null;
|
||||||
|
public tdsAmount!: number | null;
|
||||||
|
public creditAmount!: number | null;
|
||||||
|
public readonly createdAt!: Date;
|
||||||
|
public readonly updatedAt!: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
ClaimCreditNoteItem.init(
|
||||||
|
{
|
||||||
|
itemId: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
field: 'item_id',
|
||||||
|
},
|
||||||
|
creditNoteId: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
field: 'credit_note_id',
|
||||||
|
},
|
||||||
|
slNo: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
field: 'sl_no',
|
||||||
|
},
|
||||||
|
transactionNo: {
|
||||||
|
type: DataTypes.STRING(100),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'transaction_no',
|
||||||
|
},
|
||||||
|
description: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'description',
|
||||||
|
},
|
||||||
|
hsnCd: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'hsn_cd',
|
||||||
|
},
|
||||||
|
amount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
field: 'amount',
|
||||||
|
},
|
||||||
|
claimAmount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
field: 'claim_amount',
|
||||||
|
},
|
||||||
|
tdsAmount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
field: 'tds_amount',
|
||||||
|
},
|
||||||
|
creditAmount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: true,
|
||||||
|
defaultValue: 0,
|
||||||
|
field: 'credit_amount',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sequelize,
|
||||||
|
tableName: 'claim_credit_note_items',
|
||||||
|
underscored: true,
|
||||||
|
timestamps: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
89
src/models/CpcAuditLog.ts
Normal file
89
src/models/CpcAuditLog.ts
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
import { DataTypes, Model, Optional } from 'sequelize';
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
|
||||||
|
interface CpcAuditLogAttributes {
|
||||||
|
id: string;
|
||||||
|
documentId: string;
|
||||||
|
action: string;
|
||||||
|
previousState?: any;
|
||||||
|
newState?: any;
|
||||||
|
performedBy?: string;
|
||||||
|
remarks?: string;
|
||||||
|
createdAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CpcAuditLogCreationAttributes extends Optional<CpcAuditLogAttributes, 'id' | 'createdAt'> {}
|
||||||
|
|
||||||
|
class CpcAuditLog extends Model<CpcAuditLogAttributes, CpcAuditLogCreationAttributes> implements CpcAuditLogAttributes {
|
||||||
|
public id!: string;
|
||||||
|
public documentId!: string;
|
||||||
|
public action!: string;
|
||||||
|
public previousState?: any;
|
||||||
|
public newState?: any;
|
||||||
|
public performedBy?: string;
|
||||||
|
public remarks?: string;
|
||||||
|
public createdAt!: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
CpcAuditLog.init(
|
||||||
|
{
|
||||||
|
id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
field: 'id'
|
||||||
|
},
|
||||||
|
documentId: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
field: 'document_id',
|
||||||
|
references: {
|
||||||
|
model: 'cpc_documents',
|
||||||
|
key: 'id'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
action: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: false
|
||||||
|
},
|
||||||
|
previousState: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'previous_state'
|
||||||
|
},
|
||||||
|
newState: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'new_state'
|
||||||
|
},
|
||||||
|
performedBy: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'performed_by'
|
||||||
|
},
|
||||||
|
remarks: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true
|
||||||
|
},
|
||||||
|
createdAt: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
field: 'created_at'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sequelize,
|
||||||
|
modelName: 'CpcAuditLog',
|
||||||
|
tableName: 'cpc_audit_logs',
|
||||||
|
timestamps: false
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
CpcAuditLog.belongsTo(sequelize.models.CpcDocument, {
|
||||||
|
foreignKey: 'documentId',
|
||||||
|
targetKey: 'id',
|
||||||
|
as: 'document'
|
||||||
|
});
|
||||||
|
|
||||||
|
export { CpcAuditLog };
|
||||||
|
|
||||||
143
src/models/CpcDocument.ts
Normal file
143
src/models/CpcDocument.ts
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import { DataTypes, Model, Optional } from 'sequelize';
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
|
||||||
|
interface CpcDocumentAttributes {
|
||||||
|
id: string;
|
||||||
|
bookingId?: string;
|
||||||
|
claimId?: string;
|
||||||
|
attemptNo?: number;
|
||||||
|
documentType?: string;
|
||||||
|
documentGcpUrl?: string;
|
||||||
|
provider?: string;
|
||||||
|
msdPayload?: any;
|
||||||
|
extractedFields?: any;
|
||||||
|
fieldConfidence?: any;
|
||||||
|
validationStatus?: string;
|
||||||
|
matchPercentage?: number;
|
||||||
|
mismatchReasons?: any;
|
||||||
|
fieldResults?: any;
|
||||||
|
ipAddress?: string;
|
||||||
|
createdAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CpcDocumentCreationAttributes extends Optional<CpcDocumentAttributes, 'id' | 'attemptNo' | 'createdAt'> {}
|
||||||
|
|
||||||
|
class CpcDocument extends Model<CpcDocumentAttributes, CpcDocumentCreationAttributes> implements CpcDocumentAttributes {
|
||||||
|
public id!: string;
|
||||||
|
public bookingId?: string;
|
||||||
|
public claimId?: string;
|
||||||
|
public attemptNo?: number;
|
||||||
|
public documentType?: string;
|
||||||
|
public documentGcpUrl?: string;
|
||||||
|
public provider?: string;
|
||||||
|
public msdPayload?: any;
|
||||||
|
public extractedFields?: any;
|
||||||
|
public fieldConfidence?: any;
|
||||||
|
public validationStatus?: string;
|
||||||
|
public matchPercentage?: number;
|
||||||
|
public mismatchReasons?: any;
|
||||||
|
public fieldResults?: any;
|
||||||
|
public ipAddress?: string;
|
||||||
|
public createdAt!: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
CpcDocument.init(
|
||||||
|
{
|
||||||
|
id: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
defaultValue: DataTypes.UUIDV4,
|
||||||
|
primaryKey: true,
|
||||||
|
field: 'id'
|
||||||
|
},
|
||||||
|
bookingId: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'booking_id'
|
||||||
|
},
|
||||||
|
claimId: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'claim_id'
|
||||||
|
},
|
||||||
|
attemptNo: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
defaultValue: 1,
|
||||||
|
field: 'attempt_no'
|
||||||
|
},
|
||||||
|
documentType: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'document_type'
|
||||||
|
},
|
||||||
|
documentGcpUrl: {
|
||||||
|
type: DataTypes.TEXT,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'document_gcp_url'
|
||||||
|
},
|
||||||
|
provider: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'provider'
|
||||||
|
},
|
||||||
|
msdPayload: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'msd_payload'
|
||||||
|
},
|
||||||
|
extractedFields: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'extracted_fields'
|
||||||
|
},
|
||||||
|
fieldConfidence: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'field_confidence'
|
||||||
|
},
|
||||||
|
validationStatus: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'validation_status'
|
||||||
|
},
|
||||||
|
matchPercentage: {
|
||||||
|
type: DataTypes.FLOAT,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'match_percentage'
|
||||||
|
},
|
||||||
|
mismatchReasons: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'mismatch_reasons'
|
||||||
|
},
|
||||||
|
fieldResults: {
|
||||||
|
type: DataTypes.JSONB,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'field_results'
|
||||||
|
},
|
||||||
|
ipAddress: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'ip_address'
|
||||||
|
},
|
||||||
|
createdAt: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
field: 'created_at'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sequelize,
|
||||||
|
modelName: 'CpcDocument',
|
||||||
|
tableName: 'cpc_documents',
|
||||||
|
timestamps: false,
|
||||||
|
indexes: [
|
||||||
|
{
|
||||||
|
name: 'unique_cpc_document_claim_attempt_booking',
|
||||||
|
unique: true,
|
||||||
|
fields: ['claimId', 'attemptNo', 'bookingId']
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
export { CpcDocument };
|
||||||
86
src/models/Form1626asQuarterSnapshot.ts
Normal file
86
src/models/Form1626asQuarterSnapshot.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import { DataTypes, Model, Optional } from 'sequelize';
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
import { Form1626asUploadLog } from './Form1626asUploadLog';
|
||||||
|
|
||||||
|
export interface Form1626asQuarterSnapshotAttributes {
|
||||||
|
id: number;
|
||||||
|
tanNumber: string;
|
||||||
|
financialYear: string;
|
||||||
|
quarter: string;
|
||||||
|
aggregatedAmount: number;
|
||||||
|
uploadLogId?: number | null;
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Form1626asQuarterSnapshotCreationAttributes
|
||||||
|
extends Optional<Form1626asQuarterSnapshotAttributes, 'id' | 'uploadLogId' | 'createdAt'> {}
|
||||||
|
|
||||||
|
class Form1626asQuarterSnapshot
|
||||||
|
extends Model<Form1626asQuarterSnapshotAttributes, Form1626asQuarterSnapshotCreationAttributes>
|
||||||
|
implements Form1626asQuarterSnapshotAttributes
|
||||||
|
{
|
||||||
|
public id!: number;
|
||||||
|
public tanNumber!: string;
|
||||||
|
public financialYear!: string;
|
||||||
|
public quarter!: string;
|
||||||
|
public aggregatedAmount!: number;
|
||||||
|
public uploadLogId?: number | null;
|
||||||
|
public createdAt!: Date;
|
||||||
|
|
||||||
|
public uploadLog?: Form1626asUploadLog;
|
||||||
|
}
|
||||||
|
|
||||||
|
Form1626asQuarterSnapshot.init(
|
||||||
|
{
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
},
|
||||||
|
tanNumber: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
field: 'tan_number',
|
||||||
|
},
|
||||||
|
financialYear: {
|
||||||
|
type: DataTypes.STRING(20),
|
||||||
|
allowNull: false,
|
||||||
|
field: 'financial_year',
|
||||||
|
},
|
||||||
|
quarter: {
|
||||||
|
type: DataTypes.STRING(10),
|
||||||
|
allowNull: false,
|
||||||
|
},
|
||||||
|
aggregatedAmount: {
|
||||||
|
type: DataTypes.DECIMAL(15, 2),
|
||||||
|
allowNull: false,
|
||||||
|
field: 'aggregated_amount',
|
||||||
|
},
|
||||||
|
uploadLogId: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: true,
|
||||||
|
field: 'upload_log_id',
|
||||||
|
references: { model: 'form_16_26as_upload_log', key: 'id' },
|
||||||
|
onDelete: 'SET NULL',
|
||||||
|
},
|
||||||
|
createdAt: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
field: 'created_at',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sequelize,
|
||||||
|
tableName: 'form_16_26as_quarter_snapshots',
|
||||||
|
timestamps: false,
|
||||||
|
underscored: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
Form1626asQuarterSnapshot.belongsTo(Form1626asUploadLog, {
|
||||||
|
as: 'uploadLog',
|
||||||
|
foreignKey: 'uploadLogId',
|
||||||
|
targetKey: 'id',
|
||||||
|
});
|
||||||
|
|
||||||
|
export { Form1626asQuarterSnapshot };
|
||||||
82
src/models/Form1626asUploadLog.ts
Normal file
82
src/models/Form1626asUploadLog.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import { DataTypes, Model, Optional } from 'sequelize';
|
||||||
|
import { sequelize } from '@config/database';
|
||||||
|
import { User } from './User';
|
||||||
|
|
||||||
|
export interface Form1626asUploadLogAttributes {
|
||||||
|
id: number;
|
||||||
|
uploadedAt: Date;
|
||||||
|
uploadedBy: string;
|
||||||
|
fileName?: string | null;
|
||||||
|
recordsImported: number;
|
||||||
|
errorsCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Form1626asUploadLogCreationAttributes
|
||||||
|
extends Optional<Form1626asUploadLogAttributes, 'id' | 'fileName'> {}
|
||||||
|
|
||||||
|
class Form1626asUploadLog
|
||||||
|
extends Model<Form1626asUploadLogAttributes, Form1626asUploadLogCreationAttributes>
|
||||||
|
implements Form1626asUploadLogAttributes
|
||||||
|
{
|
||||||
|
public id!: number;
|
||||||
|
public uploadedAt!: Date;
|
||||||
|
public uploadedBy!: string;
|
||||||
|
public fileName?: string | null;
|
||||||
|
public recordsImported!: number;
|
||||||
|
public errorsCount!: number;
|
||||||
|
|
||||||
|
public uploadedByUser?: User;
|
||||||
|
}
|
||||||
|
|
||||||
|
Form1626asUploadLog.init(
|
||||||
|
{
|
||||||
|
id: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
autoIncrement: true,
|
||||||
|
primaryKey: true,
|
||||||
|
},
|
||||||
|
uploadedAt: {
|
||||||
|
type: DataTypes.DATE,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: DataTypes.NOW,
|
||||||
|
field: 'uploaded_at',
|
||||||
|
},
|
||||||
|
uploadedBy: {
|
||||||
|
type: DataTypes.UUID,
|
||||||
|
allowNull: false,
|
||||||
|
field: 'uploaded_by',
|
||||||
|
references: { model: 'users', key: 'user_id' },
|
||||||
|
},
|
||||||
|
fileName: {
|
||||||
|
type: DataTypes.STRING(255),
|
||||||
|
allowNull: true,
|
||||||
|
field: 'file_name',
|
||||||
|
},
|
||||||
|
recordsImported: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
field: 'records_imported',
|
||||||
|
},
|
||||||
|
errorsCount: {
|
||||||
|
type: DataTypes.INTEGER,
|
||||||
|
allowNull: false,
|
||||||
|
defaultValue: 0,
|
||||||
|
field: 'errors_count',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
sequelize,
|
||||||
|
tableName: 'form_16_26as_upload_log',
|
||||||
|
timestamps: false,
|
||||||
|
underscored: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
Form1626asUploadLog.belongsTo(User, {
|
||||||
|
as: 'uploadedByUser',
|
||||||
|
foreignKey: 'uploadedBy',
|
||||||
|
targetKey: 'userId',
|
||||||
|
});
|
||||||
|
|
||||||
|
export { Form1626asUploadLog };
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user