Compare commits

...

27 Commits

Author SHA1 Message Date
16deafd42d sap-client number conflict for new SAP user for IO fetch resolved 2026-04-29 08:26:16 +05:30
Aaditya Jaiswal
d01e248a35 Revert "add uploaded Form16 PDF APIs and bulk download support"
This reverts commit d25ffbaf7b.
2026-04-28 19:13:47 +05:30
Aaditya Jaiswal
d25ffbaf7b add uploaded Form16 PDF APIs and bulk download support
Add RE uploaded Form16 PDF listing and bulk download support in backend, including dependency and migration updates required by the new flow.

Made-with: Cursor
2026-04-28 18:58:16 +05:30
Devanshu Sonbhurra
8e40b73f65 Merge branch 'CSD_DEV' into 'laxman_dev'
Csd dev

See merge request sipl/re-workflow-be!1
2026-04-21 06:42:41 +00:00
Arjun Mehar
3c55404f18 added forntend build 2026-04-20 20:18:54 +05:30
Arjun Mehar
387d1881f7 FIX USER VALIDATION 2026-04-20 20:11:11 +05:30
Arjun Mehar
dfe2c1423a Frontend build deploy 2026-04-17 20:06:06 +05:30
Arjun Mehar
7d35a1d167 Merge branch 'laxman_dev' of http://10.10.1.3:2010/sipl/re-workflow-be into CSD_DEV 2026-04-17 19:59:56 +05:30
Arjun Mehar
657191ce2b Implemented CPC-CSD OCR 2026-04-17 19:58:45 +05:30
Aaditya Jaiswal
e739b8b5ee loal storage rename form 16 2026-04-16 15:23:52 +05:30
Aaditya Jaiswal
1a02781731 form 16 renaming logs added 2026-04-16 12:58:40 +05:30
Aaditya Jaiswal
44a19bbfea form 16 renaming done 2026-04-14 20:12:41 +05:30
Aaditya Jaiswal
872bda4731 octa reverted to UAT 2026-04-10 20:40:32 +05:30
Aaditya Jaiswal
80e28fb0eb octa change to production sso 2026-04-10 19:32:22 +05:30
Aaditya Jaiswal
729a0d2d26 auth logic 2026-04-09 19:03:25 +05:30
Aaditya Jaiswal
7828c8d463 frontend env fixed 2026-04-09 17:19:36 +05:30
Aaditya Jaiswal
3f94e4fe47 token exchange 2026-04-09 16:52:49 +05:30
Aaditya Jaiswal
876ec26e97 dashboard added 2026-04-09 15:39:13 +05:30
Aaditya Jaiswal
34c488ae16 log in issue fixed 2026-04-09 14:17:50 +05:30
Aaditya Jaiswal
f43251ac13 new Build added 2026-04-09 11:42:38 +05:30
Aaditya Jaiswal
c3e08ebfea VAPT issue fixed 2026-04-08 16:25:25 +05:30
Aaditya Jaiswal
42e6c2356b VAPT done 2026-03-31 19:38:13 +05:30
Aaditya Jaiswal
2b2a1bc6ce three issues fixed just backing up 2026-03-31 19:31:33 +05:30
Aaditya Jaiswal
d7f44057cc amount sign fixed 2026-03-30 12:39:53 +05:30
Aaditya Jaiswal
5e29adef1b last update - merging fixed 2026-03-27 18:26:53 +05:30
Aaditya Jaiswal
3c1c743df6 Merge origin/laxman_dev into laxman_dev.
Accept pulled backend updates and keep existing resolved functionality.

Made-with: Cursor
2026-03-27 18:12:40 +05:30
Aaditya Jaiswal
8e176cdf25 last updated on fixed 2026-03-27 18:09:13 +05:30
68 changed files with 8494 additions and 437 deletions

5
.env.docker Normal file
View File

@ -0,0 +1,5 @@
# Auto-loaded by `docker compose` for ${VAR} substitution in docker-compose.yml (not passed into every container).
# Okta SPA values (public) — same preview tenant as CPC-CSD client dev.
VITE_OKTA_DOMAIN=https://dev-830839.oktapreview.com
VITE_OKTA_CLIENT_ID=0oa2jgzvrpdwx2iqd0h8

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,219 @@
{
"id": "re-workflow-cpc-csd-api-env",
"name": "RE Workflow — CPC-CSD API (complete)",
"values": [
{
"key": "hostUrl",
"value": "http://localhost:5000",
"type": "default",
"enabled": true,
"description": "API origin only (scheme + host + port). No path. Node dev: 5000. Docker host-mapped API: often 5004. Nginx all-in-one: use 8080 only if you proxy everything through it."
},
{
"key": "apiRoot",
"value": "http://localhost:5000/api/v1",
"type": "default",
"enabled": true,
"description": "Must equal {{hostUrl}}/api/v1. All CPC-CSD REST calls in the bundled collection use {{apiRoot}}/cpc-csd/... Bare GCS upload uses POST {{hostUrl}}/api/upload (see collection folder 02). The SPA may still use {{hostUrl}}/api/documents/* — same handlers; see docs/CPC-CDC.md if you need those URLs."
},
{
"key": "accessToken",
"value": "",
"type": "secret",
"enabled": true,
"description": "JWT only (no Bearer prefix). From browser: DevTools → Application → Local Storage → access_token, or Network → Authorization header value after Bearer. Required for all CPC-CSD routes except GET /health."
},
{
"key": "refreshToken",
"value": "",
"type": "secret",
"enabled": true,
"description": "Optional. Only if you chain POST /auth/refresh from another collection."
},
{
"key": "ocrProvider",
"value": "GEMINI_VERTEX_DIRECT",
"type": "default",
"enabled": true,
"description": "Multipart field provider. GEMINI_VERTEX_DIRECT = Gemini on document bytes; skips Document AI OCR even if configured. GEMINI_VERTEX = optional Document AI then Gemini. RULES = rules engine on OCR text only, no Vertex."
},
{
"key": "claimIdCpc",
"value": "CPC-POSTMAN-0001",
"type": "default",
"enabled": true,
"description": "claim_id and booking_id for CPC runs (same pattern as Dashboard finalBookingId: CPC-{suffix}). Must be unique enough for your DB rules."
},
{
"key": "claimIdCsd",
"value": "CSD-POSTMAN-0001",
"type": "default",
"enabled": true,
"description": "claim_id and booking_id for CSD (PO) runs: CSD-{suffix}."
},
{
"key": "claimIdRetail",
"value": "CSD-RETAIL-0001",
"type": "default",
"enabled": true,
"description": "Optional booking/claim id for RETAIL_INVOICE tests (any string; booking_type often CSD in samples)."
},
{
"key": "cpcDocumentId",
"value": "",
"type": "default",
"enabled": true,
"description": "UUID from GET .../documents/recent (or history). Required for GET by id, GET file, PUT status, DELETE. Optional test script on recent can set this."
},
{
"key": "documentGcpUrl",
"value": "gs://your-bucket/path/document.pdf",
"type": "default",
"enabled": true,
"description": "For POST .../v1/ocr/validate JSON only. File must already exist in GCS."
},
{
"key": "reportAttemptQuery",
"value": "",
"type": "default",
"enabled": true,
"description": "Per-claim Excel: append empty or ?attempt=2 (full query string including ?)."
},
{
"key": "recentPage",
"value": "1",
"type": "default",
"enabled": true,
"description": "`GET .../documents/recent` — **page** (integer, **1-based**). Increment to fetch the next page; reset to `1` when you change `recentSearch`, `recentStatus`, or `recentType`."
},
{
"key": "recentLimit",
"value": "15",
"type": "default",
"enabled": true,
"description": "`GET .../documents/recent` — **limit** (page size, number of **document rows** per page). The SPA dashboard offers 10 / 15 / 30 / 50. Larger pages reduce the chance a multi-file CPC batch is split across pages."
},
{
"key": "recentSearch",
"value": "",
"type": "default",
"enabled": true,
"description": "Optional **`search`** query: case-insensitive substring on **`booking_id`**, **`claim_id`**, **`document_type`**, and document **`id`** (UUID). Examples: `CPC-114`, `POSTMAN`, part of a UUID. Leave **empty** to list without text filter (matches Dashboard debounced booking search)."
},
{
"key": "recentStatus",
"value": "",
"type": "default",
"enabled": true,
"description": "Optional **`status`** filter. **Empty** or omit in URL = all statuses.\n\n| Value | Server behaviour |\n|-------|------------------|\n| *(empty)* | No status filter — “All submissions”. |\n| `SUCCESSFUL` | `MATCH`, `SUCCESSFUL`, `APPROVED`. |\n| `UNSUCCESSFUL` | `MISMATCH`, `REJECTED`, `UNSUCCESSFUL`, `NEED_MANUAL` — use for **“Rejected / mismatch”** tab parity. |\n| `ALL` | Explicit no-op filter. |\n| Any other string | Treated as exact **`validation_status`** value. |\n\nImplementation: `appendCpcDocumentFilters` in `re-workflow-be/src/services/cpc-cdc/utils.ts`."
},
{
"key": "recentType",
"value": "",
"type": "default",
"enabled": true,
"description": "Optional **`type`** (document family). **Empty** = all types.\n\nSupported tokens include **`AADHAAR`**, **`CPC_AUTH`**, **`CSD_PO`**, **`RETAIL_INVOICE`**, **`ALL`** — server maps to `document_type` `ILIKE` patterns (see same `appendCpcDocumentFilters`)."
},
{
"key": "recentSortBy",
"value": "createdAt",
"type": "default",
"enabled": true,
"description": "`sortBy` query — must be one of: **`id`**, **`bookingId`**, **`createdAt`**, **`documentType`**, **`validationStatus`**, **`claimId`**, **`matchPercentage`**. Invalid values fall back to **`createdAt`** in the controller."
},
{
"key": "recentOrder",
"value": "desc",
"type": "default",
"enabled": true,
"description": "`order` query — **`asc`** or **`desc`** (case-insensitive). **`desc`** = newest first (dashboard default)."
},
{
"key": "masterReportSearch",
"value": "",
"type": "default",
"enabled": true,
"description": "GET .../report/all/download optional search query param."
},
{
"key": "masterReportStatus",
"value": "",
"type": "default",
"enabled": true,
"description": "Optional validation_status filter for master Excel."
},
{
"key": "masterReportType",
"value": "",
"type": "default",
"enabled": true,
"description": "Optional document_type filter for master Excel."
},
{
"key": "putStatusBodyJson",
"value": "{\n \"status\": \"APPROVED\",\n \"remarks\": \"Manual review via Postman\"\n}",
"type": "default",
"enabled": true,
"description": "Body for PUT .../documents/:id/status. Adjust status, remarks, optional correctedFields per API contract."
},
{
"key": "metadataQueueJsonCsdPo",
"value": "[{\"document_type\":\"CSD_PO\",\"msd_payload\":{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-2024-001\",\"po_amount\":\"25000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"po_number\",\"po_amount\",\"signature_and_stamp\"]}]",
"type": "default",
"enabled": true,
"description": "CSD (1 doc) PO — Purchase Order. JSON keys: `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (yes/no). Legacy keys still work. Stringify for `metadata_queue`."
},
{
"key": "metadataQueueJsonCpcTwoFiles",
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-2024-77\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]},{\"document_type\":\"AADHAAR\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"},\"expected_field_keys\":[\"customer_name\",\"aadhar_number\"]}]",
"type": "default",
"enabled": true,
"description": "CPC (2 docs), order = file order. Doc1: `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`. Doc2: `customer_name`, `aadhar_number`. Legacy keys still work."
},
{
"key": "metadataQueueJsonCpcAuthOnly",
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-99\",\"letter_amount\":\"10000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]}]",
"type": "default",
"enabled": true,
"description": "Single CPC_AUTH upload (skip_min). Same keys as CPC doc1 (`customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`)."
},
{
"key": "metadataQueueJsonRetailInvoice",
"value": "[{\"document_type\":\"RETAIL_INVOICE\",\"msd_payload\":{\"vendor_name\":\"Royal Enfield Store\",\"order_or_authorisation_number\":\"INV-2024-1001\",\"invoice_value\":\"185000\",\"invoice_date\":\"15-01-2024\"},\"expected_field_keys\":[\"vendor_name\",\"order_or_authorisation_number\",\"invoice_value\",\"invoice_date\"]}]",
"type": "default",
"enabled": true,
"description": "Retail invoice: vendor, order, amount, and invoice date compared to the reference payload per validation policy."
},
{
"key": "msdPayloadCpcAuth",
"value": "{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-1\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"}",
"type": "default",
"enabled": true,
"description": "validate-upload: Authorization letter — `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`."
},
{
"key": "msdPayloadAadhaar",
"value": "{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"}",
"type": "default",
"enabled": true,
"description": "validate-upload: Aadhaar — `customer_name`, `aadhar_number` (12 digits)."
},
{
"key": "msdPayloadCsdPo",
"value": "{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-001\",\"po_amount\":\"12000\",\"signature_and_stamp\":\"yes\"}",
"type": "default",
"enabled": true,
"description": "validate-upload: CSD PO — same keys as `metadataQueueJsonCsdPo`."
},
{
"key": "msdPayloadRetailInvoice",
"value": "{\"vendor_name\":\"RE Store\",\"order_or_authorisation_number\":\"INV-99\",\"invoice_value\":\"50000\",\"invoice_date\":\"01-04-2024\"}",
"type": "default",
"enabled": true,
"description": "validate-upload: msd_payload for RETAIL_INVOICE."
}
],
"_postman_variable_scope": "environment",
"_postman_exported_at": "2026-04-20T12:00:00.000Z",
"_postman_exported_using": "RE Workflow CPC-CSD bundle"
}

View File

@ -0,0 +1,90 @@
{
"info": {
"_postman_id": "re-workflow-cpc-csd-ocr-single-2026",
"name": "RE Workflow — CPC-CSD OCR (single POST)",
"description": "## What this collection is\nOne **multipart** request that runs the **full CPC-CSD OCR pipeline** used by the app: optional OCR text → Vertex/Gemini extraction → validation → **persist** `cpc_documents` rows.\n\nThis is **not** a different backend route — it is exactly:\n`POST {{apiRoot}}/cpc-csd/v1/ocr/upload`\n\n## Import\n1. Import **RE_Workflow_CPC_CDC_OCR_SingleRequest.postman_environment.json** (or merge variables into your existing env).\n2. Set **accessToken** (JWT, no `Bearer ` prefix).\n3. Select this environment in the dropdown.\n4. Open **POST Full OCR pipeline**, attach file(s), Send.\n\n## Auth\n- Collection **Bearer**: `{{accessToken}}`\n- User must be allowed for CPC-CSD (same as main RE Workflow collection).\n\n## Request (exact)\n| Item | Value |\n|------|--------|\n| Method | **POST** |\n| URL | `{{apiRoot}}/cpc-csd/v1/ocr/upload` |\n| Body mode | **form-data** (multipart) |\n| Content-Type | Let Postman set **multipart boundary** (do not set `application/json` on this request). |\n\n### Multipart text fields (always these keys)\n| Field name | Type | Required | Description |\n|------------|------|----------|-------------|\n| `claim_id` | text | yes | Claim id string; same family as Dashboard (`CPC-…` / `CSD-…`). |\n| `booking_id` | text | yes | In samples same as `claim_id`; backend accepts booking id pattern. |\n| `booking_type` | text | yes | **`CSD`** = one PO file. **`CPC`** = two files (auth + Aadhaar). |\n| `provider` | text | yes | e.g. `GEMINI_VERTEX_DIRECT` (see env `ocrProvider`). |\n| `metadata_queue` | text | yes | **Stringified JSON array** (not a Postman JSON body). Each element describes one uploaded file in order. |\n\n### Multipart file field(s)\n| Field name | Type | Count | Rule |\n|------------|------|-------|------|\n| `files` | file | **1** for CSD | One PO PDF/image. |\n| `files` | file | **2** for CPC | **Duplicate** the key `files` in Postman (two rows, same key `files`): first row = authorization letter, second = Aadhaar. Order **must** match `metadata_queue` array order. |\n\n### `metadata_queue` JSON shape (per array element)\nEach object **must** include:\n- `document_type`: `CSD_PO` | `CPC_AUTH` | `AADHAAR` | `RETAIL_INVOICE` (this collection documents CSD + CPC).\n- `msd_payload`: object — MSD/reference values for that file.\n- `expected_field_keys`: string array — **same keys** as in `msd_payload` you want validated (order preserved).\n\n**CSD_PO** keys (current canonical): `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (`yes`/`no`).\n\n**CPC_AUTH** (doc 1): `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`.\n\n**AADHAAR** (doc 2): `customer_name`, `aadhar_number` (12 digits).\n\nUse env **`metadata_queue_json`** for CSD default, **`metadata_queue_json_cpc`** for CPC (set the `metadata_queue` field value to that variable when testing CPC).\n\n## Limits (server)\n- Max **20** `files` parts; **15 MB** per file; ZIP not allowed (same as main API).\n\n## Response\n- **200** JSON: per-file results with `document_id`, `validation_status`, `field_results`, etc. (same contract as main collection folder `03`/`04`).\n\n## Optional (not in this one-request collection)\n- `POST .../ocr/validate-upload` — single file validate without persisting as the same dashboard flow.\n- `POST .../ocr/validate` — JSON body + GCS URL.\n- `POST {{hostUrl}}/api/upload` — bare GCS staging without CPC metadata.",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
},
"auth": {
"type": "bearer",
"bearer": [
{
"key": "token",
"value": "{{accessToken}}",
"type": "string"
}
]
},
"variable": [
{
"key": "hostUrl",
"value": "http://localhost:5000"
},
{
"key": "apiRoot",
"value": "http://localhost:5000/api/v1"
},
{
"key": "accessToken",
"value": ""
},
{
"key": "ocrProvider",
"value": "GEMINI_VERTEX_DIRECT"
}
],
"item": [
{
"name": "POST Full OCR pipeline (multipart upload)",
"description": "**Single API** for end-to-end OCR on CPC-CSD: `POST {{apiRoot}}/cpc-csd/v1/ocr/upload`.\n\n**CSD (1 file):** `ocr_booking_type=CSD`, attach **one** `files` part, `metadata_queue` = `{{metadata_queue_json}}` (default CSD_PO).\n\n**CPC (2 files):** Set `ocr_booking_type` to `CPC`, set `metadata_queue` to `{{metadata_queue_json_cpc}}`, **add a second form row** with key `files` (duplicate key), attach auth PDF then Aadhaar PDF in that order.\n\n**claim_id / booking_id:** both use `{{ocr_claim_id}}` — change env when switching CSD vs CPC claim ids.",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "formdata",
"formdata": [
{
"key": "claim_id",
"value": "{{ocr_claim_id}}",
"type": "text",
"description": "Same as Dashboard claim id string."
},
{
"key": "booking_id",
"value": "{{ocr_claim_id}}",
"type": "text",
"description": "Samples use same value as claim_id; must match your booking/claim convention."
},
{
"key": "booking_type",
"value": "{{ocr_booking_type}}",
"type": "text",
"description": "CSD = 1 file. CPC = 2 files + CPC metadata array."
},
{
"key": "provider",
"value": "{{ocrProvider}}",
"type": "text",
"description": "Vertex/Rules mode; see env ocrProvider."
},
{
"key": "metadata_queue",
"value": "{{metadata_queue_json}}",
"type": "text",
"description": "Stringified JSON array. CSD default from env `metadata_queue_json`. For CPC switch value to {{metadata_queue_json_cpc}} in this field (or paste)."
},
{
"key": "files",
"type": "file",
"src": [],
"description": "CSD: attach PO here only. CPC: first file = authorization letter; add another `files` row below for Aadhaar."
}
]
},
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
"description": "Multipart form-data only. Do not set Content-Type manually."
},
"response": []
}
]
}

View File

@ -0,0 +1,65 @@
{
"id": "re-workflow-cpc-csd-ocr-single-env",
"name": "RE Workflow — CPC-CSD OCR (single POST)",
"values": [
{
"key": "hostUrl",
"value": "http://localhost:5000",
"type": "default",
"enabled": true,
"description": "API origin (scheme + host + port). Docker: often http://localhost:5004. No trailing slash."
},
{
"key": "apiRoot",
"value": "http://localhost:5000/api/v1",
"type": "default",
"enabled": true,
"description": "Must be {{hostUrl}}/api/v1. Used only by the OCR collection URL."
},
{
"key": "accessToken",
"value": "",
"type": "secret",
"enabled": true,
"description": "JWT only (no 'Bearer ' prefix). Required: collection uses Bearer auth with this variable."
},
{
"key": "ocrProvider",
"value": "GEMINI_VERTEX_DIRECT",
"type": "default",
"enabled": true,
"description": "Multipart text field `provider`. GEMINI_VERTEX_DIRECT = Gemini on file bytes. GEMINI_VERTEX = optional Document AI then Gemini. RULES = regex/rules on OCR text only (no Vertex)."
},
{
"key": "ocr_claim_id",
"value": "CSD-OCR-0001",
"type": "default",
"enabled": true,
"description": "Used for BOTH `claim_id` and `booking_id` form fields (same as Dashboard). For CPC use e.g. CPC-OCR-0001 and set ocr_booking_type=CPC."
},
{
"key": "ocr_booking_type",
"value": "CSD",
"type": "default",
"enabled": true,
"description": "Multipart `booking_type`: CSD (1 file, PO) or CPC (2 files: auth letter + Aadhaar)."
},
{
"key": "metadata_queue_json",
"value": "[{\"document_type\":\"CSD_PO\",\"msd_payload\":{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-2024-001\",\"po_amount\":\"25000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"po_number\",\"po_amount\",\"signature_and_stamp\"]}]",
"type": "default",
"enabled": true,
"description": "Default for CSD. Single-line JSON STRING for form field `metadata_queue`. For CPC: set Body `metadata_queue` to {{metadata_queue_json_cpc}} (or paste that value) and add a second `files` row."
},
{
"key": "metadata_queue_json_cpc",
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-2024-77\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]},{\"document_type\":\"AADHAAR\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"},\"expected_field_keys\":[\"customer_name\",\"aadhar_number\"]}]",
"type": "default",
"enabled": true,
"description": "CPC 2-file metadata_queue. Array order MUST match file order: [0]=first `files` part (auth letter), [1]=second `files` part (Aadhaar)."
}
],
"_postman_variable_scope": "environment",
"_postman_exported_at": "2026-04-15T15:00:00.000Z",
"_postman_exported_using": "RE Workflow CPC-CSD OCR single-request bundle"
}

View File

@ -19,7 +19,12 @@
"variable": [ "variable": [
{ {
"key": "baseUrl", "key": "baseUrl",
"value": "http://localhost:3000/api/v1", "value": "http://localhost:5000/api/v1",
"type": "string"
},
{
"key": "healthUrl",
"value": "http://localhost:5000/health",
"type": "string" "type": "string"
}, },
{ {
@ -101,7 +106,31 @@
] ]
}, },
{ {
"name": "Token Exchange (Development)", "name": "Token Exchange (Okta authorization code)",
"event": [
{
"listen": "test",
"script": {
"exec": [
"if (pm.response.code === 200) {",
" const jsonData = pm.response.json();",
" const data = jsonData.data || jsonData;",
" const token = data && (data.accessToken || data.access_token);",
" const refresh = data && (data.refreshToken || data.refresh_token);",
" if (token) {",
" pm.collectionVariables.set('accessToken', token);",
" pm.environment.set('accessToken', token);",
" }",
" if (refresh) {",
" pm.collectionVariables.set('refreshToken', refresh);",
" pm.environment.set('refreshToken', refresh);",
" }",
"}"
],
"type": "text/javascript"
}
}
],
"request": { "request": {
"method": "POST", "method": "POST",
"header": [ "header": [

View File

@ -1 +1 @@
import{a as s}from"./index-B4PRp9Lp.js";import"./radix-vendor-CLtqm-Ae.js";import"./charts-vendor-CmYZJIYl.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-DgwXkk2Y.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-HW_ujxKo.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion}; import{a as s}from"./index-r8G8cQlR.js";import"./radix-vendor-CLtqm-Ae.js";import"./charts-vendor-CmYZJIYl.js";import"./utils-vendor-BTBPSQfW.js";import"./ui-vendor-DgwXkk2Y.js";import"./socket-vendor-TjCxX7sJ.js";import"./redux-vendor-tbZCm13o.js";import"./router-vendor-DbXFJHwt.js";async function m(n){return(await s.post(`/conclusions/${n}/generate`)).data.data}async function f(n,t){return(await s.post(`/conclusions/${n}/finalize`,{finalRemark:t})).data.data}async function d(n){var t;try{return(await s.get(`/conclusions/${n}`)).data.data}catch(o){if(((t=o.response)==null?void 0:t.status)===404)return null;throw o}}export{f as finalizeConclusion,m as generateConclusion,d as getConclusion};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -13,15 +13,15 @@
<!-- Preload essential fonts and icons --> <!-- Preload essential fonts and icons -->
<link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<script type="module" crossorigin src="/assets/index-B4PRp9Lp.js"></script> <script type="module" crossorigin src="/assets/index-r8G8cQlR.js"></script>
<link rel="modulepreload" crossorigin href="/assets/charts-vendor-CmYZJIYl.js"> <link rel="modulepreload" crossorigin href="/assets/charts-vendor-CmYZJIYl.js">
<link rel="modulepreload" crossorigin href="/assets/radix-vendor-CLtqm-Ae.js"> <link rel="modulepreload" crossorigin href="/assets/radix-vendor-CLtqm-Ae.js">
<link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js"> <link rel="modulepreload" crossorigin href="/assets/utils-vendor-BTBPSQfW.js">
<link rel="modulepreload" crossorigin href="/assets/ui-vendor-DgwXkk2Y.js"> <link rel="modulepreload" crossorigin href="/assets/ui-vendor-DgwXkk2Y.js">
<link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js"> <link rel="modulepreload" crossorigin href="/assets/socket-vendor-TjCxX7sJ.js">
<link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js"> <link rel="modulepreload" crossorigin href="/assets/redux-vendor-tbZCm13o.js">
<link rel="modulepreload" crossorigin href="/assets/router-vendor-HW_ujxKo.js"> <link rel="modulepreload" crossorigin href="/assets/router-vendor-DbXFJHwt.js">
<link rel="stylesheet" crossorigin href="/assets/index-BNFD-0wA.css"> <link rel="stylesheet" crossorigin href="/assets/index-Bap1UWaI.css">
</head> </head>
<body> <body>

68
docs/CPC-CDC.md Normal file
View File

@ -0,0 +1,68 @@
# CPC-CSD module (re-workflow)
This module (formerly referred to as CPC-CDC in code comments) covers **CPC/CSD document upload, OCR/extraction, validation against MSD payloads, audit history, dashboards, and Excel reports**. It was consolidated from the standalone **CPC-CSD** app into this backend.
## HTTP API
**CPC-CSD-compatible URLs** (same as `CPC-CSD/server/src/routes/index.js` + Postman `CPC-CSD-Full-Flow`): `POST /api/upload`, `GET /api/documents/*`, `POST /api/v1/ocr/validate`, `POST /api/v1/ocr/validate-upload` (field **`file`**), `POST /api/v1/ocr/upload` (field **`files`**, max 20), report downloads under `/api/v1/ocr/report/...`. Registered from `src/routes/cpc-csd-compat.mount.ts` before `/api/v1`; disable with **`CPC_LEGACY_COMPAT_ROUTES=false`**.
**Namespaced API** — canonical prefix **`/api/v1/cpc-csd`**; legacy alias **`/api/v1/cpc-cdc`** (`src/routes/cpc-cdc.routes.ts`) mounts the same handlers and auth.
| Method | Path (prefix **`/api`** or **`/api/v1/cpc-csd`** or legacy **`/api/v1/cpc-cdc`**) | Purpose |
|--------|------|---------|
| POST | `/upload` | GCS-only: multipart field **`file`** → `{ gcsUrl }` (compat: **`/api/upload`**) |
| POST | `/v1/ocr/validate` | JSON URL mode — returns **400** with legacy message (use validate-upload) |
| POST | `/v1/ocr/validate-upload` | Single file field **`file`** + `claim_id` / `msd_payload` / … |
| POST | `/v1/ocr/upload` | Bulk: field **`files`** (max 20) + `metadata_queue` or `msd_payload` / `document_type` |
| GET | `/documents/analytics` | Totals, pass rate, distribution, `dailyVolume`, `topMismatchFields` |
| GET | `/documents/history` | `claimId` query — attempts grouped |
| GET | `/documents/recent` | Paginated list; query: `page`, `limit`, `search`, `status`, `type`, `sortBy`, `order` |
| GET | `/documents/:id/file` | Authenticated file bytes for preview (browser cannot use `gs://` directly) |
| GET | `/documents/:id` | Document + audit logs + `field_results` |
| PUT | `/documents/:id/status` | Manual status / corrected fields |
| DELETE | `/documents/:id` | Remove document row |
| GET | `/v1/ocr/report/:claimId/download` | Per-claim Excel |
| GET | `/v1/ocr/report/all/download` | Master Excel (supports `search`, `status`, `type`) |
Compat paths are under **`/api/...`**; namespaced routes are **`/api/v1/cpc-csd/...`** with **`/api/v1/cpc-cdc/...`** as an alias (same path suffixes as in the tables second column).
## Database
Sequelize models: **`CpcDocument`** (`cpc_documents`), **`CpcAuditLog`** (`cpc_audit_logs`). Migration: `src/migrations/2026041300-create-cpc-cdc-tables.ts`.
**Admin viewer list** is stored under `admin_configurations.config_key = CPC_CSD_ADMIN_CONFIG` (migration `20260416120000-rename-cpc-cdc-admin-config-key.ts` renames the legacy `CPC_CDC_ADMIN_CONFIG` row when applied).
On **application startup**, `ensureCpcCdcSchema()` runs after DB connect (`src/services/cpc-cdc/ensureCpcCdcSchema.ts`) so `CREATE TABLE IF NOT EXISTS` applies if migrations were skipped; still run `npm run migrate` for a full schema history.
Notable columns on `cpc_documents`: `booking_id`, `claim_id`, `attempt_no`, `document_type`, `document_gcp_url`, `provider`, JSONB `msd_payload`, `extracted_fields`, `field_confidence`, `validation_status`, `match_percentage`, `mismatch_reasons`, `field_results`, `ip_address`.
Unique index: `(claim_id, attempt_no, document_type)` — important when migrating legacy data with duplicates.
## Environment variables
Copy **`re-workflow-be/.env.example`** to `.env` and adjust. Typical keys (see `CpcCdcController` and `src/services/cpc-cdc/*`):
- **`GCP_PROJECT_ID`** — GCP project for Vertex / optional Document AI.
- **`VERTEX_AI_LOCATION`** — Vertex region (e.g. `asia-south1`).
- **`DOC_AI_PROCESSOR_ID`** — Optional; when set and valid, Document AI OCR may run before Gemini.
- **`GCP_LOCATION_DOC_AI`** — Document AI region (default `us`).
- **GCS** — Bucket/credentials as required by `CpcGcsService` (service account via `GOOGLE_APPLICATION_CREDENTIALS` or workload identity).
- **`CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI`** — **`true`**: always allow saving after failed/missing Vertex. **`false`**: in **production** only, disallow degraded saves. **Omitted in non-production**: degraded saves are **allowed** so local CPC works without GCP; set to **`false`** in dev to force strict Vertex. **Omitted in production**: strict (Vertex required unless `RULES` provider).
**Extraction behaviour (upload response):**
- **`extraction_source`: `vertex_gemini`** — Fields came from the Vertex Gemini API (document bytes + optional Document AI OCR text).
- **`extraction_source`: `rules_engine`** — Provider was **`RULES`**; fields come from `CpcRuleExtractService` on OCR text only (no Gemini).
- **`extraction_source`: `degraded_empty`** — Extraction was skipped, failed, or (in **non-production**) hit a **Vertex auth / ADC** problem; the row is still stored with empty `extracted_fields` so you can test DB/history. In production this only happens when **`CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI=true`** or missing `GCP_PROJECT_ID` with degraded policy.
## One-off data migration from legacy Prisma DB
If you still have the old **`Document`** / **`AuditLog`** tables (CPC-CSD Prisma schema) in PostgreSQL, run:
```bash
npm run migrate:cpc-csd
```
Optional **`CPC_CSD_DATABASE_URL`**: if set, rows are read from that database and written to the database in **`DATABASE_URL`** (re-workflow). If unset, both read and write use **`DATABASE_URL`** (same cluster; both table sets must exist).
After migration, spot-check history, document detail, and Excel downloads, then decommission the legacy app.

Binary file not shown.

801
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -17,6 +17,7 @@
"clean": "rm -rf dist", "clean": "rm -rf dist",
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts", "setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts", "migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
"migrate:cpc-csd": "ts-node -r tsconfig-paths/register src/scripts/migrate-cpc-csd-to-cpc-tables.ts",
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts", "seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts", "seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
"seed:dealer-user": "ts-node -r tsconfig-paths/register src/scripts/seed-dealer-user.ts", "seed:dealer-user": "ts-node -r tsconfig-paths/register src/scripts/seed-dealer-user.ts",
@ -31,6 +32,7 @@
"test:ci": "jest --ci --coverage --passWithNoTests --forceExit" "test:ci": "jest --ci --coverage --passWithNoTests --forceExit"
}, },
"dependencies": { "dependencies": {
"@google-cloud/documentai": "^9.6.0",
"@google-cloud/secret-manager": "^6.1.1", "@google-cloud/secret-manager": "^6.1.1",
"@google-cloud/storage": "^7.18.0", "@google-cloud/storage": "^7.18.0",
"@google-cloud/vertexai": "^1.10.0", "@google-cloud/vertexai": "^1.10.0",
@ -45,6 +47,7 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dayjs": "^1.11.19", "dayjs": "^1.11.19",
"dotenv": "^16.4.7", "dotenv": "^16.4.7",
"exceljs": "^4.4.0",
"express": "^4.21.2", "express": "^4.21.2",
"express-rate-limit": "^7.5.0", "express-rate-limit": "^7.5.0",
"fast-xml-parser": "^5.3.3", "fast-xml-parser": "^5.3.3",
@ -66,6 +69,7 @@
"sanitize-html": "^2.17.1", "sanitize-html": "^2.17.1",
"sequelize": "^6.37.5", "sequelize": "^6.37.5",
"socket.io": "^4.8.1", "socket.io": "^4.8.1",
"string-similarity": "^4.0.4",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"web-push": "^3.6.7", "web-push": "^3.6.7",
"winston": "^3.17.0", "winston": "^3.17.0",
@ -87,10 +91,12 @@
"@types/passport-jwt": "^4.0.1", "@types/passport-jwt": "^4.0.1",
"@types/pg": "^8.15.6", "@types/pg": "^8.15.6",
"@types/sanitize-html": "^2.16.0", "@types/sanitize-html": "^2.16.0",
"@types/string-similarity": "^4.0.2",
"@types/supertest": "^6.0.2", "@types/supertest": "^6.0.2",
"@types/web-push": "^3.6.4", "@types/web-push": "^3.6.4",
"@typescript-eslint/eslint-plugin": "^8.19.1", "@typescript-eslint/eslint-plugin": "^8.19.1",
"@typescript-eslint/parser": "^8.19.1", "@typescript-eslint/parser": "^8.19.1",
"concurrently": "^9.1.2",
"eslint": "^9.17.0", "eslint": "^9.17.0",
"jest": "^29.7.0", "jest": "^29.7.0",
"nodemon": "^3.1.9", "nodemon": "^3.1.9",

43
set-admin.ts Normal file
View File

@ -0,0 +1,43 @@
import { sequelize } from './src/config/database';
import { User } from './src/models/User';
async function makeAdmin() {
try {
const email = 'testuser11@eichergroup.com';
console.log(`Setting role to ADMIN for: ${email}`);
// Test connection first
await sequelize.authenticate();
console.log('Database connected.');
const [updatedRows] = await User.update(
{ role: 'ADMIN' },
{ where: { email: email } }
);
if (updatedRows > 0) {
console.log(`✅ Success! ${email} is now an ADMIN.`);
} else {
console.log(`⚠️ User not found in database: ${email}`);
console.log(`Creating user ${email} with ADMIN role...`);
const newUser = await User.create({
email: email,
oktaSub: `MANUAL_ADMIN_${Date.now()}`,
firstName: 'Test',
lastName: 'User 11',
displayName: 'Test User 11',
role: 'ADMIN',
isActive: true
});
console.log(`✅ Success! Created new ADMIN user: ${newUser.email}`);
}
} catch (error) {
console.error('❌ Error updating user:', error);
} finally {
await sequelize.close();
}
}
makeAdmin();

View File

@ -0,0 +1,80 @@
import { NextFunction, Request, Response } from 'express';
import {
requireForm1626AsAccess,
requireForm16ReOnly,
requireForm16SubmissionAccess,
} from '../middlewares/form16Permission.middleware';
import { canView26As, canViewForm16Submission } from '../services/form16Permission.service';
import { getDealerCodeForUser } from '../services/form16.service';
jest.mock('../services/form16Permission.service', () => ({
canView26As: jest.fn(),
canViewForm16Submission: jest.fn(),
}));
jest.mock('../services/form16.service', () => ({
getDealerCodeForUser: jest.fn(),
}));
function createRes(): Response {
const res: Partial<Response> = {};
res.status = jest.fn().mockReturnValue(res);
res.json = jest.fn().mockReturnValue(res);
return res as Response;
}
describe('Form16 Permission Middlewares', () => {
const mockedCanView26As = canView26As as jest.Mock;
const mockedCanViewForm16Submission = canViewForm16Submission as jest.Mock;
const mockedGetDealerCodeForUser = getDealerCodeForUser as jest.Mock;
beforeEach(() => {
jest.clearAllMocks();
});
it('allows ADMIN on 26AS middleware without config dependency', async () => {
const req = { user: { userId: 'a1', email: 'admin@royalenfield.com', role: 'ADMIN' } } as unknown as Request;
const res = createRes();
const next = jest.fn() as NextFunction;
await requireForm1626AsAccess(req, res, next);
expect(next).toHaveBeenCalledTimes(1);
expect(mockedCanView26As).not.toHaveBeenCalled();
});
it('denies non-authorized user on 26AS middleware', async () => {
mockedCanView26As.mockResolvedValue(false);
const req = { user: { userId: 'u1', email: 'user@royalenfield.com', role: 'USER' } } as unknown as Request;
const res = createRes();
const next = jest.fn() as NextFunction;
await requireForm1626AsAccess(req, res, next);
expect(next).not.toHaveBeenCalled();
expect((res.status as jest.Mock).mock.calls[0][0]).toBe(403);
});
it('denies dealer on RE-only middleware', async () => {
mockedGetDealerCodeForUser.mockResolvedValue('DLR001');
const req = { user: { userId: 'u2', email: 'dealer@royalenfield.com', role: 'USER' } } as unknown as Request;
const res = createRes();
const next = jest.fn() as NextFunction;
await requireForm16ReOnly(req, res, next);
expect(next).not.toHaveBeenCalled();
expect((res.status as jest.Mock).mock.calls[0][0]).toBe(403);
});
it('allows submission middleware for authorized non-admin RE user', async () => {
mockedCanViewForm16Submission.mockResolvedValue(true);
const req = { user: { userId: 'u3', email: 'submission@royalenfield.com', role: 'USER' } } as unknown as Request;
const res = createRes();
const next = jest.fn() as NextFunction;
await requireForm16SubmissionAccess(req, res, next);
expect(next).toHaveBeenCalledTimes(1);
});
});

View File

@ -0,0 +1,96 @@
import { canView26As, canViewForm16Submission, getForm16ViewerConfig } from '../services/form16Permission.service';
import { sequelize } from '../config/database';
import { getDealerCodeForUser } from '../services/form16.service';
jest.mock('../config/database', () => ({
sequelize: {
query: jest.fn(),
},
}));
jest.mock('../services/form16.service', () => ({
getDealerCodeForUser: jest.fn(),
}));
describe('Form16 Permission Service (strict RBAC)', () => {
const mockedQuery = sequelize.query as jest.Mock;
const mockedGetDealerCodeForUser = getDealerCodeForUser as jest.Mock;
beforeEach(() => {
jest.clearAllMocks();
});
it('returns normalized viewer lists from config', async () => {
mockedQuery.mockResolvedValue([
{
config_value: JSON.stringify({
submissionViewerEmails: [' User1@royalenfield.com '],
twentySixAsViewerEmails: ['USER2@royalenfield.com'],
}),
},
]);
const config = await getForm16ViewerConfig();
expect(config.submissionViewerEmails).toEqual(['user1@royalenfield.com']);
expect(config.twentySixAsViewerEmails).toEqual(['user2@royalenfield.com']);
});
it('ADMIN always has submission and 26AS access', async () => {
expect(await canViewForm16Submission('admin@royalenfield.com', 'u-admin', 'ADMIN')).toBe(true);
expect(await canView26As('admin@royalenfield.com', 'ADMIN')).toBe(true);
});
it('dealer always has submission access, but not implicit 26AS access', async () => {
mockedGetDealerCodeForUser.mockResolvedValue('DLR001');
mockedQuery.mockResolvedValue([{ config_value: JSON.stringify({ submissionViewerEmails: [], twentySixAsViewerEmails: [] }) }]);
expect(await canViewForm16Submission('dealer@royalenfield.com', 'u-dealer', 'USER')).toBe(true);
expect(await canView26As('dealer@royalenfield.com', 'USER')).toBe(false);
});
it('non-admin RE user gets submission access only when listed in submission viewers', async () => {
mockedGetDealerCodeForUser.mockResolvedValue(null);
mockedQuery.mockResolvedValue([
{
config_value: JSON.stringify({
submissionViewerEmails: ['submissions@royalenfield.com'],
twentySixAsViewerEmails: [],
}),
},
]);
expect(await canViewForm16Submission('submissions@royalenfield.com', 'u1', 'USER')).toBe(true);
expect(await canViewForm16Submission('other@royalenfield.com', 'u2', 'USER')).toBe(false);
});
it('26AS viewers implicitly have submission access', async () => {
mockedGetDealerCodeForUser.mockResolvedValue(null);
mockedQuery.mockResolvedValue([
{
config_value: JSON.stringify({
submissionViewerEmails: [],
twentySixAsViewerEmails: ['twentysix@royalenfield.com'],
}),
},
]);
expect(await canViewForm16Submission('twentysix@royalenfield.com', 'u3', 'USER')).toBe(true);
expect(await canView26As('twentysix@royalenfield.com', 'USER')).toBe(true);
});
it('strict deny when viewer lists are empty for non-admin RE user', async () => {
mockedGetDealerCodeForUser.mockResolvedValue(null);
mockedQuery.mockResolvedValue([
{
config_value: JSON.stringify({
submissionViewerEmails: [],
twentySixAsViewerEmails: [],
}),
},
]);
expect(await canViewForm16Submission('re-user@royalenfield.com', 'u4', 'USER')).toBe(false);
expect(await canView26As('re-user@royalenfield.com', 'USER')).toBe(false);
});
});

View File

@ -6,11 +6,13 @@ import cookieParser from 'cookie-parser';
import { UserService } from './services/user.service'; import { UserService } from './services/user.service';
import { SSOUserData } from './types/auth.types'; import { SSOUserData } from './types/auth.types';
import { sequelize } from './config/database'; import { sequelize } from './config/database';
import { ensureCpcCdcSchema } from './services/cpc-cdc/ensureCpcCdcSchema';
import { corsMiddleware } from './middlewares/cors.middleware'; import { corsMiddleware } from './middlewares/cors.middleware';
import { authenticateToken } from './middlewares/auth.middleware'; import { authenticateToken } from './middlewares/auth.middleware';
import { requireAdmin } from './middlewares/authorization.middleware'; import { requireAdmin } from './middlewares/authorization.middleware';
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware'; import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
import routes from './routes/index'; import routes from './routes/index';
import { registerCpcCsdCompatRoutes } from './routes/cpc-csd-compat.mount';
import form16Routes from './routes/form16.routes'; import form16Routes from './routes/form16.routes';
import { ensureUploadDir, UPLOAD_DIR } from './config/storage'; import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
import { initializeGoogleSecretManager } from './services/googleSecretManager.service'; import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
@ -28,15 +30,25 @@ const app: express.Application = express();
// 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc. // 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc.
// This handles a specific Express/Helmet edge case where redirects lose headers. // This handles a specific Express/Helmet edge case where redirects lose headers.
app.use((req: express.Request, res: express.Response, next: express.NextFunction) => { app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
const isDev = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'local'; // Match server.ts: anything except production is "dev" for local tooling (.env often uses NODE_ENV=dev)
const nodeEnv = (process.env.NODE_ENV || '').toLowerCase();
const isDev = nodeEnv !== 'production' && nodeEnv !== 'prod';
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000'; const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
// Build connect-src dynamically // Build connect-src dynamically
const connectSrc = ["'self'", "blob:", "data:"]; const connectSrc = ["'self'", "blob:", "data:"];
if (isDev) { if (isDev) {
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000"); for (let port = 3000; port <= 3010; port++) {
if (frontendUrl.includes('localhost')) connectSrc.push(frontendUrl); connectSrc.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
connectSrc.push(`ws://localhost:${port}`, `ws://127.0.0.1:${port}`);
}
for (let port = 5000; port <= 5005; port++) {
connectSrc.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
connectSrc.push(`ws://localhost:${port}`, `ws://127.0.0.1:${port}`);
}
if (frontendUrl.includes('localhost') || frontendUrl.includes('127.0.0.1')) connectSrc.push(frontendUrl);
} else if (frontendUrl && frontendUrl !== '*') { } else if (frontendUrl && frontendUrl !== '*') {
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean); const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
connectSrc.push(...origins); connectSrc.push(...origins);
} }
@ -97,6 +109,7 @@ export const initializeAppDatabase = async () => {
try { try {
await sequelize.authenticate(); await sequelize.authenticate();
console.log('✅ App database connection established'); console.log('✅ App database connection established');
await ensureCpcCdcSchema();
} catch (error) { } catch (error) {
console.error('❌ App database connection failed:', error); console.error('❌ App database connection failed:', error);
throw error; throw error;
@ -147,6 +160,9 @@ app.get('/health', (_req: express.Request, res: express.Response) => {
}); });
}); });
// CPC-CSD-compatible paths (`/api/upload`, `/api/documents/*`, `/api/v1/ocr/*`) — same as `CPC-CSD/server` router
registerCpcCsdCompatRoutes(app);
// Mount API routes (form16 already mounted above before body parser) // Mount API routes (form16 already mounted above before body parser)
app.use('/api/v1', routes); app.use('/api/v1', routes);

View File

@ -0,0 +1,10 @@
/**
* Centralized session policy for VAPT compliance.
* Keep strict constants (no environment overrides) to prevent accidental relaxation.
*/
export const ACCESS_TOKEN_TTL = '30m';
export const REFRESH_TOKEN_TTL = '30m';
export const ACCESS_TOKEN_TTL_MS = 30 * 60 * 1000;
export const REFRESH_TOKEN_TTL_MS = 30 * 60 * 1000;

View File

@ -1,11 +1,13 @@
import { SSOConfig, SSOUserData } from '../types/auth.types'; import { SSOConfig, SSOUserData } from '../types/auth.types';
import { ACCESS_TOKEN_TTL, REFRESH_TOKEN_TTL } from './sessionPolicy';
// Use getter functions to read from process.env dynamically // Use getter functions to read from process.env dynamically
// This ensures values are read after secrets are loaded from Google Secret Manager // This ensures values are read after secrets are loaded from Google Secret Manager
const ssoConfig: SSOConfig = { const ssoConfig: SSOConfig = {
get jwtSecret() { return process.env.JWT_SECRET || ''; }, get jwtSecret() { return process.env.JWT_SECRET || ''; },
get jwtExpiry() { return process.env.JWT_EXPIRY || '24h'; }, // VAPT hard policy: no env-based override for token lifetimes.
get refreshTokenExpiry() { return process.env.REFRESH_TOKEN_EXPIRY || '7d'; }, get jwtExpiry() { return ACCESS_TOKEN_TTL; },
get refreshTokenExpiry() { return REFRESH_TOKEN_TTL; },
get sessionSecret() { return process.env.SESSION_SECRET || ''; }, get sessionSecret() { return process.env.SESSION_SECRET || ''; },
// Use only FRONTEND_URL from environment - no fallbacks // Use only FRONTEND_URL from environment - no fallbacks
get allowedOrigins() { get allowedOrigins() {

View File

@ -67,8 +67,8 @@ export const SYSTEM_CONFIG = {
// Session & Security // Session & Security
SECURITY: { SECURITY: {
SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '480', 10), // 8 hours SESSION_TIMEOUT_MINUTES: parseInt(process.env.SESSION_TIMEOUT_MINUTES || '30', 10),
JWT_EXPIRY: process.env.JWT_EXPIRY || '8h', JWT_EXPIRY: process.env.JWT_EXPIRY || '30m',
ENABLE_2FA: process.env.ENABLE_2FA === 'true', ENABLE_2FA: process.env.ENABLE_2FA === 'true',
}, },

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,204 @@
import { Request, Response } from 'express';
import { CpcHistoryService } from '../services/cpc-cdc/CpcHistoryService';
import { CpcDocument } from '../models/CpcDocument';
import { appendCpcDocumentFilters, cpcWhereFromAndParts } from '../services/cpc-cdc/utils';
import ExcelJS from 'exceljs';
import { ResponseHandler } from '../utils/responseHandler';
import { Op } from 'sequelize';
export class CpcReportController {
/**
* Download Excel report for a specific claim
*/
async downloadReport(req: Request, res: Response) {
try {
const { claimId } = req.params;
const { attempt } = req.query;
const where: any = {
[Op.or]: [
{ claimId: claimId },
{ bookingId: claimId }
]
};
if (attempt) where.attemptNo = parseInt(attempt as string);
const docs = await CpcDocument.findAll({
where,
order: [['createdAt', 'DESC']]
});
if (!docs || docs.length === 0) {
return ResponseHandler.error(res, "No records found for this claim", 404);
}
const workbook = new ExcelJS.Workbook();
const sheet = workbook.addWorksheet('Validation Report');
// HEADERS
const row1 = sheet.getRow(1);
row1.values = [
'Booking Type', 'Booking Number', 'Document Count', 'Document Name',
'Customer Name', '', '', '', '',
'PO Number /Authorisation Letter Number', '', '', '', '',
'Aadhar Number', '', '', '', '',
'PO Amount / Authorisation Letter Amount', '', '', '', '',
'Signature & Stamp Availability', '', '', '', '',
'Final Validation'
];
const row2 = sheet.getRow(2);
row2.values = [
'', '', '', '',
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
'Expected', 'OCR', 'Accuracy Matching Availability', 'Accuracy Criteria', 'Success Ratio',
''
];
sheet.mergeCells('E1:I1');
sheet.mergeCells('J1:N1');
sheet.mergeCells('O1:S1');
sheet.mergeCells('T1:X1');
sheet.mergeCells('Y1:AC1');
sheet.mergeCells('A1:A2'); sheet.mergeCells('B1:B2'); sheet.mergeCells('C1:C2'); sheet.mergeCells('D1:D2');
sheet.mergeCells('AD1:AD2');
[row1, row2].forEach((r: any) => {
r.font = { bold: true, size: 9 };
r.alignment = { vertical: 'middle', horizontal: 'center', wrapText: true };
r.eachCell((cell: any) => {
cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFD9D9D9' } };
cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } };
});
});
docs.forEach((doc: any, idx: number) => {
const rowData = CpcHistoryService.getSummaryRow(doc, idx);
const values = [
rowData.booking_type,
rowData.booking_number,
rowData.document_count,
rowData.document_name,
rowData.customer_name_group.msd, rowData.customer_name_group.ocr, rowData.customer_name_group.accuracy_pct, rowData.customer_name_group.criteria, rowData.customer_name_group.is_match,
rowData.po_or_auth_number_group.msd, rowData.po_or_auth_number_group.ocr, rowData.po_or_auth_number_group.accuracy_pct, rowData.po_or_auth_number_group.criteria, rowData.po_or_auth_number_group.is_match,
rowData.aadhaar_number_group.msd, rowData.aadhaar_number_group.ocr, rowData.aadhaar_number_group.accuracy_pct, rowData.aadhaar_number_group.criteria, rowData.aadhaar_number_group.is_match,
rowData.amount_group.msd, rowData.amount_group.ocr, rowData.amount_group.accuracy_pct, rowData.amount_group.criteria, rowData.amount_group.is_match,
rowData.stamp_group.msd, rowData.stamp_group.ocr, rowData.stamp_group.accuracy_pct, rowData.stamp_group.criteria, rowData.stamp_group.is_match,
rowData.final_validation
];
const row = sheet.addRow(values);
row.eachCell((cell: any, colNum: number) => {
cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } };
cell.font = { size: 8 };
cell.alignment = { vertical: 'middle', horizontal: 'center' };
if (cell.value === 'N.A.' && colNum > 4) {
cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFFF0000' } };
cell.font = { color: { argb: 'FFFFFFFF' }, size: 8, bold: true };
}
});
});
sheet.addRow([]);
sheet.addRow([]);
const detailHeader = sheet.addRow(['Detailed Field-Wise Comparison']);
detailHeader.font = { bold: true, size: 12 };
docs.forEach((doc: any) => {
const docHeader = sheet.addRow([`Document: ${doc.documentType?.replace(/_/g, ' ')}`]);
docHeader.font = { bold: true, size: 10 };
const subHeader = sheet.addRow(['Field', 'Expected', 'Extracted (OCR)', 'Accuracy %', 'Criteria', 'Status', 'Message']);
const finalResults = CpcHistoryService.getDetailedFieldResults(doc);
finalResults.forEach((f: any) => {
sheet.addRow([
f.field.replace(/_/g, ' '),
f.expected || '-',
f.extracted || 'Not extracted',
f.accuracy,
f.criteria,
f.pass ? 'PASS' : 'FAIL',
f.message
]);
});
sheet.addRow([]);
});
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
res.setHeader('Content-Disposition', `attachment; filename=Report_${claimId}.xlsx`);
await workbook.xlsx.write(res);
res.end();
} catch (error: any) {
return ResponseHandler.error(res, error.message || "Report generation failed", 500);
}
}
/**
* Download Master Audit Report for all filtered documents
*/
async downloadAllReport(req: Request, res: Response) {
try {
const { search, status, type } = req.query;
const andParts: Record<string, unknown>[] = [];
appendCpcDocumentFilters(andParts, {
type: type as string,
status: status as string,
search: search as string,
searchIncludeId: false
});
const where = cpcWhereFromAndParts(andParts);
const docs = await CpcDocument.findAll({
where,
order: [['createdAt', 'DESC']]
});
const workbook = new ExcelJS.Workbook();
const sheet = workbook.addWorksheet('Master Audit Trail');
const row1 = sheet.getRow(1);
row1.values = ['Booking Type', 'Booking Number', 'Doc ID', 'Document Name', 'Customer Name', '', '', '', '', 'PO Number /Authorisation Letter Number', '', '', '', '', 'Aadhar Number', '', '', '', '', 'PO Amount / Authorisation Letter Amount', '', '', '', '', 'Signature & Stamp Availability', '', '', '', '', 'Final Validation'];
const row2 = sheet.getRow(2);
row2.values = ['', '', '', '', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching Availability', 'Accuracy Criteria', 'Success Ratio', ''];
sheet.mergeCells('E1:I1'); sheet.mergeCells('J1:N1'); sheet.mergeCells('O1:S1'); sheet.mergeCells('T1:X1'); sheet.mergeCells('Y1:AC1'); sheet.mergeCells('A1:A2'); sheet.mergeCells('B1:B2'); sheet.mergeCells('C1:C2'); sheet.mergeCells('D1:D2'); sheet.mergeCells('AD1:AD2');
[row1, row2].forEach((r: any) => {
r.font = { bold: true, size: 9 };
r.alignment = { vertical: 'middle', horizontal: 'center', wrapText: true };
r.eachCell((cell: any) => { cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFD9D9D9' } }; cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } }; });
});
docs.forEach((doc: any, idx: number) => {
const rowData = CpcHistoryService.getSummaryRow(doc, idx);
const values = [
rowData.booking_type, rowData.booking_number, String(doc.id).slice(0, 8), rowData.document_name,
rowData.customer_name_group.msd, rowData.customer_name_group.ocr, rowData.customer_name_group.accuracy_pct, rowData.customer_name_group.criteria, rowData.customer_name_group.is_match,
rowData.po_or_auth_number_group.msd, rowData.po_or_auth_number_group.ocr, rowData.po_or_auth_number_group.accuracy_pct, rowData.po_or_auth_number_group.criteria, rowData.po_or_auth_number_group.is_match,
rowData.aadhaar_number_group.msd, rowData.aadhaar_number_group.ocr, rowData.aadhaar_number_group.accuracy_pct, rowData.aadhaar_number_group.criteria, rowData.aadhaar_number_group.is_match,
rowData.amount_group.msd, rowData.amount_group.ocr, rowData.amount_group.accuracy_pct, rowData.amount_group.criteria, rowData.amount_group.is_match,
rowData.stamp_group.msd, rowData.stamp_group.ocr, rowData.stamp_group.accuracy_pct, rowData.stamp_group.criteria, rowData.stamp_group.is_match,
rowData.final_validation
];
const row = sheet.addRow(values);
});
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
res.setHeader('Content-Disposition', `attachment; filename=Master_Audit_Report.xlsx`);
await workbook.xlsx.write(res);
res.end();
} catch (error: any) {
return ResponseHandler.error(res, error.message || "Master report failed", 500);
}
}
}

View File

@ -9,6 +9,11 @@ import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeU
import { clearConfigCache } from '@services/configReader.service'; import { clearConfigCache } from '@services/configReader.service';
import { User, UserRole } from '@models/User'; import { User, UserRole } from '@models/User';
import { sanitizeHtml, sanitizeObject, isHtmlEmpty } from '@utils/sanitizer'; import { sanitizeHtml, sanitizeObject, isHtmlEmpty } from '@utils/sanitizer';
import {
CPC_CSD_ADMIN_CONFIG_KEY,
CPC_CDC_ADMIN_CONFIG_KEY_LEGACY,
selectCpcCsdAdminConfigValue,
} from '@utils/cpcCsdAdminConfigDb';
/** /**
* Get all holidays (with optional year filter) * Get all holidays (with optional year filter)
@ -564,6 +569,10 @@ const DEFAULT_FORM16_CONFIG = {
reminderNotificationTemplate: 'Reminder: Dear [Name], your Form 16A submission is pending for request [Request ID]. Please complete it.', reminderNotificationTemplate: 'Reminder: Dear [Name], your Form 16A submission is pending for request [Request ID]. Please complete it.',
}; };
const DEFAULT_CPC_CSD_CONFIG = {
viewerEmails: [] as string[],
};
/** /**
* Get Form 16 admin configuration (who can see submission data, 26AS, reminders) * Get Form 16 admin configuration (who can see submission data, 26AS, reminders)
*/ */
@ -721,6 +730,93 @@ export const putForm16Config = async (req: Request, res: Response): Promise<void
} }
}; };
/**
* Get CPC-CSD admin configuration (who can access CPC-CSD module).
*/
export const getCpcCdcConfig = async (req: Request, res: Response): Promise<void> => {
try {
const raw = await selectCpcCsdAdminConfigValue();
if (raw) {
try {
const parsed = JSON.parse(raw);
res.json({
success: true,
data: {
viewerEmails: Array.isArray(parsed.viewerEmails) ? parsed.viewerEmails : DEFAULT_CPC_CSD_CONFIG.viewerEmails,
},
});
return;
} catch {
// fall through to defaults
}
}
res.json({ success: true, data: DEFAULT_CPC_CSD_CONFIG });
} catch (error: any) {
logger.error('[Admin] Error fetching CPC-CSD config:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to fetch CPC-CSD configuration',
});
}
};
/**
* Update CPC-CSD admin configuration.
*/
export const putCpcCdcConfig = async (req: Request, res: Response): Promise<void> => {
try {
const userId = req.user?.userId;
if (!userId) {
res.status(401).json({ success: false, error: 'User not authenticated' });
return;
}
const body = sanitizeObject(req.body as Record<string, unknown>);
const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase();
const viewerEmails = Array.isArray(body.viewerEmails)
? body.viewerEmails.map(normalizeEmail).filter(Boolean)
: DEFAULT_CPC_CSD_CONFIG.viewerEmails;
const configValue = JSON.stringify({
viewerEmails,
});
await sequelize.query(
`INSERT INTO admin_configurations (
config_id, config_key, config_category, config_value, value_type, display_name, description, is_editable, is_sensitive, sort_order, created_at, updated_at, last_modified_by, last_modified_at
) VALUES (
gen_random_uuid(), :configKey, 'SYSTEM_SETTINGS', :configValue, 'JSON', 'CPC-CSD Admin Config', 'CPC-CSD module visibility settings', true, false, 0, NOW(), NOW(), :userId, NOW()
)
ON CONFLICT (config_key) DO UPDATE SET
config_value = EXCLUDED.config_value,
last_modified_by = EXCLUDED.last_modified_by,
last_modified_at = NOW(),
updated_at = NOW()`,
{
replacements: { configKey: CPC_CSD_ADMIN_CONFIG_KEY, configValue, userId },
type: QueryTypes.RAW,
}
);
await sequelize.query(
`DELETE FROM admin_configurations WHERE config_key = :legacy`,
{ replacements: { legacy: CPC_CDC_ADMIN_CONFIG_KEY_LEGACY }, type: QueryTypes.RAW }
);
clearConfigCache();
logger.info('[Admin] CPC-CSD configuration updated');
res.json({ success: true, message: 'CPC-CSD configuration saved' });
} catch (error: any) {
logger.error('[Admin] Error updating CPC-CSD config:', error);
res.status(500).json({
success: false,
error: error.message || 'Failed to save CPC-CSD configuration',
});
}
};
/** /**
* ============================================ * ============================================
* USER ROLE MANAGEMENT (RBAC) * USER ROLE MANAGEMENT (RBAC)

View File

@ -6,14 +6,41 @@ import type { AuthenticatedRequest } from '../types/express';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service'; import { activityService, SYSTEM_EVENT_REQUEST_ID } from '../services/activity.service';
import { getRequestMetadata } from '../utils/requestUtils'; import { getRequestMetadata } from '../utils/requestUtils';
import { ACCESS_TOKEN_TTL_MS, REFRESH_TOKEN_TTL_MS } from '../config/sessionPolicy';
import crypto from 'crypto';
export class AuthController { export class AuthController {
private authService: AuthService; private authService: AuthService;
// One-time code usage guard (in-memory, per instance).
private readonly consumedAuthCodes = new Map<string, number>();
private readonly authCodeTtlMs = 10 * 60 * 1000;
constructor() { constructor() {
this.authService = new AuthService(); this.authService = new AuthService();
} }
private getCodeDigest(code: string): string {
return crypto.createHash('sha256').update(code).digest('hex');
}
private pruneConsumedCodes(now: number): void {
for (const [digest, ts] of this.consumedAuthCodes.entries()) {
if (now - ts > this.authCodeTtlMs) this.consumedAuthCodes.delete(digest);
}
}
private hasConsumedCode(code: string): boolean {
const now = Date.now();
this.pruneConsumedCodes(now);
return this.consumedAuthCodes.has(this.getCodeDigest(code));
}
private markCodeConsumed(code: string): void {
const now = Date.now();
this.pruneConsumedCodes(now);
this.consumedAuthCodes.set(this.getCodeDigest(code), now);
}
/** /**
* Handle SSO callback from frontend * Handle SSO callback from frontend
* POST /api/v1/auth/sso-callback * POST /api/v1/auth/sso-callback
@ -129,7 +156,7 @@ export class AuthController {
return; return;
} }
const newAccessToken = await this.authService.refreshAccessToken(refreshToken); const refreshResult = await this.authService.refreshAccessToken(refreshToken);
// Set new access token in cookie if using cookie-based auth // Set new access token in cookie if using cookie-based auth
const isProduction = process.env.NODE_ENV === 'production'; const isProduction = process.env.NODE_ENV === 'production';
@ -140,10 +167,10 @@ export class AuthController {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isSecureEnv,
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' is safer and works on same-domain sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' is safer and works on same-domain
maxAge: 24 * 60 * 60 * 1000, // 24 hours maxAge: Math.max(1000, refreshResult.accessTokenTtlMs),
}; };
res.cookie('accessToken', newAccessToken, cookieOptions); res.cookie('accessToken', refreshResult.accessToken, cookieOptions);
// SECURITY: In production, don't return token in response body // SECURITY: In production, don't return token in response body
// Token is securely stored in httpOnly cookie // Token is securely stored in httpOnly cookie
@ -154,7 +181,7 @@ export class AuthController {
} else { } else {
// Dev: Include token for debugging // Dev: Include token for debugging
ResponseHandler.success(res, { ResponseHandler.success(res, {
accessToken: newAccessToken accessToken: refreshResult.accessToken
}, 'Token refreshed successfully'); }, 'Token refreshed successfully');
} }
} catch (error) { } catch (error) {
@ -218,7 +245,7 @@ export class AuthController {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isSecureEnv,
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const), sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
maxAge: 24 * 60 * 60 * 1000, // 24 hours maxAge: ACCESS_TOKEN_TTL_MS,
path: '/', path: '/',
}; };
@ -271,7 +298,7 @@ export class AuthController {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isSecureEnv,
sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const), sameSite: isSecureEnv ? ('lax' as const) : ('lax' as const),
maxAge: 24 * 60 * 60 * 1000, maxAge: ACCESS_TOKEN_TTL_MS,
path: '/', path: '/',
}; };
@ -498,14 +525,14 @@ export class AuthController {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isSecureEnv,
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, sameSite: isSecureEnv ? 'lax' as const : 'lax' as const,
maxAge: 24 * 60 * 60 * 1000, // 24 hours maxAge: ACCESS_TOKEN_TTL_MS,
}; };
res.cookie('accessToken', result.accessToken, cookieOptions); res.cookie('accessToken', result.accessToken, cookieOptions);
const refreshCookieOptions = { const refreshCookieOptions = {
...cookieOptions, ...cookieOptions,
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days maxAge: REFRESH_TOKEN_TTL_MS,
}; };
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions); res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
@ -545,6 +572,17 @@ export class AuthController {
const { code, redirectUri } = validateTokenExchange(req.body); const { code, redirectUri } = validateTokenExchange(req.body);
logger.info('Token exchange validation passed', { redirectUri }); logger.info('Token exchange validation passed', { redirectUri });
if (this.hasConsumedCode(code)) {
ResponseHandler.error(
res,
'Token exchange failed',
400,
'RELOGIN_REQUIRED'
);
return;
}
this.markCodeConsumed(code);
const userAgent = req.headers['user-agent'] || getRequestMetadata(req).userAgent; const userAgent = req.headers['user-agent'] || getRequestMetadata(req).userAgent;
const result = await this.authService.exchangeCodeForTokens(code, redirectUri, userAgent); const result = await this.authService.exchangeCodeForTokens(code, redirectUri, userAgent);
@ -582,14 +620,14 @@ export class AuthController {
httpOnly: true, httpOnly: true,
secure: isSecureEnv, secure: isSecureEnv,
sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' for same-domain sameSite: isSecureEnv ? 'lax' as const : 'lax' as const, // 'lax' for same-domain
maxAge: 24 * 60 * 60 * 1000, // 24 hours for access token maxAge: ACCESS_TOKEN_TTL_MS,
}; };
res.cookie('accessToken', result.accessToken, cookieOptions); res.cookie('accessToken', result.accessToken, cookieOptions);
const refreshCookieOptions = { const refreshCookieOptions = {
...cookieOptions, ...cookieOptions,
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days for refresh token maxAge: REFRESH_TOKEN_TTL_MS,
}; };
res.cookie('refreshToken', result.refreshToken, refreshCookieOptions); res.cookie('refreshToken', result.refreshToken, refreshCookieOptions);
@ -624,6 +662,14 @@ export class AuthController {
} catch (error) { } catch (error) {
logger.error('Token exchange failed:', error); logger.error('Token exchange failed:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const normalized = String(errorMessage || '').toLowerCase();
const isExpiredOrInvalidCode =
normalized.includes('authorization code is invalid or has expired') ||
normalized.includes('invalid_grant');
if (isExpiredOrInvalidCode) {
ResponseHandler.error(res, 'Token exchange failed', 400, 'RELOGIN_REQUIRED');
return;
}
ResponseHandler.error(res, 'Token exchange failed', 400, errorMessage); ResponseHandler.error(res, 'Token exchange failed', 400, errorMessage);
} }
} }

View File

@ -0,0 +1,36 @@
import { Request, Response } from 'express';
import { ResponseHandler } from '../utils/responseHandler';
import logger from '@utils/logger';
import { canAccessCpcCdc } from '../services/cpcPermission.service';
class CpcPermissionController {
/**
* GET /api/v1/cpc-csd/permissions (legacy: /api/v1/cpc-cdc/permissions)
* Returns CPC-CSD access permission for current user.
*/
async getPermissions(req: Request, res: Response): Promise<void> {
try {
const user = req.user;
if (!user?.userId || !user?.email) {
ResponseHandler.unauthorized(res, 'Authentication required');
return;
}
const role = (user as any).role as string | undefined;
const canViewCpcCsd = await canAccessCpcCdc(user.email, role);
ResponseHandler.success(
res,
{ canViewCpcCsd, canViewCpcCdc: canViewCpcCsd },
'CPC-CSD permissions'
);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[CpcPermissionController] getPermissions error:', error);
ResponseHandler.error(res, 'Failed to get CPC-CSD permissions', 500, errorMessage);
}
}
}
export const cpcPermissionController = new CpcPermissionController();

View File

@ -23,6 +23,26 @@ import { Dealer } from '@models/Dealer';
*/ */
export class Form16Controller { export class Form16Controller {
// Minimal PII masking for 26AS APIs mask PAN in all responses
private maskPan(pan: unknown): string | undefined {
if (pan == null) return undefined;
const s = String(pan).trim();
if (!s) return undefined;
const last4 = s.slice(-4);
if (s.length <= 4) return 'XXXX';
return `XXXXXXX${last4}`;
}
private mask26asEntry(entry: any): any {
if (!entry) return entry;
const plain = typeof entry.toJSON === 'function' ? entry.toJSON() : entry;
const masked = { ...plain };
if (masked.panNumber) {
masked.panNumber = this.maskPan(masked.panNumber);
}
return masked;
}
private toSapCsv(sap: { private toSapCsv(sap: {
trnsUniqNo?: string | null; trnsUniqNo?: string | null;
tdsTransId?: string | null; tdsTransId?: string | null;
@ -42,6 +62,61 @@ export class Form16Controller {
.join('|'); .join('|');
return `${header}\n${row}\n`; return `${header}\n${row}\n`;
} }
private isStrictTxt26asFile(file: { originalname?: string; mimetype?: string; buffer?: Buffer }): { ok: boolean; reason?: string } {
const originalName = (file.originalname || '').trim();
const ext = path.extname(originalName).toLowerCase();
if (ext !== '.txt') {
return { ok: false, reason: 'Only .txt files are allowed for 26AS upload.' };
}
const mime = String(file.mimetype || '').toLowerCase();
const allowedMimes = new Set(['text/plain', 'text/csv', 'application/octet-stream']);
if (!allowedMimes.has(mime)) {
return { ok: false, reason: 'Invalid MIME type. Only plain text (.txt) is allowed for 26AS upload.' };
}
if (!file.buffer || file.buffer.length === 0) {
return { ok: false, reason: 'Uploaded file is empty. Please upload a valid 26AS .txt file.' };
}
const b = file.buffer;
if (
(b.length >= 4 && b[0] === 0x25 && b[1] === 0x50 && b[2] === 0x44 && b[3] === 0x46) || // PDF
(b.length >= 4 && b[0] === 0x50 && b[1] === 0x4b && b[2] === 0x03 && b[3] === 0x04) || // ZIP/DOCX/XLSX
(b.length >= 4 && b[0] === 0x89 && b[1] === 0x50 && b[2] === 0x4e && b[3] === 0x47) || // PNG
(b.length >= 3 && b[0] === 0xff && b[1] === 0xd8 && b[2] === 0xff) || // JPEG
(b.length >= 2 && b[0] === 0x4d && b[1] === 0x5a) // EXE
) {
return { ok: false, reason: 'Binary file signature detected. Only plain text 26AS .txt files are allowed.' };
}
let suspiciousControlCount = 0;
for (let i = 0; i < b.length; i++) {
const byte = b[i];
if (byte === 0x00) {
return { ok: false, reason: 'Invalid text content. Null bytes detected.' };
}
const isTabOrLfOrCr = byte === 0x09 || byte === 0x0a || byte === 0x0d;
const isPrintableAscii = byte >= 0x20 && byte <= 0x7e;
if (!isTabOrLfOrCr && !isPrintableAscii) suspiciousControlCount++;
}
if (suspiciousControlCount / Math.max(b.length, 1) > 0.01) {
return { ok: false, reason: 'Invalid text content. File appears to contain binary data.' };
}
const text = b.toString('utf8');
if (text.includes('\uFFFD')) {
return { ok: false, reason: 'Invalid UTF-8 text content. Please upload a plain text .txt file.' };
}
const lines = text.split(/\r?\n/).map((l) => l.trim()).filter(Boolean);
if (lines.length === 0) {
return { ok: false, reason: 'Uploaded file has no usable text rows.' };
}
return { ok: true };
}
/** /**
* GET /api/v1/form16/permissions * GET /api/v1/form16/permissions
* Returns Form 16 permissions for the current user (API-driven from admin config). * Returns Form 16 permissions for the current user (API-driven from admin config).
@ -257,11 +332,8 @@ export class Form16Controller {
limit, limit,
offset, offset,
}); });
return ResponseHandler.success( const entries = (result.rows || []).map((row: any) => this.mask26asEntry(row));
res, return ResponseHandler.success(res, { entries, total: result.total, summary: result.summary }, '26AS entries fetched');
{ entries: result.rows, total: result.total, summary: result.summary },
'26AS entries fetched'
);
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[Form16Controller] list26as error:', error); logger.error('[Form16Controller] list26as error:', error);
@ -269,13 +341,28 @@ export class Form16Controller {
} }
} }
/**
* GET /api/v1/form16/26as/dashboard
* RE only. Aggregated Form16A dashboard (collection/submission status + year/zone breakdown).
*/
async get26asDashboard(req: Request, res: Response): Promise<void> {
try {
const dashboard = await form16Service.getForm16DashboardData();
return ResponseHandler.success(res, dashboard, 'Form16A dashboard fetched');
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[Form16Controller] get26asDashboard error:', error);
return ResponseHandler.error(res, 'Failed to fetch Form16A dashboard', 500, errorMessage);
}
}
/** /**
* POST /api/v1/form16/26as * POST /api/v1/form16/26as
* RE only. Create a 26AS TDS entry. * RE only. Create a 26AS TDS entry.
*/ */
async create26as(req: Request, res: Response): Promise<void> { async create26as(req: Request, res: Response): Promise<void> {
try { try {
const body = req.body as Record<string, unknown>; const body = ((req.body ?? {}) as Record<string, unknown>);
const tanNumber = (body.tanNumber as string)?.trim(); const tanNumber = (body.tanNumber as string)?.trim();
const financialYear = (body.financialYear as string)?.trim(); const financialYear = (body.financialYear as string)?.trim();
const quarter = (body.quarter as string)?.trim(); const quarter = (body.quarter as string)?.trim();
@ -300,7 +387,8 @@ export class Form16Controller {
statusOltas: (body.statusOltas as string) || undefined, statusOltas: (body.statusOltas as string) || undefined,
remarks: (body.remarks as string) || undefined, remarks: (body.remarks as string) || undefined,
}); });
return ResponseHandler.success(res, { entry }, '26AS entry created'); const masked = this.mask26asEntry(entry);
return ResponseHandler.success(res, { entry: masked }, '26AS entry created');
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[Form16Controller] create26as error:', error); logger.error('[Form16Controller] create26as error:', error);
@ -318,7 +406,7 @@ export class Form16Controller {
if (Number.isNaN(id)) { if (Number.isNaN(id)) {
return ResponseHandler.error(res, 'Invalid entry id', 400); return ResponseHandler.error(res, 'Invalid entry id', 400);
} }
const body = req.body as Record<string, unknown>; const body = ((req.body ?? {}) as Record<string, unknown>);
const updateData: Record<string, unknown> = {}; const updateData: Record<string, unknown> = {};
if (body.tanNumber !== undefined) updateData.tanNumber = body.tanNumber; if (body.tanNumber !== undefined) updateData.tanNumber = body.tanNumber;
if (body.panNumber !== undefined) updateData.panNumber = body.panNumber; if (body.panNumber !== undefined) updateData.panNumber = body.panNumber;
@ -339,7 +427,8 @@ export class Form16Controller {
if (!entry) { if (!entry) {
return ResponseHandler.error(res, '26AS entry not found', 404); return ResponseHandler.error(res, '26AS entry not found', 404);
} }
return ResponseHandler.success(res, { entry }, '26AS entry updated'); const masked = this.mask26asEntry(entry);
return ResponseHandler.success(res, { entry: masked }, '26AS entry updated');
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[Form16Controller] update26as error:', error); logger.error('[Form16Controller] update26as error:', error);
@ -688,6 +777,14 @@ export class Form16Controller {
if (!file || !file.buffer) { if (!file || !file.buffer) {
return ResponseHandler.error(res, 'No file uploaded. Please upload a .txt file.', 400); return ResponseHandler.error(res, 'No file uploaded. Please upload a .txt file.', 400);
} }
const ext = path.extname(file.originalname || '').toLowerCase();
if (ext !== '.txt') {
return ResponseHandler.error(res, 'Only .txt files are allowed for 26AS upload.', 400);
}
const strictTxtValidation = this.isStrictTxt26asFile(file);
if (!strictTxtValidation.ok) {
return ResponseHandler.error(res, strictTxtValidation.reason || 'Invalid 26AS text file.', 400);
}
if (!userId) { if (!userId) {
return ResponseHandler.error(res, 'Authentication required', 401); return ResponseHandler.error(res, 'Authentication required', 401);
} }
@ -732,8 +829,9 @@ export class Form16Controller {
async get26asUploadHistory(req: Request, res: Response): Promise<void> { async get26asUploadHistory(req: Request, res: Response): Promise<void> {
try { try {
const limit = Math.min(Math.max(parseInt(String(req.query.limit || '50'), 10), 1), 200); const limit = Math.min(Math.max(parseInt(String(req.query.limit || '50'), 10), 1), 200);
const history = await form16Service.list26asUploadHistory(limit); const offset = Math.max(0, parseInt(String(req.query.offset ?? '0'), 10) || 0);
return ResponseHandler.success(res, { history }, '26AS upload history fetched'); const { rows: history, total } = await form16Service.list26asUploadHistory(limit, offset);
return ResponseHandler.success(res, { history, total }, '26AS upload history fetched');
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('[Form16Controller] get26asUploadHistory error:', error); logger.error('[Form16Controller] get26asUploadHistory error:', error);

View File

@ -96,6 +96,19 @@ async function processOutgoingFile(fileName: string, resolvedOutgoingDir: string
updatedAt: new Date(), updatedAt: new Date(),
}); });
// Delete source CSV only after successful DB persistence + read-marking.
// SAP team keeps a parallel archive copy, so main OUTGOING can be safely cleaned.
const sourcePath = path.join(resolvedOutgoingDir, fileName);
try {
if (fs.existsSync(sourcePath)) {
fs.unlinkSync(sourcePath);
logger.info(`[Form16 SAP Job] Deleted processed OUTGOING file: ${sourcePath}`);
}
} catch (e) {
// Keep processing successful even if cleanup fails; next pull will skip due to read marker.
logger.warn(`[Form16 SAP Job] Could not delete processed file: ${sourcePath}`, e);
}
return counts; return counts;
} }

View File

@ -1,5 +1,14 @@
import cors from 'cors'; import cors from 'cors';
/** Vite dev: localhost vs 127.0.0.1, and ports 30003010 when 3000/3001 are already taken. */
function getDevViteOrigins(): string[] {
const out: string[] = [];
for (let port = 3000; port <= 3010; port++) {
out.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
}
return out;
}
// Configure allowed origins - uses only FRONTEND_URL from environment // Configure allowed origins - uses only FRONTEND_URL from environment
const getAllowedOrigins = (): string[] | boolean => { const getAllowedOrigins = (): string[] | boolean => {
const frontendUrl = process.env.FRONTEND_URL; const frontendUrl = process.env.FRONTEND_URL;
@ -15,10 +24,9 @@ const getAllowedOrigins = (): string[] | boolean => {
console.error(' Multiple origins: FRONTEND_URL=https://app1.com,https://app2.com'); console.error(' Multiple origins: FRONTEND_URL=https://app1.com,https://app2.com');
return []; return [];
} else { } else {
// Dev fallback: allow localhost:3000 console.warn('⚠️ WARNING: FRONTEND_URL not set. Defaulting Vite dev origins (localhost + 127.0.0.1).');
console.warn('⚠️ WARNING: FRONTEND_URL not set. Defaulting to http://localhost:3000 for development.'); console.warn(' Set FRONTEND_URL in .env if you use another host/port.');
console.warn(' To avoid this warning, set FRONTEND_URL=http://localhost:3000 in your .env file'); return getDevViteOrigins();
return ['http://localhost:3000'];
} }
} }
@ -35,12 +43,14 @@ const getAllowedOrigins = (): string[] | boolean => {
if (origins.length === 0) { if (origins.length === 0) {
console.error('❌ ERROR: FRONTEND_URL is set but contains no valid URLs!'); console.error('❌ ERROR: FRONTEND_URL is set but contains no valid URLs!');
return isProduction ? [] : ['http://localhost:3000']; // Fallback for development return isProduction ? [] : getDevViteOrigins(); // Fallback for development
} }
// In development always allow localhost:3000 (Vite default) so frontend works even if FRONTEND_URL is 3001 // In development allow common Vite host/port combos (avoids CORS when Vite bumps to 3002+)
if (!isProduction && !origins.includes('http://localhost:3000')) { if (!isProduction) {
origins = ['http://localhost:3000', ...origins]; for (const o of getDevViteOrigins()) {
if (!origins.includes(o)) origins.push(o);
}
} }
console.log(`✅ CORS: Allowing origins from FRONTEND_URL: ${origins.join(', ')}`); console.log(`✅ CORS: Allowing origins from FRONTEND_URL: ${origins.join(', ')}`);

View File

@ -0,0 +1,34 @@
/**
* CPC-CSD permission middleware enforces API-driven viewer list.
* Use after authenticateToken so req.user is available.
*/
import { Request, Response, NextFunction } from 'express';
import { ResponseHandler } from '../utils/responseHandler';
import { canAccessCpcCdc } from '../services/cpcPermission.service';
export const requireCpcCdcAccess = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
const user = req.user;
if (!user?.userId || !user?.email) {
ResponseHandler.unauthorized(res, 'Authentication required');
return;
}
const role = (user as any).role as string | undefined;
const allowed = await canAccessCpcCdc(user.email, role);
if (!allowed) {
ResponseHandler.forbidden(res, 'You do not have permission to access CPC-CSD');
return;
}
next();
} catch (error) {
ResponseHandler.error(res, 'Permission check failed', 500, error instanceof Error ? error.message : 'Unknown error');
}
};

View File

@ -0,0 +1,130 @@
import { QueryInterface, DataTypes } from 'sequelize';
export async function up(queryInterface: QueryInterface): Promise<void> {
// Create cpc_documents table
await queryInterface.createTable('cpc_documents', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
allowNull: false
},
booking_id: {
type: DataTypes.STRING(255),
allowNull: true
},
claim_id: {
type: DataTypes.STRING(255),
allowNull: true
},
attempt_no: {
type: DataTypes.INTEGER,
defaultValue: 1,
allowNull: false
},
document_type: {
type: DataTypes.STRING(255),
allowNull: true
},
document_gcp_url: {
type: DataTypes.TEXT,
allowNull: true
},
provider: {
type: DataTypes.STRING(255),
allowNull: true
},
msd_payload: {
type: DataTypes.JSONB,
allowNull: true
},
extracted_fields: {
type: DataTypes.JSONB,
allowNull: true
},
field_confidence: {
type: DataTypes.JSONB,
allowNull: true
},
validation_status: {
type: DataTypes.STRING(255),
allowNull: true
},
match_percentage: {
type: DataTypes.FLOAT,
allowNull: true
},
mismatch_reasons: {
type: DataTypes.JSONB,
allowNull: true
},
field_results: {
type: DataTypes.JSONB,
allowNull: true
},
ip_address: {
type: DataTypes.STRING(255),
allowNull: true
},
created_at: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
allowNull: false
}
});
// Create cpc_audit_logs table
await queryInterface.createTable('cpc_audit_logs', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
allowNull: false
},
document_id: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'cpc_documents',
key: 'id'
},
onDelete: 'CASCADE'
},
action: {
type: DataTypes.STRING(255),
allowNull: false
},
previous_state: {
type: DataTypes.JSONB,
allowNull: true
},
new_state: {
type: DataTypes.JSONB,
allowNull: true
},
performed_by: {
type: DataTypes.STRING(255),
allowNull: true
},
remarks: {
type: DataTypes.TEXT,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
allowNull: false
}
});
// Unique index for the multi-attempt claim logic (idempotent for repeated startup migrations)
await queryInterface.sequelize.query(`
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
ON cpc_documents (claim_id, attempt_no, document_type);
`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.dropTable('cpc_audit_logs');
await queryInterface.dropTable('cpc_documents');
}

View File

@ -0,0 +1,50 @@
import { QueryInterface } from 'sequelize';
/**
* Idempotent CPC-CDC schema for environments where 2026041300 did not run or tables were dropped.
* Safe to run on top of an existing DB that already has these tables from the earlier migration.
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`
CREATE TABLE IF NOT EXISTS cpc_documents (
id UUID NOT NULL PRIMARY KEY,
booking_id VARCHAR(255),
claim_id VARCHAR(255),
attempt_no INTEGER NOT NULL DEFAULT 1,
document_type VARCHAR(255),
document_gcp_url TEXT,
provider VARCHAR(255),
msd_payload JSONB,
extracted_fields JSONB,
field_confidence JSONB,
validation_status VARCHAR(255),
match_percentage DOUBLE PRECISION,
mismatch_reasons JSONB,
field_results JSONB,
ip_address VARCHAR(255),
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
`);
await queryInterface.sequelize.query(`
CREATE TABLE IF NOT EXISTS cpc_audit_logs (
id UUID NOT NULL PRIMARY KEY,
document_id UUID NOT NULL REFERENCES cpc_documents(id) ON DELETE CASCADE,
action VARCHAR(255) NOT NULL,
previous_state JSONB,
new_state JSONB,
performed_by VARCHAR(255),
remarks TEXT,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
`);
await queryInterface.sequelize.query(`
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
ON cpc_documents (claim_id, attempt_no, booking_id);
`);
}
export async function down(_queryInterface: QueryInterface): Promise<void> {
// Non-destructive: tables may contain production CPC data.
}

View File

@ -0,0 +1,26 @@
import { QueryInterface } from 'sequelize';
/**
* Batch upload can include multiple files of the same document_type in one attempt.
* Replace unique(claim_id, attempt_no, document_type) with unique(claim_id, attempt_no, booking_id)
* because booking_id is distinct per file (e.g. CLAIM-1, CLAIM-2, ...).
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`
DROP INDEX IF EXISTS unique_cpc_document_attempt;
`);
await queryInterface.sequelize.query(`
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_claim_attempt_booking
ON cpc_documents (claim_id, attempt_no, booking_id);
`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`
DROP INDEX IF EXISTS unique_cpc_document_claim_attempt_booking;
`);
await queryInterface.sequelize.query(`
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
ON cpc_documents (claim_id, attempt_no, document_type);
`);
}

View File

@ -0,0 +1,26 @@
import { QueryInterface } from 'sequelize';
/**
* Rename CPC admin viewer-list config key from CPC_CDC_* to CPC_CSD_* (display name aligned).
*/
export async function up(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`
UPDATE admin_configurations
SET
config_key = 'CPC_CSD_ADMIN_CONFIG',
display_name = 'CPC-CSD Admin Config',
description = 'CPC-CSD module visibility settings'
WHERE config_key = 'CPC_CDC_ADMIN_CONFIG'
`);
}
export async function down(queryInterface: QueryInterface): Promise<void> {
await queryInterface.sequelize.query(`
UPDATE admin_configurations
SET
config_key = 'CPC_CDC_ADMIN_CONFIG',
display_name = 'CPC-CDC Admin Config',
description = 'CPC-CDC module visibility settings'
WHERE config_key = 'CPC_CSD_ADMIN_CONFIG'
`);
}

89
src/models/CpcAuditLog.ts Normal file
View File

@ -0,0 +1,89 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface CpcAuditLogAttributes {
id: string;
documentId: string;
action: string;
previousState?: any;
newState?: any;
performedBy?: string;
remarks?: string;
createdAt?: Date;
}
interface CpcAuditLogCreationAttributes extends Optional<CpcAuditLogAttributes, 'id' | 'createdAt'> {}
class CpcAuditLog extends Model<CpcAuditLogAttributes, CpcAuditLogCreationAttributes> implements CpcAuditLogAttributes {
public id!: string;
public documentId!: string;
public action!: string;
public previousState?: any;
public newState?: any;
public performedBy?: string;
public remarks?: string;
public createdAt!: Date;
}
CpcAuditLog.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'id'
},
documentId: {
type: DataTypes.UUID,
allowNull: false,
field: 'document_id',
references: {
model: 'cpc_documents',
key: 'id'
}
},
action: {
type: DataTypes.STRING(255),
allowNull: false
},
previousState: {
type: DataTypes.JSONB,
allowNull: true,
field: 'previous_state'
},
newState: {
type: DataTypes.JSONB,
allowNull: true,
field: 'new_state'
},
performedBy: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'performed_by'
},
remarks: {
type: DataTypes.TEXT,
allowNull: true
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
},
{
sequelize,
modelName: 'CpcAuditLog',
tableName: 'cpc_audit_logs',
timestamps: false
}
);
CpcAuditLog.belongsTo(sequelize.models.CpcDocument, {
foreignKey: 'documentId',
targetKey: 'id',
as: 'document'
});
export { CpcAuditLog };

143
src/models/CpcDocument.ts Normal file
View File

@ -0,0 +1,143 @@
import { DataTypes, Model, Optional } from 'sequelize';
import { sequelize } from '@config/database';
interface CpcDocumentAttributes {
id: string;
bookingId?: string;
claimId?: string;
attemptNo?: number;
documentType?: string;
documentGcpUrl?: string;
provider?: string;
msdPayload?: any;
extractedFields?: any;
fieldConfidence?: any;
validationStatus?: string;
matchPercentage?: number;
mismatchReasons?: any;
fieldResults?: any;
ipAddress?: string;
createdAt?: Date;
}
interface CpcDocumentCreationAttributes extends Optional<CpcDocumentAttributes, 'id' | 'attemptNo' | 'createdAt'> {}
class CpcDocument extends Model<CpcDocumentAttributes, CpcDocumentCreationAttributes> implements CpcDocumentAttributes {
public id!: string;
public bookingId?: string;
public claimId?: string;
public attemptNo?: number;
public documentType?: string;
public documentGcpUrl?: string;
public provider?: string;
public msdPayload?: any;
public extractedFields?: any;
public fieldConfidence?: any;
public validationStatus?: string;
public matchPercentage?: number;
public mismatchReasons?: any;
public fieldResults?: any;
public ipAddress?: string;
public createdAt!: Date;
}
CpcDocument.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
field: 'id'
},
bookingId: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'booking_id'
},
claimId: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'claim_id'
},
attemptNo: {
type: DataTypes.INTEGER,
defaultValue: 1,
field: 'attempt_no'
},
documentType: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'document_type'
},
documentGcpUrl: {
type: DataTypes.TEXT,
allowNull: true,
field: 'document_gcp_url'
},
provider: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'provider'
},
msdPayload: {
type: DataTypes.JSONB,
allowNull: true,
field: 'msd_payload'
},
extractedFields: {
type: DataTypes.JSONB,
allowNull: true,
field: 'extracted_fields'
},
fieldConfidence: {
type: DataTypes.JSONB,
allowNull: true,
field: 'field_confidence'
},
validationStatus: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'validation_status'
},
matchPercentage: {
type: DataTypes.FLOAT,
allowNull: true,
field: 'match_percentage'
},
mismatchReasons: {
type: DataTypes.JSONB,
allowNull: true,
field: 'mismatch_reasons'
},
fieldResults: {
type: DataTypes.JSONB,
allowNull: true,
field: 'field_results'
},
ipAddress: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'ip_address'
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
},
{
sequelize,
modelName: 'CpcDocument',
tableName: 'cpc_documents',
timestamps: false,
indexes: [
{
name: 'unique_cpc_document_claim_attempt_booking',
unique: true,
fields: ['claimId', 'attemptNo', 'bookingId']
}
]
}
);
export { CpcDocument };

View File

@ -42,6 +42,9 @@ import { Form16LedgerEntry } from './Form16LedgerEntry';
import { Form16SapResponse } from './Form16SapResponse'; import { Form16SapResponse } from './Form16SapResponse';
import { Form16DebitNoteSapResponse } from './Form16DebitNoteSapResponse'; import { Form16DebitNoteSapResponse } from './Form16DebitNoteSapResponse';
import { From16SapReadFile } from './From16SapReadFile'; import { From16SapReadFile } from './From16SapReadFile';
import { CpcDocument } from './CpcDocument';
import { CpcAuditLog } from './CpcAuditLog';
// Define associations // Define associations
const defineAssociations = () => { const defineAssociations = () => {
@ -189,6 +192,13 @@ const defineAssociations = () => {
// Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts // Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts
// Only hasMany associations from WorkflowRequest are defined here since they're one-way // Only hasMany associations from WorkflowRequest are defined here since they're one-way
// CPC-CSD associations
CpcDocument.hasMany(CpcAuditLog, {
as: 'auditLogs',
foreignKey: 'documentId',
sourceKey: 'id'
});
}; };
// Initialize associations // Initialize associations
@ -237,7 +247,9 @@ export {
Form16LedgerEntry, Form16LedgerEntry,
Form16SapResponse, Form16SapResponse,
Form16DebitNoteSapResponse, Form16DebitNoteSapResponse,
From16SapReadFile From16SapReadFile,
CpcDocument,
CpcAuditLog
}; };
// Export default sequelize instance // Export default sequelize instance

View File

@ -16,6 +16,7 @@ import {
updateActivityTypeSchema, updateActivityTypeSchema,
activityTypeParamsSchema, activityTypeParamsSchema,
updateForm16ConfigSchema, updateForm16ConfigSchema,
updateCpcCdcConfigSchema,
} from '../validators/admin.validator'; } from '../validators/admin.validator';
import { import {
getAllHolidays, getAllHolidays,
@ -29,6 +30,8 @@ import {
resetConfiguration, resetConfiguration,
getForm16Config, getForm16Config,
putForm16Config, putForm16Config,
getCpcCdcConfig,
putCpcCdcConfig,
updateUserRole, updateUserRole,
getUsersByRole, getUsersByRole,
getRoleStatistics, getRoleStatistics,
@ -139,6 +142,21 @@ router.get('/form16-config', getForm16Config);
*/ */
router.put('/form16-config', validateBody(updateForm16ConfigSchema), putForm16Config); router.put('/form16-config', validateBody(updateForm16ConfigSchema), putForm16Config);
/**
* @route GET /api/admin/cpc-csd-config
* @desc Get CPC-CSD admin config (viewer emails)
* @access Admin
*/
router.get('/cpc-csd-config', getCpcCdcConfig);
/**
* @route PUT /api/admin/cpc-csd-config
* @desc Update CPC-CSD admin config
* @body { viewerEmails? }
* @access Admin
*/
router.put('/cpc-csd-config', validateBody(updateCpcCdcConfigSchema), putCpcCdcConfig);
// ==================== User Role Management Routes (RBAC) ==================== // ==================== User Role Management Routes (RBAC) ====================
/** /**

View File

@ -0,0 +1,60 @@
import { Router } from 'express';
import multer from 'multer';
import { cpcCdcController } from '../controllers/CpcCdcController';
import { CpcReportController } from '../controllers/CpcReportController';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { authenticateToken } from '../middlewares/auth.middleware';
import { requireCpcCdcAccess } from '../middlewares/cpcPermission.middleware';
const router = Router();
const cpcReportController = new CpcReportController();
const disallowZipUpload: multer.Options['fileFilter'] = (_req, file, cb) => {
const lowerName = String(file.originalname || '').toLowerCase();
const lowerMime = String(file.mimetype || '').toLowerCase();
const isZip = lowerName.endsWith('.zip') || lowerMime.includes('zip');
if (isZip) {
cb(new Error('ZIP files are not allowed for CPC-CSD validation'));
return;
}
cb(null, true);
};
// Configure Multer for memory storage (buffers needed for GCS/Gemini)
const upload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: 15 * 1024 * 1024 }, // 15MB limit
fileFilter: disallowZipUpload
});
// All CPC-CSD routes require authentication (mounted at /cpc-csd and legacy /cpc-cdc)
router.use(authenticateToken);
router.use(requireCpcCdcAccess);
// OCR / Validation — mirror CPC-CSD: bulk uses `files[]`, single upload uses `file`
router.post('/v1/ocr/upload', upload.array('files', 20), asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController)));
router.post('/v1/ocr/validate-upload', upload.single('file'), asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController)));
router.post('/v1/ocr/validate', asyncHandler(cpcCdcController.validateDocumentByUrlStub.bind(cpcCdcController)));
// History and Documents (order aligned with CPC-CSD/server/src/routes/index.js)
router.get('/documents/analytics', asyncHandler(cpcCdcController.getAnalytics.bind(cpcCdcController)));
router.get('/documents/history', asyncHandler(cpcCdcController.getClaimHistory.bind(cpcCdcController)));
router.get('/documents/recent', asyncHandler(cpcCdcController.getRecentDocuments.bind(cpcCdcController)));
router.get('/documents/:id/file', asyncHandler(cpcCdcController.getDocumentFile.bind(cpcCdcController)));
router.get('/documents/:id', asyncHandler(cpcCdcController.getDocumentById.bind(cpcCdcController)));
router.put('/documents/:id/status', asyncHandler(cpcCdcController.updateDocumentStatus.bind(cpcCdcController)));
router.delete('/documents/:id', asyncHandler(cpcCdcController.deleteDocument.bind(cpcCdcController)));
// Reports (Matching History.jsx exactly)
router.get('/v1/ocr/report/all/download', asyncHandler(cpcReportController.downloadAllReport.bind(cpcReportController)));
router.get('/v1/ocr/report/:claimId/download', asyncHandler(cpcReportController.downloadReport.bind(cpcReportController)));
// Backwards compatibility or alternative paths
router.get('/report/all/download', asyncHandler(cpcReportController.downloadAllReport.bind(cpcReportController)));
router.get('/report/:claimId/download', asyncHandler(cpcReportController.downloadReport.bind(cpcReportController)));
export default router;

View File

@ -0,0 +1,89 @@
import express from 'express';
import multer from 'multer';
import { authenticateToken } from '../middlewares/auth.middleware';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { generalApiLimiter } from '../middlewares/rateLimiter.middleware';
import { requireCpcCdcAccess } from '../middlewares/cpcPermission.middleware';
import { cpcCdcController } from '../controllers/CpcCdcController';
import { CpcReportController } from '../controllers/CpcReportController';
const memoryUpload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: 15 * 1024 * 1024 },
fileFilter: (_req, file, cb) => {
const lowerName = String(file.originalname || '').toLowerCase();
const lowerMime = String(file.mimetype || '').toLowerCase();
const isZip = lowerName.endsWith('.zip') || lowerMime.includes('zip');
if (isZip) {
cb(new Error('ZIP files are not allowed for CPC-CSD validation'));
return;
}
cb(null, true);
}
});
const cpcReportController = new CpcReportController();
const authLim = [authenticateToken, requireCpcCdcAccess, generalApiLimiter];
/**
* CPC-CSD (`CPC-CSD/server`) style URLs on re-workflow:
* - `POST /api/upload`
* - `GET /api/documents/...` (same order as legacy router)
* - `POST /api/v1/ocr/validate` | `validate-upload` | `upload`
* - `GET /api/v1/ocr/report/...`
*
* Disable with `CPC_LEGACY_COMPAT_ROUTES=false`.
*/
export function registerCpcCsdCompatRoutes(app: express.Application): void {
if (String(process.env.CPC_LEGACY_COMPAT_ROUTES || '').toLowerCase() === 'false') {
return;
}
app.post(
'/api/upload',
...authLim,
memoryUpload.single('file'),
asyncHandler(cpcCdcController.uploadBareFile.bind(cpcCdcController))
);
const documentsRouter = express.Router();
documentsRouter.use(...authLim);
documentsRouter.get('/analytics', asyncHandler(cpcCdcController.getAnalytics.bind(cpcCdcController)));
documentsRouter.get('/history', asyncHandler(cpcCdcController.getClaimHistory.bind(cpcCdcController)));
documentsRouter.get('/recent', asyncHandler(cpcCdcController.getRecentDocuments.bind(cpcCdcController)));
documentsRouter.get('/:id/file', asyncHandler(cpcCdcController.getDocumentFile.bind(cpcCdcController)));
documentsRouter.get('/:id', asyncHandler(cpcCdcController.getDocumentById.bind(cpcCdcController)));
documentsRouter.put('/:id/status', asyncHandler(cpcCdcController.updateDocumentStatus.bind(cpcCdcController)));
documentsRouter.delete('/:id', asyncHandler(cpcCdcController.deleteDocument.bind(cpcCdcController)));
app.use('/api/documents', documentsRouter);
app.post(
'/api/v1/ocr/validate',
...authLim,
asyncHandler(cpcCdcController.validateDocumentByUrlStub.bind(cpcCdcController))
);
app.post(
'/api/v1/ocr/validate-upload',
...authLim,
memoryUpload.single('file'),
asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController))
);
app.post(
'/api/v1/ocr/upload',
...authLim,
memoryUpload.array('files', 20),
asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController))
);
app.get(
'/api/v1/ocr/report/all/download',
...authLim,
asyncHandler(cpcReportController.downloadAllReport.bind(cpcReportController))
);
app.get(
'/api/v1/ocr/report/:claimId/download',
...authLim,
asyncHandler(cpcReportController.downloadReport.bind(cpcReportController))
);
}

View File

@ -0,0 +1,16 @@
import { Router } from 'express';
import { authenticateToken } from '../middlewares/auth.middleware';
import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { cpcPermissionController } from '../controllers/cpcPermission.controller';
const router = Router();
router.use(authenticateToken);
router.get(
'/permissions',
asyncHandler(cpcPermissionController.getPermissions.bind(cpcPermissionController))
);
export default router;

View File

@ -1,4 +1,4 @@
import { Router } from 'express'; import { Router, json, urlencoded } from 'express';
import multer from 'multer'; import multer from 'multer';
import path from 'path'; import path from 'path';
import fs from 'fs'; import fs from 'fs';
@ -8,9 +8,15 @@ import { form16Controller } from '../controllers/form16.controller';
import { form16SapController } from '../controllers/form16Sap.controller'; import { form16SapController } from '../controllers/form16Sap.controller';
import { asyncHandler } from '../middlewares/errorHandler.middleware'; import { asyncHandler } from '../middlewares/errorHandler.middleware';
import { UPLOAD_DIR } from '../config/storage'; import { UPLOAD_DIR } from '../config/storage';
import { ResponseHandler } from '../utils/responseHandler';
const router = Router(); const router = Router();
// Form16 routes are mounted before global parsers in app.ts to preserve multipart streams.
// Add route-local parsers so JSON/x-www-form-urlencoded endpoints (e.g., /26as POST/PUT) still receive req.body.
router.use(json({ limit: '10mb' }));
router.use(urlencoded({ extended: true, limit: '10mb' }));
// REform16 pattern: disk storage to uploads dir (path.join(__dirname, '../../uploads') → we use UPLOAD_DIR/form16-extract) // REform16 pattern: disk storage to uploads dir (path.join(__dirname, '../../uploads') → we use UPLOAD_DIR/form16-extract)
const form16ExtractDir = path.join(UPLOAD_DIR, 'form16-extract'); const form16ExtractDir = path.join(UPLOAD_DIR, 'form16-extract');
if (!fs.existsSync(form16ExtractDir)) { if (!fs.existsSync(form16ExtractDir)) {
@ -45,13 +51,22 @@ const upload = multer({
limits: { fileSize: 15 * 1024 * 1024 }, limits: { fileSize: 15 * 1024 * 1024 },
}); });
// 26AS upload: .txt only, 5MB, memory storage (parse then bulk insert) // 26AS upload: .txt only, 40MB, memory storage (parse then bulk insert)
const upload26asTxt = multer({ const upload26asTxt = multer({
storage: multer.memoryStorage(), storage: multer.memoryStorage(),
limits: { fileSize: 5 * 1024 * 1024 }, limits: { fileSize: 40 * 1024 * 1024 },
fileFilter: (_req, file, cb) => { fileFilter: (_req, file, cb) => {
const ext = path.extname(file.originalname || '').toLowerCase(); const originalName = (file.originalname || '').trim();
const isTxt = ext === '.txt' || (file.mimetype && (file.mimetype === 'text/plain' || file.mimetype === 'application/octet-stream')); const ext = path.extname(originalName).toLowerCase();
const mime = String(file.mimetype || '').toLowerCase();
// Keep route-level filter strict and deterministic: only .txt name + known text mime types.
// Controller still performs deep buffer validation to block renamed binaries.
const allowedMimes = new Set([
'text/plain',
'text/csv',
'application/octet-stream',
]);
const isTxt = ext === '.txt' && allowedMimes.has(mime);
if (isTxt) { if (isTxt) {
cb(null, true); cb(null, true);
} else { } else {
@ -60,6 +75,18 @@ const upload26asTxt = multer({
}, },
}); });
const upload26asTxtSingle = (req: any, res: any, next: any) => {
upload26asTxt.single('file')(req, res, (err: any) => {
if (!err) return next();
const message =
err?.message ||
(err?.code === 'LIMIT_FILE_SIZE'
? 'File too large. Maximum allowed size is 40 MB for 26AS upload.'
: 'Invalid 26AS upload file. Only .txt files are allowed.');
return ResponseHandler.error(res, message, 400);
});
};
router.use(authenticateToken); router.use(authenticateToken);
// Permissions (API-driven from admin config; used by frontend to show/hide Form 16 and 26AS) // Permissions (API-driven from admin config; used by frontend to show/hide Form 16 and 26AS)
@ -186,35 +213,47 @@ router.post(
); );
// 26AS (who can see: twentySixAsViewerEmails from admin config) // 26AS (who can see: twentySixAsViewerEmails from admin config)
router.get(
'/26as/dashboard',
requireForm16ReOnly,
requireForm1626AsAccess,
asyncHandler(form16Controller.get26asDashboard.bind(form16Controller))
);
router.get( router.get(
'/26as', '/26as',
requireForm16ReOnly,
requireForm1626AsAccess, requireForm1626AsAccess,
asyncHandler(form16Controller.list26as.bind(form16Controller)) asyncHandler(form16Controller.list26as.bind(form16Controller))
); );
router.post( router.post(
'/26as', '/26as',
requireForm16ReOnly,
requireForm1626AsAccess, requireForm1626AsAccess,
asyncHandler(form16Controller.create26as.bind(form16Controller)) asyncHandler(form16Controller.create26as.bind(form16Controller))
); );
router.put( router.put(
'/26as/:id', '/26as/:id',
requireForm16ReOnly,
requireForm1626AsAccess, requireForm1626AsAccess,
asyncHandler(form16Controller.update26as.bind(form16Controller)) asyncHandler(form16Controller.update26as.bind(form16Controller))
); );
router.delete( router.delete(
'/26as/:id', '/26as/:id',
requireForm16ReOnly,
requireForm1626AsAccess, requireForm1626AsAccess,
asyncHandler(form16Controller.delete26as.bind(form16Controller)) asyncHandler(form16Controller.delete26as.bind(form16Controller))
); );
router.get( router.get(
'/26as/upload-history', '/26as/upload-history',
requireForm16ReOnly,
requireForm1626AsAccess, requireForm1626AsAccess,
asyncHandler(form16Controller.get26asUploadHistory.bind(form16Controller)) asyncHandler(form16Controller.get26asUploadHistory.bind(form16Controller))
); );
router.post( router.post(
'/26as/upload', '/26as/upload',
requireForm16ReOnly,
requireForm1626AsAccess, requireForm1626AsAccess,
upload26asTxt.single('file'), upload26asTxtSingle,
asyncHandler(form16Controller.upload26as.bind(form16Controller)) asyncHandler(form16Controller.upload26as.bind(form16Controller))
); );

View File

@ -35,6 +35,9 @@ import antivirusRoutes from './antivirus.routes';
import dealerExternalRoutes from './dealerExternal.routes'; import dealerExternalRoutes from './dealerExternal.routes';
import form16Routes from './form16.routes'; import form16Routes from './form16.routes';
import hsnSacCodeRoutes from './hsnSacCode.routes'; import hsnSacCodeRoutes from './hsnSacCode.routes';
import cpcCdcRoutes from './cpc-cdc.routes';
import cpcPermissionRoutes from './cpc-permission.routes';
const router = Router(); const router = Router();
@ -101,6 +104,10 @@ router.use('/dealers-external', generalApiLimiter, dealerExternalRoutes); // 200
router.use('/form16', uploadLimiter, form16Routes); // 50 req/15min (file uploads: extract, submissions, 26as) router.use('/form16', uploadLimiter, form16Routes); // 50 req/15min (file uploads: extract, submissions, 26as)
router.use('/api-tokens', authLimiter, apiTokenRoutes); // 20 req/15min (sensitive — same as auth) router.use('/api-tokens', authLimiter, apiTokenRoutes); // 20 req/15min (sensitive — same as auth)
router.use('/hsn-sac', generalApiLimiter, hsnSacCodeRoutes); // 200 req/15min router.use('/hsn-sac', generalApiLimiter, hsnSacCodeRoutes); // 200 req/15min
router.use('/cpc-csd', generalApiLimiter, cpcPermissionRoutes); // 200 req/15min (canonical)
router.use('/cpc-csd', generalApiLimiter, cpcCdcRoutes);
export default router; export default router;

View File

@ -0,0 +1,163 @@
/**
* One-off migration: CPC-CSD Prisma tables "Document" and "AuditLog"
* re-workflow tables cpc_documents and cpc_audit_logs.
*
* Usage:
* DATABASE_URL=postgres://... npm run migrate:cpc-csd
*
* Optional CPC_CSD_DATABASE_URL: when set, rows are read from that database
* and written to DATABASE_URL. When unset, both use DATABASE_URL (same DB;
* Prisma legacy tables must still exist alongside cpc_* tables).
*/
import 'dotenv/config';
import { Sequelize, QueryTypes } from 'sequelize';
import { sequelize, CpcDocument, CpcAuditLog } from '../models';
type LegacyDoc = Record<string, any>;
type LegacyLog = Record<string, any>;
async function openSource(): Promise<{ sequelize: Sequelize; close: () => Promise<void> }> {
const url = process.env.CPC_CSD_DATABASE_URL?.trim();
if (url) {
const s = new Sequelize(url, {
dialect: 'postgres',
logging: false
});
return {
sequelize: s,
close: async () => {
await s.close();
}
};
}
return {
sequelize,
close: async () => {}
};
}
async function tableExists(client: Sequelize, tableName: string): Promise<boolean> {
const rows = (await client.query(
`SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_schema = 'public' AND table_name = :tableName
) AS "exists"`,
{ replacements: { tableName }, type: QueryTypes.SELECT }
)) as { exists: boolean }[];
return Boolean(rows[0]?.exists);
}
async function migrateDocuments(source: Sequelize): Promise<number> {
if (!(await tableExists(source, 'Document'))) {
console.warn('[migrate-cpc-csd] Table "Document" not found on source; skipping documents.');
return 0;
}
const rows = (await source.query('SELECT * FROM "Document"', {
type: QueryTypes.SELECT
})) as LegacyDoc[];
let inserted = 0;
for (const r of rows) {
if (!r.id) continue;
const existing = await CpcDocument.findByPk(r.id);
if (existing) continue;
try {
await CpcDocument.create({
id: r.id,
bookingId: r.bookingId ?? null,
claimId: r.claimId ?? null,
attemptNo: r.attemptNo ?? 1,
documentType: r.documentType ?? null,
documentGcpUrl: r.documentGcpUrl ?? null,
provider: r.provider ?? null,
msdPayload: r.msdPayload ?? null,
extractedFields: r.extractedFields ?? null,
fieldConfidence: r.fieldConfidence ?? null,
validationStatus: r.validationStatus ?? null,
matchPercentage: r.matchPercentage ?? null,
mismatchReasons: r.mismatchReasons ?? null,
fieldResults: r.fieldResults ?? null,
ipAddress: r.ipAddress ?? null,
createdAt: r.createdAt ? new Date(r.createdAt) : new Date()
});
inserted += 1;
} catch (err: any) {
console.error(`[migrate-cpc-csd] Skip document ${r.id}:`, err?.message || err);
}
}
return inserted;
}
async function migrateAuditLogs(source: Sequelize): Promise<number> {
if (!(await tableExists(source, 'AuditLog'))) {
console.warn('[migrate-cpc-csd] Table "AuditLog" not found on source; skipping audit logs.');
return 0;
}
const rows = (await source.query('SELECT * FROM "AuditLog"', {
type: QueryTypes.SELECT
})) as LegacyLog[];
let inserted = 0;
for (const r of rows) {
if (!r.id || !r.documentId) continue;
const parent = await CpcDocument.findByPk(r.documentId);
if (!parent) {
console.warn(`[migrate-cpc-csd] Skip audit ${r.id}: parent document ${r.documentId} missing`);
continue;
}
const existingLog = await CpcAuditLog.findByPk(r.id);
if (existingLog) continue;
try {
await CpcAuditLog.create({
id: r.id,
documentId: r.documentId,
action: r.action,
previousState: r.previousState ?? null,
newState: r.newState ?? null,
performedBy: r.performedBy ?? null,
remarks: r.remarks ?? null,
createdAt: r.createdAt ? new Date(r.createdAt) : new Date()
});
inserted += 1;
} catch (err: any) {
console.error(`[migrate-cpc-csd] Skip audit log ${r.id}:`, err?.message || err);
}
}
return inserted;
}
async function printCounts(): Promise<void> {
const docTotal = await CpcDocument.count();
const logTotal = await CpcAuditLog.count();
console.log(`[migrate-cpc-csd] Target counts: cpc_documents=${docTotal}, cpc_audit_logs=${logTotal}`);
}
async function main(): Promise<void> {
const { sequelize: source, close } = await openSource();
try {
await sequelize.authenticate();
await source.authenticate();
console.log('[migrate-cpc-csd] Connected to target (DATABASE_URL) and source.');
const docInserted = await migrateDocuments(source);
const logInserted = await migrateAuditLogs(source);
console.log(`[migrate-cpc-csd] New cpc_documents rows: ${docInserted}`);
console.log(`[migrate-cpc-csd] New cpc_audit_logs rows: ${logInserted}`);
await printCounts();
} finally {
await close();
await sequelize.close();
}
}
main().catch((e) => {
console.error('[migrate-cpc-csd] Failed:', e);
process.exit(1);
});

View File

@ -75,6 +75,10 @@ import * as m67 from '../migrations/20260324110001-add-pan-number-to-26as';
import * as m68 from '../migrations/20260325090001-ensure-pan-number-in-26as'; import * as m68 from '../migrations/20260325090001-ensure-pan-number-in-26as';
import * as m69 from '../migrations/20260325094500-add-user-session-and-hsn-sac-codes'; import * as m69 from '../migrations/20260325094500-add-user-session-and-hsn-sac-codes';
import * as m70 from '../migrations/20260325175000-update-credit-notes-and-add-items'; import * as m70 from '../migrations/20260325175000-update-credit-notes-and-add-items';
import * as m71 from '../migrations/2026041300-create-cpc-cdc-tables';
import * as m72 from '../migrations/20260414100000-ensure-cpc-cdc-tables-exist';
import * as m73 from '../migrations/20260416120000-rename-cpc-cdc-admin-config-key';
interface Migration { interface Migration {
name: string; name: string;
@ -157,6 +161,9 @@ const migrations: Migration[] = [
{ name: '20260325090001-ensure-pan-number-in-26as', module: m68 }, { name: '20260325090001-ensure-pan-number-in-26as', module: m68 },
{ name: '20260325094500-add-user-session-and-hsn-sac-codes', module: m69 }, { name: '20260325094500-add-user-session-and-hsn-sac-codes', module: m69 },
{ name: '20260325175000-update-credit-notes-and-add-items', module: m70 }, { name: '20260325175000-update-credit-notes-and-add-items', module: m70 },
{ name: '2026041300-create-cpc-cdc-tables', module: m71 },
{ name: '20260414100000-ensure-cpc-cdc-tables-exist', module: m72 },
{ name: '20260416120000-rename-cpc-cdc-admin-config-key', module: m73 },
]; ];
/** /**

View File

@ -113,13 +113,6 @@ const startServer = async (): Promise<void> => {
console.error('⚠️ Activity type seeding error:', error); console.error('⚠️ Activity type seeding error:', error);
} }
// Ensure demo admin user exists (admin@example.com / Admin@123)
const { ensureDemoAdminUser } = require('./scripts/seed-admin-user');
try {
await ensureDemoAdminUser();
} catch (error) {
console.warn('⚠️ Demo admin user setup warning:', error);
}
// Initialize holidays cache for TAT calculations // Initialize holidays cache for TAT calculations
try { try {

View File

@ -5,9 +5,9 @@ import type { StringValue } from 'ms';
import { LoginResponse } from '../types/auth.types'; import { LoginResponse } from '../types/auth.types';
import logger, { logAuthEvent } from '../utils/logger'; import logger, { logAuthEvent } from '../utils/logger';
import axios from 'axios'; import axios from 'axios';
import bcrypt from 'bcryptjs';
import { v4 as uuidv4 } from 'uuid'; import { v4 as uuidv4 } from 'uuid';
import { emitToUser } from '../realtime/socket'; import { emitToUser } from '../realtime/socket';
import { ACCESS_TOKEN_TTL_MS } from '../config/sessionPolicy';
function parseDeviceFromUserAgent(ua?: string): string { function parseDeviceFromUserAgent(ua?: string): string {
if (!ua) return 'Unknown Device'; if (!ua) return 'Unknown Device';
@ -29,14 +29,54 @@ function parseDeviceFromUserAgent(ua?: string): string {
export class AuthService { export class AuthService {
private resolveOktaConfigForRedirectUri(redirectUri: string): {
domain: string;
clientId: string;
clientSecret: string;
apiToken: string;
profile: 'localhost' | 'default';
} {
let host = '';
try {
host = new URL(redirectUri).hostname.toLowerCase();
} catch {
host = '';
}
const isLocalhostRedirect = host === 'localhost' || host === '127.0.0.1';
if (isLocalhostRedirect) {
return {
domain: process.env.OKTA_DOMAIN_LOCALHOST || ssoConfig.oktaDomain,
clientId: process.env.OKTA_CLIENT_ID_LOCALHOST || ssoConfig.oktaClientId,
clientSecret: process.env.OKTA_CLIENT_SECRET_LOCALHOST || ssoConfig.oktaClientSecret,
apiToken: process.env.OKTA_API_TOKEN_LOCALHOST || ssoConfig.oktaApiToken || '',
profile: 'localhost',
};
}
return {
domain: ssoConfig.oktaDomain,
clientId: ssoConfig.oktaClientId,
clientSecret: ssoConfig.oktaClientSecret,
apiToken: ssoConfig.oktaApiToken || '',
profile: 'default',
};
}
/** /**
* Fetch user details from Okta Users API (full profile with manager, employeeID, etc.) * Fetch user details from Okta Users API (full profile with manager, employeeID, etc.)
* Falls back to userinfo endpoint if Users API fails or token is not configured * Falls back to userinfo endpoint if Users API fails or token is not configured
*/ */
private async fetchUserFromOktaUsersAPI(oktaSub: string, email: string, accessToken: string): Promise<any> { private async fetchUserFromOktaUsersAPI(
oktaSub: string,
email: string,
accessToken: string,
oktaDomainOverride?: string,
oktaApiTokenOverride?: string
): Promise<any> {
try { try {
const oktaDomain = oktaDomainOverride || ssoConfig.oktaDomain;
const oktaApiToken = oktaApiTokenOverride || ssoConfig.oktaApiToken;
// Check if API token is configured // Check if API token is configured
if (!ssoConfig.oktaApiToken || ssoConfig.oktaApiToken.trim() === '') { if (!oktaApiToken || oktaApiToken.trim() === '') {
logger.info('OKTA_API_TOKEN not configured, will use userinfo endpoint as fallback'); logger.info('OKTA_API_TOKEN not configured, will use userinfo endpoint as fallback');
return null; return null;
} }
@ -47,17 +87,17 @@ export class AuthService {
// First attempt: Use email (preferred method as shown in curl example) // First attempt: Use email (preferred method as shown in curl example)
if (email) { if (email) {
const usersApiEndpoint = `${ssoConfig.oktaDomain}/api/v1/users/${encodeURIComponent(email)}`; const usersApiEndpoint = `${oktaDomain}/api/v1/users/${encodeURIComponent(email)}`;
logger.info('Fetching user from Okta Users API (using email)', { logger.info('Fetching user from Okta Users API (using email)', {
endpoint: usersApiEndpoint.replace(email, email.substring(0, 5) + '...'), endpoint: usersApiEndpoint.replace(email, email.substring(0, 5) + '...'),
hasApiToken: !!ssoConfig.oktaApiToken, hasApiToken: !!oktaApiToken,
}); });
try { try {
const response = await axios.get(usersApiEndpoint, { const response = await axios.get(usersApiEndpoint, {
headers: { headers: {
'Authorization': `SSWS ${ssoConfig.oktaApiToken}`, 'Authorization': `SSWS ${oktaApiToken}`,
'Accept': 'application/json', 'Accept': 'application/json',
}, },
validateStatus: (status) => status < 500, // Don't throw on 4xx errors validateStatus: (status) => status < 500, // Don't throw on 4xx errors
@ -80,17 +120,17 @@ export class AuthService {
// Second attempt: Use oktaSub (user ID) if email lookup failed // Second attempt: Use oktaSub (user ID) if email lookup failed
if (oktaSub) { if (oktaSub) {
const usersApiEndpoint = `${ssoConfig.oktaDomain}/api/v1/users/${encodeURIComponent(oktaSub)}`; const usersApiEndpoint = `${oktaDomain}/api/v1/users/${encodeURIComponent(oktaSub)}`;
logger.info('Fetching user from Okta Users API (using oktaSub)', { logger.info('Fetching user from Okta Users API (using oktaSub)', {
endpoint: usersApiEndpoint.replace(oktaSub, oktaSub.substring(0, 10) + '...'), endpoint: usersApiEndpoint.replace(oktaSub, oktaSub.substring(0, 10) + '...'),
hasApiToken: !!ssoConfig.oktaApiToken, hasApiToken: !!oktaApiToken,
}); });
try { try {
const response = await axios.get(usersApiEndpoint, { const response = await axios.get(usersApiEndpoint, {
headers: { headers: {
'Authorization': `SSWS ${ssoConfig.oktaApiToken}`, 'Authorization': `SSWS ${oktaApiToken}`,
'Accept': 'application/json', 'Accept': 'application/json',
}, },
validateStatus: (status) => status < 500, validateStatus: (status) => status < 500,
@ -268,6 +308,20 @@ export class AuthService {
throw new Error('Email and Okta sub are required'); throw new Error('Email and Okta sub are required');
} }
const norm = (value?: unknown): string | undefined => {
const s = String(value ?? '').trim();
return s ? s : undefined;
};
const limit = (value: unknown, max: number): string | undefined => {
const s = norm(value);
return s ? s.slice(0, max) : undefined;
};
const normalizedEmail = norm(userData.email)?.toLowerCase();
const normalizedOktaSub = norm(userData.oktaSub);
if (!normalizedEmail || !normalizedOktaSub) {
throw new Error('Email and Okta sub are required');
}
// Prepare user data with defaults for missing fields // Prepare user data with defaults for missing fields
// If firstName/lastName are missing, try to extract from displayName // If firstName/lastName are missing, try to extract from displayName
let firstName = userData.firstName || ''; let firstName = userData.firstName || '';
@ -293,13 +347,17 @@ export class AuthService {
displayName = userData.email.split('@')[0] || 'User'; displayName = userData.email.split('@')[0] || 'User';
} }
firstName = limit(firstName, 100) || '';
lastName = limit(lastName, 100) || '';
displayName = limit(displayName, 200) || normalizedEmail.split('@')[0] || 'User';
const sessionToken = uuidv4(); const sessionToken = uuidv4();
const lastLoginDevice = parseDeviceFromUserAgent(userAgent); const lastLoginDevice = parseDeviceFromUserAgent(userAgent);
// Prepare update/create data - always include required fields // Prepare update/create data - always include required fields
const userUpdateData: any = { const userUpdateData: any = {
email: userData.email, email: normalizedEmail,
oktaSub: userData.oktaSub, oktaSub: normalizedOktaSub,
lastLogin: new Date(), lastLogin: new Date(),
sessionToken, sessionToken,
lastLoginDevice, lastLoginDevice,
@ -310,25 +368,36 @@ export class AuthService {
if (firstName) userUpdateData.firstName = firstName; if (firstName) userUpdateData.firstName = firstName;
if (lastName) userUpdateData.lastName = lastName; if (lastName) userUpdateData.lastName = lastName;
if (displayName) userUpdateData.displayName = displayName; if (displayName) userUpdateData.displayName = displayName;
if (userData.employeeId) userUpdateData.employeeId = userData.employeeId; // Optional if (limit(userData.employeeId, 50)) userUpdateData.employeeId = limit(userData.employeeId, 50); // Optional
if (userData.department) userUpdateData.department = userData.department; if (limit(userData.department, 100)) userUpdateData.department = limit(userData.department, 100);
if (userData.designation) userUpdateData.designation = userData.designation; if (limit(userData.designation, 100)) userUpdateData.designation = limit(userData.designation, 100);
if (userData.phone) userUpdateData.phone = userData.phone; if (limit(userData.phone, 20)) userUpdateData.phone = limit(userData.phone, 20);
if (userData.manager) userUpdateData.manager = userData.manager; // Manager name from SSO if (limit(userData.manager, 200)) userUpdateData.manager = limit(userData.manager, 200); // Manager name from SSO
if (userData.jobTitle) userUpdateData.jobTitle = userData.jobTitle; // Job title from SSO if (limit(userData.jobTitle, 3000)) userUpdateData.jobTitle = limit(userData.jobTitle, 3000); // Job title from SSO
if (userData.postalAddress) userUpdateData.postalAddress = userData.postalAddress; // Address from SSO if (limit(userData.postalAddress, 500)) userUpdateData.postalAddress = limit(userData.postalAddress, 500); // Address from SSO
if (userData.mobilePhone) userUpdateData.mobilePhone = userData.mobilePhone; // Mobile phone from SSO if (limit(userData.mobilePhone, 20)) userUpdateData.mobilePhone = limit(userData.mobilePhone, 20); // Mobile phone from SSO
if (userData.employeeNumber || userData.dealerCode) { if (limit(userData.secondEmail, 255)) userUpdateData.secondEmail = limit(userData.secondEmail, 255);
userUpdateData.employeeNumber = userData.employeeNumber || userData.dealerCode; const employeeNumber = limit(userData.employeeNumber || userData.dealerCode, 50);
if (employeeNumber) {
userUpdateData.employeeNumber = employeeNumber;
} }
if (userData.adGroups && Array.isArray(userData.adGroups) && userData.adGroups.length > 0) { if (userData.adGroups && Array.isArray(userData.adGroups) && userData.adGroups.length > 0) {
userUpdateData.adGroups = userData.adGroups; // Group memberships from SSO userUpdateData.adGroups = userData.adGroups
.map((group: unknown) => limit(group, 255))
.filter(Boolean)
.slice(0, 200); // Group memberships from SSO
} }
// Check if user exists by email (primary identifier) // Prefer matching by oktaSub, then fallback to email.
// This avoids collisions when email changes in IdP over time.
let user = await User.findOne({ let user = await User.findOne({
where: { email: userData.email } where: { oktaSub: normalizedOktaSub }
}); });
if (!user) {
user = await User.findOne({
where: { email: normalizedEmail }
});
}
if (user) { if (user) {
// Update existing user - update oktaSub if different, and other fields // Update existing user - update oktaSub if different, and other fields
@ -352,21 +421,22 @@ export class AuthService {
} else { } else {
// Create new user with required fields (email and oktaSub) // Create new user with required fields (email and oktaSub)
user = await User.create({ user = await User.create({
email: userData.email, email: normalizedEmail,
oktaSub: userData.oktaSub, oktaSub: normalizedOktaSub,
employeeId: userData.employeeId || null, // Optional employeeId: userData.employeeId || null, // Optional
firstName: firstName || null, firstName: firstName || null,
lastName: lastName || null, lastName: lastName || null,
displayName: displayName, displayName: displayName,
department: userData.department || null, department: limit(userData.department, 100) || null,
designation: userData.designation || null, designation: limit(userData.designation, 100) || null,
phone: userData.phone || null, phone: limit(userData.phone, 20) || null,
manager: userData.manager || null, // Manager name from SSO manager: limit(userData.manager, 200) || null, // Manager name from SSO
jobTitle: userData.jobTitle || null, // Job title from SSO jobTitle: limit(userData.jobTitle, 3000) || null, // Job title from SSO
postalAddress: userData.postalAddress || null, // Address from SSO postalAddress: limit(userData.postalAddress, 500) || null, // Address from SSO
mobilePhone: userData.mobilePhone || null, mobilePhone: limit(userData.mobilePhone, 20) || null,
adGroups: userData.adGroups && Array.isArray(userData.adGroups) && userData.adGroups.length > 0 ? userData.adGroups : null, secondEmail: limit(userData.secondEmail, 255) || null,
employeeNumber: userData.employeeNumber || userData.dealerCode || null, adGroups: userUpdateData.adGroups && Array.isArray(userUpdateData.adGroups) && userUpdateData.adGroups.length > 0 ? userUpdateData.adGroups : null,
employeeNumber: limit(userData.employeeNumber || userData.dealerCode, 50) || null,
isActive: true, isActive: true,
role: 'USER', role: 'USER',
lastLogin: new Date(), lastLogin: new Date(),
@ -417,7 +487,7 @@ export class AuthService {
/** /**
* Generate JWT access token * Generate JWT access token
*/ */
private generateAccessToken(user: User): string { private generateAccessToken(user: User, expiresIn?: StringValue | number): string {
if (!ssoConfig.jwtSecret) { if (!ssoConfig.jwtSecret) {
throw new Error('JWT secret is not configured'); throw new Error('JWT secret is not configured');
} }
@ -431,7 +501,7 @@ export class AuthService {
}; };
const options: SignOptions = { const options: SignOptions = {
expiresIn: ssoConfig.jwtExpiry as StringValue | number expiresIn: expiresIn ?? (ssoConfig.jwtExpiry as StringValue | number)
}; };
return jwt.sign(payload, ssoConfig.jwtSecret, options); return jwt.sign(payload, ssoConfig.jwtSecret, options);
@ -472,7 +542,7 @@ export class AuthService {
/** /**
* Refresh access token using refresh token * Refresh access token using refresh token
*/ */
async refreshAccessToken(refreshToken: string): Promise<string> { async refreshAccessToken(refreshToken: string): Promise<{ accessToken: string; accessTokenTtlMs: number }> {
try { try {
const decoded = jwt.verify(refreshToken, ssoConfig.jwtSecret) as any; const decoded = jwt.verify(refreshToken, ssoConfig.jwtSecret) as any;
@ -489,7 +559,23 @@ export class AuthService {
throw new Error('Session expired due to login from another device'); throw new Error('Session expired due to login from another device');
} }
return this.generateAccessToken(user); // Strict 30-minute session timeout from login time.
const lastLoginTime = user.lastLogin ? new Date(user.lastLogin).getTime() : 0;
if (!lastLoginTime || Number.isNaN(lastLoginTime)) {
throw new Error('Session expired');
}
const sessionAgeMs = Date.now() - lastLoginTime;
if (sessionAgeMs > ACCESS_TOKEN_TTL_MS) {
throw new Error('Session expired');
}
// Absolute session deadline: refreshed token must never outlive login + 30m.
const remainingSessionMs = ACCESS_TOKEN_TTL_MS - sessionAgeMs;
if (remainingSessionMs <= 0) {
throw new Error('Session expired');
}
const remainingSessionSeconds = Math.max(1, Math.floor(remainingSessionMs / 1000));
const accessToken = this.generateAccessToken(user, `${remainingSessionSeconds}s` as StringValue);
return { accessToken, accessTokenTtlMs: remainingSessionSeconds * 1000 };
} catch (error) { } catch (error) {
logAuthEvent('auth_failure', undefined, { logAuthEvent('auth_failure', undefined, {
action: 'token_refresh_failed', action: 'token_refresh_failed',
@ -529,174 +615,16 @@ export class AuthService {
} }
/** /**
* Authenticate user with username (email) and password via Okta API * Authenticate user with username (email) and password via Okta (Resource Owner Password grant).
* This is for direct API authentication (e.g., Postman, mobile apps) * For direct API clients (e.g. Postman) when Okta allows this grant; otherwise use token-exchange.
* * No local or demo password bypass.
* Flow:
* 1. Authenticate with Okta using username/password
* 2. Get access token from Okta
* 3. Fetch user info from Okta
* 4. Create/update user in our database if needed
* 5. Return our JWT tokens
*/ */
async authenticateWithPassword(username: string, password: string, userAgent?: string): Promise<LoginResponse> { async authenticateWithPassword(username: string, password: string, userAgent?: string): Promise<LoginResponse> {
// Demo admin: admin@example.com / Admin@123 (works with or without .env; for dev/demo only)
const DEMO_ADMIN_EMAIL = 'admin@example.com';
const DEFAULT_DEMO_ADMIN_HASH = '$2a$10$H4ikTC.HDZPM0iFxjBy2C./WlkbGbidipIiZlXIJx6QpcBazdf12K'; // bcrypt of "Admin@123"
const tryLocalAdminLogin = async (): Promise<LoginResponse | null> => {
const normalizedInput = username?.trim?.()?.toLowerCase?.() ?? '';
const adminEmail = process.env.LOCAL_ADMIN_EMAIL?.trim() || DEMO_ADMIN_EMAIL;
if (normalizedInput !== adminEmail.toLowerCase()) return null;
const hash = process.env.LOCAL_ADMIN_PASSWORD_HASH?.trim() || DEFAULT_DEMO_ADMIN_HASH;
const passwordMatch = await bcrypt.compare(password, hash);
if (!passwordMatch) return null;
let user = await User.findOne({ where: { email: adminEmail } });
const sessionToken = uuidv4();
const lastLoginDevice = parseDeviceFromUserAgent(userAgent);
if (!user) {
user = await User.create({
email: adminEmail,
oktaSub: 'local-ADMIN',
displayName: 'RE Admin',
firstName: 'RE',
lastName: 'Admin',
isActive: true,
role: 'ADMIN',
emailNotificationsEnabled: true,
pushNotificationsEnabled: true,
inAppNotificationsEnabled: true,
sessionToken,
lastLoginDevice,
lastLogin: new Date()
});
logger.info('Demo admin user created on first login', { email: adminEmail });
} else {
await user.update({ lastLogin: new Date(), sessionToken, lastLoginDevice });
}
logger.info('Demo admin login successful', { email: adminEmail });
const accessToken = this.generateAccessToken(user);
const refreshToken = this.generateRefreshToken(user);
return {
user: {
userId: user.userId,
employeeId: user.employeeId ?? null,
email: user.email,
firstName: user.firstName ?? null,
lastName: user.lastName ?? null,
displayName: user.displayName ?? null,
department: user.department ?? null,
designation: user.designation ?? null,
jobTitle: user.jobTitle ?? null,
role: user.role,
},
accessToken,
refreshToken,
};
};
// Helper: try local dealer login (TESTREFLOW) when ENABLE_LOCAL_DEALER_LOGIN is set (in scope for try and catch)
const tryLocalDealerLogin = async (): Promise<LoginResponse | null> => {
const enabled = process.env.ENABLE_LOCAL_DEALER_LOGIN?.toLowerCase()?.trim() === 'true';
const hash = process.env.LOCAL_DEALER_PASSWORD_HASH?.trim();
const localUsername = 'TESTREFLOW';
const normalizedUsername = username?.trim?.()?.toUpperCase?.() ?? '';
if (!enabled || !hash || normalizedUsername !== localUsername) return null;
const passwordMatch = await bcrypt.compare(password, hash);
if (!passwordMatch) return null;
logger.info('Local dealer login successful', { username: localUsername });
return this.handleSSOCallback({
oktaSub: 'local-TESTREFLOW',
email: 'testreflow@example.com',
displayName: 'Test Reflow Dealer',
firstName: 'Test',
lastName: 'Reflow',
}, userAgent);
};
// Fallback bcrypt hash for "Test@123" when .env hash is corrupted (dev only)
const ROHIT_DEALER_EMAIL = 'rohitm_ext@royalenfield.com';
const FALLBACK_HASH_TEST123 = '$2a$10$gQ34/Jt9rOFDBWJqVur2W.ZWlN0vqAzt2I/6HKBKOtggowY/R8W/C';
// Helper: try local login by email (e.g. rohitm_ext@royalenfield.com) when LOCAL_DEALER_2_* is set or known dealer
const tryLocalDealerLoginByEmail = async (): Promise<LoginResponse | null> => {
const envEmail = process.env.LOCAL_DEALER_2_EMAIL?.trim()?.toLowerCase();
const rawHash = process.env.LOCAL_DEALER_2_PASSWORD_HASH;
let hash = (typeof rawHash === 'string' ? rawHash.trim() : '') || '';
if (hash.length >= 2 && ((hash.startsWith('"') && hash.endsWith('"')) || (hash.startsWith("'") && hash.endsWith("'")))) hash = hash.slice(1, -1);
const normalizedInput = username?.trim?.()?.toLowerCase?.() ?? '';
const isRohitEmail = normalizedInput === ROHIT_DEALER_EMAIL;
const email = envEmail || (isRohitEmail ? ROHIT_DEALER_EMAIL : null);
const inputMatches = !!email && normalizedInput === email;
if (!inputMatches) {
logger.info('[Auth] Local dealer by email skip', {
hasEmail: !!envEmail,
hasHash: !!hash,
hashLen: hash.length,
inputMatch: inputMatches,
normalizedInput: normalizedInput ? `${normalizedInput.slice(0, 5)}...` : '',
});
return null;
}
let passwordMatch = false;
if (hash.length >= 50) {
passwordMatch = await bcrypt.compare(password, hash);
}
if (!passwordMatch && isRohitEmail) {
passwordMatch = await bcrypt.compare(password, FALLBACK_HASH_TEST123);
if (passwordMatch) logger.info('[Auth] Local dealer login used fallback hash for', { email: ROHIT_DEALER_EMAIL });
}
if (!passwordMatch) {
logger.warn('[Auth] Local dealer by email: password mismatch', { email });
return null;
}
const { Op } = await import('sequelize');
const user = await User.findOne({ where: { email: { [Op.iLike]: email } } });
if (!user) {
logger.warn('Local dealer login by email: user not found', { email });
return null;
}
const sessionToken = uuidv4();
const lastLoginDevice = parseDeviceFromUserAgent(userAgent);
await user.update({ lastLogin: new Date(), sessionToken, lastLoginDevice });
logger.info('Local dealer login by email successful', { email });
const accessToken = this.generateAccessToken(user);
const refreshToken = this.generateRefreshToken(user);
return {
user: {
userId: user.userId,
employeeId: user.employeeId ?? null,
email: user.email,
firstName: user.firstName ?? null,
lastName: user.lastName ?? null,
displayName: user.displayName ?? null,
department: user.department ?? null,
designation: user.designation ?? null,
jobTitle: user.jobTitle ?? null,
role: user.role,
},
accessToken,
refreshToken,
};
};
try { try {
logger.info('Authenticating user with username/password', { username }); logger.info('Authenticating user with username/password', { username });
// Demo admin (admin@example.com / Admin@123) and optional env-based local admin // Authenticate with Okta using Resource Owner Password flow
const adminResult = await tryLocalAdminLogin(); // Requires Okta Resource Owner Password grant when used; otherwise use SSO / token-exchange.
if (adminResult) return adminResult;
// Development-only: try local dealer login when enabled
const localResult = await tryLocalDealerLogin();
if (localResult) return localResult;
// Optional: local login by email (e.g. rohit.m.ext@royalenfield.com) when LOCAL_DEALER_2_* set
const localEmailResult = await tryLocalDealerLoginByEmail();
if (localEmailResult) return localEmailResult;
// Step 1: Authenticate with Okta using Resource Owner Password flow
// Note: This requires Okta to have Resource Owner Password grant type enabled
const tokenEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/token`; const tokenEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/token`;
const tokenResponse = await axios.post( const tokenResponse = await axios.post(
@ -807,21 +735,6 @@ export class AuthService {
oktaError: error.response?.data, oktaError: error.response?.data,
}); });
// When Okta does not allow password grant (e.g. only authorization_code), fall back to local logins
const msg = (error.message || '').toLowerCase();
if (msg.includes('grant type') || msg.includes('not authorized to use the provided grant type')) {
const adminFallback = await tryLocalAdminLogin();
if (adminFallback) {
logger.info('Local admin login used after Okta grant-type rejection');
return adminFallback;
}
const localResult = await tryLocalDealerLogin();
if (localResult) {
logger.info('Local dealer login used after Okta grant-type rejection');
return localResult;
}
}
if (error.response?.data) { if (error.response?.data) {
const errorData = error.response.data; const errorData = error.response.data;
if (typeof errorData === 'object' && !Array.isArray(errorData)) { if (typeof errorData === 'object' && !Array.isArray(errorData)) {
@ -843,11 +756,12 @@ export class AuthService {
*/ */
async exchangeCodeForTokens(code: string, redirectUri: string, userAgent?: string): Promise<LoginResponse> { async exchangeCodeForTokens(code: string, redirectUri: string, userAgent?: string): Promise<LoginResponse> {
try { try {
const oktaConfigForRequest = this.resolveOktaConfigForRedirectUri(redirectUri);
// Validate configuration // Validate configuration
if (!ssoConfig.oktaClientId || ssoConfig.oktaClientId.trim() === '') { if (!oktaConfigForRequest.clientId || oktaConfigForRequest.clientId.trim() === '') {
throw new Error('OKTA_CLIENT_ID is not configured. Please set it in your .env file.'); throw new Error('OKTA_CLIENT_ID is not configured. Please set it in your .env file.');
} }
if (!ssoConfig.oktaClientSecret || ssoConfig.oktaClientSecret.trim() === '' || ssoConfig.oktaClientSecret.includes('your_okta_client_secret')) { if (!oktaConfigForRequest.clientSecret || oktaConfigForRequest.clientSecret.trim() === '' || oktaConfigForRequest.clientSecret.includes('your_okta_client_secret')) {
throw new Error('OKTA_CLIENT_SECRET is not configured. Please set it in your .env file.'); throw new Error('OKTA_CLIENT_SECRET is not configured. Please set it in your .env file.');
} }
if (!code || code.trim() === '') { if (!code || code.trim() === '') {
@ -857,15 +771,46 @@ export class AuthService {
throw new Error('Redirect URI is required'); throw new Error('Redirect URI is required');
} }
const normalize = (s: string) => s.trim().replace(/\/+$/, '');
const providedRedirectUri = normalize(redirectUri);
const configuredFrontendBases = (process.env.FRONTEND_URL || '')
.split(',')
.map((s) => normalize(s))
.filter(Boolean);
const providedOrigin = (() => {
try {
return normalize(new URL(providedRedirectUri).origin);
} catch {
return '';
}
})();
const matchingConfiguredBase = configuredFrontendBases.find((base) => {
try {
return normalize(new URL(base).origin).toLowerCase() === providedOrigin.toLowerCase();
} catch {
return false;
}
});
const fallbackConfiguredBase = configuredFrontendBases[0] || '';
const selectedFrontendBase = matchingConfiguredBase || fallbackConfiguredBase;
const canonicalRedirectUri = selectedFrontendBase ? `${selectedFrontendBase}/login/callback` : providedRedirectUri;
const isSecureEnv = process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'uat';
const effectiveRedirectUri = isSecureEnv ? canonicalRedirectUri : providedRedirectUri;
logger.info('Exchanging code with Okta', { logger.info('Exchanging code with Okta', {
redirectUri, redirectUri: effectiveRedirectUri,
providedRedirectUri,
canonicalRedirectUri,
configuredFrontendBases,
selectedFrontendBase,
oktaProfile: oktaConfigForRequest.profile,
codePrefix: code.substring(0, 10) + '...', codePrefix: code.substring(0, 10) + '...',
oktaDomain: ssoConfig.oktaDomain, oktaDomain: oktaConfigForRequest.domain,
clientId: ssoConfig.oktaClientId, clientId: oktaConfigForRequest.clientId,
hasClientSecret: !!ssoConfig.oktaClientSecret && !ssoConfig.oktaClientSecret.includes('your_okta_client_secret'), hasClientSecret: !!oktaConfigForRequest.clientSecret && !oktaConfigForRequest.clientSecret.includes('your_okta_client_secret'),
}); });
const tokenEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/token`; const tokenEndpoint = `${oktaConfigForRequest.domain}/oauth2/default/v1/token`;
// Exchange authorization code for tokens // Exchange authorization code for tokens
// redirect_uri here must match the one used when requesting the authorization code // redirect_uri here must match the one used when requesting the authorization code
@ -874,9 +819,9 @@ export class AuthService {
new URLSearchParams({ new URLSearchParams({
grant_type: 'authorization_code', grant_type: 'authorization_code',
code, code,
redirect_uri: redirectUri, // Frontend URL (e.g., http://localhost:3000/login/callback) redirect_uri: effectiveRedirectUri, // Must match authorize request redirect_uri exactly
client_id: ssoConfig.oktaClientId, client_id: oktaConfigForRequest.clientId,
client_secret: ssoConfig.oktaClientSecret, client_secret: oktaConfigForRequest.clientSecret,
}), }),
{ {
headers: { headers: {
@ -931,7 +876,7 @@ export class AuthService {
// Step 1: Try to get user info from Okta Users API (full profile with manager, employeeID, etc.) // Step 1: Try to get user info from Okta Users API (full profile with manager, employeeID, etc.)
// First, get oktaSub from userinfo to use as user ID // First, get oktaSub from userinfo to use as user ID
const userInfoEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/userinfo`; const userInfoEndpoint = `${oktaConfigForRequest.domain}/oauth2/default/v1/userinfo`;
const userInfoResponse = await axios.get(userInfoEndpoint, { const userInfoResponse = await axios.get(userInfoEndpoint, {
headers: { headers: {
Authorization: `Bearer ${access_token}`, Authorization: `Bearer ${access_token}`,
@ -947,7 +892,13 @@ export class AuthService {
// Try Users API first (provides full profile including manager, employeeID, etc.) // Try Users API first (provides full profile including manager, employeeID, etc.)
let userData: SSOUserData | null = null; let userData: SSOUserData | null = null;
const usersApiResponse = await this.fetchUserFromOktaUsersAPI(oktaSub, oktaUserInfo.email || '', access_token); const usersApiResponse = await this.fetchUserFromOktaUsersAPI(
oktaSub,
oktaUserInfo.email || '',
access_token,
oktaConfigForRequest.domain,
oktaConfigForRequest.apiToken
);
if (usersApiResponse) { if (usersApiResponse) {
userData = this.extractUserDataFromUsersAPI(usersApiResponse, oktaSub); userData = this.extractUserDataFromUsersAPI(usersApiResponse, oktaSub);
@ -1036,8 +987,38 @@ export class AuthService {
throw new Error('Redirect URI is required'); throw new Error('Redirect URI is required');
} }
const normalize = (s: string) => s.trim().replace(/\/+$/, '');
const providedRedirectUri = normalize(redirectUri);
const configuredFrontendBases = (process.env.FRONTEND_URL || '')
.split(',')
.map((s) => normalize(s))
.filter(Boolean);
const providedOrigin = (() => {
try {
return normalize(new URL(providedRedirectUri).origin);
} catch {
return '';
}
})();
const matchingConfiguredBase = configuredFrontendBases.find((base) => {
try {
return normalize(new URL(base).origin).toLowerCase() === providedOrigin.toLowerCase();
} catch {
return false;
}
});
const fallbackConfiguredBase = configuredFrontendBases[0] || '';
const selectedFrontendBase = matchingConfiguredBase || fallbackConfiguredBase;
const canonicalRedirectUri = selectedFrontendBase ? `${selectedFrontendBase}/login/callback` : providedRedirectUri;
const isSecureEnv = process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'uat';
const effectiveRedirectUri = isSecureEnv ? canonicalRedirectUri : providedRedirectUri;
logger.info('Exchanging code with Tanflow', { logger.info('Exchanging code with Tanflow', {
redirectUri, redirectUri: effectiveRedirectUri,
providedRedirectUri,
canonicalRedirectUri,
configuredFrontendBases,
selectedFrontendBase,
codePrefix: code.substring(0, 10) + '...', codePrefix: code.substring(0, 10) + '...',
tanflowBaseUrl: ssoConfig.tanflowBaseUrl, tanflowBaseUrl: ssoConfig.tanflowBaseUrl,
clientId: ssoConfig.tanflowClientId, clientId: ssoConfig.tanflowClientId,
@ -1052,7 +1033,7 @@ export class AuthService {
new URLSearchParams({ new URLSearchParams({
grant_type: 'authorization_code', grant_type: 'authorization_code',
code, code,
redirect_uri: redirectUri, redirect_uri: effectiveRedirectUri,
client_id: ssoConfig.tanflowClientId!, client_id: ssoConfig.tanflowClientId!,
client_secret: ssoConfig.tanflowClientSecret!, client_secret: ssoConfig.tanflowClientSecret!,
}), }),

View File

@ -0,0 +1,71 @@
import { Storage } from "@google-cloud/storage";
import path from 'path';
import logger from "@utils/logger";
/** Optional layout for CPC/CSD objects (mirrors local `uploads/cpc-csd-files/...`). */
export type CpcGcsUploadOptions = {
bucket?: string;
/** Directory prefix inside the bucket, no leading slash, e.g. `cpc-csd/csd/BOOK-1/documents` */
objectDir?: string;
/** Final filename segment only (no path separators) */
objectBaseName?: string;
};
class CpcGcsService {
private storage: Storage;
private bucketName: string;
constructor() {
this.storage = new Storage({
projectId: process.env.GCP_PROJECT_ID,
keyFilename: process.env.GCP_KEY_FILE
});
this.bucketName = process.env.GCP_BUCKET_NAME || '';
}
parseGsUrl(gsUrl: string) {
if (!gsUrl || !gsUrl.startsWith("gs://")) throw new Error("INVALID_DOCUMENT_URL");
const s = gsUrl.slice(5);
const [bucket, ...rest] = s.split("/");
const objectPath = rest.join("/");
if (!bucket || !objectPath) throw new Error("INVALID_DOCUMENT_URL");
return { bucket, objectPath };
}
async downloadFromGcs(gsUrl: string): Promise<Buffer> {
const { bucket, objectPath } = this.parseGsUrl(gsUrl);
const [buf] = await this.storage.bucket(bucket).file(objectPath).download();
return buf;
}
/**
* Third argument can be a legacy custom bucket string, or structured options for path layout.
*/
async uploadToGcs(
fileBuffer: Buffer,
originalName: string,
legacyBucketOrOpts?: string | CpcGcsUploadOptions
): Promise<string> {
const opts: CpcGcsUploadOptions =
typeof legacyBucketOrOpts === 'string' ? { bucket: legacyBucketOrOpts } : legacyBucketOrOpts || {};
const targetBucket = opts.bucket || this.bucketName;
const base =
opts.objectBaseName && !opts.objectBaseName.includes('/') && !opts.objectBaseName.includes('..')
? opts.objectBaseName
: `${Date.now()}-${path.basename(originalName)}`;
const dir =
opts.objectDir && !opts.objectDir.includes('..')
? opts.objectDir.replace(/^\/+|\/+$/g, '')
: 'cpc-csd/uploads';
const fileName = `${dir}/${base}`.replace(/\\/g, '/');
const bucket = this.storage.bucket(targetBucket);
const file = bucket.file(fileName);
await file.save(fileBuffer);
logger.info(`[CpcGcsService] File uploaded to gs://${targetBucket}/${fileName}`);
return `gs://${targetBucket}/${fileName}`;
}
}
export const cpcGcsService = new CpcGcsService();

View File

@ -0,0 +1,301 @@
/**
* Utility to map OCR document data to the "Excel Screenshot Summary" format
* and ensure uniform detail field results for the CPC-CSD module.
*/
const FIELD_DEFAULTS: any = {
AADHAAR: ['customer_name', 'aadhar_number', 'name', 'dob', 'gender', 'address'],
ADHAAR: ['customer_name', 'aadhar_number', 'name', 'dob', 'gender', 'address'],
CSD_PO: ['customer_name', 'po_number', 'po_amount', 'signature_and_stamp'],
GENERIC_INVOICE: ['customer_name', 'order_or_auth_number', 'invoice_value', 'invoice_date', 'tax_amount'],
RETAIL_INVOICE: ['customer_name', 'order_or_auth_number', 'invoice_value', 'invoice_date', 'tax_amount'],
INVOICE: ['customer_name', 'order_or_auth_number', 'invoice_value', 'invoice_date', 'tax_amount'],
AUTHORITY_LETTER: [
'customer_name',
'letter_number',
'letter_amount',
'signature_and_stamp',
'authorized_person_name',
'order_or_authorisation_number',
'invoice_value',
'govt_signatory_and_stamp_present',
'authority_grantor_name',
'valid_until',
'purpose',
'date_of_issue'
],
AUTH_LETTER: [
'customer_name',
'letter_number',
'letter_amount',
'signature_and_stamp',
'authorized_person_name',
'order_or_authorisation_number',
'invoice_value',
'govt_signatory_and_stamp_present',
'authority_grantor_name',
'valid_until',
'purpose',
'date_of_issue'
]
};
const CRITERIA_MAP: any = {
// Fallbacks when doc-typespecific text is not applied (UI / reports only)
aadhaar_number: 'Exact match',
aadhar_number: 'Exact match',
name: 'Text match',
dob: 'Exact after normalization',
gender: 'Exact (M/Male normalize)',
address: 'Text match',
customer_name: 'Text match',
order_or_auth_number: 'Text match',
order_or_authorisation_number: 'Text match',
invoice_value: 'Amount comparison',
invoice_date: 'Date comparison',
tax_amount: 'Amount comparison',
authorized_person_name: 'Text match',
authority_grantor_name: 'Text match',
letter_number: 'Text match',
valid_until: 'Exact date match',
purpose: 'Text match',
date_of_issue: 'Exact match',
mail_extraction: 'Email on document vs expected',
stamp: 'Signature / stamp vs expected',
signatory: 'Signature / stamp vs expected',
govt_signatory_and_stamp_present: 'Signature / stamp vs expected',
stamp_sign_present: 'Signature / stamp vs expected',
signature_and_stamp: 'Signature / stamp vs expected',
po_number: 'Exact match',
po_amount: 'Amount comparison',
letter_amount: 'Amount comparison'
};
/** Normalize document type for criteria copy (matches validation service naming). */
function normalizeCriteriaDocType(docType?: string): string {
const u = String(docType || '').toUpperCase().trim();
if (u.includes('AADHAAR') || u === 'ADHAAR') return 'AADHAAR';
if (u.includes('CPC_AUTH') || u.includes('AUTHORITY')) return 'CPC_AUTH';
if (u.includes('CSD_PO') || u.includes('PURCHASE') || (u.includes('PO') && u.includes('CSD'))) return 'CSD_PO';
if (u.includes('RETAIL') || u.includes('INVOICE')) return 'RETAIL_INVOICE';
return u;
}
/** Human-readable accuracy criteria for reports / API field_results (shared with validation). */
export function getCriteriaLabel(field: string, docType?: string): string {
if (!field) return 'Exact check';
const f = field.toLowerCase();
const dt = normalizeCriteriaDocType(docType);
if ((f === 'order_or_authorisation_number' || f === 'po_number') && dt === 'CSD_PO') {
return 'Exact match';
}
if (f === 'letter_number' && dt === 'CPC_AUTH') {
return 'Text match';
}
if (f === 'aadhaar_number' || f === 'aadhar_number') {
return 'Exact match';
}
if ((f === 'customer_name' || f === 'name' || f === 'authorized_person_name') && (dt === 'CSD_PO' || dt === 'CPC_AUTH' || dt === 'AADHAAR')) {
return 'Text match';
}
if ((f === 'invoice_value' || f === 'po_amount') && dt === 'CSD_PO') {
return 'Amount comparison';
}
if ((f === 'invoice_value' || f === 'letter_amount') && dt === 'CPC_AUTH') {
return 'Amount comparison';
}
if (f === 'govt_signatory_and_stamp_present' || f === 'stamp_sign_present' || f === 'signature_and_stamp') {
return 'Signature / stamp vs expected';
}
if (f === 'mail_extraction') {
return 'Email on document vs expected';
}
const key = Object.keys(CRITERIA_MAP).find((k: string) => f.includes(k.toLowerCase()));
return CRITERIA_MAP[key || ''] || 'Exact check';
}
/** Normalize match % from persisted validation row (snake or camel). */
function matchPctFromResult(found: Record<string, unknown> | null | undefined): number | null {
if (!found || typeof found !== 'object') return null;
const row = found as { match_percentage?: unknown; matchPercentage?: unknown };
const v = row.match_percentage != null ? row.match_percentage : row.matchPercentage;
if (v == null || v === '') return null;
const n = Number(v);
if (!Number.isFinite(n)) return null;
return Math.round(n);
}
export class CpcHistoryService {
/**
* Transforms a document into a detailed field result array.
* Ensures that if a field was expected but not extracted, it still shows up as a fail.
*/
static getDetailedFieldResults(doc: any) {
const rawDocTypeUpper = String(doc.documentType || doc.document_type || '').trim();
const rawType = rawDocTypeUpper.toLowerCase().replace(/_/g, ' ');
// Normalize type for internal lookup
let type = 'UNKNOWN';
if (rawType.includes('aadhaar') || rawType.includes('adhaar')) type = 'AADHAAR';
else if (rawType.includes('authority') || rawType.includes('auth') || rawType.includes('cpc letter')) type = 'AUTHORITY_LETTER';
else if (rawType.includes('invoice')) type = 'GENERIC_INVOICE';
else if (rawType.includes('purchase order') || rawType.includes('csd_po') || rawType.includes('po'))
type = 'CSD_PO';
const hardcodedKeys = FIELD_DEFAULTS[type] || [];
// Read expected/extracted values from ALL possible variant keys
const expectedObj = doc.msdPayload || doc.msd_payload || {};
const extractedObj = doc.extractedFields || doc.extracted_fields || {};
const payloadKeys = Object.keys(expectedObj);
const expectedKeys = payloadKeys.length > 0 ? payloadKeys : hardcodedKeys;
const rawFr = doc.fieldResults ?? doc.field_results;
const existingResults = Array.isArray(rawFr) ? rawFr : [];
const finalResults = expectedKeys.map((key: string) => {
const found = existingResults.find((r: any) => r.field?.toLowerCase() === key.toLowerCase());
const msdVal = expectedObj[key] || (found ? found.expected : '-');
const ocrVal = extractedObj[key] || (found ? (found.extracted || found.actual) : '-');
if (found) {
const mp = matchPctFromResult(found);
const mpNum = mp != null ? mp : 0;
const st = String((found as { status?: string }).status || '');
const pass =
(found as { pass?: boolean }).pass === true ||
st === 'SUCCESSFUL' ||
st === 'MATCH';
return {
...found,
field: key,
expected: String(msdVal),
extracted: String(ocrVal),
status: st || (found as { status?: string }).status,
match_percentage: mpNum,
matchPercentage: mpNum,
accuracy:
(found as { accuracy?: string }).accuracy ||
(mp != null ? `${mp}%` : `${mpNum}%`),
criteria: (found as { criteria?: string }).criteria || getCriteriaLabel(key, rawDocTypeUpper),
pass,
message:
(found as { reason?: string }).reason ||
(found as { message?: string }).message ||
(pass ? 'Matched' : 'Mismatch detected')
};
}
return {
field: key,
expected: String(msdVal),
extracted: String(ocrVal),
match_percentage: 0,
matchPercentage: 0,
accuracy: '0%',
criteria: getCriteriaLabel(key, rawDocTypeUpper),
pass: false,
status: 'MISSING',
message: 'Not found'
};
});
return finalResults;
}
/**
* Generates the "Excel Screenshot Summary" row for a document.
*/
static getSummaryRow(doc: any, idx: number) {
const rawType = String(doc.documentType || doc.document_type || '')
.toLowerCase()
.trim()
.replace(/_/g, ' ');
const results = this.getDetailedFieldResults(doc);
const findRes = (key: string) => results.find((r: any) => r.field.toLowerCase() === key.toLowerCase()) || null;
const booking_type = doc.claimId?.startsWith('CPC') ? 'CPC' : 'CSD';
const booking_number = doc.bookingId || doc.claimId || 'N/A';
const mapGroup = (fieldKey: string, altKeys: string[] = []) => {
const res = findRes(fieldKey) || (altKeys.length > 0 ? results.find((r: any) => altKeys.some(ak => r.field.toLowerCase() === ak.toLowerCase())) : null);
if (!res) return { msd: 'N.A.', ocr: 'N.A.', accuracy_pct: 'N.A.', criteria: 'N.A.', is_match: 'N.A.', isNA: true };
return {
msd: res.expected,
ocr: res.extracted,
accuracy_pct: res.accuracy,
criteria: res.criteria,
is_match: res.pass ? 'Yes' : 'No',
isNA: false
};
};
let f1, f2, f3, f4, f5;
const na = { msd: 'N.A.', ocr: 'N.A.', accuracy_pct: 'N.A.', criteria: 'N.A.', is_match: 'N.A.', isNA: true };
if (rawType.includes('aadhaar') || rawType.includes('adhaar')) {
f1 = mapGroup('customer_name', ['name', 'authorized_person_name']);
f2 = na;
f3 = mapGroup('aadhar_number', ['aadhaar_number']);
f4 = na;
f5 = na;
} else if (rawType.includes('authority') || rawType.includes('auth') || rawType.includes('cpc letter')) {
f1 = mapGroup('customer_name', ['authorized_person_name', 'name']);
f2 = mapGroup('letter_number', [
'order_or_auth_number',
'letter_no',
'order_or_authorisation_number'
]);
f3 = na;
f4 = mapGroup('letter_amount', ['invoice_value', 'amount']);
f5 = mapGroup('signature_and_stamp', ['govt_signatory_and_stamp_present', 'stamp', 'signatory', 'stamp_sign_present']);
} else if (rawType.includes('purchase order') || rawType.includes('csd_po') || rawType.includes('po')) {
f1 = mapGroup('customer_name');
f2 = mapGroup('po_number', ['order_or_authorisation_number', 'order_or_auth_number']);
f3 = na;
f4 = mapGroup('po_amount', ['invoice_value', 'amount']);
f5 = mapGroup('signature_and_stamp', ['govt_signatory_and_stamp_present', 'stamp', 'signatory', 'stamp_sign_present']);
} else if (rawType.includes('invoice')) {
f1 = mapGroup('customer_name');
f2 = mapGroup('order_or_auth_number', ['order_or_authorisation_number']);
f3 = na;
f4 = mapGroup('invoice_value', ['tax_amount']);
f5 = mapGroup('govt_signatory_and_stamp_present', ['stamp', 'signatory', 'stamp_sign_present']);
} else {
f1 = mapGroup('customer_name', ['name', 'authorized_person_name']);
f2 = mapGroup('order_or_auth_number', ['aadhaar_number', 'order_or_authorisation_number']);
f3 = na;
f4 = mapGroup('invoice_value', ['amount']);
f5 = mapGroup('govt_signatory_and_stamp_present', ['stamp', 'signatory', 'stamp_sign_present']);
}
const vs = String(doc.validationStatus || '').toUpperCase();
const final_validation =
vs === 'SUCCESSFUL' || vs === 'MATCH' || vs === 'APPROVED' ? 'Successful' : 'Unsuccessful';
return {
booking_type,
booking_number,
document_count: idx + 1,
document_name: rawType.toUpperCase(),
f1, f2, f3, f4, f5,
customer_name_group: f1,
po_or_auth_number_group: f2,
aadhaar_number_group: f3,
amount_group: f4,
stamp_group: f5,
field_results: results,
final_validation,
createdAt: doc.createdAt
};
}
}

View File

@ -0,0 +1,44 @@
import { DocumentProcessorServiceClient } from "@google-cloud/documentai";
import logger from "@utils/logger";
export class CpcOcrService {
private client: DocumentProcessorServiceClient;
constructor() {
this.client = new DocumentProcessorServiceClient({
keyFilename: process.env.GCP_KEY_FILE
});
}
async runDocAIOcr(params: {
projectId: string,
location: string,
processorId: string,
fileBuffer: Buffer,
mimeType?: string
}) {
const { projectId, location, processorId, fileBuffer, mimeType } = params;
const name = `projects/${projectId}/locations/${location}/processors/${processorId}`;
logger.info(`[CpcOcrService] Running Document AI OCR for processor: ${processorId}`);
const request = {
name,
rawDocument: {
content: fileBuffer.toString("base64"),
mimeType: mimeType || "application/pdf",
},
};
try {
const [result] = await this.client.processDocument(request);
const text = result?.document?.text || "";
return { text };
} catch (error) {
logger.error(`[CpcOcrService] Document AI Error: ${error instanceof Error ? error.message : String(error)}`);
throw error;
}
}
}
export const cpcOcrService = new CpcOcrService();

View File

@ -0,0 +1,375 @@
import { calculateMatch, normalizePersonNameExtract } from './utils';
export type RuleExtractHints = {
/** MSD fields typed in UI — used to find the same text inside the PDF (no "Name:" label needed). */
msdPayload?: Record<string, unknown>;
/** When `CSD_PO`, prefer buyer/beneficiary lines (Sold To, Bill To, …) over the first generic `Name:` (often supplier). */
documentType?: string;
};
/**
* Regex-based extraction logic for CPC-CSD documents.
* Provides a lightweight alternative to Gemini for common patterns.
* Field names align with MSD payloads from the CPC dashboard (e.g. authority_letter).
*/
function escapeRegExp(s: string): string {
return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
/** If MSD name appears verbatim (spacing flexible) in PDF text, return the matched span. */
function matchMsdNameInBody(body: string, expected: string): string | null {
const e = String(expected || '').trim();
if (e.length < 2) return null;
const flex = escapeRegExp(e).replace(/\s+/g, '\\s+');
const m = body.match(new RegExp(flex, 'i'));
return m ? m[0].replace(/\s+/g, ' ').trim() : null;
}
/**
* Same word as MSD on a line with other text (table cells, "Customer Arjun …") strict substring match often fails.
*/
function findMsdNameTokenInOcr(body: string, expected: string): string | null {
const h = String(expected || '').trim();
if (h.length < 2 || !body.trim()) return null;
const hl = h.toLowerCase();
const noise = /^(qty|ref|date|page|gst|hsn|po|no|id|by|to|of|in|at|sl|sr|index|desc|amount|total)$/i;
const lines = body.split(/\r?\n/).map((l) => l.trim()).filter((l) => l.length > 0);
for (const line of lines) {
if (line.length > 160) continue;
if (line.toLowerCase() === hl) return line;
const parts = line.split(/[\s,;:|/<>()[\]]+/).filter(Boolean);
for (const raw of parts) {
const p = raw.replace(/^[^A-Za-z\u0900-\u097F0-9]+|[^A-Za-z\u0900-\u097F0-9]+$/g, '');
if (!p || p.length < 2 || noise.test(p)) continue;
if (p.toLowerCase() === hl) return p;
}
}
return null;
}
/** Pick a short line whose fuzzy score vs MSD is high (authority letters often put name on its own line). */
function pickNameLineByMsd(body: string, expected: string, minScore = 52): string | null {
const exp = String(expected || '').trim();
if (exp.length < 2 || !body.trim()) return null;
let best: { line: string; score: number } | null = null;
const lines = body.split(/\r?\n/).map((l) => l.trim()).filter((l) => l.length > 2 && l.length < 120);
for (const line of lines) {
if (/^(page|ref|no\.?|date|subject|to|from|dear|sir|madam|annex|schedule|authority|letter|royal|enfield|\d+\s*\/\s*\d+)/i.test(line)) {
continue;
}
const s = calculateMatch(exp, line, 'authorized_person_name');
if (s >= minScore && (!best || s > best.score)) {
best = { line, score: s };
}
}
return best?.line ?? null;
}
function normalizePan(s: string): string | null {
const p = String(s || '')
.toUpperCase()
.replace(/\s/g, '');
return /^[A-Z]{5}[0-9]{4}[A-Z]$/.test(p) ? p : null;
}
/** If MSD PAN appears in PDF text, return canonical PAN (OCR may split with spaces). */
function panFromMsdHint(body: string, msdPan: unknown): string | null {
const p = normalizePan(String(msdPan ?? ''));
if (!p || !body) return null;
const compact = body.toUpperCase().replace(/[\s-]/g, '');
return compact.includes(p) ? p : null;
}
/** If MSD amount digits appear in body, return normalized digit string for range match. */
function invoiceDigitsFromMsdHint(body: string, msdAmt: unknown): string | null {
const d = String(msdAmt ?? '').replace(/[^\d.]/g, '');
if (!d || d.length < 1) return null;
const intPart = d.split('.')[0];
if (intPart.length >= 2 && body.replace(/[^\d]/g, '').includes(intPart)) {
return d;
}
return null;
}
/** Supplier / letterhead lines — not the CSD customer individual name. */
const RE_COMPANY_NAME_HINT =
/\b(LIMITED|LTD\.?|L\.?\s*L\.?\s*P\.?|PVT\.?\s*LTD|PRIVATE\s+LIMITED|PVT|PTE|INC\.?|CORP|CORPORATION|INDIA\s+LTD|MOTORS?|AUTOMOBILES?|DEALERS?|ENTERPRISES?|SALES\s*(?:&|AND)?\s*SERVICE|WORKS|AGENCIES)\b/i;
function looksLikeCompanyLine(s: string): boolean {
const x = String(s || '').trim();
if (!x) return false;
if (RE_COMPANY_NAME_HINT.test(x)) return true;
if (/^[A-Z0-9.&\s\-]{14,}$/.test(x) && !/\s{2,}/.test(x)) return true;
return false;
}
function trimBuyerCapture(raw: string): string {
let s = String(raw || '').replace(/\r/g, '').trim();
s = s.replace(/^[:\-–—\s]+/, '');
const cut = s.split(/\b(?:GSTIN|PAN|Phone|Tel|Email|E-?mail|Mob|Mobile|Address|Qty|Quantity|Part)\b/i)[0];
s = (cut ?? s).trim();
return s.replace(/\s+/g, ' ').trim();
}
function isCsdPoHints(hints?: RuleExtractHints): boolean {
const dt = String(hints?.documentType || '').toUpperCase();
return dt.includes('CSD_PO') || dt.includes('PURCHASE_ORDER');
}
/** Many CSD PO line-items print: 16-digit card/UIN then customer name then plot no / address (Description column). */
const RE_VEHICLE_TOKENS =
/^(ROYAL|ENFIELD|METEOR|CLASSIC|BULLET|HIMALAYAN|INTERCEPTOR|CONTINENTAL|STELLAR|THUNDER|BS-?VI|BSVI|SUPER|VARIANT|MODEL|CC|HP|ABS|QTY|HSN)$/i;
function isPlausibleHumanNameFromPoDescription(s: string): boolean {
const x = String(s || '')
.replace(/\s+/g, ' ')
.trim();
if (x.length < 3 || x.length > 72) return false;
const parts = x.split(/\s+/).filter(Boolean);
if (parts.length < 1 || parts.length > 6) return false;
if (looksLikeCompanyLine(x)) return false;
for (const p of parts) {
if (RE_VEHICLE_TOKENS.test(p)) return false;
}
return parts.some((p) => /^[A-Za-z\u0900-\u097F]{2,}$/.test(p));
}
/**
* Pattern: `5312423002619089 KALAIYARASAN K 71` 16 digits (optional spaces in groups of 4),
* then name tokens, then often a short plot/house number or newline/address.
*/
function extractCsdPoNameInDescriptionColumn(body: string): string | null {
const norm = body.replace(/\r\n/g, '\n').replace(/\u00a0/g, ' ');
const digitRes: RegExp[] = [/\b\d{4}\s+\d{4}\s+\d{4}\s+\d{4}\b/g, /\b\d{16}\b/g];
const seenAt = new Set<number>();
for (const re of digitRes) {
re.lastIndex = 0;
let dm: RegExpExecArray | null;
while ((dm = re.exec(norm)) !== null) {
const compact = dm[0].replace(/\s/g, '');
if (compact.length !== 16 || !/^\d{16}$/.test(compact)) continue;
if (seenAt.has(dm.index)) continue;
seenAt.add(dm.index);
const tail = norm.slice(dm.index + dm[0].length).replace(/^\s+/, '');
let nm = tail.match(
/^([A-Za-z\u0900-\u097F]+(?:\s+[A-Za-z\u0900-\u097F]+){0,5})(?=\s+\d{1,4}\b|\s*\n|\s*$)/i
);
if (!nm?.[1]) {
const loose = tail.match(/^([A-Za-z\u0900-\u097F]{2,25})\b/i);
if (loose?.[1] && isPlausibleHumanNameFromPoDescription(loose[1])) nm = loose;
}
if (!nm?.[1]) continue;
const candidate = nm[1].replace(/\s+/g, ' ').trim();
if (isPlausibleHumanNameFromPoDescription(candidate)) {
return candidate;
}
}
}
return null;
}
/**
* CSD / defence-style POs usually put the customer under Sold To / Bill To / card holder,
* not under the first "Name:" (often dealer contact).
*/
function extractCsdPoBuyerFromLabels(body: string): string | null {
const norm = body.replace(/\r\n/g, '\n');
const patterns: RegExp[] = [
/(?:^|\n)\s*Sold\s*To\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Bill\s*To\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Ship\s*To\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Consignee\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*(?:Buyer|Purchaser)\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Customer\s*(?:Name|Details)?\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*CSD\s*Card(?:\s*Holder)?\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Card\s*Holder(?:\s*Name)?\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Beneficiary\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*Name\s*of\s*(?:the\s*)?(?:Purchaser|Buyer|Customer)\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
/(?:^|\n)\s*(?:Ordered|Order)\s*(?:By|Placed\s*By)\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i
];
for (const re of patterns) {
const m = norm.match(re);
if (!m?.[1]) continue;
const line = trimBuyerCapture(m[1]);
if (line.length < 2 || line.length > 100) continue;
if (/^(page|date|amount|total|ref|subject)\b/i.test(line)) continue;
return line;
}
return null;
}
export class CpcRuleExtractService {
/**
* If Vertex returned a supplier-style string but OCR shows a clear buyer line, prefer the buyer line.
*/
static refineCsdPoCustomerName(ocrText: string, customerName: unknown): string | null {
const cur = String(customerName ?? '').trim();
const text = String(ocrText || '');
const fromDesc = extractCsdPoNameInDescriptionColumn(text);
const fromLabels = extractCsdPoBuyerFromLabels(text);
const buyer = fromDesc || fromLabels;
if (!buyer) return cur.length >= 2 ? cur : null;
if (!cur) return buyer;
if (looksLikeCompanyLine(cur) && !looksLikeCompanyLine(buyer)) return buyer;
return cur;
}
static extractWithRules(ocrText: string, hints?: RuleExtractHints) {
const t = String(ocrText || "");
const msd = hints?.msdPayload || {};
const isCsdPo = isCsdPoHints(hints);
// Matches 12 digit Aadhaar (with optional spaces)
const aadhaarMatch = t.match(/\b\d{4}\s?\d{4}\s?\d{4}\b/);
// Matches currency patterns
const invoiceMatch = t.match(/(?:₹|Rs\.?|INR)\s?[\d,]+(?:\.\d{1,2})?/i);
// Matches common order/auth patterns
const orderMatch = t.match(/\b(?:PO|ORDER|AUTH|AUTHORIZATION)\s*[:\-]?\s*([A-Z0-9\-\/]{4,})/i);
// Matches "Name: [Value]" / "Authorised Person" / applicant-style labels
const nameMatch = t.match(/\bName\s*[:\-]\s*([A-Za-z][A-Za-z0-9\s.'-]{2,79})/i);
const authPersonMatch = t.match(
/\b(?:authorized|authorised)\s+person\s*[:\-]\s*([A-Za-z][A-Za-z0-9\s.'-]{2,79})/i
);
const applicantMatch = t.match(
/\b(?:applicant|holder|customer|borrower|dealer)\s*[:\-]\s*([A-Za-z][A-Za-z0-9\s.'-]{2,79})/i
);
let displayNameRaw = isCsdPo
? extractCsdPoNameInDescriptionColumn(t) || extractCsdPoBuyerFromLabels(t) || ''
: '';
if (!displayNameRaw) {
displayNameRaw = (authPersonMatch?.[1] || nameMatch?.[1] || applicantMatch?.[1] || '').trim();
}
// MSD-guided: name often appears in body exactly as user typed (no label) — same idea as manual compare in CPC-CSD UI flow
if (!displayNameRaw) {
const fromAuth = msd.customer_name ?? msd.authorized_person_name ?? msd.name;
const hint = String(fromAuth ?? '').trim();
if (hint) {
const minFuzzy = hint.length <= 10 ? 40 : 52;
displayNameRaw =
matchMsdNameInBody(t, hint) ||
findMsdNameTokenInOcr(t, hint) ||
pickNameLineByMsd(t, hint, minFuzzy) ||
'';
}
}
// Title / ALL CAPS line fallback — include short single names (e.g. "Arjun") skipped by older rules
if (!displayNameRaw) {
const lines = t.split(/\r?\n/).map((l) => l.trim()).filter(Boolean);
const noiseLine = /^(qty|ref|date|page|gst|hsn|po|no|id|total|amount|index|desc|sl)$/i;
for (const line of lines) {
if (line.length < 3 || line.length > 80) continue;
if (noiseLine.test(line)) continue;
if (/^(ref|date|subject|to|from|dear|page|annex|authority|letter|royal|enfield|cpc|csd)\b/i.test(line)) {
continue;
}
if (isCsdPo && looksLikeCompanyLine(line)) {
continue;
}
const words = line.split(/\s+/).filter(Boolean);
const singleName =
words.length === 1 &&
/^[A-Za-z\u0900-\u097F]{2,25}$/.test(words[0]) &&
!RE_VEHICLE_TOKENS.test(words[0]) &&
!looksLikeCompanyLine(words[0]);
const multiAllCaps =
/^[A-Z][A-Z0-9\s.'-]{4,70}$/.test(line) && words.length >= 2;
if (singleName || multiAllCaps) {
displayNameRaw = line;
break;
}
const titleCaseName =
words.length >= 1 &&
words.length <= 4 &&
words.every((w) => /^[A-Za-z\u0900-\u097F]{2,}$/.test(w)) &&
!words.some((w) => RE_VEHICLE_TOKENS.test(w)) &&
line[0] === line[0].toUpperCase() &&
/[a-z\u0900-\u097F]/.test(line) &&
!looksLikeCompanyLine(line);
if (titleCaseName && line.length <= 48) {
displayNameRaw = line;
break;
}
}
}
let displayName = displayNameRaw.length >= 2 ? displayNameRaw.replace(/\s+/g, ' ').trim() : null;
if (isCsdPo && displayName) {
displayName = CpcRuleExtractService.refineCsdPoCustomerName(t, displayName) ?? displayName;
}
if (displayName) {
const n = normalizePersonNameExtract(displayName);
if (n) displayName = n;
}
// PAN (Indian format) + MSD hint (PDF may lack strict word boundaries)
let panFromRegex = t.match(/\b([A-Z]{5}[0-9]{4}[A-Z])\b/i);
let panVal = panFromRegex ? String(panFromRegex[1]).toUpperCase() : null;
if (!panVal && msd.pan_number != null) {
panVal = panFromMsdHint(t, msd.pan_number);
}
// Numeric amount for range matching against MSD invoice_value
const amountDigits = invoiceMatch
? String(invoiceMatch[0]).replace(/[^\d.]/g, '').replace(/^\.+|\.+$/g, '')
: null;
let invoiceValueNormalized =
amountDigits && amountDigits.length ? amountDigits : null;
if (!invoiceValueNormalized) {
invoiceValueNormalized =
invoiceDigitsFromMsdHint(t, msd.po_amount) ||
invoiceDigitsFromMsdHint(t, msd.letter_amount) ||
invoiceDigitsFromMsdHint(t, msd.invoice_value);
}
const stampPresent = /(stamp|seal|authorized signatory|signature)/i.test(t);
const govtStampPresent = /(govt\.?\s*stamp|government\s*seal|govt\.?\s*signatory|official\s*stamp|authorized\s*signatory)/i.test(t) || stampPresent;
const stampYesNo = govtStampPresent ? 'yes' : 'no';
const poOrOrder = orderMatch ? orderMatch[1].trim() : null;
const aadhaarDigits = aadhaarMatch ? aadhaarMatch[0].replace(/\s/g, '').trim() : null;
return {
extracted_fields: {
authorized_person_name: displayName,
customer_name: displayName,
pan_number: panVal,
order_or_authorisation_number: poOrOrder,
po_number: poOrOrder,
order_or_auth_number: poOrOrder,
invoice_value: invoiceValueNormalized,
po_amount: invoiceValueNormalized,
letter_amount: invoiceValueNormalized,
aadhaar_number: aadhaarDigits,
aadhar_number: aadhaarDigits,
stamp_or_signatory_present: stampPresent,
stamp_sign_present: stampPresent,
govt_signatory_and_stamp_present: stampYesNo,
signature_and_stamp: stampYesNo
},
field_confidence: {
authorized_person_name: displayName ? 0.65 : 0.2,
customer_name: displayName ? 0.65 : 0.2,
pan_number: panVal ? 0.85 : 0.2,
order_or_authorisation_number: orderMatch ? 0.7 : 0.2,
po_number: orderMatch ? 0.7 : 0.2,
order_or_auth_number: orderMatch ? 0.7 : 0.2,
invoice_value: invoiceValueNormalized ? 0.7 : 0.2,
po_amount: invoiceValueNormalized ? 0.7 : 0.2,
letter_amount: invoiceValueNormalized ? 0.7 : 0.2,
aadhaar_number: aadhaarMatch ? 0.85 : 0.2,
aadhar_number: aadhaarMatch ? 0.85 : 0.2,
stamp_or_signatory_present: stampPresent ? 0.55 : 0.3,
stamp_sign_present: stampPresent ? 0.55 : 0.3,
govt_signatory_and_stamp_present: govtStampPresent ? 0.55 : 0.3,
signature_and_stamp: govtStampPresent ? 0.55 : 0.3
}
};
}
}

View File

@ -0,0 +1,820 @@
import fs from 'fs';
import path from 'path';
import { VertexAI } from '@google-cloud/vertexai';
import {
calculateMatch,
canonicalizeRuleFieldKey,
digitsOnly,
isPersonalHolderNameField,
normalizeMoney,
normalizePersonNameExtract
} from './utils';
import { getCriteriaLabel } from './CpcHistoryService';
import logger from '@utils/logger';
/** Vertex SDK does not read `GCP_KEY_FILE` by itself — must pass keyFilename (critical in Docker). */
function resolveVertexServiceAccountPath(): string | undefined {
const fromAdc = (process.env.GOOGLE_APPLICATION_CREDENTIALS || '').trim();
const fromKeyFile = (process.env.GCP_KEY_FILE || '').trim();
const candidates = [
fromAdc,
fromKeyFile ? path.resolve(process.cwd(), fromKeyFile) : ''
].filter(Boolean);
for (const p of candidates) {
try {
if (fs.existsSync(p)) return path.resolve(p);
} catch {
/* ignore */
}
}
return undefined;
}
/**
* Decide which printed script Gemini should prefer when the document shows the same field in English and Hindi.
* Driven by the user's MSD/form string (Devanagari vs Latin letter counts).
*/
function preferScriptForMsdFieldValue(value: unknown): 'Devanagari' | 'Latin' {
const s = String(value ?? '').trim();
if (!s) return 'Latin';
try {
const dev = (s.match(/\p{Script=Devanagari}/gu) || []).length;
const lat = (s.match(/\p{Script=Latin}/gu) || []).length;
if (dev === 0 && lat === 0) return 'Latin';
return dev >= lat ? 'Devanagari' : 'Latin';
} catch {
return /[\u0900-\u097F]/.test(s) ? 'Devanagari' : 'Latin';
}
}
/** JSON block appended to Vertex prompt: per-field prefer_script from MSD input language. */
function buildMsdScriptPreferenceBlock(
expectedFields: string[],
msdReferencePayload?: Record<string, unknown>
): string {
if (!msdReferencePayload || typeof msdReferencePayload !== 'object') return '';
const uniq = [...new Set((expectedFields || []).map((f) => String(f || '').trim()).filter(Boolean))];
const keys =
uniq.length > 0
? uniq
: Object.keys(msdReferencePayload).filter((k) => {
const v = msdReferencePayload[k];
return v !== undefined && v !== null && String(v).trim() !== '';
});
if (keys.length === 0) return '';
const hints: Record<string, { prefer_script: 'Devanagari' | 'Latin' }> = {};
for (const key of keys) {
const raw = msdReferencePayload[key];
if (raw === undefined || raw === null) continue;
const str = String(raw).trim();
if (!str) continue;
hints[key] = { prefer_script: preferScriptForMsdFieldValue(raw) };
}
if (Object.keys(hints).length === 0) return '';
return `MSD_SCRIPT_PREFERENCE (per field: infer input language from MSD; when the document shows the same field in both English and Hindi, extract ONLY the on-page text whose script matches prefer_script for that key — do not translate; do not swap languages):\n${JSON.stringify(hints, null, 2)}\n`;
}
const VALID_DOC_TYPES = ['CSD_PO', 'CPC_AUTH', 'AADHAAR', 'RETAIL_INVOICE'] as const;
/**
* Field rules aligned with RE / Softude mail (FebApr 2026):
* - Rahul: CSD PO # 100% exact, amounts ±5, per-field all-pass (no average-based gate).
* - Rohit table: customer / order (where fuzzy) 95%, invoice 98% OR ±5, stamp 85% fuzzy,
* Aadhaar 12-digit 100%, retail invoice # 95%, document date 90%.
*/
const DOCUMENT_RULES: any = {
/** CPC claim doc 2 */
'AADHAAR': {
'name': { threshold: 90, method: 'fuzzy' },
'customer_name': { threshold: 90, method: 'fuzzy' },
'aadhaar_number': { threshold: 100, method: 'exact_length_12' },
'aadhar_number': { threshold: 100, method: 'exact_length_12' },
'gender': { threshold: 100, method: 'exact' },
'mail_extraction': { threshold: 90, method: 'fuzzy' }
},
/** CPC claim doc 1 — authorization letter */
'CPC_AUTH': {
'authorized_person_name': { threshold: 90, method: 'fuzzy' },
'customer_name': { threshold: 90, method: 'fuzzy' },
'authority_grantor_name': { threshold: 90, method: 'fuzzy' },
'letter_number': { threshold: 90, method: 'fuzzy' },
'invoice_value': { threshold: null, method: 'range_5_or_fuzzy_98' },
'letter_amount': { threshold: null, method: 'range_5_or_fuzzy_98' },
'amount': { threshold: null, method: 'range_5_or_fuzzy_98' },
'pan_number': { threshold: 95, method: 'fuzzy' },
'order_or_authorisation_number': { threshold: 95, method: 'fuzzy' },
'stamp_sign_present': { threshold: 85, method: 'boolean_fuzzy_85' },
'govt_signatory_and_stamp_present': { threshold: 85, method: 'boolean_fuzzy_85' },
'signature_and_stamp': { threshold: 85, method: 'boolean_fuzzy_85' },
'mail_extraction': { threshold: 90, method: 'fuzzy' }
},
/** CSD — Purchase order: PO# remains exact 100% per Rahul; other fuzzy thresholds per Rohit table. */
'CSD_PO': {
'customer_name': { threshold: 90, method: 'fuzzy' },
'name': { threshold: 90, method: 'fuzzy' },
'order_or_authorisation_number': { threshold: 100, method: 'exact' },
'po_number': { threshold: 100, method: 'exact' },
'invoice_value': { threshold: null, method: 'range_5_or_fuzzy_98' },
'po_amount': { threshold: null, method: 'range_5_or_fuzzy_98' },
'vendor_name': { threshold: 95, method: 'fuzzy' },
'govt_signatory_and_stamp_present': { threshold: 85, method: 'boolean_fuzzy_85' },
'signature_and_stamp': { threshold: 85, method: 'boolean_fuzzy_85' },
'mail_extraction': { threshold: 90, method: 'fuzzy' }
},
'RETAIL_INVOICE': {
'customer_name': { threshold: 95, method: 'fuzzy' },
'order_or_authorisation_number': { threshold: 95, method: 'fuzzy' },
'invoice_value': { threshold: null, method: 'range_5_or_fuzzy_98' },
'invoice_date': { threshold: 90, method: 'fuzzy' },
'vendor_name': { threshold: 95, method: 'fuzzy' },
'mail_extraction': { threshold: 90, method: 'fuzzy' }
},
'GENERIC': {
'default': { threshold: 95, method: 'fuzzy' }
}
};
/** Human-readable `field_results.threshold` for API/UI (no percentage figures). */
function apiThresholdLabel(rule: { method?: string; threshold?: number | null }): string {
const m = rule?.method;
if (m === 'range_5_or_fuzzy_98' || m === 'range_5') return 'Amount comparison';
if (m === 'boolean_fuzzy_85' || m === 'boolean') return 'Stamp / signature';
if (m === 'exact_length_12') return 'Aadhaar number';
if (m === 'exact' || m === 'exact_numeric') return 'Exact match';
if (m === 'fuzzy') return 'Text match';
return 'N/A';
}
function msdFieldDisplayName(fieldKey: string, docType?: string): string {
if (fieldKey === 'invoice_value' || fieldKey === 'po_amount') {
if (docType === 'CSD_PO') return 'PO Amount';
if (docType === 'CPC_AUTH') return 'Letter Amount';
}
if (fieldKey === 'letter_amount') return 'Letter Amount';
const map: Record<string, string> = {
authorized_person_name: 'Customer Name',
customer_name: 'Customer Name',
name: 'Customer Name',
letter_number: 'Letter Number',
po_number: 'PO Number',
order_or_authorisation_number: 'PO Number',
invoice_value: 'Document Amount',
po_amount: 'PO Amount',
amount: 'Letter Amount',
aadhaar_number: 'Aadhaar Number',
aadhar_number: 'Aadhaar Number',
govt_signatory_and_stamp_present: 'Signature & Stamp',
signature_and_stamp: 'Signature & Stamp',
stamp_sign_present: 'Signature & Stamp',
mail_extraction: 'Mail extraction',
pan_number: 'PAN',
vendor_name: 'Supplier Name',
authority_grantor_name: 'Authority Grantor',
gender: 'Gender'
};
return map[fieldKey] || fieldKey.replace(/_/g, ' ');
}
function buildMsdStyleMessage(fieldKey: string, status: string, docType?: string): string {
const label = msdFieldDisplayName(fieldKey, docType);
if (status === 'MISSING') {
return `According to the expected record and the document, the "${label}" could not be read from the document.\nKindly upload the document again or update the expected value.`;
}
return `According to the expected record and the document, the "${label}" does not match.\nKindly upload the document again or update the expected value.`;
}
function pickRuleForKey(rules: Record<string, unknown>, key: string): string {
const k = canonicalizeRuleFieldKey(key).toLowerCase();
const candidates = Object.keys(rules)
.filter((rk) => rk !== 'default')
.sort((a, b) => b.length - a.length);
const hit = candidates.find((rk) => k.includes(rk.toLowerCase()));
return hit || 'default';
}
function isWithinRange(valA: any, valB: any, diff: number = 5): boolean {
const a = parseFloat(String(valA).replace(/[^0-9.]/g, ""));
const b = parseFloat(String(valB).replace(/[^0-9.]/g, ""));
if (isNaN(a) || isNaN(b)) return false;
return Math.abs(a - b) <= diff;
}
function isVertexModelAccessIssue(err: unknown): boolean {
const e = err as { message?: string; name?: string; code?: number | string };
const blob = `${e?.name || ''} ${e?.message || ''} ${String(e?.code || '')}`.toLowerCase();
return (
blob.includes('publisher model') ||
blob.includes('model') && blob.includes('not found') ||
blob.includes('does not have access') ||
blob.includes('status: 404') ||
blob.includes('code":404')
);
}
export class CpcValidationService {
/**
* @param expectedFieldKeys When set (e.g. from UI row order), every listed key is validated MSD values may be empty (fails with clear reason) and keys are not dropped. When omitted, keys come from `msdPayload` (non-blank key names only).
*/
static validateSrs(
msdPayload: any,
extractedFields: any,
fieldConfidence: any = {},
docTypeAttr: string = 'generic_invoice',
claimId: string | null = null,
attemptNo: number = 1,
expectedFieldKeys?: string[] | null
) {
let normalizedDocType = (docTypeAttr || "generic_invoice").toUpperCase();
if (normalizedDocType === 'AADHAAR_CARD' || normalizedDocType === 'ADHAAR') normalizedDocType = 'AADHAAR';
if (normalizedDocType === 'AUTHORITY_LETTER' || normalizedDocType === 'CPC_LETTER') normalizedDocType = 'CPC_AUTH';
if (normalizedDocType === 'PURCHASE_ORDER' || normalizedDocType === 'PO') normalizedDocType = 'CSD_PO';
if (normalizedDocType === 'INVOICE' || normalizedDocType === 'GENERIC_INVOICE') normalizedDocType = 'RETAIL_INVOICE';
if (!VALID_DOC_TYPES.includes(normalizedDocType as any) && normalizedDocType !== 'GENERIC') {
logger.warn(`[CpcValidation] Unknown doc type "${docTypeAttr}" → falling back to GENERIC`);
}
const rules = DOCUMENT_RULES[normalizedDocType] || DOCUMENT_RULES.GENERIC;
const fieldResults: any[] = [];
const mismatchReasons: string[] = [];
let totalMatchPercent = 0;
let totalFields = 0;
let matchedCount = 0;
let mismatchedCount = 0;
let missingCount = 0;
const globalThreshold = 95;
const findNormalizedValue = (obj: any, targetKey: string) => {
const norm = (k: string) => k.toLowerCase().replace(/[\s_]/g, '');
const normTarget = norm(targetKey);
if (obj[targetKey] !== undefined) return obj[targetKey];
/** MSD field → alternate keys produced by rules / Gemini */
const synonymSources: Record<string, string[]> = {
authorized_person_name: ['customer_name', 'name', 'authorized_person_name', 'account_holder_name'],
customer_name: ['customer_name', 'name', 'authorized_person_name', 'account_holder_name', 'customername'],
name: ['authorized_person_name', 'customer_name', 'customername'],
pan_number: ['pan_number', 'pan', 'panno'],
invoice_value: ['invoice_value', 'amount', 'total_amount', 'total_value', 'po_amount', 'letter_amount'],
po_amount: ['po_amount', 'invoice_value', 'amount', 'total_amount', 'total_value'],
letter_amount: ['letter_amount', 'invoice_value', 'amount', 'total_amount', 'total_value'],
aadhaar_number: ['aadhaar_number', 'aadhar_number', 'aadhaar', 'aadhaarnumber', 'id_number'],
aadhar_number: ['aadhar_number', 'aadhaar_number', 'aadhaar', 'aadhaarnumber', 'id_number'],
letter_number: ['letter_number', 'order_or_auth_number', 'auth_number', 'auth_no'],
order_or_authorisation_number: ['order_or_authorisation_number', 'order_or_auth_number', 'po_number', 'order_number'],
po_number: ['po_number', 'order_or_authorisation_number', 'order_or_auth_number', 'order_number'],
govt_signatory_and_stamp_present: [
'govt_signatory_and_stamp_present',
'signature_and_stamp',
'stamp_sign_present',
'stamp_or_signatory_present'
],
signature_and_stamp: [
'signature_and_stamp',
'govt_signatory_and_stamp_present',
'stamp_sign_present',
'stamp_or_signatory_present'
],
mail_extraction: ['mail_extraction', 'email', 'registered_email', 'contact_email', 'buyer_email', 'correspondence_email']
};
for (const alt of synonymSources[targetKey] || []) {
if (obj[alt] !== undefined && obj[alt] !== null && String(obj[alt]).trim() !== '') {
return obj[alt];
}
}
const aliases: any = {
name: ['customername', 'customer_name', 'full_name', 'authorized_person_name', 'account_holder_name'],
customer_name: ['customername', 'name', 'full_name', 'authorized_person_name', 'account_holder_name'],
aadhaar_number: ['aadhaarnumber', 'aadhar_number', 'aadhar', 'aadhaar', 'id_number'],
aadhar_number: ['aadhaarnumber', 'aadhaar_number', 'aadhaar', 'id_number'],
invoice_value: ['total_amount', 'amount', 'total_value', 'po_amount', 'letter_amount'],
po_amount: ['invoice_value', 'total_amount', 'amount', 'total_value'],
letter_amount: ['invoice_value', 'amount', 'total_value'],
letter_number: ['order_or_auth_number', 'auth_number', 'auth_no'],
order_or_authorisation_number: ['order_or_auth_number', 'po_number', 'order_number'],
po_number: ['order_or_authorisation_number', 'order_or_auth_number', 'order_number'],
govt_signatory_and_stamp_present: ['stamp_sign_present', 'stamp_or_signatory_present', 'signature_and_stamp'],
signature_and_stamp: ['govt_signatory_and_stamp_present', 'stamp_sign_present', 'stamp_or_signatory_present'],
mail_extraction: ['email', 'e_mail', 'contactemail', 'correspondenceemail']
};
for (const k of Object.keys(obj)) {
const normKey = norm(k);
if (normKey === normTarget) return obj[k];
for (const [canonical, list] of Object.entries(aliases)) {
if (
norm(canonical) === normTarget &&
(list as string[]).some((a) => norm(a) === normKey)
) {
return obj[k];
}
}
}
return undefined;
};
const fromUi = Array.isArray(expectedFieldKeys)
? [...new Set(expectedFieldKeys.map((k) => String(k || '').trim()).filter(Boolean))]
: [];
const expectedKeys =
fromUi.length > 0
? fromUi
: Object.keys(msdPayload || {}).filter((k) => k && String(k).trim() !== '');
for (const key of expectedKeys) {
totalFields++;
const rawExpected = msdPayload?.[key];
const expectedStr =
rawExpected === null || rawExpected === undefined ? '' : String(rawExpected);
const msdValueEmpty =
expectedStr.trim() === '' || expectedStr.trim().toLowerCase() === 'null';
if (msdValueEmpty) {
const foundPeek = findNormalizedValue(extractedFields, key);
const confidence = fieldConfidence[key] || 0;
const label = msdFieldDisplayName(key, normalizedDocType);
mismatchReasons.push(
`According to the expected record, "${label}" was not provided. Enter the expected value to validate against the document.`
);
fieldResults.push({
field: key,
expected: '(not provided)',
extracted: foundPeek ?? null,
status: 'UNSUCCESSFUL',
match_percentage: 0,
threshold: 'N/A',
match_method: 'n/a',
extraction_confidence: confidence,
reason: 'Expected value was empty — enter a value to compare with the document.',
criteria: getCriteriaLabel(key, normalizedDocType)
});
mismatchedCount++;
continue;
}
let expected = rawExpected;
let found = findNormalizedValue(extractedFields, key);
const confidence = fieldConfidence[key] || 0;
if (isPersonalHolderNameField(key)) {
const en = normalizePersonNameExtract(String(expected ?? ''));
if (en) expected = en as typeof rawExpected;
if (found !== undefined && found !== null) {
const fn = normalizePersonNameExtract(String(found));
if (fn) found = fn as typeof found;
}
}
const ruleKey = pickRuleForKey(rules as Record<string, unknown>, key);
const rule = rules[ruleKey] || rules.default || DOCUMENT_RULES.GENERIC.default;
let matchPercent = 0;
let isPass = false;
let status = "UNSUCCESSFUL";
let reason = null;
if (found === undefined || found === null || String(found).trim() === "" || String(found).toLowerCase() === "null") {
status = "MISSING";
reason = "Field not found in document";
missingCount++;
} else {
if (rule.method === 'exact_numeric') {
const numExp = parseFloat(String(expected).replace(/[^0-9.]/g, ''));
const numFnd = parseFloat(String(found).replace(/[^0-9.]/g, ''));
isPass = !isNaN(numExp) && !isNaN(numFnd) && Math.round(numExp) === Math.round(numFnd);
matchPercent = isPass ? 100 : 0;
} else if (rule.method === 'exact') {
const normExp = String(expected).trim().toLowerCase().replace(/[\s\-\/]+/g, '');
const normFnd = String(found).trim().toLowerCase().replace(/[\s\-\/]+/g, '');
isPass = normExp === normFnd;
matchPercent = isPass ? 100 : 0;
} else if (rule.method === 'range_5') {
isPass = isWithinRange(expected, found, 5);
matchPercent = isPass ? 100 : 0;
} else if (rule.method === 'range_5_or_fuzzy_98') {
const inRange = isWithinRange(expected, found, 5);
const expM = normalizeMoney(String(expected));
const fndM = normalizeMoney(String(found));
const fuzzyMoney =
expM && fndM ? calculateMatch(expM, fndM, key) : calculateMatch(String(expected), String(found), key);
isPass = inRange || fuzzyMoney >= 98;
matchPercent = inRange ? 100 : fuzzyMoney;
} else if (rule.method === 'boolean') {
const normBool = (v: unknown) => {
const t = String(v ?? '')
.toLowerCase()
.trim();
if (/\b(yes|true|1|present|available|signed)\b/.test(t)) return 'pos';
if (/\b(no|false|0|absent|not\s*available|unavailable|n\/a)\b/.test(t)) return 'neg';
return 'unk';
};
const ePol = normBool(expected);
const fPol = normBool(found);
if (ePol !== 'unk' && fPol !== 'unk') {
isPass = ePol === fPol;
} else {
isPass =
String(expected).trim().toLowerCase() === String(found).trim().toLowerCase();
}
matchPercent = isPass ? 100 : 0;
} else if (rule.method === 'boolean_fuzzy_85') {
const expand = (v: unknown) => {
const t = String(v ?? '').toLowerCase();
if (/\b(yes|true|1|present|available|signed)\b/.test(t)) return 'available';
if (/\b(no|false|0|absent|not\s*available|unavailable|n\/a)\b/.test(t)) return 'not available';
return String(v ?? '')
.trim()
.toLowerCase();
};
const ex = expand(expected);
const fd = expand(found);
matchPercent = calculateMatch(ex, fd, key);
isPass = matchPercent >= 85;
} else if (rule.method === 'exact_length_12') {
const dExp = String(expected).replace(/\D/g, "");
const dFnd = String(found).replace(/\D/g, "");
isPass = (dExp === dFnd && dFnd.length === 12);
matchPercent = isPass ? 100 : 0;
} else if (rule.threshold === 100) {
matchPercent = String(expected).trim().toLowerCase() === String(found).trim().toLowerCase() ? 100 : 0;
isPass = (matchPercent === 100);
} else {
matchPercent = calculateMatch(expected, found, key);
isPass = (matchPercent >= (rule.threshold || globalThreshold));
}
if (isPass) {
status = "SUCCESSFUL";
matchedCount++;
} else {
status = "UNSUCCESSFUL";
reason = 'Value does not match expected';
mismatchedCount++;
}
}
totalMatchPercent += matchPercent;
if (status !== "SUCCESSFUL") {
mismatchReasons.push(buildMsdStyleMessage(key, status, normalizedDocType));
}
fieldResults.push({
field: key,
expected: expected,
extracted: found || null,
status: status,
match_percentage: matchPercent,
threshold: apiThresholdLabel(rule),
match_method: rule.method,
extraction_confidence: confidence,
reason: reason,
criteria: getCriteriaLabel(key, normalizedDocType)
});
}
/** MSD: success only if every expected field passes its own rule (no averaging). */
const allFieldsPass =
totalFields > 0 && mismatchedCount === 0 && missingCount === 0 && matchedCount === totalFields;
const overallAccuracy = totalFields > 0 ? Math.round(totalMatchPercent / totalFields) : 0;
const displayMatchPercent = allFieldsPass ? 100 : overallAccuracy;
const hasMissing = missingCount > 0;
const overallValidationStatus = hasMissing
? "NEED_MANUAL"
: allFieldsPass
? "MATCH"
: "MISMATCH";
const overallStatus = overallValidationStatus === "MATCH" ? "SUCCESSFUL" : "UNSUCCESSFUL";
return {
claim_id: claimId,
attempt_no: attemptNo,
status: overallStatus,
validation_status: overallValidationStatus,
match_percentage: displayMatchPercent,
overall_match_percentage: displayMatchPercent,
threshold: 100,
all_fields_passed: allFieldsPass,
mismatch_summary: {
total_expected_fields: totalFields,
matched: matchedCount,
mismatched: mismatchedCount,
missing: missingCount,
all_fields_passed: allFieldsPass
},
mismatch_reasons: mismatchReasons,
field_results: fieldResults
};
}
static async extractWithGemini(params: {
projectId: string;
location: string;
modelName?: string;
documentType: string;
ocrText?: string;
fileBuffer?: Buffer;
mimeType?: string;
expectedFields?: string[];
/** MSD / form values — passed into prompt so Gemini aligns labels with user input (no secrets; same as document check). */
msdReferencePayload?: Record<string, unknown>;
}) {
const {
projectId,
location,
modelName,
documentType,
ocrText,
fileBuffer,
mimeType,
expectedFields = [],
msdReferencePayload
} = params;
const saPath = resolveVertexServiceAccountPath();
const vertexInit: ConstructorParameters<typeof VertexAI>[0] = {
project: projectId,
location
};
if (saPath) {
(vertexInit as { googleAuthOptions?: { keyFilename: string } }).googleAuthOptions = {
keyFilename: saPath
};
logger.info(`[CpcValidation] Vertex AI using service account file: ${saPath}`);
} else {
logger.warn(
'[CpcValidation] No GCP_KEY_FILE / GOOGLE_APPLICATION_CREDENTIALS on disk — Vertex uses ADC only (often empty inside Docker).'
);
}
const usedModel =
(modelName && String(modelName).trim()) ||
process.env.GEMINI_MODEL?.trim() ||
process.env.VERTEX_AI_MODEL?.trim() ||
'gemini-1.5-flash';
const promptText = this.buildPrompt(documentType, ocrText || "", expectedFields, params.msdReferencePayload);
const parts: any[] = [{ text: promptText }];
if (fileBuffer) {
parts.push({
inlineData: {
mimeType: mimeType || "application/pdf",
data: fileBuffer.toString("base64")
}
});
}
const fallbackLocation = (process.env.CPC_VERTEX_FALLBACK_LOCATION || 'us-central1').trim();
const fallbackModel = (process.env.CPC_VERTEX_FALLBACK_MODEL || 'gemini-2.0-flash-lite').trim();
const attempts = [
{ location, model: usedModel, label: 'primary' },
{ location: fallbackLocation, model: fallbackModel, label: 'fallback' }
].filter((a, i, arr) => arr.findIndex((x) => x.location === a.location && x.model === a.model) === i);
let lastErr: unknown;
for (let idx = 0; idx < attempts.length; idx++) {
const attempt = attempts[idx];
const attemptVertexInit: ConstructorParameters<typeof VertexAI>[0] = {
project: projectId,
location: attempt.location
};
if (saPath) {
(attemptVertexInit as { googleAuthOptions?: { keyFilename: string } }).googleAuthOptions = {
keyFilename: saPath
};
}
const vertexAI = new VertexAI(attemptVertexInit);
const model = vertexAI.getGenerativeModel({ model: attempt.model });
try {
if (idx > 0) {
logger.warn(
`[CpcValidation] Retrying Vertex extraction using ${attempt.label} model/location (${attempt.model} @ ${attempt.location})`
);
}
const resp = await model.generateContent({
contents: [{ role: 'user', parts }],
generationConfig: {
temperature: 0.1,
maxOutputTokens: Math.min(
8192,
parseInt(process.env.CPC_VERTEX_MAX_OUTPUT_TOKENS || '8192', 10) || 8192
)
}
});
const cand = resp?.response?.candidates?.[0] as { finishReason?: string; content?: { parts?: unknown[] } } | undefined;
if (cand?.finishReason && cand.finishReason !== 'STOP') {
logger.warn(`[CpcValidation] Gemini finishReason=${cand.finishReason}`);
}
const out =
cand?.content?.parts?.map((p: any) => (typeof p?.text === 'string' ? p.text : '')).join('') || '';
if (!out) throw new Error('EMPTY_AI_RESPONSE');
const parsed = this.parseJsonLoose(out);
const merged: Record<string, unknown> = { ...(parsed.extracted_fields || {}) };
const lockKeys = [...new Set(expectedFields.map((k) => String(k || '').trim()).filter(Boolean))];
for (const k of lockKeys) {
if (!(k in merged)) merged[k] = null;
}
parsed.extracted_fields = merged;
const keys = Object.keys(parsed.extracted_fields || {});
if (keys.length === 0) {
logger.warn('[CpcValidation] Gemini returned empty extracted_fields; raw head: ' + out.slice(0, 400));
}
return parsed;
} catch (error) {
lastErr = error;
const shouldRetry = idx < attempts.length - 1 && isVertexModelAccessIssue(error);
if (shouldRetry) {
logger.warn(
`[CpcValidation] Vertex attempt failed for ${attempt.model} @ ${attempt.location}. Trying fallback...`,
error
);
continue;
}
logger.error("Gemini Extraction Error:", error);
throw error;
}
}
throw lastErr || new Error('AI_EXTRACTION_FAILED: Vertex extraction failed');
}
private static buildPrompt(
documentType: string,
ocrText: string,
expectedFields: string[] = [],
msdReferencePayload?: Record<string, unknown>
) {
const dt = documentType.toLowerCase();
const rawDocType = String(documentType || '');
const isAadhaar = dt.includes('aadhaar');
const isInvoice = dt.includes('invoice') || dt.includes('retail');
/** Avoid `includes('po')` — false positives on unrelated doc type strings. */
const isCsdPo =
/\bcsd[_\s-]*po\b/i.test(rawDocType) ||
/\bpurchase[_\s-]*order\b/i.test(rawDocType) ||
/^\s*PO\s*$/i.test(rawDocType.trim());
const isAuthorityDoc =
dt.includes('authority') ||
dt.includes('cpc_auth') ||
dt.includes('auth_letter') ||
dt.includes('authority_letter') ||
dt.includes('cpc_letter');
const schema: any = {
extracted_fields: {},
field_confidence: {}
};
const userLockedKeys = [...new Set((expectedFields || []).map((f) => String(f || '').trim()).filter(Boolean))];
if (userLockedKeys.length > 0) {
userLockedKeys.forEach((f) => {
schema.extracted_fields[f] = 'string|null';
});
} else if (isAadhaar) {
schema.extracted_fields = {
customer_name: 'string',
aadhar_number: 'string',
name: 'string|null',
dob: 'string',
gender: 'string',
address: 'string',
aadhaar_number: 'string|null'
};
} else if (isCsdPo) {
schema.extracted_fields = {
customer_name: 'string',
po_number: 'string',
po_amount: 'string',
signature_and_stamp: 'string|boolean',
vendor_name: 'string',
invoice_date: 'string',
order_or_authorisation_number: 'string|null',
invoice_value: 'string|null',
govt_signatory_and_stamp_present: 'string|boolean|null'
};
} else if (isInvoice) {
schema.extracted_fields = {
customer_name: 'string',
order_or_authorisation_number: 'string',
invoice_value: 'string',
invoice_date: 'string',
vendor_name: 'string'
};
} else if (isAuthorityDoc) {
schema.extracted_fields = {
customer_name: 'string',
letter_number: 'string|null',
letter_amount: 'string|null',
signature_and_stamp: 'string|boolean|null',
authorized_person_name: 'string|null',
authority_grantor_name: 'string',
valid_until: 'string',
purpose: 'string',
date_of_issue: 'string',
pan_number: 'string|null',
order_or_authorisation_number: 'string|null',
amount: 'string|null',
invoice_value: 'string|null',
stamp_sign_present: 'string|boolean|null',
govt_signatory_and_stamp_present: 'string|boolean|null'
};
}
Object.keys(schema.extracted_fields).forEach(key => {
schema.field_confidence[key] = "number (0-1)";
});
const msdRef =
msdReferencePayload &&
typeof msdReferencePayload === 'object' &&
Object.keys(msdReferencePayload).length > 0
? JSON.stringify(msdReferencePayload, null, 2)
: '';
const scriptPrefBlock = buildMsdScriptPreferenceBlock(userLockedKeys, msdReferencePayload);
return `
Return ONLY valid JSON (no markdown).
Schema:
${JSON.stringify(schema, null, 2)}
Instructions:
Extract fields based on the provided document_type.
${userLockedKeys.length > 0
? `MANDATORY_KEYS: Your JSON property "extracted_fields" MUST contain exactly these keys (same spelling, no extras): ${userLockedKeys.join(', ')}. Use null only when that value is not visible on the document image/PDF.`
: ''}
${userLockedKeys.length > 0
? `EXTRACTION REQUEST: Extract only what is needed for those keys; do not invent keys outside the list.`
: ''}
${msdRef ? `REFERENCE_VALUES (from the user's form — use to locate the correct rows/labels on the document; values in extracted_fields must match what is visibly printed on the PDF/image, not invented):\n${msdRef}\n` : ''}
${scriptPrefBlock}
BILINGUAL_FORMS: Indian CPC/CSD forms often print the same label in English and Hindi. For each key in MSD_SCRIPT_PREFERENCE (if present), the MSD value shows which language the user entered prefer_script is Devanagari (Hindi script) vs Latin (English). When both languages appear for that field on the image/PDF, copy the value whose script matches prefer_script. When only one script is visible, extract that visible value. Never return the other language if both are printed and MSD is clearly single-script. Numeric-only fields (amounts, IDs): use digits as printed; script rule applies mainly to name and free-text fields.
For Aadhaar: customer_name (holder name), aadhar_number (12 digits, no spaces preferred), optional dob (DDMMYYYY), gender, address. You may also populate legacy keys name and aadhaar_number if visible.
NAME_LINE_VS_MSD: When the printed name includes a relation suffix (S/O, D/O, W/O, C/O, Son of, ) after the holder's name, if REFERENCE_VALUES show the same person's name without that suffix, return only that shorter holder name for customer_name / name / authorized_person_name (do not append the S/O clause).
HOLDER_NAME_NO_TITLES: For customer_name, name, and authorized_person_name only return the person's given name tokens as printed (Latin or Devanagari per script rules). Do NOT include salutations or ranks (Mr, Mrs, Ms, Dr, Prof, Sir, Shri, Smt, Kumari, Lt, Captain, Major, Colonel, General, Admiral, Wing Commander, Group Captain, etc.). Do NOT include relation lines (S/O, D/O, W/O, C/O, Son of, …) or father's name after the holder name; only the holder's own name span.
CRITICAL: For 'address', extract ONLY the physical location details.
${isCsdPo
? `For CSD Purchase Order: extract po_number (PO reference — exact text), po_amount (digits only, rupees), vendor_name (supplier/dealer company from letterhead or From/Supplier block), customer_name (the human buyer / beneficiary — NOT the dealer company name), invoice_date, signature_and_stamp as yes/no (official stamp or authorized signatory visible). Legacy keys order_or_authorisation_number, invoice_value, govt_signatory_and_stamp_present may be filled with the same values if present.
For customer_name, read the value beside or under labels such as: Sold To, Bill To, Ship To, Consignee, Buyer, Purchaser, Customer, CSD Card / Card Holder, Beneficiary, Name of Purchaser/Buyer, Ordered By. Do NOT use the first generic "Name:" on the page if it sits under supplier/dealer details or is clearly a sales contact.
Many CSD PO line tables put the beneficiary in the Description column as: a 16-digit number (card/UIN style) immediately followed by the person's name (then often a house/plot number and address). Prefer that name for customer_name when present.
${expectedFields.some((f) => String(f).toLowerCase() === 'customer_name') ? "CRITICAL: The JSON key customer_name must hold the printed buyer/beneficiary person name from the PO (what the user typed in customer_name). Put the supplying company's legal name only under vendor_name when that key exists; never put the dealer letterhead name in customer_name." : ''}`
: ''}
${isInvoice ? 'For Retail Invoice: customer name, invoice amount (numeric only, exclude currency symbol), order/authorisation number, vendor name, and date.' : ''}
${isAuthorityDoc
? 'For CPC / Authorization Letter: extract customer_name (person being authorized), letter_number, letter_amount (numeric), signature_and_stamp yes/no (stamp/signature visible). Also extract authority grantor, dates, purpose, PAN if visible when those keys exist in the schema. Legacy keys authorized_person_name, invoice_value, govt_signatory_and_stamp_present may mirror the same values.'
: ''}
${userLockedKeys.some((f) => String(f).toLowerCase() === 'mail_extraction')
? "If 'mail_extraction' is requested: extract the email address or mail reference line visible on the document (official correspondence / contact email). Put the primary value in extracted_fields.mail_extraction."
: ''}
If a field name like 'pan_number' is requested, look for a 10-character alphanumeric string (5 letters, 4 digits, 1 letter).
For 'govt_signatory_and_stamp_present' or 'signature_and_stamp', check if the document has an official stamp or authorized signatory mark and return "yes" or "no".
document_type: ${documentType}
OCR_TEXT:
"""${ocrText ? ocrText.slice(0, 20000) : "No OCR text provided. Please extract directly from the provided document image/PDF."}"""
`;
}
private static parseJsonLoose(text: string): { extracted_fields: Record<string, unknown>; field_confidence: Record<string, unknown> } {
let s = String(text || '').trim();
s = s.replace(/^```(?:json)?\s*/i, '').replace(/\s*```\s*$/i, '');
const a = s.indexOf('{');
const b = s.lastIndexOf('}');
if (a === -1) throw new Error('AI_EXTRACTION_FAILED: No JSON object found in LLM response');
let parsed: Record<string, unknown>;
try {
parsed = JSON.parse(s.slice(a, b + 1)) as Record<string, unknown>;
} catch {
throw new Error('AI_EXTRACTION_FAILED: Invalid JSON from model');
}
const nested = parsed.extracted_fields;
if (nested && typeof nested === 'object' && !Array.isArray(nested)) {
return {
extracted_fields: nested as Record<string, unknown>,
field_confidence:
parsed.field_confidence && typeof parsed.field_confidence === 'object'
? (parsed.field_confidence as Record<string, unknown>)
: {}
};
}
// Model sometimes returns flat keys instead of { extracted_fields: { ... } }
const fc =
parsed.field_confidence && typeof parsed.field_confidence === 'object'
? (parsed.field_confidence as Record<string, unknown>)
: {};
const ef: Record<string, unknown> = { ...parsed };
delete ef.field_confidence;
delete ef.extracted_fields;
return { extracted_fields: ef, field_confidence: fc };
}
}

View File

@ -0,0 +1,55 @@
import { sequelize } from '@config/database';
import logger from '@utils/logger';
/**
* Ensures CPC-CSD tables exist (idempotent). Runs at app startup so a fresh DB
* still serves CPC routes even if the migrations runner was skipped once.
*/
export async function ensureCpcCdcSchema(): Promise<void> {
try {
await sequelize.query(`
CREATE TABLE IF NOT EXISTS cpc_documents (
id UUID NOT NULL PRIMARY KEY,
booking_id VARCHAR(255),
claim_id VARCHAR(255),
attempt_no INTEGER NOT NULL DEFAULT 1,
document_type VARCHAR(255),
document_gcp_url TEXT,
provider VARCHAR(255),
msd_payload JSONB,
extracted_fields JSONB,
field_confidence JSONB,
validation_status VARCHAR(255),
match_percentage DOUBLE PRECISION,
mismatch_reasons JSONB,
field_results JSONB,
ip_address VARCHAR(255),
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
`);
await sequelize.query(`
CREATE TABLE IF NOT EXISTS cpc_audit_logs (
id UUID NOT NULL PRIMARY KEY,
document_id UUID NOT NULL REFERENCES cpc_documents(id) ON DELETE CASCADE,
action VARCHAR(255) NOT NULL,
previous_state JSONB,
new_state JSONB,
performed_by VARCHAR(255),
remarks TEXT,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
`);
await sequelize.query(`DROP INDEX IF EXISTS unique_cpc_document_attempt;`);
await sequelize.query(`
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_claim_attempt_booking
ON cpc_documents (claim_id, attempt_no, booking_id);
`);
logger.info('[CPC-CSD] Schema check complete (cpc_documents / cpc_audit_logs).');
} catch (err) {
logger.error('[CPC-CSD] ensureCpcCdcSchema failed — run `npm run migrate` in re-workflow-be.', err);
// Do not block app boot; CPC routes will error until DB is fixed.
}
}

View File

@ -0,0 +1,22 @@
/**
* Pull plain text from a PDF buffer for CPC RULES / regex extraction when Document AI is off.
*/
export async function extractPdfTextFromBuffer(buffer: Buffer): Promise<string> {
if (!buffer?.length) return '';
try {
const { PDFParse } = await import('pdf-parse');
const parser = new PDFParse({ data: new Uint8Array(buffer) });
const textResult = await parser.getText();
const text = textResult?.text ?? '';
await parser.destroy();
// #region agent log
fetch('http://127.0.0.1:7259/ingest/1bcd6134-2d07-4e57-96c5-9f7406df102e',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'5f3c70'},body:JSON.stringify({sessionId:'5f3c70',location:'extractPdfText.ts:success',message:'pdf-parse succeeded',data:{textLen:text.length},timestamp:Date.now(),hypothesisId:'B'})}).catch(()=>{});
// #endregion
return typeof text === 'string' ? text : '';
} catch (pdfErr: any) {
// #region agent log
fetch('http://127.0.0.1:7259/ingest/1bcd6134-2d07-4e57-96c5-9f7406df102e',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'5f3c70'},body:JSON.stringify({sessionId:'5f3c70',location:'extractPdfText.ts:catch',message:'pdf-parse FAILED',data:{errorName:pdfErr?.name,errorMessage:pdfErr?.message?.slice(0,200)},timestamp:Date.now(),hypothesisId:'B'})}).catch(()=>{});
// #endregion
return '';
}
}

View File

@ -0,0 +1,401 @@
import stringSimilarity from 'string-similarity';
import { Op, cast, col, where as sqlWhere } from 'sequelize';
/** Shared list/report filters for CPC documents (parity with legacy CPC-CSD). */
export function appendCpcDocumentFilters(
andParts: Record<string, unknown>[],
opts: {
type?: string;
status?: string;
search?: string;
/** When true, `search` also matches document `id` (recent documents API). */
searchIncludeId?: boolean;
}
): void {
const { type, status, search, searchIncludeId = false } = opts;
if (type && type !== 'ALL') {
if (type === 'AADHAAR') {
andParts.push({
[Op.or]: [
{ documentType: { [Op.iLike]: '%AADHAAR%' } },
{ documentType: { [Op.iLike]: '%ADHAAR%' } }
]
});
} else if (type === 'RETAIL_INVOICE') {
andParts.push({
[Op.or]: [
{ documentType: { [Op.iLike]: '%RETAIL%' } },
{ documentType: { [Op.iLike]: '%INVOICE%' } }
]
});
} else if (type === 'CPC_AUTH') {
andParts.push({
[Op.or]: [
{ documentType: { [Op.iLike]: '%AUTHORITY%' } },
{ documentType: { [Op.iLike]: '%CPC_AUTH%' } },
{ documentType: { [Op.iLike]: '%AUTH%' } }
]
});
} else if (type === 'CSD_PO') {
andParts.push({
[Op.or]: [
{ documentType: { [Op.iLike]: '%CSD_PO%' } },
{ documentType: { [Op.iLike]: '%PURCHASE_ORDER%' } },
{ documentType: { [Op.iLike]: '%PO%' } }
]
});
} else {
andParts.push({ documentType: { [Op.iLike]: `%${type}%` } });
}
}
if (status && status !== 'ALL') {
if (status === 'SUCCESSFUL') {
andParts.push({
validationStatus: { [Op.in]: ['SUCCESSFUL', 'MATCH', 'APPROVED'] }
});
} else if (status === 'UNSUCCESSFUL') {
// Document-level "failed" outcomes. Per-field columns can still show green for fields that passed.
// NEED_MANUAL = missing required extraction; not MATCH/SUCCESSFUL/APPROVED.
andParts.push({
validationStatus: {
[Op.in]: ['UNSUCCESSFUL', 'MISMATCH', 'REJECTED', 'NEED_MANUAL']
}
});
} else {
andParts.push({ validationStatus: status });
}
}
const q = String(search ?? '').trim();
if (q) {
const pattern = `%${q}%`;
const orClause: object[] = [
{ bookingId: { [Op.iLike]: pattern } },
{ claimId: { [Op.iLike]: pattern } },
{ documentType: { [Op.iLike]: pattern } }
];
if (searchIncludeId) {
// Postgres: `uuid ILIKE '…'` is invalid — cast so id substring search works and does not break the whole OR.
orClause.unshift(sqlWhere(cast(col('id'), 'TEXT'), { [Op.iLike]: pattern }));
}
andParts.push({ [Op.or]: orClause });
}
}
export function cpcWhereFromAndParts(andParts: Record<string, unknown>[]): Record<string, unknown> {
if (andParts.length === 0) return {};
return { [Op.and]: andParts };
}
export function digitsOnly(str: string | null | undefined): string {
return String(str || "").replace(/\D/g, "");
}
export function normalizeMoney(str: string | null | undefined): string {
const cleaned = String(str || "").replace(/[^\d.]/g, "");
const num = cleaned ? Number(cleaned) : NaN;
if (Number.isNaN(num)) return "";
return String(Math.round(num));
}
/** Compact key for rule lookup / money detection (spaces, hyphens, underscores removed). */
export function compactFieldKey(rawKey: string): string {
return String(rawKey || '')
.trim()
.toLowerCase()
.replace(/[\s_-]+/g, '');
}
/**
* True for MSD/extraction keys that represent rupee amounts (commas / Indian grouping should be ignored).
*/
export function isMoneyFieldKey(rawKey: string): boolean {
const k = compactFieldKey(rawKey);
if (!k) return false;
if (k.includes('amount')) return true;
if (k.includes('invoicevalue')) return true;
if (k.includes('totalvalue')) return true;
if (k.includes('taxamount')) return true;
return false;
}
/**
* Lowercase + spaces/hyphens underscores for all keys; compact camelCase aliases **only for money keys**
* (e.g. poAmount / Po Amount po_amount). Non-money keys are unchanged except whitespace normalization.
*/
export function canonicalizeRuleFieldKey(rawKey: string): string {
const k = String(rawKey || '')
.trim()
.toLowerCase()
.replace(/[\s-]+/g, '_');
if (!isMoneyFieldKey(k) && !isMoneyFieldKey(rawKey)) {
return k;
}
const compact = k.replace(/_/g, '');
const amountAliases: Record<string, string> = {
poamount: 'po_amount',
letteramount: 'letter_amount',
invoicevalue: 'invoice_value',
taxamount: 'tax_amount',
totalamount: 'total_amount'
};
if (amountAliases[compact]) return amountAliases[compact];
return k;
}
/** Rename payload keys so money fields use canonical snake_case (e.g. poAmount → po_amount). Non-money keys untouched. */
export function canonicalizeMoneyFieldKeysInRecord(obj: Record<string, unknown> | null | undefined): Record<string, unknown> {
if (!obj || typeof obj !== 'object' || Array.isArray(obj)) return (obj || {}) as Record<string, unknown>;
const out = { ...obj };
for (const key of [...Object.keys(out)]) {
if (!isMoneyFieldKey(key)) continue;
const nk = canonicalizeRuleFieldKey(key);
if (nk === key) continue;
const v = out[key];
delete out[key];
if (out[nk] === undefined) out[nk] = v;
}
return out;
}
/** Normalize money-type values to plain digit strings (no commas) for MSD / extracted payloads. */
export function sanitizeMoneyValuesInRecord(obj: Record<string, unknown> | null | undefined): Record<string, unknown> {
if (!obj || typeof obj !== 'object' || Array.isArray(obj)) return (obj || {}) as Record<string, unknown>;
const out: Record<string, unknown> = { ...obj };
for (const key of Object.keys(out)) {
if (!isMoneyFieldKey(key)) continue;
const v = out[key];
if (v === null || v === undefined) continue;
const s = String(v).trim();
if (!s) continue;
const nm = normalizeMoney(s);
if (nm !== '') out[key] = nm;
}
return out;
}
/**
* Strip trailing relation / father-name suffix (S/O, W/O, ) so "Arjun Mehar S/O Radheshyam Mehar" "Arjun Mehar".
*/
export function trimPatronymicSuffixFromName(s: string | null | undefined): string {
let t = cleanText(s);
if (!t) return '';
const re = /\b(?:s\/o|w\/o|d\/o|c\/o|son\s+of|daughter\s+of|wife\s+of|husband\s+of|care\s+of)\b/i;
const parts = t.split(re);
t = (parts[0] ?? t).trim();
t = t.split(/[,;]/)[0]?.trim() ?? t;
return cleanText(t);
}
/** Multi-word military / rank prefixes at the start of a name line (longest first). */
const MULTI_TITLE_PREFIX_RES: RegExp[] = [
/^air\s+vice\s+marshal\s+/i,
/^air\s+commodore\s+/i,
/^vice\s+admiral\s+/i,
/^rear\s+admiral\s+/i,
/^group\s+captain\s+/i,
/^wing\s+commander\s+/i,
/^sqn\s+ldr\.?\s+/i,
/^flying\s+officer\s+/i,
/^fg\s+offr\.?\s+/i
];
/** Single-token salutations / ranks at the start (repeat until none). */
const SINGLE_TITLE_PREFIX_RE =
/^(?:mr|mrs|ms|miss|dr\.?|doctor|prof\.?|sir|madam|shri|smt\.?|smti\.?|kumari|kum\.?|lt\.?|lieut\.?|lieutenant|leftenant|capt\.?|captain|maj\.?|major|col\.?|colonel|brig\.?|brigadier|gen\.?|general|cmdr|commander|cmde|commodore|adm\.?|admiral|hon\.?|honorable|honourable|retd\.?|svc)\s+/i;
function stripLeadingSalutationsAndTitles(s: string): string {
let t = cleanText(s);
for (let guard = 0; guard < 24; guard++) {
let removed = false;
for (const re of MULTI_TITLE_PREFIX_RES) {
if (re.test(t)) {
t = t.replace(re, '').trim();
removed = true;
break;
}
}
if (removed) continue;
if (SINGLE_TITLE_PREFIX_RE.test(t)) {
t = t.replace(SINGLE_TITLE_PREFIX_RE, '').trim();
continue;
}
break;
}
return t;
}
/**
* Holder-style person name for extraction / compare: no leading Dr./military rank tokens, no S/O-style suffixes.
*/
export function normalizePersonNameExtract(s: string | null | undefined): string {
if (s == null || !String(s).trim()) return '';
let t = stripLeadingSalutationsAndTitles(String(s));
t = trimPatronymicSuffixFromName(t);
return cleanText(t);
}
/** Strip salutations / relation clutter from holder name fields on an extracted / payload object. */
export function sanitizePersonNameFieldsInRecord(obj: Record<string, unknown> | null | undefined): Record<string, unknown> {
if (!obj || typeof obj !== 'object' || Array.isArray(obj)) return (obj || {}) as Record<string, unknown>;
const out = { ...obj };
for (const key of Object.keys(out)) {
if (!isPersonalHolderNameField(key)) continue;
const v = out[key];
if (v === null || v === undefined) continue;
const n = normalizePersonNameExtract(String(v));
if (n) out[key] = n;
}
return out;
}
/** Customer / holder person name fields (not supplier, grantor, or company). */
export function isPersonalHolderNameField(rawKey: string): boolean {
const k = compactFieldKey(rawKey);
if (!k) return false;
if (/(vendor|grantor|supplier|dealer|company|business)/.test(k)) return false;
return (
k === 'name' ||
k === 'customername' ||
k === 'authorizedpersonname' ||
k === 'accountholdername'
);
}
export function cleanText(str: string | null | undefined): string {
return String(str || "").trim().replace(/\s+/g, " ");
}
export function nameTokens(str: string | null | undefined): string[] {
return cleanText(str)
.toLowerCase()
.replace(/[^a-z\s]/g, " ")
.split(/\s+/)
.filter(Boolean);
}
export function normalizeDate(str: string | null | undefined): string | null {
if (!str) return null;
// Remove non-alphanumeric chars
const clean = str.replace(/[^a-zA-Z0-9]/g, "");
// Attempt to match common formats (DDMMYYYY, DD-MM-YYYY, YYYY-MM-DD)
// 1. DDMMYYYY (8 digits)
if (/^\d{8}$/.test(clean)) {
const day = clean.substring(0, 2);
const month = clean.substring(2, 4);
const year = clean.substring(4, 8);
return `${year}-${month}-${day}`;
}
// 2. Already ISO-like YYYYMMDD
if (/^\d{4}\d{2}\d{2}$/.test(clean) && (clean.startsWith("19") || clean.startsWith("20"))) {
const year = clean.substring(0, 4);
const month = clean.substring(4, 6);
const day = clean.substring(6, 8);
return `${year}-${month}-${day}`;
}
// Try Native Date parsing if it has separators
try {
const d = new Date(str);
if (!isNaN(d.getTime())) {
return d.toISOString().split('T')[0];
}
} catch (e) { }
return null;
}
export function cleanAddress(str: string | null | undefined): string {
if (!str) return "";
// Remove "S/O", "C/O", "D/O", "W/O" and following name until a comma or newline
return str.replace(/(?:[scdw]\/o[:\s]|care\sof[:\s]|son\sof[:\s]|daughter\sof[:\s]|wife\sof[:\s])[^,\n]*(?:,|\n)?/gi, "").trim();
}
export function calculateMatch(expected: string, found: string, key: string = ""): number {
if (!expected || !found) return 0;
const lowerKey = key.toLowerCase();
let expStr = String(expected).trim().toLowerCase();
let fndStr = String(found).trim().toLowerCase();
// 1. Date Normalization Special Handling
if (lowerKey.includes('dob') || lowerKey.includes('date')) {
const normExp = normalizeDate(expStr);
const normFnd = normalizeDate(fndStr);
if (normExp && normFnd && normExp === normFnd) return 100;
// Fallback to digits only for dates like "28-06-1990" vs "28061990"
const dExp = expStr.replace(/\D/g, "");
const dFnd = fndStr.replace(/\D/g, "");
if (dExp !== "" && dExp === dFnd) return 100;
}
// 2. Address Cleanup
if (lowerKey.includes('address')) {
fndStr = cleanAddress(fndStr).toLowerCase();
expStr = cleanAddress(expStr).toLowerCase();
}
// 2a. Personal name (MSD): document may print "Arjun Mehar S/O Radheshyam Mehar" while MSD is "Arjun Mehar".
// Strip S/O-style suffixes from the document side, then pass if the full MSD phrase appears as a whole phrase.
if (isPersonalHolderNameField(lowerKey)) {
const expTrim = trimPatronymicSuffixFromName(expStr).toLowerCase().replace(/\s+/g, ' ').trim();
const fndTrim = trimPatronymicSuffixFromName(fndStr).toLowerCase().replace(/\s+/g, ' ').trim();
if (expTrim.length >= 2 && fndTrim.length >= 2) {
const phraseOk = (hay: string, needle: string) => {
if (hay === needle) return true;
if (hay.startsWith(needle)) {
if (hay.length === needle.length) return true;
const next = hay.charAt(needle.length);
return /\s|[,;/]/.test(next);
}
const esc = needle.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
return new RegExp(`(^|\\s)${esc}(\\s|$)`).test(hay);
};
if (expTrim.length >= 3 && phraseOk(fndTrim, expTrim)) {
return 100;
}
expStr = expTrim;
fndStr = fndTrim;
}
}
// 2b. Money: ignore commas, ₹, spaces — compare numeric rupees (aligns browser vs API + Gemini "1,93,533")
if (isMoneyFieldKey(lowerKey)) {
const expM = normalizeMoney(expStr);
const fndM = normalizeMoney(fndStr);
if (expM && fndM && expM === fndM) return 100;
const a = expM ? Number(expM) : NaN;
const b = fndM ? Number(fndM) : NaN;
if (!Number.isNaN(a) && !Number.isNaN(b)) {
if (Math.abs(a - b) <= 5) return 100;
const maxv = Math.max(Math.abs(a), Math.abs(b), 1);
const pct = Math.round(100 - Math.min(100, (Math.abs(a - b) / maxv) * 100));
return Math.max(0, pct);
}
}
// 3. Exact match
if (expStr === fndStr) return 100;
// 4. String Similarity (Levenshtein/Dice)
const similarity = stringSimilarity.compareTwoStrings(expStr, fndStr);
const score = Math.round(similarity * 100);
// 5. Token-based fallback (Good for names/addresses)
const tokensA = nameTokens(expStr);
const tokensB = nameTokens(fndStr);
if (tokensA.length > 0 && tokensB.length > 0) {
const setA = new Set(tokensA);
const setB = new Set(tokensB);
let intersection = 0;
for (const t of setA) if (setB.has(t)) intersection++;
const tokenScore = Math.round((intersection / Math.max(setA.size, setB.size)) * 100);
return Math.max(score, tokenScore);
}
return score > 0 ? score : 0;
}

View File

@ -0,0 +1,54 @@
/**
* CPC-CSD permission service API-driven access based on admin configuration.
* Reads viewerEmails from CPC_CSD_ADMIN_CONFIG (legacy CPC_CDC_ADMIN_CONFIG until migrated).
*/
import { selectCpcCsdAdminConfigValue } from '../utils/cpcCsdAdminConfigDb';
export interface CpcCdcViewerConfig {
viewerEmails: string[];
}
const emptyConfig: CpcCdcViewerConfig = {
viewerEmails: [],
};
function normalizeEmail(email: string): string {
return (email || '').trim().toLowerCase();
}
/**
* Load CPC-CSD viewer config from admin_configurations.
* Returns empty list if config is missing or invalid.
*/
export async function getCpcCdcViewerConfig(): Promise<CpcCdcViewerConfig> {
try {
const raw = await selectCpcCsdAdminConfigValue();
if (!raw) {
return emptyConfig;
}
const parsed = JSON.parse(raw);
const viewerEmails = Array.isArray(parsed.viewerEmails)
? parsed.viewerEmails.map((e: unknown) => normalizeEmail(String(e ?? ''))).filter(Boolean)
: [];
return { viewerEmails };
} catch {
return emptyConfig;
}
}
/**
* Check if user can access CPC-CSD section.
* - Admin: always allowed.
* - Otherwise: only listed emails are allowed.
*/
export async function canAccessCpcCdc(userEmail: string, role?: string): Promise<boolean> {
if (role === 'ADMIN') return true;
const config = await getCpcCdcViewerConfig();
const email = normalizeEmail(userEmail);
if (!email) return false;
return config.viewerEmails.includes(email);
}

View File

@ -7,7 +7,7 @@
*/ */
import crypto from 'crypto'; import crypto from 'crypto';
import { Op, fn, col, QueryTypes } from 'sequelize'; import { Op, fn, col, QueryTypes, where as sqlWhere } from 'sequelize';
import { sequelize } from '../config/database'; import { sequelize } from '../config/database';
import { import {
Form16CreditNote, Form16CreditNote,
@ -67,6 +67,14 @@ export async function getDealerCodeForUser(userId: string, userEmail?: string |
const SECTION_26AS_194Q = '194Q'; const SECTION_26AS_194Q = '194Q';
const AMOUNT_MATCH_TOLERANCE = 1; const AMOUNT_MATCH_TOLERANCE = 1;
/**
* Form 16 INCOMING CSV `TDS_AMT`: amount digits first, sign last credit `123.45+`, debit `123.45-`.
*/
function formatForm16IncomingCsvTdsAmt(amount: number, kind: 'credit' | 'debit'): string {
const n = Math.abs(Number(amount)).toFixed(2);
return kind === 'credit' ? `${n}+` : `${n}-`;
}
type Latest26asRow = { type Latest26asRow = {
panNumber: string | null; panNumber: string | null;
amountPaid: number | null; amountPaid: number | null;
@ -122,7 +130,7 @@ export async function getLatest26asAggregatedForQuarter(
const [row] = await sequelize.query<{ sum: string }>( const [row] = await sequelize.query<{ sum: string }>(
`WITH latest_upload AS ( `WITH latest_upload AS (
SELECT MAX(upload_log_id) AS mid FROM tds_26as_entries SELECT MAX(upload_log_id) AS mid FROM tds_26as_entries
WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(tan_number, '')), '[^A-Z0-9]', '', 'g')) = :tan WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(tan_number, '')), '[^a-zA-Z0-9]', '', 'g')) = :tan
AND financial_year = :fy AND quarter = :qtr AND financial_year = :fy AND quarter = :qtr
AND UPPER(TRIM(COALESCE(section_code, ''))) = :section AND UPPER(TRIM(COALESCE(section_code, ''))) = :section
AND UPPER(TRIM(COALESCE(status_oltas, ''))) IN ('F', 'O') AND UPPER(TRIM(COALESCE(status_oltas, ''))) IN ('F', 'O')
@ -130,7 +138,7 @@ export async function getLatest26asAggregatedForQuarter(
) )
SELECT COALESCE(SUM(e.tax_deducted), 0)::text AS sum SELECT COALESCE(SUM(e.tax_deducted), 0)::text AS sum
FROM tds_26as_entries e FROM tds_26as_entries e
WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^A-Z0-9]', '', 'g')) = :tan WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^a-zA-Z0-9]', '', 'g')) = :tan
AND e.financial_year = :fy AND e.quarter = :qtr AND e.financial_year = :fy AND e.quarter = :qtr
AND UPPER(TRIM(COALESCE(e.section_code, ''))) = :section AND UPPER(TRIM(COALESCE(e.section_code, ''))) = :section
AND UPPER(TRIM(COALESCE(e.status_oltas, ''))) IN ('F', 'O') AND UPPER(TRIM(COALESCE(e.status_oltas, ''))) IN ('F', 'O')
@ -165,7 +173,7 @@ async function getLatest26asRowsForQuarter(
}>( }>(
`WITH latest_upload AS ( `WITH latest_upload AS (
SELECT MAX(upload_log_id) AS mid FROM tds_26as_entries SELECT MAX(upload_log_id) AS mid FROM tds_26as_entries
WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(tan_number, '')), '[^A-Z0-9]', '', 'g')) = :tan WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(tan_number, '')), '[^a-zA-Z0-9]', '', 'g')) = :tan
AND financial_year = :fy AND quarter = :qtr AND financial_year = :fy AND quarter = :qtr
AND UPPER(TRIM(COALESCE(section_code, ''))) = :section AND UPPER(TRIM(COALESCE(section_code, ''))) = :section
AND UPPER(TRIM(COALESCE(status_oltas, ''))) IN ('F', 'O') AND UPPER(TRIM(COALESCE(status_oltas, ''))) IN ('F', 'O')
@ -179,7 +187,7 @@ async function getLatest26asRowsForQuarter(
e.transaction_date, e.transaction_date,
e.date_of_booking e.date_of_booking
FROM tds_26as_entries e FROM tds_26as_entries e
WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^A-Z0-9]', '', 'g')) = :tan WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^a-zA-Z0-9]', '', 'g')) = :tan
AND e.financial_year = :fy AND e.financial_year = :fy
AND e.quarter = :qtr AND e.quarter = :qtr
AND UPPER(TRIM(COALESCE(e.section_code, ''))) = :section AND UPPER(TRIM(COALESCE(e.section_code, ''))) = :section
@ -221,7 +229,7 @@ async function get26asCoverageDebug(tanNumber: string, financialYear: string, qu
END END
)::text AS matching_194q_f_o_rows )::text AS matching_194q_f_o_rows
FROM tds_26as_entries e FROM tds_26as_entries e
WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^A-Z0-9]', '', 'g')) = :tan WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^a-zA-Z0-9]', '', 'g')) = :tan
AND e.financial_year = :fy AND e.financial_year = :fy
AND e.quarter = :q`, AND e.quarter = :q`,
{ replacements: { tan: normalizedTan, fy, q, section: SECTION_26AS_194Q }, type: QueryTypes.SELECT } { replacements: { tan: normalizedTan, fy, q, section: SECTION_26AS_194Q }, type: QueryTypes.SELECT }
@ -234,7 +242,7 @@ async function get26asCoverageDebug(tanNumber: string, financialYear: string, qu
status_oltas, status_oltas,
COUNT(*)::text AS cnt COUNT(*)::text AS cnt
FROM tds_26as_entries e FROM tds_26as_entries e
WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^A-Z0-9]', '', 'g')) = :tan WHERE UPPER(REGEXP_REPLACE(TRIM(COALESCE(e.tan_number, '')), '[^a-zA-Z0-9]', '', 'g')) = :tan
AND e.financial_year = :fy AND e.financial_year = :fy
AND e.quarter = :q AND e.quarter = :q
GROUP BY section_code, status_oltas GROUP BY section_code, status_oltas
@ -273,6 +281,30 @@ function normalizeDateOnly(value: unknown): string | null {
return `${yyyy}-${mm}-${dd}`; return `${yyyy}-${mm}-${dd}`;
} }
// Handle OCR values like "13-Jan-2025" without timezone conversion.
const m2 = raw.match(/^(\d{1,2})[-\/]([A-Za-z]{3,9})[-\/](\d{4})$/);
if (m2) {
const dd = m2[1].padStart(2, '0');
const mon = m2[2].toLowerCase();
const yyyy = m2[3];
const monthMap: Record<string, string> = {
jan: '01', january: '01',
feb: '02', february: '02',
mar: '03', march: '03',
apr: '04', april: '04',
may: '05',
jun: '06', june: '06',
jul: '07', july: '07',
aug: '08', august: '08',
sep: '09', sept: '09', september: '09',
oct: '10', october: '10',
nov: '11', november: '11',
dec: '12', december: '12',
};
const mm = monthMap[mon];
if (mm) return `${yyyy}-${mm}-${dd}`;
}
const d = new Date(raw); const d = new Date(raw);
if (!Number.isNaN(d.getTime())) return d.toISOString().slice(0, 10); if (!Number.isNaN(d.getTime())) return d.toISOString().slice(0, 10);
return null; return null;
@ -646,6 +678,150 @@ function normalizeQuarter(raw: string): string {
return (raw || '').trim() || ''; return (raw || '').trim() || '';
} }
/**
* Assessment Year from Financial Year (Indian income tax): FY 2024-25 AY 2025-26.
*/
function financialYearToAssessmentYear(financialYear: string): string {
const fy = normalizeFinancialYear(financialYear) || (financialYear || '').trim();
const m = /^(\d{4})-(\d{2})$/.exec(fy);
if (!m) return fy.replace(/[^\w.-]/g, '_').slice(0, 24) || 'AY';
const y1 = parseInt(m[1], 10);
const ayStart = y1 + 1;
const ayEnd2 = (y1 + 2) % 100;
return `${ayStart}-${String(ayEnd2).padStart(2, '0')}`;
}
function sanitizeForm16PdfDeductorSegment(text: string, maxLen: number): string {
let s = String(text || '')
.replace(/[\r\n]+/g, ' ')
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '')
.replace(/\s+/g, ' ')
.trim();
if (!s) return 'Deductor';
if (s.length > maxLen) s = s.slice(0, maxLen).trim();
return s;
}
function sanitizeForm16PdfCertSegment(text: string): string {
const t = String(text || '').trim().replace(/[^A-Za-z0-9_-]/g, '');
return t || 'CERT';
}
/**
* PDF file name after successful 26AS match + credit note:
* [TAN]_[Assessment Year]_[Quarter]_[Name and address of deductor]_[Certificate].pdf
*/
function buildForm16CreditNoteSuccessPdfFileName(sub: Form16aSubmission): string {
const tan = normalizeTanNumber(String(sub.tanNumber || ''))
.replace(/[^A-Z0-9]/gi, '')
.toUpperCase() || 'TAN';
const fy = normalizeFinancialYear(String(sub.financialYear || '').trim()) || String(sub.financialYear || '').trim();
const ay = financialYearToAssessmentYear(fy);
const qRaw = String(sub.quarter || '').trim();
const q = normalizeQuarter(qRaw) || qRaw || 'QX';
const ocr = (sub.ocrExtractedData || {}) as Record<string, unknown>;
let nameAddr = String(ocr.nameAndAddressOfDeductor || '').trim();
if (!nameAddr) {
const dn = String(ocr.deductorName || sub.deductorName || '').trim();
const da = String(ocr.deductorAddress || '').trim();
nameAddr = [dn, da].filter(Boolean).join(', ');
}
if (!nameAddr) nameAddr = String(sub.deductorName || 'Deductor').trim();
let deductorSan = sanitizeForm16PdfDeductorSegment(nameAddr, 150);
const certSan = sanitizeForm16PdfCertSegment(String(sub.form16aNumber || ''));
let base = `${tan}_${ay}_${q}_${deductorSan}_${certSan}`;
if (base.length > 220) {
const over = base.length - 220;
const shorter = Math.max(20, deductorSan.length - over - 5);
deductorSan = sanitizeForm16PdfDeductorSegment(nameAddr, shorter);
base = `${tan}_${ay}_${q}_${deductorSan}_${certSan}`;
}
return `${base}.pdf`;
}
async function renameForm16SubmissionPdfAfterCreditNote(params: {
submissionId: number;
requestId: string;
oldRelativePath: string;
}): Promise<void> {
const { submissionId, requestId, oldRelativePath } = params;
logger.info('[Form16] PDF rename flow start', {
submissionId,
requestId,
oldRelativePath,
nodeEnv: process.env.NODE_ENV,
gcpBucket: process.env.GCP_BUCKET_NAME || null,
});
const oldPathNorm = String(oldRelativePath || '').replace(/\\/g, '/').trim();
if (!oldPathNorm || oldPathNorm.includes('..') || !oldPathNorm.startsWith('requests/')) {
logger.warn('[Form16] Skip PDF rename: invalid storage path', { oldPathNorm });
return;
}
const sub = await Form16aSubmission.findByPk(submissionId);
if (!sub) {
logger.warn('[Form16] Skip PDF rename: submission not found', { submissionId, requestId });
return;
}
const newFileName = buildForm16CreditNoteSuccessPdfFileName(sub);
logger.info('[Form16] PDF rename target name computed', {
submissionId,
requestId,
newFileName,
tanNumber: (sub as any).tanNumber || null,
financialYear: (sub as any).financialYear || null,
quarter: (sub as any).quarter || null,
form16aNumber: (sub as any).form16aNumber || null,
});
try {
const result = await gcsStorageService.renameRequestDocumentFile({
oldRelativePath: oldPathNorm,
newFileName,
});
logger.info('[Form16] Storage rename success', {
submissionId,
requestId,
oldPathNorm,
renamedFilePath: result.filePath,
renamedStorageUrlPrefix: String(result.storageUrl || '').slice(0, 120),
});
await sub.update({ documentUrl: result.storageUrl });
logger.info('[Form16] Submission documentUrl updated after rename', {
submissionId,
requestId,
});
const doc = await Document.findOne({
where: { requestId, filePath: oldPathNorm },
});
if (doc) {
const fp = result.filePath.length <= 500 ? result.filePath : result.filePath.slice(0, 500);
const su =
result.storageUrl.length <= 500 ? result.storageUrl : undefined;
await doc.update({
fileName: newFileName.slice(0, 255),
originalFileName: newFileName.slice(0, 255),
filePath: fp,
storageUrl: su,
});
logger.info('[Form16] Document metadata updated after rename', {
requestId,
submissionId,
documentId: (doc as any).id || null,
oldPathNorm,
newPath: fp,
});
} else {
logger.warn('[Form16] PDF renamed; documents row not found for path', { requestId, oldPathNorm });
}
logger.info('[Form16] Form 16A PDF renamed after credit note', { submissionId, newFileName });
} catch (e: any) {
logger.error('[Form16] Failed to rename Form 16 PDF after credit note:', e?.message || e);
}
}
/** Compact FY for Form 16 note numbers: "2024-25" -> "24-25" */ /** Compact FY for Form 16 note numbers: "2024-25" -> "24-25" */
function form16FyCompact(financialYear: string): string { function form16FyCompact(financialYear: string): string {
const fy = normalizeFinancialYear(financialYear) || (financialYear || '').trim(); const fy = normalizeFinancialYear(financialYear) || (financialYear || '').trim();
@ -782,7 +958,20 @@ async function run26asMatchAndCreditNote(submission: Form16aSubmission): Promise
const submittedTaxDeducted = toNumberOrNull(extracted.totalTaxDeducted ?? sub.tdsAmount); const submittedTaxDeducted = toNumberOrNull(extracted.totalTaxDeducted ?? sub.tdsAmount);
const submittedTdsDeposited = toNumberOrNull(extracted.totalTdsDeposited ?? sub.tdsAmount); const submittedTdsDeposited = toNumberOrNull(extracted.totalTdsDeposited ?? sub.tdsAmount);
const submittedTransactionDate = normalizeDateOnly(extracted.transactionDate); const submittedTransactionDate = normalizeDateOnly(extracted.transactionDate);
const submittedBookingDate = normalizeDateOnly(extracted.dateOfBooking); const submittedLastUpdatedOn = normalizeDateOnly(extracted.certificateDate ?? extracted.lastUpdatedOn ?? extracted.lastUpdatedDate);
// Mandatory for matching: Form 16A "Last updated on" must be extracted and matched to 26AS booking date.
if (!submittedLastUpdatedOn) {
const msg = 'OCR could not extract "Last updated on" date from Form 16A. Please resubmit a clear document.';
await submission.update({
validationStatus: 'resubmission_needed',
validationNotes: msg,
});
return {
validationStatus: 'resubmission_needed',
validationNotes: msg,
};
}
// Latest 26AS upload rows for the same TAN + FY + Quarter. // Latest 26AS upload rows for the same TAN + FY + Quarter.
let latestRows = await getLatest26asRowsForQuarter(tanNumber, financialYear, quarter); let latestRows = await getLatest26asRowsForQuarter(tanNumber, financialYear, quarter);
@ -790,7 +979,7 @@ async function run26asMatchAndCreditNote(submission: Form16aSubmission): Promise
// If OCR extracted FY/Quarter incorrectly, derive FY/Quarter from OCR dates and retry. // If OCR extracted FY/Quarter incorrectly, derive FY/Quarter from OCR dates and retry.
if (latestRows.length === 0) { if (latestRows.length === 0) {
const derivedFromTx = deriveFyAndQuarterFromDateOnly(submittedTransactionDate); const derivedFromTx = deriveFyAndQuarterFromDateOnly(submittedTransactionDate);
const derivedFromBooking = deriveFyAndQuarterFromDateOnly(submittedBookingDate); const derivedFromBooking = deriveFyAndQuarterFromDateOnly(submittedLastUpdatedOn);
const derived = derivedFromTx || derivedFromBooking; const derived = derivedFromTx || derivedFromBooking;
if (derived && (derived.financialYear !== financialYear || derived.quarter !== quarter)) { if (derived && (derived.financialYear !== financialYear || derived.quarter !== quarter)) {
const altRows = await getLatest26asRowsForQuarter(tanNumber, derived.financialYear, derived.quarter); const altRows = await getLatest26asRowsForQuarter(tanNumber, derived.financialYear, derived.quarter);
@ -914,16 +1103,15 @@ async function run26asMatchAndCreditNote(submission: Form16aSubmission): Promise
return { validationStatus: 'failed', validationNotes: 'Transaction date mismatch with latest 26AS.' }; return { validationStatus: 'failed', validationNotes: 'Transaction date mismatch with latest 26AS.' };
} }
} }
if (submittedBookingDate) { // Match Form 16A "Last updated on" against 26AS "Date of Booking"
const hasBookingDate = latestRows.some((r) => normalizeDateOnly(r.dateOfBooking) === submittedBookingDate); const hasBookingDate = latestRows.some((r) => normalizeDateOnly(r.dateOfBooking) === submittedLastUpdatedOn);
if (!hasBookingDate) { if (!hasBookingDate) {
await submission.update({ await submission.update({
validationStatus: 'failed', validationStatus: 'failed',
validationNotes: validationNotes:
`Booking date mismatch with latest 26AS for TAN no - ${tanNumber}. No latest 26AS record found with booking date ${submittedBookingDate}.`, `Last updated on date mismatch with latest 26AS booking date for TAN no - ${tanNumber}. Form 16A last updated on: ${submittedLastUpdatedOn}.`,
}); });
return { validationStatus: 'failed', validationNotes: 'Booking date mismatch with latest 26AS.' }; return { validationStatus: 'failed', validationNotes: 'Last updated on date mismatch with latest 26AS booking date.' };
}
} }
if (Math.abs(tdsAmount - aggregated26as) > AMOUNT_MATCH_TOLERANCE) { if (Math.abs(tdsAmount - aggregated26as) > AMOUNT_MATCH_TOLERANCE) {
@ -1006,7 +1194,7 @@ async function run26asMatchAndCreditNote(submission: Form16aSubmission): Promise
DLR_TAN_NO: tanNumber, DLR_TAN_NO: tanNumber,
'FIN_YEAR&QUARTER': finYearAndQuarter, 'FIN_YEAR&QUARTER': finYearAndQuarter,
DOC_DATE: docDate, DOC_DATE: docDate,
TDS_AMT: `+${Number(Math.abs(tdsAmount)).toFixed(2)}`, TDS_AMT: formatForm16IncomingCsvTdsAmt(tdsAmount, 'credit'),
TDS_CERTIFICATE_NO: certificateNumber, TDS_CERTIFICATE_NO: certificateNumber,
}; };
const fileName = `${cnNumber}.csv`; const fileName = `${cnNumber}.csv`;
@ -1180,11 +1368,33 @@ export async function createSubmission(
); );
// When credit note is issued (completed), set workflow status to CLOSED so the request appears on Closed requests page // When credit note is issued (completed), set workflow status to CLOSED so the request appears on Closed requests page
if (validationStatus === 'success' && creditNoteNumber) { if (validationStatus === 'success' && creditNoteNumber) {
logger.info('[Form16] Success path reached; triggering PDF rename', {
requestId,
submissionId: submission.id,
creditNoteNumber,
uploadFilePath,
});
const workflow = await WorkflowRequest.findOne({ where: { requestId }, attributes: ['requestId', 'status'] }); const workflow = await WorkflowRequest.findOne({ where: { requestId }, attributes: ['requestId', 'status'] });
if (workflow && (workflow as any).status !== WorkflowStatus.CLOSED) { if (workflow && (workflow as any).status !== WorkflowStatus.CLOSED) {
await workflow.update({ status: WorkflowStatus.CLOSED }); await workflow.update({ status: WorkflowStatus.CLOSED });
logger.info(`[Form16] Request ${requestId} set to CLOSED (credit note issued).`); logger.info(`[Form16] Request ${requestId} set to CLOSED (credit note issued).`);
} }
await renameForm16SubmissionPdfAfterCreditNote({
submissionId: submission.id,
requestId,
oldRelativePath: uploadFilePath.replace(/\\/g, '/'),
});
logger.info('[Form16] PDF rename call completed', {
requestId,
submissionId: submission.id,
});
} else {
logger.info('[Form16] PDF rename not triggered (submission not successful)', {
requestId,
submissionId: submission.id,
validationStatus,
creditNoteNumber: creditNoteNumber || null,
});
} }
} catch (err: any) { } catch (err: any) {
logger.error( logger.error(
@ -2241,13 +2451,44 @@ export interface List26asSummary {
function build26asWhere(filters?: List26asFilters): Record<string, unknown> { function build26asWhere(filters?: List26asFilters): Record<string, unknown> {
const where: Record<string, unknown> = {}; const where: Record<string, unknown> = {};
if (filters?.financialYear) where.financialYear = filters.financialYear; const andClauses: unknown[] = [];
if (filters?.quarter) where.quarter = filters.quarter;
if (filters?.tanNumber) where.tanNumber = { [Op.iLike]: `%${filters.tanNumber}%` }; if (filters?.financialYear) where.financialYear = normalizeFinancialYear(filters.financialYear) || filters.financialYear;
if (filters?.search?.trim()) where.deductorName = { [Op.iLike]: `%${filters.search.trim()}%` }; if (filters?.quarter) where.quarter = normalizeQuarter(filters.quarter) || filters.quarter;
if (filters?.status) where.statusOltas = filters.status; if (filters?.status) where.statusOltas = filters.status;
if (filters?.assessmentYear) where.assessmentYear = filters.assessmentYear; if (filters?.assessmentYear) where.assessmentYear = filters.assessmentYear;
if (filters?.sectionCode) where.sectionCode = filters.sectionCode; if (filters?.sectionCode) where.sectionCode = filters.sectionCode;
if (filters?.tanNumber?.trim()) {
const normalizedTan = normalizeTanNumber(filters.tanNumber);
if (normalizedTan) {
andClauses.push(
sqlWhere(
fn('upper', fn('regexp_replace', fn('coalesce', col('tan_number'), ''), '[^a-zA-Z0-9]', '', 'g')),
{ [Op.like]: `%${normalizedTan}%` }
)
);
}
}
if (filters?.search?.trim()) {
const s = filters.search.trim();
const normalizedSearchTan = normalizeTanNumber(s);
const searchOr: unknown[] = [{ deductorName: { [Op.iLike]: `%${s}%` } }];
if (normalizedSearchTan) {
searchOr.push(
sqlWhere(
fn('upper', fn('regexp_replace', fn('coalesce', col('tan_number'), ''), '[^a-zA-Z0-9]', '', 'g')),
{ [Op.like]: `%${normalizedSearchTan}%` }
)
);
}
andClauses.push({ [Op.or]: searchOr });
}
if (andClauses.length > 0) {
(where as any)[Op.and] = andClauses;
}
return where; return where;
} }
@ -2257,7 +2498,8 @@ export async function list26asEntries(filters?: List26asFilters): Promise<{
summary: List26asSummary; summary: List26asSummary;
}> { }> {
const where = build26asWhere(filters); const where = build26asWhere(filters);
const hasWhere = Object.keys(where).length > 0; // Use Reflect.ownKeys so symbol keys like Op.and are counted.
const hasWhere = Reflect.ownKeys(where).length > 0;
const limit = Math.min(MAX_PAGE_SIZE, Math.max(1, filters?.limit ?? DEFAULT_PAGE_SIZE)); const limit = Math.min(MAX_PAGE_SIZE, Math.max(1, filters?.limit ?? DEFAULT_PAGE_SIZE));
const offset = Math.max(0, filters?.offset ?? 0); const offset = Math.max(0, filters?.offset ?? 0);
@ -2548,6 +2790,26 @@ export function parse26asTxtFile(buffer: Buffer): { rows: any[]; errors: string[
const errors: string[] = []; const errors: string[] = [];
if (rawLines.length === 0) return { rows: [], errors }; if (rawLines.length === 0) return { rows: [], errors };
// Lightweight, non-blocking sanity logging detect obviously suspicious uploads without rejecting them.
try {
const totalLines = rawLines.length;
const sampleLines = rawLines.slice(0, Math.min(200, totalLines));
const caretLines = sampleLines.filter((l) => (l.match(/\^/g) || []).length >= 5).length;
const hasDatePattern = sampleLines.some((l) => /\b\d{1,2}-[A-Za-z]{3}-\d{4}\b/.test(l));
const hasTanLike = sampleLines.some((l) => /\b[A-Z]{4}[A-Z0-9]{5}[A-Z]\b/i.test(l));
const suspicious =
totalLines < 5 ||
(caretLines === 0 && !hasDatePattern && !hasTanLike);
if (suspicious) {
logger.warn(
'[Form16] 26AS TXT upload appears suspicious (non-blocking): ' +
`lines=${totalLines}, caretLines=${caretLines}, hasDatePattern=${hasDatePattern}, hasTanLike=${hasTanLike}`
);
}
} catch {
// Never block parsing due to logging issues
}
const firstLine = rawLines[0]; const firstLine = rawLines[0];
let delimiter = detectDelimiter(firstLine); let delimiter = detectDelimiter(firstLine);
if (delimiter !== '^') { if (delimiter !== '^') {
@ -2751,7 +3013,7 @@ export async function process26asUploadAggregation(uploadLogId: number): Promise
DLR_TAN_NO: tanNumber, DLR_TAN_NO: tanNumber,
'FIN_YEAR&QUARTER': finYearAndQuarter, 'FIN_YEAR&QUARTER': finYearAndQuarter,
DOC_DATE: docDate, DOC_DATE: docDate,
TDS_AMT: `-${Math.abs(Number(amount)).toFixed(2)}`, TDS_AMT: formatForm16IncomingCsvTdsAmt(Number(amount), 'debit'),
TDS_CERTIFICATE_NO: creditNoteCertNumber, TDS_CERTIFICATE_NO: creditNoteCertNumber,
}; };
const fileName = `${debitNum}.csv`; const fileName = `${debitNum}.csv`;
@ -2803,13 +3065,18 @@ export interface Form1626asUploadLogRow {
} }
/** List 26AS upload history (most recent first) for management section. */ /** List 26AS upload history (most recent first) for management section. */
export async function list26asUploadHistory(limit: number = 50): Promise<Form1626asUploadLogRow[]> { export async function list26asUploadHistory(
const rows = await Form1626asUploadLog.findAll({ limit: number = 50,
offset: number = 0
): Promise<{ rows: Form1626asUploadLogRow[]; total: number }> {
const { rows, count } = await Form1626asUploadLog.findAndCountAll({
limit, limit,
offset,
order: [['uploadedAt', 'DESC']], order: [['uploadedAt', 'DESC']],
include: [{ model: User, as: 'uploadedByUser', attributes: ['email', 'displayName'], required: false }], include: [{ model: User, as: 'uploadedByUser', attributes: ['email', 'displayName'], required: false }],
distinct: true,
}); });
return rows.map((r) => { const mapped = rows.map((r) => {
const u = r as any; const u = r as any;
return { return {
id: u.id, id: u.id,
@ -2822,4 +3089,286 @@ export async function list26asUploadHistory(limit: number = 50): Promise<Form162
errorsCount: u.errorsCount ?? 0, errorsCount: u.errorsCount ?? 0,
}; };
}); });
return { rows: mapped, total: count };
}
export interface Form16DashboardKpi {
collectionPct: number;
pendingPct: number;
submittedPct: number;
submissionPendingPct: number;
}
export interface Form16DashboardOverall {
totalAmount: number;
submittedAmount: number;
pendingAmount: number;
totalDealers: number;
submittedDealerCount: number;
pendingDealerCount: number;
}
export interface Form16DashboardBreakdownRow {
label: string;
totalAmount: number;
dealerCount: number;
submittedAmount: number;
submittedDealerCount: number;
pendingAmount: number;
pendingDealerCount: number;
}
export interface Form16DashboardData {
kpi: Form16DashboardKpi;
overall: Form16DashboardOverall;
yearWise: Form16DashboardBreakdownRow[];
zoneWise: Form16DashboardBreakdownRow[];
}
/**
* Form16A dashboard for RE users.
* Uses real DB data:
* - dealer universe from active dealers
* - latest submission per dealer+FY+quarter
* - submitted/credited via form_16_credit_notes
* Zone mapping follows dealer region code prefix: N* -> North, S* -> South, E* -> East, W* -> West, C* -> Central.
*/
export async function getForm16DashboardData(): Promise<Form16DashboardData> {
const toNum = (v: unknown): number => {
const n = Number(v ?? 0);
return Number.isFinite(n) ? n : 0;
};
const [overallRow] = await sequelize.query<{
total_amount: number | string | null;
submitted_amount: number | string | null;
total_dealers: number | string | null;
submitted_dealer_count: number | string | null;
}>(
`
WITH active_dealers AS (
SELECT DISTINCT
TRIM(COALESCE(NULLIF(d.sales_code, ''), NULLIF(d.dlrcode, ''))) AS dealer_code
FROM dealers d
WHERE d.is_active = true
AND TRIM(COALESCE(NULLIF(d.sales_code, ''), NULLIF(d.dlrcode, ''))) <> ''
),
latest_submissions AS (
SELECT
s.id,
s.dealer_code,
s.financial_year,
s.quarter,
COALESCE(s.total_amount, 0)::numeric AS total_amount,
ROW_NUMBER() OVER (
PARTITION BY s.dealer_code, s.financial_year, s.quarter
ORDER BY COALESCE(s.version, 1) DESC, COALESCE(s.submitted_date, s.created_at) DESC, s.id DESC
) AS rn
FROM form16a_submissions s
INNER JOIN active_dealers ad ON ad.dealer_code = s.dealer_code
),
latest_base AS (
SELECT id, dealer_code, financial_year, quarter, total_amount
FROM latest_submissions
WHERE rn = 1
),
submitted_by_dealer AS (
SELECT
lb.dealer_code,
SUM(COALESCE(cn.amount, 0))::numeric AS submitted_amount
FROM latest_base lb
LEFT JOIN form_16_credit_notes cn ON cn.submission_id = lb.id
GROUP BY lb.dealer_code
)
SELECT
COALESCE((SELECT SUM(lb.total_amount) FROM latest_base lb), 0) AS total_amount,
COALESCE((SELECT SUM(sbd.submitted_amount) FROM submitted_by_dealer sbd), 0) AS submitted_amount,
COALESCE((SELECT COUNT(*) FROM active_dealers), 0) AS total_dealers,
COALESCE((
SELECT COUNT(DISTINCT sbd.dealer_code)
FROM submitted_by_dealer sbd
WHERE sbd.submitted_amount > 0
), 0) AS submitted_dealer_count
`,
{ type: QueryTypes.SELECT }
);
const totalAmount = toNum(overallRow?.total_amount);
const submittedAmount = toNum(overallRow?.submitted_amount);
const totalDealers = Math.max(0, Math.trunc(toNum(overallRow?.total_dealers)));
const submittedDealerCount = Math.max(0, Math.trunc(toNum(overallRow?.submitted_dealer_count)));
const pendingDealerCount = Math.max(0, totalDealers - submittedDealerCount);
const pendingAmount = Math.max(0, totalAmount - submittedAmount);
const toPct = (part: number, whole: number): number => {
if (!whole || whole <= 0) return 0;
return Math.max(0, Math.min(100, Math.round((part / whole) * 100)));
};
const yearRowsRaw = await sequelize.query<{
label: string;
total_amount: number | string | null;
dealer_count: number | string | null;
submitted_amount: number | string | null;
submitted_dealer_count: number | string | null;
}>(
`
WITH active_dealers AS (
SELECT DISTINCT
TRIM(COALESCE(NULLIF(d.sales_code, ''), NULLIF(d.dlrcode, ''))) AS dealer_code
FROM dealers d
WHERE d.is_active = true
AND TRIM(COALESCE(NULLIF(d.sales_code, ''), NULLIF(d.dlrcode, ''))) <> ''
),
latest_submissions AS (
SELECT
s.id,
s.dealer_code,
s.financial_year,
s.quarter,
COALESCE(s.total_amount, 0)::numeric AS total_amount,
ROW_NUMBER() OVER (
PARTITION BY s.dealer_code, s.financial_year, s.quarter
ORDER BY COALESCE(s.version, 1) DESC, COALESCE(s.submitted_date, s.created_at) DESC, s.id DESC
) AS rn
FROM form16a_submissions s
INNER JOIN active_dealers ad ON ad.dealer_code = s.dealer_code
),
latest_base AS (
SELECT id, dealer_code, financial_year, quarter, total_amount
FROM latest_submissions
WHERE rn = 1
),
by_year AS (
SELECT
lb.financial_year AS label,
SUM(lb.total_amount)::numeric AS total_amount,
COUNT(DISTINCT lb.dealer_code) AS dealer_count,
SUM(COALESCE(cn.amount, 0))::numeric AS submitted_amount,
COUNT(DISTINCT CASE WHEN COALESCE(cn.amount, 0) > 0 THEN lb.dealer_code END) AS submitted_dealer_count
FROM latest_base lb
LEFT JOIN form_16_credit_notes cn ON cn.submission_id = lb.id
GROUP BY lb.financial_year
)
SELECT * FROM by_year
ORDER BY label DESC
`,
{ type: QueryTypes.SELECT }
);
const zoneRowsRaw = await sequelize.query<{
label: string;
total_amount: number | string | null;
dealer_count: number | string | null;
submitted_amount: number | string | null;
submitted_dealer_count: number | string | null;
}>(
`
WITH active_dealers AS (
SELECT DISTINCT
TRIM(COALESCE(NULLIF(d.sales_code, ''), NULLIF(d.dlrcode, ''))) AS dealer_code,
CASE
WHEN UPPER(COALESCE(d.region, '')) LIKE 'N%' THEN 'North'
WHEN UPPER(COALESCE(d.region, '')) LIKE 'S%' THEN 'South'
WHEN UPPER(COALESCE(d.region, '')) LIKE 'E%' THEN 'East'
WHEN UPPER(COALESCE(d.region, '')) LIKE 'W%' THEN 'West'
WHEN UPPER(COALESCE(d.region, '')) LIKE 'C%' THEN 'Central'
ELSE 'Unknown'
END AS zone
FROM dealers d
WHERE d.is_active = true
AND TRIM(COALESCE(NULLIF(d.sales_code, ''), NULLIF(d.dlrcode, ''))) <> ''
),
latest_submissions AS (
SELECT
s.id,
s.dealer_code,
COALESCE(s.total_amount, 0)::numeric AS total_amount,
ROW_NUMBER() OVER (
PARTITION BY s.dealer_code, s.financial_year, s.quarter
ORDER BY COALESCE(s.version, 1) DESC, COALESCE(s.submitted_date, s.created_at) DESC, s.id DESC
) AS rn
FROM form16a_submissions s
INNER JOIN active_dealers ad ON ad.dealer_code = s.dealer_code
),
latest_base AS (
SELECT id, dealer_code, total_amount
FROM latest_submissions
WHERE rn = 1
),
by_zone AS (
SELECT
ad.zone AS label,
SUM(COALESCE(lb.total_amount, 0))::numeric AS total_amount,
COUNT(DISTINCT ad.dealer_code) AS dealer_count,
SUM(COALESCE(cn.amount, 0))::numeric AS submitted_amount,
COUNT(DISTINCT CASE WHEN COALESCE(cn.amount, 0) > 0 THEN ad.dealer_code END) AS submitted_dealer_count
FROM active_dealers ad
LEFT JOIN latest_base lb ON lb.dealer_code = ad.dealer_code
LEFT JOIN form_16_credit_notes cn ON cn.submission_id = lb.id
GROUP BY ad.zone
)
SELECT * FROM by_zone
ORDER BY CASE label
WHEN 'North' THEN 1
WHEN 'Central' THEN 2
WHEN 'West' THEN 3
WHEN 'East' THEN 4
WHEN 'South' THEN 5
ELSE 99
END, label
`,
{ type: QueryTypes.SELECT }
);
const yearWise = (yearRowsRaw || []).map((r) => {
const totalAmountRow = toNum(r.total_amount);
const submittedAmountRow = toNum(r.submitted_amount);
const dealerCountRow = Math.max(0, Math.trunc(toNum(r.dealer_count)));
const submittedDealerCountRow = Math.max(0, Math.trunc(toNum(r.submitted_dealer_count)));
return {
label: r.label,
totalAmount: totalAmountRow,
dealerCount: dealerCountRow,
submittedAmount: submittedAmountRow,
submittedDealerCount: submittedDealerCountRow,
pendingAmount: Math.max(0, totalAmountRow - submittedAmountRow),
pendingDealerCount: Math.max(0, dealerCountRow - submittedDealerCountRow),
};
});
const zoneWise = (zoneRowsRaw || []).map((r) => {
const totalAmountRow = toNum(r.total_amount);
const submittedAmountRow = toNum(r.submitted_amount);
const dealerCountRow = Math.max(0, Math.trunc(toNum(r.dealer_count)));
const submittedDealerCountRow = Math.max(0, Math.trunc(toNum(r.submitted_dealer_count)));
return {
label: r.label,
totalAmount: totalAmountRow,
dealerCount: dealerCountRow,
submittedAmount: submittedAmountRow,
submittedDealerCount: submittedDealerCountRow,
pendingAmount: Math.max(0, totalAmountRow - submittedAmountRow),
pendingDealerCount: Math.max(0, dealerCountRow - submittedDealerCountRow),
};
});
return {
kpi: {
collectionPct: toPct(submittedDealerCount, totalDealers),
pendingPct: toPct(pendingDealerCount, totalDealers),
submittedPct: toPct(submittedAmount, totalAmount),
submissionPendingPct: toPct(pendingAmount, totalAmount),
},
overall: {
totalAmount,
submittedAmount,
pendingAmount,
totalDealers,
submittedDealerCount,
pendingDealerCount,
},
yearWise,
zoneWise,
};
} }

View File

@ -79,7 +79,7 @@ STEP 2 - Extract these fields. For amounts, look in TABLES: find rows or columns
8. statusOfMatchingOltas - "Status of matching with OLTAS" or "OLTAS". Single letter (F, O, M) or word like "Matched". Extract as shown. 8. statusOfMatchingOltas - "Status of matching with OLTAS" or "OLTAS". Single letter (F, O, M) or word like "Matched". Extract as shown.
9. dateOfBooking - "Date of booking" or "Date of deposit". DD-MM-YYYY or DD/MM/YYYY. 9. dateOfBooking - For this workflow, use Form 16A "Last updated on" (or "Date of certificate") as booking date. DD-MM-YYYY or DD/MM/YYYY.
10. assessmentYear - "Assessment Year" or "AY" from the form header. Format YYYY-YY (e.g. 2025-26). This is the Form 16A assessment year. 10. assessmentYear - "Assessment Year" or "AY" from the form header. Format YYYY-YY (e.g. 2025-26). This is the Form 16A assessment year.
@ -355,8 +355,13 @@ function extractAssessmentYear(text: string): string | null {
function extractCertificateDate(text: string): string | null { function extractCertificateDate(text: string): string | null {
const patterns = [ const patterns = [
/Certificate\s*No\.?[^\n\r]*?Last\s*updated\s*on[:\s]*([0-9]{1,2}[-\/][A-Za-z]{3,9}[-\/][0-9]{4})/i,
/Certificate\s*No\.?[^\n\r]*?Last\s*updated\s*on[:\s]*([0-9]{1,2}[-\/][0-9]{1,2}[-\/][0-9]{4})/i,
/Last\s*updated\s*on[:\s]*([0-9]{1,2}[-\/][A-Za-z]{3,9}[-\/][0-9]{4})/i,
/Last\s*updated\s*on[:\s]*([0-9]{1,2}[-\/][0-9]{1,2}[-\/][0-9]{4})/i,
/Certificate\s*Date[:\s]*([0-9]{1,2}[-/][0-9]{1,2}[-/][0-9]{4})/i, /Certificate\s*Date[:\s]*([0-9]{1,2}[-/][0-9]{1,2}[-/][0-9]{4})/i,
/Date[:\s]*([0-9]{1,2}[-/][0-9]{1,2}[-/][0-9]{4})/i, /Date\s+of\s+certificate[:\s]*([0-9]{1,2}[-\/][A-Za-z]{3,9}[-\/][0-9]{4})/i,
/Date\s+of\s+certificate[:\s]*([0-9]{1,2}[-\/][0-9]{1,2}[-\/][0-9]{4})/i,
/Issued\s*on[:\s]*([0-9]{1,2}[-/][0-9]{1,2}[-/][0-9]{4})/i, /Issued\s*on[:\s]*([0-9]{1,2}[-/][0-9]{1,2}[-/][0-9]{4})/i,
]; ];
for (const pattern of patterns) { for (const pattern of patterns) {
@ -393,7 +398,8 @@ function parseForm16ARawText(text: string): Form16AExtractedData {
const transactionDate = extractTransactionDate(fullText); const transactionDate = extractTransactionDate(fullText);
const statusOfMatchingOltas = extractOltasStatus(fullText); const statusOfMatchingOltas = extractOltasStatus(fullText);
const certificateDate = extractCertificateDate(fullText); const certificateDate = extractCertificateDate(fullText);
const dateOfBooking = extractDateOfBooking(fullText); // Business rule: Form 16A "Last updated on" is the booking date used for 26AS matching.
const dateOfBooking = certificateDate ?? extractDateOfBooking(fullText);
let financialYear = extractFinancialYear(fullText); let financialYear = extractFinancialYear(fullText);
if (!financialYear && assessmentYear) { if (!financialYear && assessmentYear) {
const parts = assessmentYear.split(/[-/]/).map((p) => parseInt(p, 10)); const parts = assessmentYear.split(/[-/]/).map((p) => parseInt(p, 10));
@ -524,7 +530,8 @@ function sanitizeAndCleanGeminiData(extracted: Record<string, unknown>): Form16A
natureOfPayment: getStr(extracted.natureOfPayment), natureOfPayment: getStr(extracted.natureOfPayment),
transactionDate: getStr(extracted.transactionDate), transactionDate: getStr(extracted.transactionDate),
statusOfMatchingOltas: getStr(extracted.statusOfMatchingOltas), statusOfMatchingOltas: getStr(extracted.statusOfMatchingOltas),
dateOfBooking: getStr(extracted.dateOfBooking), // Business rule: map "Last updated on" (certificateDate) as booking date for matching/UI.
dateOfBooking: getStr(extracted.certificateDate ?? (extracted as any).lastUpdatedOn ?? extracted.dateOfBooking),
assessmentYear: getStr(extracted.assessmentYear), assessmentYear: getStr(extracted.assessmentYear),
quarter, quarter,
form16aNumber, form16aNumber,
@ -586,6 +593,21 @@ async function extractWithVertexAI(filePath: string, fileBase64: string, mimeTyp
return await fallbackExtraction(filePath); return await fallbackExtraction(filePath);
} }
const data = sanitizeAndCleanGeminiData(extractedData); const data = sanitizeAndCleanGeminiData(extractedData);
// Deterministic safeguard: re-parse raw PDF text and prefer the header "Last updated on" date
// to avoid model picking unrelated "Date" fields (e.g., verification/challan rows).
try {
const fallback = await fallbackExtraction(filePath);
const fallbackData = fallback.success ? (fallback.data as Form16AExtractedData | undefined) : undefined;
const fallbackCert = getStr(fallbackData?.certificateDate);
if (fallbackCert) {
data.certificateDate = fallbackCert;
data.dateOfBooking = fallbackCert;
}
} catch (overrideErr) {
logger.warn('[Form16 OCR] Could not apply fallback date override:', overrideErr);
}
logger.info('[Form16 OCR] Vertex AI extraction completed successfully'); logger.info('[Form16 OCR] Vertex AI extraction completed successfully');
return { return {
success: true, success: true,

View File

@ -27,7 +27,7 @@ function normalizeEmail(email: string): string {
/** /**
* Load Form 16 viewer config from admin_configurations (API-driven). * Load Form 16 viewer config from admin_configurations (API-driven).
* Returns empty arrays if no config or parse error (empty = allow all). * Returns empty arrays if no config or parse error.
*/ */
export async function getForm16ViewerConfig(): Promise<Form16ViewerConfig> { export async function getForm16ViewerConfig(): Promise<Form16ViewerConfig> {
try { try {
@ -55,8 +55,8 @@ export async function getForm16ViewerConfig(): Promise<Form16ViewerConfig> {
* Check if user can view Form 16 submission data (Credit Notes, Non-submitted Dealers, etc.). * Check if user can view Form 16 submission data (Credit Notes, Non-submitted Dealers, etc.).
* - Admin: always allowed (full access to everything). * - Admin: always allowed (full access to everything).
* - Dealers: always allowed (they see their own submissions). * - Dealers: always allowed (they see their own submissions).
* - RE users: allowed if submissionViewerEmails is empty, or user email is in submissionViewerEmails, * - RE users: allowed only if user email is in submissionViewerEmails
* or user email is in twentySixAsViewerEmails (26AS access implies submission access so sidebar shows both). * OR in twentySixAsViewerEmails (26AS access implies submission access).
*/ */
export async function canViewForm16Submission( export async function canViewForm16Submission(
userEmail: string, userEmail: string,
@ -69,7 +69,6 @@ export async function canViewForm16Submission(
const config = await getForm16ViewerConfig(); const config = await getForm16ViewerConfig();
const email = normalizeEmail(userEmail); const email = normalizeEmail(userEmail);
if (!email) return false; if (!email) return false;
if (config.submissionViewerEmails.length === 0 && config.twentySixAsViewerEmails.length === 0) return true;
if (config.submissionViewerEmails.includes(email)) return true; if (config.submissionViewerEmails.includes(email)) return true;
if (config.twentySixAsViewerEmails.includes(email)) return true; if (config.twentySixAsViewerEmails.includes(email)) return true;
return false; return false;
@ -78,12 +77,12 @@ export async function canViewForm16Submission(
/** /**
* Check if user can view 26AS page and 26AS data. * Check if user can view 26AS page and 26AS data.
* - Admin: always allowed (full access to everything). * - Admin: always allowed (full access to everything).
* - Otherwise: allowed if twentySixAsViewerEmails is empty, or user email is in the list. * - Otherwise: allowed only if user email is in twentySixAsViewerEmails.
*/ */
export async function canView26As(userEmail: string, role?: string): Promise<boolean> { export async function canView26As(userEmail: string, role?: string): Promise<boolean> {
if (role === 'ADMIN') return true; if (role === 'ADMIN') return true;
const config = await getForm16ViewerConfig(); const config = await getForm16ViewerConfig();
const email = normalizeEmail(userEmail); const email = normalizeEmail(userEmail);
if (config.twentySixAsViewerEmails.length === 0) return true; if (!email) return false;
return config.twentySixAsViewerEmails.includes(email); return config.twentySixAsViewerEmails.includes(email);
} }

View File

@ -13,6 +13,18 @@ interface UploadFileOptions {
fileType: 'documents' | 'attachments'; // Type of file: documents or attachments fileType: 'documents' | 'attachments'; // Type of file: documents or attachments
} }
/** CPC/CSD uploads — same GCS vs local rules as {@link uploadFileWithFallback}. */
export interface UploadCpcCsdFileOptions {
buffer: Buffer;
originalName: string;
mimeType: string;
channel: 'csd' | 'cpc';
/** Booking / claim id (caller may pre-sanitize; service sanitizes again) */
bookingSegment: string;
/** When set, used as the final filename (no path segments). Otherwise same pattern as workflow documents. */
fileName?: string;
}
interface UploadResult { interface UploadResult {
storageUrl: string; storageUrl: string;
filePath: string; // GCS path filePath: string; // GCS path
@ -24,6 +36,28 @@ class GCSStorageService {
private bucketName: string = ''; private bucketName: string = '';
private projectId: string = ''; private projectId: string = '';
private getAbsoluteUploadPath(relativePath: string): string {
return path.join(UPLOAD_DIR, ...relativePath.replace(/\\/g, '/').split('/').filter(Boolean));
}
private renameLocalStoredFile(oldNorm: string, newRelativePath: string, newFileName: string): {
storageUrl: string;
filePath: string;
fileName: string;
} {
const fullOld = this.getAbsoluteUploadPath(oldNorm);
const fullNew = this.getAbsoluteUploadPath(newRelativePath);
if (!fs.existsSync(fullOld)) {
throw new Error(`Local file not found: ${oldNorm}`);
}
fs.mkdirSync(path.dirname(fullNew), { recursive: true });
fs.renameSync(fullOld, fullNew);
const normalizedPath = newRelativePath.replace(/\\/g, '/');
const storageUrl = `/uploads/${normalizedPath}`;
logger.info('[GCS] Renamed local Form 16 document', { oldNorm, newRelativePath: normalizedPath });
return { storageUrl, filePath: normalizedPath, fileName: newFileName };
}
constructor() { constructor() {
// Check if Google Secret Manager should be used // Check if Google Secret Manager should be used
const useGoogleSecretManager = process.env.USE_GOOGLE_SECRET_MANAGER === 'true'; const useGoogleSecretManager = process.env.USE_GOOGLE_SECRET_MANAGER === 'true';
@ -322,6 +356,146 @@ class GCSStorageService {
} }
} }
private cpcCsdSanitizeBookingSegment(segment: string): string {
const s = String(segment || '').trim();
if (!s) return 'unknown-booking';
return s.replace(/[^a-zA-Z0-9._-]+/g, '_').replace(/_+/g, '_').slice(0, 120);
}
/** Same filename pattern as workflow `saveToLocalStorage` / `uploadFile`. */
private buildCpcCsdFileName(originalName: string, explicit?: string): string {
if (explicit && !explicit.includes('/') && !explicit.includes('..')) {
return explicit;
}
const timestamp = Date.now();
const randomHash = Math.random().toString(36).substring(2, 8);
const safeName = originalName.replace(/[^a-zA-Z0-9._-]/g, '_');
const extension = path.extname(originalName);
const nameWithoutExt = safeName.substring(0, Math.max(0, safeName.length - extension.length));
return `${nameWithoutExt}-${timestamp}-${randomHash}${extension}`;
}
/**
* Relative object path (same string in GCS and under {@link UPLOAD_DIR} for local fallback).
* Example: `cpc-csd-files/csd/BOOK-1/documents/scan-1713-abc.pdf`
*/
private cpcCsdRelativeObjectPath(channel: 'csd' | 'cpc', bookingSeg: string, fileName: string): string {
const ch = channel === 'cpc' ? 'cpc' : 'csd';
const b = this.cpcCsdSanitizeBookingSegment(bookingSeg);
return `cpc-csd-files/${ch}/${b}/documents/${fileName}`.replace(/\\/g, '/');
}
/**
* Upload CPC/CSD document to GCS (same bucket lifecycle as workflow requests).
*/
async uploadCpcCsdFile(options: UploadCpcCsdFileOptions): Promise<UploadResult> {
if (!this.storage) {
throw new Error('GCS storage not initialized. Check GCP configuration.');
}
const { buffer, originalName, mimeType, channel, bookingSegment } = options;
if (!buffer?.length) {
throw new Error('Buffer is required for CPC/CSD upload');
}
const fileName = this.buildCpcCsdFileName(originalName, options.fileName);
const gcsFilePath = this.cpcCsdRelativeObjectPath(channel, bookingSegment, fileName);
try {
await this.ensureBucketExists();
const bucket = this.storage.bucket(this.bucketName);
const file = bucket.file(gcsFilePath);
const uploadOptions: any = {
metadata: {
contentType: mimeType,
metadata: {
originalName,
uploadedAt: new Date().toISOString(),
cpcCsdChannel: channel,
},
},
};
await file.save(buffer, uploadOptions);
let publicUrl: string;
try {
await file.makePublic();
publicUrl = `https://storage.googleapis.com/${this.bucketName}/${gcsFilePath}`;
} catch (makePublicError: any) {
if (makePublicError?.code === 400 || makePublicError?.message?.includes('publicAccessPrevention')) {
logger.warn('[GCS] CPC/CSD file cannot be public; using signed URL.');
publicUrl = await this.getSignedUrl(gcsFilePath, 60 * 24 * 365);
} else {
throw makePublicError;
}
}
logger.info('[GCS] CPC/CSD file uploaded', { gcsPath: gcsFilePath, storageUrl: publicUrl });
return {
storageUrl: publicUrl,
filePath: gcsFilePath,
fileName,
};
} catch (error) {
logger.error('[GCS] CPC/CSD upload failed:', error);
throw new Error(`Failed to upload CPC/CSD file to GCS: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Local fallback for CPC/CSD mirrors folder layout used in GCS (`cpc-csd-files/...`).
*/
saveCpcCsdToLocalStorage(options: UploadCpcCsdFileOptions): UploadResult {
const { buffer, originalName, channel, bookingSegment } = options;
if (!buffer?.length) {
throw new Error('Buffer is required for CPC/CSD local storage');
}
const fileName = this.buildCpcCsdFileName(originalName, options.fileName);
const relativePath = this.cpcCsdRelativeObjectPath(channel, bookingSegment, fileName);
const segments = relativePath.split('/').filter(Boolean);
const localDir = path.join(UPLOAD_DIR, ...segments.slice(0, -1));
const localFilePath = path.join(UPLOAD_DIR, ...segments);
if (!fs.existsSync(localDir)) {
fs.mkdirSync(localDir, { recursive: true });
}
fs.writeFileSync(localFilePath, buffer);
const storageUrl = `/uploads/${relativePath}`;
logger.info('[GCS] CPC/CSD file saved to local storage (fallback)', {
originalName,
localPath: relativePath,
storageUrl,
});
return {
storageUrl,
filePath: relativePath,
fileName,
};
}
/**
* CPC/CSD: try GCS first, then local under `uploads/` same behaviour as {@link uploadFileWithFallback}.
*/
async uploadCpcCsdFileWithFallback(options: UploadCpcCsdFileOptions): Promise<UploadResult> {
if (!this.isConfigured()) {
logger.info('[GCS] GCS not configured, using local storage for CPC/CSD');
return this.saveCpcCsdToLocalStorage(options);
}
try {
return await this.uploadCpcCsdFile(options);
} catch (gcsError) {
logger.warn('[GCS] CPC/CSD GCS upload failed, falling back to local storage', { error: gcsError });
return this.saveCpcCsdToLocalStorage(options);
}
}
/** /**
* Upload file with automatic fallback to local storage * Upload file with automatic fallback to local storage
* If GCS is configured and works, uploads to GCS. Otherwise, saves to local storage. * If GCS is configured and works, uploads to GCS. Otherwise, saves to local storage.
@ -345,6 +519,101 @@ class GCSStorageService {
} }
} }
/**
* Rename a file already stored under uploads/requests/{requestNumber}/... (same shape as upload).
* GCS: copy to new name in same folder, delete old object. Local: rename on disk.
*/
async renameRequestDocumentFile(options: {
oldRelativePath: string;
newFileName: string;
}): Promise<{ storageUrl: string; filePath: string; fileName: string }> {
const { oldRelativePath } = options;
let newFileName = path.basename(String(options.newFileName || '').trim());
logger.info('[GCS] renameRequestDocumentFile called', {
oldRelativePath,
requestedNewFileName: options.newFileName,
sanitizedNewFileName: newFileName,
nodeEnv: process.env.NODE_ENV,
bucket: this.bucketName || null,
useGoogleSecretManager: process.env.USE_GOOGLE_SECRET_MANAGER || null,
});
if (!newFileName || newFileName === '.' || newFileName === '..') {
throw new Error('Invalid new file name');
}
if (!oldRelativePath || oldRelativePath.includes('..')) {
throw new Error('Invalid old path');
}
const oldNorm = oldRelativePath.replace(/\\/g, '/');
const dir = path.posix.dirname(oldNorm);
const newRelativePath = `${dir}/${newFileName}`;
const localOldExists = fs.existsSync(this.getAbsoluteUploadPath(oldNorm));
logger.info('[GCS] renameRequestDocumentFile storage presence check', {
oldNorm,
newRelativePath,
localOldExists,
gcsConfigured: this.isConfigured(),
});
if (!this.isConfigured()) {
logger.info('[GCS] renameRequestDocumentFile using local storage mode');
return this.renameLocalStoredFile(oldNorm, newRelativePath, newFileName);
}
// Important: if upload previously fell back to local storage, rename must also happen locally
// even when GCS is configured in the current environment.
if (localOldExists) {
logger.info('[GCS] renameRequestDocumentFile detected local-stored file; using local rename');
return this.renameLocalStoredFile(oldNorm, newRelativePath, newFileName);
}
if (!this.storage) {
throw new Error('GCS storage not initialized');
}
try {
const bucket = this.storage.bucket(this.bucketName);
const oldFile = bucket.file(oldNorm);
const [exists] = await oldFile.exists();
logger.info('[GCS] renameRequestDocumentFile GCS existence check', {
oldNorm,
exists,
newRelativePath,
});
if (!exists) {
throw new Error(`GCS file not found: ${oldNorm}`);
}
const newFile = bucket.file(newRelativePath);
await oldFile.copy(newFile);
logger.info('[GCS] renameRequestDocumentFile copy success', { from: oldNorm, to: newRelativePath });
await oldFile.delete();
logger.info('[GCS] renameRequestDocumentFile delete old success', { oldNorm });
let publicUrl: string;
try {
await newFile.makePublic();
publicUrl = `https://storage.googleapis.com/${this.bucketName}/${newRelativePath}`;
} catch (makePublicError: any) {
if (makePublicError?.code === 400 || makePublicError?.message?.includes('publicAccessPrevention')) {
logger.warn('[GCS] Renamed file cannot be public; using signed URL.');
publicUrl = await this.getSignedUrl(newRelativePath, 60 * 24 * 365);
} else {
throw makePublicError;
}
}
logger.info('[GCS] Renamed document in bucket', { from: oldNorm, to: newRelativePath });
return {
storageUrl: publicUrl,
filePath: newRelativePath,
fileName: newFileName,
};
} catch (error) {
logger.error('[GCS] renameRequestDocumentFile failed:', error);
throw new Error(`Failed to rename file: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/** /**
* Check if GCS is properly configured * Check if GCS is properly configured
*/ */

View File

@ -36,8 +36,12 @@ class NotificationService {
logger.warn('VAPID keys are not configured. Push notifications are disabled.'); logger.warn('VAPID keys are not configured. Push notifications are disabled.');
return; return;
} }
webpush.setVapidDetails(contact, pub, priv); try {
logger.info('Web Push configured'); webpush.setVapidDetails(contact, pub, priv);
logger.info('Web Push configured');
} catch (error) {
logger.warn('Invalid VAPID keys. Push notifications are disabled.', error);
}
} }
async addSubscription(userId: string, subscription: PushSubscription, userAgent?: string) { async addSubscription(userId: string, subscription: PushSubscription, userAgent?: string) {

View File

@ -16,6 +16,7 @@ export class SAPIntegrationService {
private sapServiceName: string; // OData service name for IO validation (e.g., ZFI_BUDGET_CHECK_API_SRV) private sapServiceName: string; // OData service name for IO validation (e.g., ZFI_BUDGET_CHECK_API_SRV)
private sapBlockServiceName: string; // OData service name for budget blocking (e.g., ZFI_BUDGET_BLOCK_API_SRV) private sapBlockServiceName: string; // OData service name for budget blocking (e.g., ZFI_BUDGET_BLOCK_API_SRV)
private sapRequester: string; // Requester identifier for budget blocking private sapRequester: string; // Requester identifier for budget blocking
private sapClientNum?: string; // Optional SAP Client number
constructor() { constructor() {
this.sapBaseUrl = process.env.SAP_BASE_URL || ''; this.sapBaseUrl = process.env.SAP_BASE_URL || '';
@ -28,6 +29,7 @@ export class SAPIntegrationService {
this.sapBlockServiceName = process.env.SAP_BLOCK_SERVICE_NAME || 'ZFI_BUDGET_BLOCK_API_SRV'; this.sapBlockServiceName = process.env.SAP_BLOCK_SERVICE_NAME || 'ZFI_BUDGET_BLOCK_API_SRV';
// Requester identifier for budget blocking API // Requester identifier for budget blocking API
this.sapRequester = process.env.SAP_REQUESTER || 'REFMS'; this.sapRequester = process.env.SAP_REQUESTER || 'REFMS';
this.sapClientNum = process.env.SAP_CLIENT; // Only use if explicitly provided
} }
/** /**
@ -59,9 +61,11 @@ export class SAPIntegrationService {
try { try {
// Build service root URL with required query parameters // Build service root URL with required query parameters
const serviceRootUrl = `/sap/opu/odata/sap/${serviceName}/`; const serviceRootUrl = `/sap/opu/odata/sap/${serviceName}/`;
const queryParams = new URLSearchParams({ const params: Record<string, string> = { '$format': 'json' };
'$format': 'json' if (this.sapClientNum) {
}); params['sap-client'] = this.sapClientNum;
}
const queryParams = new URLSearchParams(params);
const fullUrl = `${this.sapBaseUrl}${serviceRootUrl}?${queryParams.toString()}`; const fullUrl = `${this.sapBaseUrl}${serviceRootUrl}?${queryParams.toString()}`;
logger.debug(`[SAP] Fetching CSRF token from service: ${serviceName}`); logger.debug(`[SAP] Fetching CSRF token from service: ${serviceName}`);
@ -275,12 +279,18 @@ export class SAPIntegrationService {
// $select: Select specific fields (Sender, ResponseDate, GetIODetailsSet01) // $select: Select specific fields (Sender, ResponseDate, GetIODetailsSet01)
// $expand: Expand the nested GetIODetailsSet01 entity set to get IO details // $expand: Expand the nested GetIODetailsSet01 entity set to get IO details
// $format: Explicitly request JSON format // $format: Explicitly request JSON format
const queryParams = new URLSearchParams({ const params: Record<string, string> = {
'$filter': `IONumber eq '${ioNumber}'`, '$filter': `IONumber eq '${ioNumber}'`,
'$select': 'Sender,ResponseDate,GetIODetailsSet01', '$select': 'Sender,ResponseDate,GetIODetailsSet01',
'$expand': 'GetIODetailsSet01', '$expand': 'GetIODetailsSet01',
'$format': 'json' '$format': 'json'
}); };
if (this.sapClientNum) {
params['sap-client'] = this.sapClientNum;
}
const queryParams = new URLSearchParams(params);
const fullUrl = `${endpoint}?${queryParams.toString()}`; const fullUrl = `${endpoint}?${queryParams.toString()}`;
@ -508,6 +518,7 @@ export class SAPIntegrationService {
const requestPayload = { const requestPayload = {
Request_Date_Time: requestDateTime, Request_Date_Time: requestDateTime,
Requester: this.sapRequester, Requester: this.sapRequester,
IODate: requestDateTime,
lt_io_input: [ lt_io_input: [
{ {
IONumber: ioNumber, IONumber: ioNumber,

View File

@ -0,0 +1,25 @@
/**
* CPC/CSD document module admin_configurations keys for viewer allow-list.
* Legacy key kept for reads until migrated or overwritten by admin save.
*/
import { QueryTypes } from 'sequelize';
import { sequelize } from '../config/database';
export const CPC_CSD_ADMIN_CONFIG_KEY = 'CPC_CSD_ADMIN_CONFIG';
export const CPC_CDC_ADMIN_CONFIG_KEY_LEGACY = 'CPC_CDC_ADMIN_CONFIG';
/** Prefer CPC_CSD_ADMIN_CONFIG row; fall back to legacy CPC_CDC_ADMIN_CONFIG if present. */
export async function selectCpcCsdAdminConfigValue(): Promise<string | null> {
const result = await sequelize.query<{ config_value: string }>(
`SELECT config_value FROM admin_configurations
WHERE config_key IN (:kCsd, :kLegacy)
ORDER BY CASE WHEN config_key = :kCsd THEN 0 ELSE 1 END
LIMIT 1`,
{
replacements: { kCsd: CPC_CSD_ADMIN_CONFIG_KEY, kLegacy: CPC_CDC_ADMIN_CONFIG_KEY_LEGACY },
type: QueryTypes.SELECT,
}
);
if (!result?.length || !result[0].config_value) return null;
return result[0].config_value;
}

View File

@ -101,3 +101,8 @@ export const updateForm16ConfigSchema = z.object({
reminderRunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(), reminderRunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(),
reminderNotificationTemplate: z.string().optional(), reminderNotificationTemplate: z.string().optional(),
}); });
// ── CPC-CSD Configuration Schemas ──
export const updateCpcCdcConfigSchema = z.object({
viewerEmails: z.array(z.string().email()).optional(),
});