Implemented CPC-CSD OCR
This commit is contained in:
parent
872bda4731
commit
657191ce2b
5
.env.docker
Normal file
5
.env.docker
Normal file
@ -0,0 +1,5 @@
|
||||
# Auto-loaded by `docker compose` for ${VAR} substitution in docker-compose.yml (not passed into every container).
|
||||
# Okta SPA values (public) — same preview tenant as CPC-CSD client dev.
|
||||
VITE_OKTA_DOMAIN=https://dev-830839.oktapreview.com
|
||||
VITE_OKTA_CLIENT_ID=0oa2jgzvrpdwx2iqd0h8
|
||||
|
||||
@ -0,0 +1,890 @@
|
||||
{
|
||||
"info": {
|
||||
"_postman_id": "re-workflow-cpc-csd-complete-2026",
|
||||
"name": "RE Workflow — CPC-CSD API (complete)",
|
||||
"description": "## Purpose\nCovers **all CPC-CSD HTTP APIs** used by the browser (Dashboard, History, reports) so Postman can replace manual UI testing once tokens and URLs are set.\n\n## Authentication\n1. Import **RE_Workflow_CPC_CDC_API** environment and select it.\n2. Paste JWT into **accessToken** (no `Bearer ` prefix).\n3. Run **01_Session_and_health → GET Auth me** — expect **200**. Then use **02_CPC_CSD_API** and onward.\n4. User must be **ADMIN** or listed in **CPC_CSD_ADMIN_CONFIG** viewer emails.\n\n## URL base\n- **apiRoot** = `{hostUrl}/api/v1` — all CPC-CSD requests in this collection use **`{apiRoot}/cpc-csd/...`** (canonical API; legacy **`{apiRoot}/cpc-cdc/...`** is the same).\n- The SPA may still call **`{hostUrl}/api/documents/...`** (legacy layout); behaviour is the same — see `docs/CPC-CDC.md` if you need those paths.\n- **Bare GCS staging** (no metadata): `POST {hostUrl}/api/upload` — single multipart field **`file`**.\n\n## Multipart (same as `Dashboard.jsx`)\n| Operation | Text fields | Files |\n|-----------|-------------|-------|\n| `POST .../v1/ocr/upload` | `claim_id`, `booking_id`, `booking_type` (CPC or CSD), `provider`, **`metadata_queue`** (JSON **string** of array) | **`files`** — repeat field name; order matches `metadata_queue` |\n| `POST .../v1/ocr/validate-upload` | `claim_id`, `booking_id`, `booking_type`, `document_type`, `provider`, **`msd_payload`** (JSON string), optional `skip_min_attachment_check=true` | **`file`** (single) |\n| `POST /api/upload` | — | **`file`** |\n\nEach `metadata_queue[]` item: `document_type`, `msd_payload` (object), `expected_field_keys` (unique list of keys to run rules on).\n\n## Metadata — business names vs JSON keys (`metadata_queue` / `msd_payload`)\nUse **these JSON property names** in each `msd_payload` and the same names in `expected_field_keys` (see env `metadataQueueJsonCsdPo`, `metadataQueueJsonCpcTwoFiles`). **Legacy keys** from older integrations are still accepted (`order_or_authorisation_number`, `invoice_value`, `govt_signatory_and_stamp_present`, `authorized_person_name`, `name`, `aadhaar_number`).\n\n### 1. CSD claim (1 document) — Purchase Order (PO)\n| Business name | JSON key | Rule |\n|----------------|----------|------|\n| Customer Name | `customer_name` | Accuracy between 90% – 100% |\n| PO Number | `po_number` | 100% accuracy required |\n| PO Amount | `po_amount` | Tolerance of ±5 rupees |\n| Signature & Stamp | `signature_and_stamp` | Binary check (Available / Not Available) |\n\n### 2. CPC claim (2 documents)\n**Document 1 — Authorization Letter**\n| Customer Name | `customer_name` | 90% – 100% |\n| Letter Number | `letter_number` | 90% – 100% |\n| Letter Amount | `letter_amount` | ±5 rupees |\n| Signature & Stamp | `signature_and_stamp` | Binary (Available / Not Available) |\n\n**Document 2 — Aadhaar card**\n| Customer Name | `customer_name` | 90% – 100% |\n| Aadhar Number | `aadhar_number` | 100% accuracy required |\n\n## Provider vs model\n- **ocrProvider** in env: pipeline mode (`GEMINI_VERTEX_DIRECT`, `GEMINI_VERTEX`, `RULES`).\n- **Gemini model id** (e.g. `gemini-2.0-flash-lite`) is **server** `GEMINI_MODEL` in `re-workflow-be/.env`, not Postman.\n\n## Limits\n- 15 MB per file; ZIP not allowed; max 20 files on bulk upload.\n\n## Reference\n- Repo: `re-workflow-be/docs/CPC-CDC.md`",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
|
||||
},
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"variable": [
|
||||
{
|
||||
"key": "hostUrl",
|
||||
"value": "http://localhost:5000"
|
||||
},
|
||||
{
|
||||
"key": "apiRoot",
|
||||
"value": "http://localhost:5000/api/v1"
|
||||
}
|
||||
],
|
||||
"item": [
|
||||
{
|
||||
"name": "01_Session_and_health",
|
||||
"description": "Verify connectivity and JWT before CPC calls.",
|
||||
"item": [
|
||||
{
|
||||
"name": "GET Health (no auth)",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{hostUrl}}/health",
|
||||
"description": "Public liveness. No Bearer.",
|
||||
"auth": {
|
||||
"type": "noauth"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Auth me",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/auth/me",
|
||||
"description": "Confirms accessToken is accepted.",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "02_CPC_CSD_API",
|
||||
"description": "Canonical routes: `{{apiRoot}}/cpc-csd/...` plus bare `POST /api/upload`.",
|
||||
"item": [
|
||||
{
|
||||
"name": "GET Permissions",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/permissions",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Documents analytics",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/analytics",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Documents history by claim",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/history?claimId={{claimIdCpc}}",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Documents recent (paginated)",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/recent?page={{recentPage}}&limit={{recentLimit}}&search={{recentSearch}}&status={{recentStatus}}&type={{recentType}}&sortBy={{recentSortBy}}&order={{recentOrder}}",
|
||||
"description": "Optional: Test script saves first item id into **cpcDocumentId** for follow-up GET/PUT/DELETE.",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"event": [
|
||||
{
|
||||
"listen": "test",
|
||||
"script": {
|
||||
"exec": [
|
||||
"if (pm.response.code === 200) {",
|
||||
" try {",
|
||||
" const j = pm.response.json();",
|
||||
" const items = j.items || [];",
|
||||
" if (items.length && items[0].id != null) {",
|
||||
" pm.environment.set('cpcDocumentId', String(items[0].id));",
|
||||
" }",
|
||||
" } catch (e) { /* ignore */ }",
|
||||
"}"
|
||||
],
|
||||
"type": "text/javascript"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "GET Document by id",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/{{cpcDocumentId}}",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Document file binary",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/{{cpcDocumentId}}/file",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "PUT Document status",
|
||||
"request": {
|
||||
"method": "PUT",
|
||||
"header": [
|
||||
{
|
||||
"key": "Content-Type",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{{putStatusBodyJson}}"
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/{{cpcDocumentId}}/status",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "DELETE Document",
|
||||
"request": {
|
||||
"method": "DELETE",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/documents/{{cpcDocumentId}}",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Report Excel per claim",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/report/{{claimIdCpc}}/download{{reportAttemptQuery}}",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Report Excel master",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/report/all/download?search={{masterReportSearch}}&status={{masterReportStatus}}&type={{masterReportType}}",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Report per claim (alt path)",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/report/{{claimIdCpc}}/download{{reportAttemptQuery}}",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GET Report master (alt path)",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": "{{apiRoot}}/cpc-csd/report/all/download",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "POST Bare file upload (GCS)",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "file",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{hostUrl}}/api/upload",
|
||||
"description": "Returns `{ gcsUrl }`. Same as compat route; not under /cpc-csd prefix.",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "03_Upload_CSD_PO",
|
||||
"description": "**CSD — 1 document — Purchase Order (PO).** One `files` part. `metadata_queue` = JSON array with one object: `document_type` CSD_PO, `msd_payload` + `expected_field_keys` using `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (see collection root).",
|
||||
"item": [
|
||||
{
|
||||
"name": "POST Upload CSD",
|
||||
"description": "**CSD PO upload.** Form-data `metadata_queue` must be a **stringified JSON** array (see env `metadataQueueJsonCsdPo`). Keys: `customer_name`, `po_number`, `po_amount`, `signature_and_stamp`.",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdCsd}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdCsd}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CSD",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue",
|
||||
"value": "{{metadataQueueJsonCsdPo}}",
|
||||
"type": "text",
|
||||
"description": "JSON string: 1× CSD_PO. Keys = `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (rules in collection description)."
|
||||
},
|
||||
{
|
||||
"key": "files",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "04_Upload_CPC_two_files",
|
||||
"description": "**CPC — 2 documents.** Two `files` parts (same field name `files` twice). `metadata_queue` array order must match file order: **[1] Authorization letter (CPC_AUTH)** then **[2] Aadhaar (AADHAAR)** — field names per collection root description.",
|
||||
"item": [
|
||||
{
|
||||
"name": "POST Upload CPC (2 files)",
|
||||
"description": "**CPC two-file upload.** `metadata_queue` = env `metadataQueueJsonCpcTwoFiles`. Doc1: `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`. Doc2: `customer_name`, `aadhar_number`.",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CPC",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue",
|
||||
"value": "{{metadataQueueJsonCpcTwoFiles}}",
|
||||
"type": "text",
|
||||
"description": "JSON string: [0]=CPC_AUTH (`customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`), [1]=AADHAAR (`customer_name`, `aadhar_number`). Order matches the two `files` rows."
|
||||
},
|
||||
{
|
||||
"key": "files",
|
||||
"type": "file",
|
||||
"src": []
|
||||
},
|
||||
{
|
||||
"key": "files",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "05_Validate_upload_single_file",
|
||||
"description": "Single `file`; `document_type` + `msd_payload` JSON string.",
|
||||
"item": [
|
||||
{
|
||||
"name": "validate-upload CPC_AUTH",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CPC",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "document_type",
|
||||
"value": "CPC_AUTH",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "msd_payload",
|
||||
"value": "{{msdPayloadCpcAuth}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "skip_min_attachment_check",
|
||||
"value": "true",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "file",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/validate-upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "validate-upload AADHAAR",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CPC",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "document_type",
|
||||
"value": "AADHAAR",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "msd_payload",
|
||||
"value": "{{msdPayloadAadhaar}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "skip_min_attachment_check",
|
||||
"value": "true",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "file",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/validate-upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "validate-upload CSD_PO",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdCsd}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdCsd}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CSD",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "document_type",
|
||||
"value": "CSD_PO",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "msd_payload",
|
||||
"value": "{{msdPayloadCsdPo}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "skip_min_attachment_check",
|
||||
"value": "true",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "file",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/validate-upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "validate-upload RETAIL_INVOICE",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdRetail}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdRetail}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CSD",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "document_type",
|
||||
"value": "RETAIL_INVOICE",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "msd_payload",
|
||||
"value": "{{msdPayloadRetailInvoice}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "skip_min_attachment_check",
|
||||
"value": "true",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "file",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/validate-upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "06_Upload_CPC_single_skip_min",
|
||||
"description": "`skip_min_attachment_check=true` + one `files` part.",
|
||||
"item": [
|
||||
{
|
||||
"name": "POST CPC one file skip min",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdCpc}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CPC",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue",
|
||||
"value": "{{metadataQueueJsonCpcAuthOnly}}",
|
||||
"type": "text",
|
||||
"description": "JSON string: single CPC_AUTH — same keys as CPC doc1 (`customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`)."
|
||||
},
|
||||
{
|
||||
"key": "skip_min_attachment_check",
|
||||
"value": "true",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "files",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "07_Upload_retail_invoice",
|
||||
"description": "Single retail invoice file.",
|
||||
"item": [
|
||||
{
|
||||
"name": "POST Upload RETAIL_INVOICE",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{claimIdRetail}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{claimIdRetail}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "CSD",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue",
|
||||
"value": "{{metadataQueueJsonRetailInvoice}}",
|
||||
"type": "text",
|
||||
"description": "JSON string: RETAIL_INVOICE (not CSD/CPC PO flow — see env `metadataQueueJsonRetailInvoice`)."
|
||||
},
|
||||
{
|
||||
"key": "files",
|
||||
"type": "file",
|
||||
"src": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "08_JSON_validate_GCS_URL",
|
||||
"description": "Requires real `gs://` object.",
|
||||
"item": [
|
||||
{
|
||||
"name": "POST validate JSON (GCS URL)",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [
|
||||
{
|
||||
"key": "Content-Type",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\n \"claim_id\": \"{{claimIdCpc}}\",\n \"document_type\": \"CPC_AUTH\",\n \"document_gcp_url\": \"{{documentGcpUrl}}\",\n \"msd_payload\": {\n \"customer_name\": \"Amit Kumar\",\n \"letter_number\": \"AUTH-1\",\n \"letter_amount\": \"45000\",\n \"signature_and_stamp\": \"yes\"\n },\n \"provider\": \"{{ocrProvider}}\"\n}"
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/validate",
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,219 @@
|
||||
{
|
||||
"id": "re-workflow-cpc-csd-api-env",
|
||||
"name": "RE Workflow — CPC-CSD API (complete)",
|
||||
"values": [
|
||||
{
|
||||
"key": "hostUrl",
|
||||
"value": "http://localhost:5000",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "API origin only (scheme + host + port). No path. Node dev: 5000. Docker host-mapped API: often 5004. Nginx all-in-one: use 8080 only if you proxy everything through it."
|
||||
},
|
||||
{
|
||||
"key": "apiRoot",
|
||||
"value": "http://localhost:5000/api/v1",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Must equal {{hostUrl}}/api/v1. All CPC-CSD REST calls in the bundled collection use {{apiRoot}}/cpc-csd/... Bare GCS upload uses POST {{hostUrl}}/api/upload (see collection folder 02). The SPA may still use {{hostUrl}}/api/documents/* — same handlers; see docs/CPC-CDC.md if you need those URLs."
|
||||
},
|
||||
{
|
||||
"key": "accessToken",
|
||||
"value": "",
|
||||
"type": "secret",
|
||||
"enabled": true,
|
||||
"description": "JWT only (no Bearer prefix). From browser: DevTools → Application → Local Storage → access_token, or Network → Authorization header value after Bearer. Required for all CPC-CSD routes except GET /health."
|
||||
},
|
||||
{
|
||||
"key": "refreshToken",
|
||||
"value": "",
|
||||
"type": "secret",
|
||||
"enabled": true,
|
||||
"description": "Optional. Only if you chain POST /auth/refresh from another collection."
|
||||
},
|
||||
{
|
||||
"key": "ocrProvider",
|
||||
"value": "GEMINI_VERTEX_DIRECT",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Multipart field provider. GEMINI_VERTEX_DIRECT = Gemini on document bytes; skips Document AI OCR even if configured. GEMINI_VERTEX = optional Document AI then Gemini. RULES = rules engine on OCR text only, no Vertex."
|
||||
},
|
||||
{
|
||||
"key": "claimIdCpc",
|
||||
"value": "CPC-POSTMAN-0001",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "claim_id and booking_id for CPC runs (same pattern as Dashboard finalBookingId: CPC-{suffix}). Must be unique enough for your DB rules."
|
||||
},
|
||||
{
|
||||
"key": "claimIdCsd",
|
||||
"value": "CSD-POSTMAN-0001",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "claim_id and booking_id for CSD (PO) runs: CSD-{suffix}."
|
||||
},
|
||||
{
|
||||
"key": "claimIdRetail",
|
||||
"value": "CSD-RETAIL-0001",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Optional booking/claim id for RETAIL_INVOICE tests (any string; booking_type often CSD in samples)."
|
||||
},
|
||||
{
|
||||
"key": "cpcDocumentId",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "UUID from GET .../documents/recent (or history). Required for GET by id, GET file, PUT status, DELETE. Optional test script on recent can set this."
|
||||
},
|
||||
{
|
||||
"key": "documentGcpUrl",
|
||||
"value": "gs://your-bucket/path/document.pdf",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "For POST .../v1/ocr/validate JSON only. File must already exist in GCS."
|
||||
},
|
||||
{
|
||||
"key": "reportAttemptQuery",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Per-claim Excel: append empty or ?attempt=2 (full query string including ?)."
|
||||
},
|
||||
{
|
||||
"key": "recentPage",
|
||||
"value": "1",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "GET documents/recent — page (1-based)."
|
||||
},
|
||||
{
|
||||
"key": "recentLimit",
|
||||
"value": "30",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "GET documents/recent — page size (max sensible for UI parity)."
|
||||
},
|
||||
{
|
||||
"key": "recentSearch",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Optional: filter by booking/claim/type text and id (when API supports searchIncludeId)."
|
||||
},
|
||||
{
|
||||
"key": "recentStatus",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Leave empty for no filter. Set SUCCESSFUL or UNSUCCESSFUL to match History page filters (backend maps to validation_status sets)."
|
||||
},
|
||||
{
|
||||
"key": "recentType",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Leave empty for no filter. Else: AADHAAR | CPC_AUTH | CSD_PO | RETAIL_INVOICE | AUTHORITY_LETTER (see appendCpcDocumentFilters)."
|
||||
},
|
||||
{
|
||||
"key": "recentSortBy",
|
||||
"value": "createdAt",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Sort field: id | bookingId | createdAt | documentType | validationStatus | claimId | matchPercentage."
|
||||
},
|
||||
{
|
||||
"key": "recentOrder",
|
||||
"value": "DESC",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "ASC or DESC."
|
||||
},
|
||||
{
|
||||
"key": "masterReportSearch",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "GET .../report/all/download optional search query param."
|
||||
},
|
||||
{
|
||||
"key": "masterReportStatus",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Optional validation_status filter for master Excel."
|
||||
},
|
||||
{
|
||||
"key": "masterReportType",
|
||||
"value": "",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Optional document_type filter for master Excel."
|
||||
},
|
||||
{
|
||||
"key": "putStatusBodyJson",
|
||||
"value": "{\n \"status\": \"APPROVED\",\n \"remarks\": \"Manual review via Postman\"\n}",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Body for PUT .../documents/:id/status. Adjust status, remarks, optional correctedFields per API contract."
|
||||
},
|
||||
{
|
||||
"key": "metadataQueueJsonCsdPo",
|
||||
"value": "[{\"document_type\":\"CSD_PO\",\"msd_payload\":{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-2024-001\",\"po_amount\":\"25000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"po_number\",\"po_amount\",\"signature_and_stamp\"]}]",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "CSD (1 doc) PO — Purchase Order. JSON keys: `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (yes/no). Legacy keys still work. Stringify for `metadata_queue`."
|
||||
},
|
||||
{
|
||||
"key": "metadataQueueJsonCpcTwoFiles",
|
||||
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-2024-77\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]},{\"document_type\":\"AADHAAR\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"},\"expected_field_keys\":[\"customer_name\",\"aadhar_number\"]}]",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "CPC (2 docs), order = file order. Doc1: `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`. Doc2: `customer_name`, `aadhar_number`. Legacy keys still work."
|
||||
},
|
||||
{
|
||||
"key": "metadataQueueJsonCpcAuthOnly",
|
||||
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-99\",\"letter_amount\":\"10000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]}]",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Single CPC_AUTH upload (skip_min). Same keys as CPC doc1 (`customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`)."
|
||||
},
|
||||
{
|
||||
"key": "metadataQueueJsonRetailInvoice",
|
||||
"value": "[{\"document_type\":\"RETAIL_INVOICE\",\"msd_payload\":{\"vendor_name\":\"Royal Enfield Store\",\"order_or_authorisation_number\":\"INV-2024-1001\",\"invoice_value\":\"185000\",\"invoice_date\":\"15-01-2024\"},\"expected_field_keys\":[\"vendor_name\",\"order_or_authorisation_number\",\"invoice_value\",\"invoice_date\"]}]",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Retail invoice: vendor, order, amount, and invoice date compared to the reference payload per validation policy."
|
||||
},
|
||||
{
|
||||
"key": "msdPayloadCpcAuth",
|
||||
"value": "{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-1\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"}",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "validate-upload: Authorization letter — `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`."
|
||||
},
|
||||
{
|
||||
"key": "msdPayloadAadhaar",
|
||||
"value": "{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"}",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "validate-upload: Aadhaar — `customer_name`, `aadhar_number` (12 digits)."
|
||||
},
|
||||
{
|
||||
"key": "msdPayloadCsdPo",
|
||||
"value": "{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-001\",\"po_amount\":\"12000\",\"signature_and_stamp\":\"yes\"}",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "validate-upload: CSD PO — same keys as `metadataQueueJsonCsdPo`."
|
||||
},
|
||||
{
|
||||
"key": "msdPayloadRetailInvoice",
|
||||
"value": "{\"vendor_name\":\"RE Store\",\"order_or_authorisation_number\":\"INV-99\",\"invoice_value\":\"50000\",\"invoice_date\":\"01-04-2024\"}",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "validate-upload: msd_payload for RETAIL_INVOICE."
|
||||
}
|
||||
],
|
||||
"_postman_variable_scope": "environment",
|
||||
"_postman_exported_at": "2026-04-15T12:00:00.000Z",
|
||||
"_postman_exported_using": "RE Workflow CPC-CSD bundle"
|
||||
}
|
||||
@ -0,0 +1,90 @@
|
||||
{
|
||||
"info": {
|
||||
"_postman_id": "re-workflow-cpc-csd-ocr-single-2026",
|
||||
"name": "RE Workflow — CPC-CSD OCR (single POST)",
|
||||
"description": "## What this collection is\nOne **multipart** request that runs the **full CPC-CSD OCR pipeline** used by the app: optional OCR text → Vertex/Gemini extraction → validation → **persist** `cpc_documents` rows.\n\nThis is **not** a different backend route — it is exactly:\n`POST {{apiRoot}}/cpc-csd/v1/ocr/upload`\n\n## Import\n1. Import **RE_Workflow_CPC_CDC_OCR_SingleRequest.postman_environment.json** (or merge variables into your existing env).\n2. Set **accessToken** (JWT, no `Bearer ` prefix).\n3. Select this environment in the dropdown.\n4. Open **POST Full OCR pipeline**, attach file(s), Send.\n\n## Auth\n- Collection **Bearer**: `{{accessToken}}`\n- User must be allowed for CPC-CSD (same as main RE Workflow collection).\n\n## Request (exact)\n| Item | Value |\n|------|--------|\n| Method | **POST** |\n| URL | `{{apiRoot}}/cpc-csd/v1/ocr/upload` |\n| Body mode | **form-data** (multipart) |\n| Content-Type | Let Postman set **multipart boundary** (do not set `application/json` on this request). |\n\n### Multipart text fields (always these keys)\n| Field name | Type | Required | Description |\n|------------|------|----------|-------------|\n| `claim_id` | text | yes | Claim id string; same family as Dashboard (`CPC-…` / `CSD-…`). |\n| `booking_id` | text | yes | In samples same as `claim_id`; backend accepts booking id pattern. |\n| `booking_type` | text | yes | **`CSD`** = one PO file. **`CPC`** = two files (auth + Aadhaar). |\n| `provider` | text | yes | e.g. `GEMINI_VERTEX_DIRECT` (see env `ocrProvider`). |\n| `metadata_queue` | text | yes | **Stringified JSON array** (not a Postman JSON body). Each element describes one uploaded file in order. |\n\n### Multipart file field(s)\n| Field name | Type | Count | Rule |\n|------------|------|-------|------|\n| `files` | file | **1** for CSD | One PO PDF/image. |\n| `files` | file | **2** for CPC | **Duplicate** the key `files` in Postman (two rows, same key `files`): first row = authorization letter, second = Aadhaar. Order **must** match `metadata_queue` array order. |\n\n### `metadata_queue` JSON shape (per array element)\nEach object **must** include:\n- `document_type`: `CSD_PO` | `CPC_AUTH` | `AADHAAR` | `RETAIL_INVOICE` (this collection documents CSD + CPC).\n- `msd_payload`: object — MSD/reference values for that file.\n- `expected_field_keys`: string array — **same keys** as in `msd_payload` you want validated (order preserved).\n\n**CSD_PO** keys (current canonical): `customer_name`, `po_number`, `po_amount`, `signature_and_stamp` (`yes`/`no`).\n\n**CPC_AUTH** (doc 1): `customer_name`, `letter_number`, `letter_amount`, `signature_and_stamp`.\n\n**AADHAAR** (doc 2): `customer_name`, `aadhar_number` (12 digits).\n\nUse env **`metadata_queue_json`** for CSD default, **`metadata_queue_json_cpc`** for CPC (set the `metadata_queue` field value to that variable when testing CPC).\n\n## Limits (server)\n- Max **20** `files` parts; **15 MB** per file; ZIP not allowed (same as main API).\n\n## Response\n- **200** JSON: per-file results with `document_id`, `validation_status`, `field_results`, etc. (same contract as main collection folder `03`/`04`).\n\n## Optional (not in this one-request collection)\n- `POST .../ocr/validate-upload` — single file validate without persisting as the same dashboard flow.\n- `POST .../ocr/validate` — JSON body + GCS URL.\n- `POST {{hostUrl}}/api/upload` — bare GCS staging without CPC metadata.",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
|
||||
},
|
||||
"auth": {
|
||||
"type": "bearer",
|
||||
"bearer": [
|
||||
{
|
||||
"key": "token",
|
||||
"value": "{{accessToken}}",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"variable": [
|
||||
{
|
||||
"key": "hostUrl",
|
||||
"value": "http://localhost:5000"
|
||||
},
|
||||
{
|
||||
"key": "apiRoot",
|
||||
"value": "http://localhost:5000/api/v1"
|
||||
},
|
||||
{
|
||||
"key": "accessToken",
|
||||
"value": ""
|
||||
},
|
||||
{
|
||||
"key": "ocrProvider",
|
||||
"value": "GEMINI_VERTEX_DIRECT"
|
||||
}
|
||||
],
|
||||
"item": [
|
||||
{
|
||||
"name": "POST Full OCR pipeline (multipart upload)",
|
||||
"description": "**Single API** for end-to-end OCR on CPC-CSD: `POST {{apiRoot}}/cpc-csd/v1/ocr/upload`.\n\n**CSD (1 file):** `ocr_booking_type=CSD`, attach **one** `files` part, `metadata_queue` = `{{metadata_queue_json}}` (default CSD_PO).\n\n**CPC (2 files):** Set `ocr_booking_type` to `CPC`, set `metadata_queue` to `{{metadata_queue_json_cpc}}`, **add a second form row** with key `files` (duplicate key), attach auth PDF then Aadhaar PDF in that order.\n\n**claim_id / booking_id:** both use `{{ocr_claim_id}}` — change env when switching CSD vs CPC claim ids.",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "formdata",
|
||||
"formdata": [
|
||||
{
|
||||
"key": "claim_id",
|
||||
"value": "{{ocr_claim_id}}",
|
||||
"type": "text",
|
||||
"description": "Same as Dashboard claim id string."
|
||||
},
|
||||
{
|
||||
"key": "booking_id",
|
||||
"value": "{{ocr_claim_id}}",
|
||||
"type": "text",
|
||||
"description": "Samples use same value as claim_id; must match your booking/claim convention."
|
||||
},
|
||||
{
|
||||
"key": "booking_type",
|
||||
"value": "{{ocr_booking_type}}",
|
||||
"type": "text",
|
||||
"description": "CSD = 1 file. CPC = 2 files + CPC metadata array."
|
||||
},
|
||||
{
|
||||
"key": "provider",
|
||||
"value": "{{ocrProvider}}",
|
||||
"type": "text",
|
||||
"description": "Vertex/Rules mode; see env ocrProvider."
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue",
|
||||
"value": "{{metadata_queue_json}}",
|
||||
"type": "text",
|
||||
"description": "Stringified JSON array. CSD default from env `metadata_queue_json`. For CPC switch value to {{metadata_queue_json_cpc}} in this field (or paste)."
|
||||
},
|
||||
{
|
||||
"key": "files",
|
||||
"type": "file",
|
||||
"src": [],
|
||||
"description": "CSD: attach PO here only. CPC: first file = authorization letter; add another `files` row below for Aadhaar."
|
||||
}
|
||||
]
|
||||
},
|
||||
"url": "{{apiRoot}}/cpc-csd/v1/ocr/upload",
|
||||
"description": "Multipart form-data only. Do not set Content-Type manually."
|
||||
},
|
||||
"response": []
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,65 @@
|
||||
{
|
||||
"id": "re-workflow-cpc-csd-ocr-single-env",
|
||||
"name": "RE Workflow — CPC-CSD OCR (single POST)",
|
||||
"values": [
|
||||
{
|
||||
"key": "hostUrl",
|
||||
"value": "http://localhost:5000",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "API origin (scheme + host + port). Docker: often http://localhost:5004. No trailing slash."
|
||||
},
|
||||
{
|
||||
"key": "apiRoot",
|
||||
"value": "http://localhost:5000/api/v1",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Must be {{hostUrl}}/api/v1. Used only by the OCR collection URL."
|
||||
},
|
||||
{
|
||||
"key": "accessToken",
|
||||
"value": "",
|
||||
"type": "secret",
|
||||
"enabled": true,
|
||||
"description": "JWT only (no 'Bearer ' prefix). Required: collection uses Bearer auth with this variable."
|
||||
},
|
||||
{
|
||||
"key": "ocrProvider",
|
||||
"value": "GEMINI_VERTEX_DIRECT",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Multipart text field `provider`. GEMINI_VERTEX_DIRECT = Gemini on file bytes. GEMINI_VERTEX = optional Document AI then Gemini. RULES = regex/rules on OCR text only (no Vertex)."
|
||||
},
|
||||
{
|
||||
"key": "ocr_claim_id",
|
||||
"value": "CSD-OCR-0001",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Used for BOTH `claim_id` and `booking_id` form fields (same as Dashboard). For CPC use e.g. CPC-OCR-0001 and set ocr_booking_type=CPC."
|
||||
},
|
||||
{
|
||||
"key": "ocr_booking_type",
|
||||
"value": "CSD",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Multipart `booking_type`: CSD (1 file, PO) or CPC (2 files: auth letter + Aadhaar)."
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue_json",
|
||||
"value": "[{\"document_type\":\"CSD_PO\",\"msd_payload\":{\"customer_name\":\"Rahul Verma\",\"po_number\":\"PO-2024-001\",\"po_amount\":\"25000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"po_number\",\"po_amount\",\"signature_and_stamp\"]}]",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "Default for CSD. Single-line JSON STRING for form field `metadata_queue`. For CPC: set Body `metadata_queue` to {{metadata_queue_json_cpc}} (or paste that value) and add a second `files` row."
|
||||
},
|
||||
{
|
||||
"key": "metadata_queue_json_cpc",
|
||||
"value": "[{\"document_type\":\"CPC_AUTH\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"letter_number\":\"AUTH-2024-77\",\"letter_amount\":\"45000\",\"signature_and_stamp\":\"yes\"},\"expected_field_keys\":[\"customer_name\",\"letter_number\",\"letter_amount\",\"signature_and_stamp\"]},{\"document_type\":\"AADHAAR\",\"msd_payload\":{\"customer_name\":\"Amit Kumar\",\"aadhar_number\":\"123412341234\"},\"expected_field_keys\":[\"customer_name\",\"aadhar_number\"]}]",
|
||||
"type": "default",
|
||||
"enabled": true,
|
||||
"description": "CPC 2-file metadata_queue. Array order MUST match file order: [0]=first `files` part (auth letter), [1]=second `files` part (Aadhaar)."
|
||||
}
|
||||
],
|
||||
"_postman_variable_scope": "environment",
|
||||
"_postman_exported_at": "2026-04-15T15:00:00.000Z",
|
||||
"_postman_exported_using": "RE Workflow CPC-CSD OCR single-request bundle"
|
||||
}
|
||||
@ -19,7 +19,12 @@
|
||||
"variable": [
|
||||
{
|
||||
"key": "baseUrl",
|
||||
"value": "http://localhost:3000/api/v1",
|
||||
"value": "http://localhost:5000/api/v1",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"key": "healthUrl",
|
||||
"value": "http://localhost:5000/health",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
@ -101,7 +106,31 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Token Exchange (Development)",
|
||||
"name": "Token Exchange (Okta authorization code)",
|
||||
"event": [
|
||||
{
|
||||
"listen": "test",
|
||||
"script": {
|
||||
"exec": [
|
||||
"if (pm.response.code === 200) {",
|
||||
" const jsonData = pm.response.json();",
|
||||
" const data = jsonData.data || jsonData;",
|
||||
" const token = data && (data.accessToken || data.access_token);",
|
||||
" const refresh = data && (data.refreshToken || data.refresh_token);",
|
||||
" if (token) {",
|
||||
" pm.collectionVariables.set('accessToken', token);",
|
||||
" pm.environment.set('accessToken', token);",
|
||||
" }",
|
||||
" if (refresh) {",
|
||||
" pm.collectionVariables.set('refreshToken', refresh);",
|
||||
" pm.environment.set('refreshToken', refresh);",
|
||||
" }",
|
||||
"}"
|
||||
],
|
||||
"type": "text/javascript"
|
||||
}
|
||||
}
|
||||
],
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [
|
||||
|
||||
68
docs/CPC-CDC.md
Normal file
68
docs/CPC-CDC.md
Normal file
@ -0,0 +1,68 @@
|
||||
# CPC-CSD module (re-workflow)
|
||||
|
||||
This module (formerly referred to as CPC-CDC in code comments) covers **CPC/CSD document upload, OCR/extraction, validation against MSD payloads, audit history, dashboards, and Excel reports**. It was consolidated from the standalone **CPC-CSD** app into this backend.
|
||||
|
||||
## HTTP API
|
||||
|
||||
**CPC-CSD-compatible URLs** (same as `CPC-CSD/server/src/routes/index.js` + Postman `CPC-CSD-Full-Flow`): `POST /api/upload`, `GET /api/documents/*`, `POST /api/v1/ocr/validate`, `POST /api/v1/ocr/validate-upload` (field **`file`**), `POST /api/v1/ocr/upload` (field **`files`**, max 20), report downloads under `/api/v1/ocr/report/...`. Registered from `src/routes/cpc-csd-compat.mount.ts` before `/api/v1`; disable with **`CPC_LEGACY_COMPAT_ROUTES=false`**.
|
||||
|
||||
**Namespaced API** — canonical prefix **`/api/v1/cpc-csd`**; legacy alias **`/api/v1/cpc-cdc`** (`src/routes/cpc-cdc.routes.ts`) mounts the same handlers and auth.
|
||||
|
||||
| Method | Path (prefix **`/api`** or **`/api/v1/cpc-csd`** or legacy **`/api/v1/cpc-cdc`**) | Purpose |
|
||||
|--------|------|---------|
|
||||
| POST | `/upload` | GCS-only: multipart field **`file`** → `{ gcsUrl }` (compat: **`/api/upload`**) |
|
||||
| POST | `/v1/ocr/validate` | JSON URL mode — returns **400** with legacy message (use validate-upload) |
|
||||
| POST | `/v1/ocr/validate-upload` | Single file field **`file`** + `claim_id` / `msd_payload` / … |
|
||||
| POST | `/v1/ocr/upload` | Bulk: field **`files`** (max 20) + `metadata_queue` or `msd_payload` / `document_type` |
|
||||
| GET | `/documents/analytics` | Totals, pass rate, distribution, `dailyVolume`, `topMismatchFields` |
|
||||
| GET | `/documents/history` | `claimId` query — attempts grouped |
|
||||
| GET | `/documents/recent` | Paginated list; query: `page`, `limit`, `search`, `status`, `type`, `sortBy`, `order` |
|
||||
| GET | `/documents/:id/file` | Authenticated file bytes for preview (browser cannot use `gs://` directly) |
|
||||
| GET | `/documents/:id` | Document + audit logs + `field_results` |
|
||||
| PUT | `/documents/:id/status` | Manual status / corrected fields |
|
||||
| DELETE | `/documents/:id` | Remove document row |
|
||||
| GET | `/v1/ocr/report/:claimId/download` | Per-claim Excel |
|
||||
| GET | `/v1/ocr/report/all/download` | Master Excel (supports `search`, `status`, `type`) |
|
||||
|
||||
Compat paths are under **`/api/...`**; namespaced routes are **`/api/v1/cpc-csd/...`** with **`/api/v1/cpc-cdc/...`** as an alias (same path suffixes as in the table’s second column).
|
||||
|
||||
## Database
|
||||
|
||||
Sequelize models: **`CpcDocument`** (`cpc_documents`), **`CpcAuditLog`** (`cpc_audit_logs`). Migration: `src/migrations/2026041300-create-cpc-cdc-tables.ts`.
|
||||
|
||||
**Admin viewer list** is stored under `admin_configurations.config_key = CPC_CSD_ADMIN_CONFIG` (migration `20260416120000-rename-cpc-cdc-admin-config-key.ts` renames the legacy `CPC_CDC_ADMIN_CONFIG` row when applied).
|
||||
|
||||
On **application startup**, `ensureCpcCdcSchema()` runs after DB connect (`src/services/cpc-cdc/ensureCpcCdcSchema.ts`) so `CREATE TABLE IF NOT EXISTS` applies if migrations were skipped; still run `npm run migrate` for a full schema history.
|
||||
|
||||
Notable columns on `cpc_documents`: `booking_id`, `claim_id`, `attempt_no`, `document_type`, `document_gcp_url`, `provider`, JSONB `msd_payload`, `extracted_fields`, `field_confidence`, `validation_status`, `match_percentage`, `mismatch_reasons`, `field_results`, `ip_address`.
|
||||
|
||||
Unique index: `(claim_id, attempt_no, document_type)` — important when migrating legacy data with duplicates.
|
||||
|
||||
## Environment variables
|
||||
|
||||
Copy **`re-workflow-be/.env.example`** to `.env` and adjust. Typical keys (see `CpcCdcController` and `src/services/cpc-cdc/*`):
|
||||
|
||||
- **`GCP_PROJECT_ID`** — GCP project for Vertex / optional Document AI.
|
||||
- **`VERTEX_AI_LOCATION`** — Vertex region (e.g. `asia-south1`).
|
||||
- **`DOC_AI_PROCESSOR_ID`** — Optional; when set and valid, Document AI OCR may run before Gemini.
|
||||
- **`GCP_LOCATION_DOC_AI`** — Document AI region (default `us`).
|
||||
- **GCS** — Bucket/credentials as required by `CpcGcsService` (service account via `GOOGLE_APPLICATION_CREDENTIALS` or workload identity).
|
||||
- **`CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI`** — **`true`**: always allow saving after failed/missing Vertex. **`false`**: in **production** only, disallow degraded saves. **Omitted in non-production**: degraded saves are **allowed** so local CPC works without GCP; set to **`false`** in dev to force strict Vertex. **Omitted in production**: strict (Vertex required unless `RULES` provider).
|
||||
|
||||
**Extraction behaviour (upload response):**
|
||||
|
||||
- **`extraction_source`: `vertex_gemini`** — Fields came from the Vertex Gemini API (document bytes + optional Document AI OCR text).
|
||||
- **`extraction_source`: `rules_engine`** — Provider was **`RULES`**; fields come from `CpcRuleExtractService` on OCR text only (no Gemini).
|
||||
- **`extraction_source`: `degraded_empty`** — Extraction was skipped, failed, or (in **non-production**) hit a **Vertex auth / ADC** problem; the row is still stored with empty `extracted_fields` so you can test DB/history. In production this only happens when **`CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI=true`** or missing `GCP_PROJECT_ID` with degraded policy.
|
||||
|
||||
## One-off data migration from legacy Prisma DB
|
||||
|
||||
If you still have the old **`Document`** / **`AuditLog`** tables (CPC-CSD Prisma schema) in PostgreSQL, run:
|
||||
|
||||
```bash
|
||||
npm run migrate:cpc-csd
|
||||
```
|
||||
|
||||
Optional **`CPC_CSD_DATABASE_URL`**: if set, rows are read from that database and written to the database in **`DATABASE_URL`** (re-workflow). If unset, both read and write use **`DATABASE_URL`** (same cluster; both table sets must exist).
|
||||
|
||||
After migration, spot-check history, document detail, and Excel downloads, then decommission the legacy app.
|
||||
795
package-lock.json
generated
795
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -17,6 +17,7 @@
|
||||
"clean": "rm -rf dist",
|
||||
"setup": "ts-node -r tsconfig-paths/register src/scripts/auto-setup.ts",
|
||||
"migrate": "ts-node -r tsconfig-paths/register src/scripts/migrate.ts",
|
||||
"migrate:cpc-csd": "ts-node -r tsconfig-paths/register src/scripts/migrate-cpc-csd-to-cpc-tables.ts",
|
||||
"seed:config": "ts-node -r tsconfig-paths/register src/scripts/seed-admin-config.ts",
|
||||
"seed:test-dealer": "ts-node -r tsconfig-paths/register src/scripts/seed-test-dealer.ts",
|
||||
"seed:dealer-user": "ts-node -r tsconfig-paths/register src/scripts/seed-dealer-user.ts",
|
||||
@ -31,6 +32,7 @@
|
||||
"test:ci": "jest --ci --coverage --passWithNoTests --forceExit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google-cloud/documentai": "^9.6.0",
|
||||
"@google-cloud/secret-manager": "^6.1.1",
|
||||
"@google-cloud/storage": "^7.18.0",
|
||||
"@google-cloud/vertexai": "^1.10.0",
|
||||
@ -45,6 +47,7 @@
|
||||
"cors": "^2.8.5",
|
||||
"dayjs": "^1.11.19",
|
||||
"dotenv": "^16.4.7",
|
||||
"exceljs": "^4.4.0",
|
||||
"express": "^4.21.2",
|
||||
"express-rate-limit": "^7.5.0",
|
||||
"fast-xml-parser": "^5.3.3",
|
||||
@ -66,6 +69,7 @@
|
||||
"sanitize-html": "^2.17.1",
|
||||
"sequelize": "^6.37.5",
|
||||
"socket.io": "^4.8.1",
|
||||
"string-similarity": "^4.0.4",
|
||||
"uuid": "^8.3.2",
|
||||
"web-push": "^3.6.7",
|
||||
"winston": "^3.17.0",
|
||||
@ -87,10 +91,12 @@
|
||||
"@types/passport-jwt": "^4.0.1",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/sanitize-html": "^2.16.0",
|
||||
"@types/string-similarity": "^4.0.2",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/web-push": "^3.6.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
||||
"@typescript-eslint/parser": "^8.19.1",
|
||||
"concurrently": "^9.1.2",
|
||||
"eslint": "^9.17.0",
|
||||
"jest": "^29.7.0",
|
||||
"nodemon": "^3.1.9",
|
||||
|
||||
43
set-admin.ts
Normal file
43
set-admin.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import { sequelize } from './src/config/database';
|
||||
import { User } from './src/models/User';
|
||||
|
||||
async function makeAdmin() {
|
||||
try {
|
||||
const email = 'testuser11@eichergroup.com';
|
||||
console.log(`Setting role to ADMIN for: ${email}`);
|
||||
|
||||
// Test connection first
|
||||
await sequelize.authenticate();
|
||||
console.log('Database connected.');
|
||||
|
||||
const [updatedRows] = await User.update(
|
||||
{ role: 'ADMIN' },
|
||||
{ where: { email: email } }
|
||||
);
|
||||
|
||||
if (updatedRows > 0) {
|
||||
console.log(`✅ Success! ${email} is now an ADMIN.`);
|
||||
} else {
|
||||
console.log(`⚠️ User not found in database: ${email}`);
|
||||
console.log(`Creating user ${email} with ADMIN role...`);
|
||||
|
||||
const newUser = await User.create({
|
||||
email: email,
|
||||
oktaSub: `MANUAL_ADMIN_${Date.now()}`,
|
||||
firstName: 'Test',
|
||||
lastName: 'User 11',
|
||||
displayName: 'Test User 11',
|
||||
role: 'ADMIN',
|
||||
isActive: true
|
||||
});
|
||||
|
||||
console.log(`✅ Success! Created new ADMIN user: ${newUser.email}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('❌ Error updating user:', error);
|
||||
} finally {
|
||||
await sequelize.close();
|
||||
}
|
||||
}
|
||||
|
||||
makeAdmin();
|
||||
22
src/app.ts
22
src/app.ts
@ -6,11 +6,13 @@ import cookieParser from 'cookie-parser';
|
||||
import { UserService } from './services/user.service';
|
||||
import { SSOUserData } from './types/auth.types';
|
||||
import { sequelize } from './config/database';
|
||||
import { ensureCpcCdcSchema } from './services/cpc-cdc/ensureCpcCdcSchema';
|
||||
import { corsMiddleware } from './middlewares/cors.middleware';
|
||||
import { authenticateToken } from './middlewares/auth.middleware';
|
||||
import { requireAdmin } from './middlewares/authorization.middleware';
|
||||
import { metricsMiddleware, createMetricsRouter } from './middlewares/metrics.middleware';
|
||||
import routes from './routes/index';
|
||||
import { registerCpcCsdCompatRoutes } from './routes/cpc-csd-compat.mount';
|
||||
import form16Routes from './routes/form16.routes';
|
||||
import { ensureUploadDir, UPLOAD_DIR } from './config/storage';
|
||||
import { initializeGoogleSecretManager } from './services/googleSecretManager.service';
|
||||
@ -28,15 +30,25 @@ const app: express.Application = express();
|
||||
// 1. Security middleware - Manual "Gold Standard" CSP to ensure it survives 301/404/etc.
|
||||
// This handles a specific Express/Helmet edge case where redirects lose headers.
|
||||
app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
const isDev = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'local';
|
||||
// Match server.ts: anything except production is "dev" for local tooling (.env often uses NODE_ENV=dev)
|
||||
const nodeEnv = (process.env.NODE_ENV || '').toLowerCase();
|
||||
const isDev = nodeEnv !== 'production' && nodeEnv !== 'prod';
|
||||
const frontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000';
|
||||
|
||||
// Build connect-src dynamically
|
||||
const connectSrc = ["'self'", "blob:", "data:"];
|
||||
if (isDev) {
|
||||
connectSrc.push("http://localhost:3000", "http://localhost:5000", "ws://localhost:3000", "ws://localhost:5000");
|
||||
if (frontendUrl.includes('localhost')) connectSrc.push(frontendUrl);
|
||||
for (let port = 3000; port <= 3010; port++) {
|
||||
connectSrc.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
|
||||
connectSrc.push(`ws://localhost:${port}`, `ws://127.0.0.1:${port}`);
|
||||
}
|
||||
for (let port = 5000; port <= 5005; port++) {
|
||||
connectSrc.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
|
||||
connectSrc.push(`ws://localhost:${port}`, `ws://127.0.0.1:${port}`);
|
||||
}
|
||||
if (frontendUrl.includes('localhost') || frontendUrl.includes('127.0.0.1')) connectSrc.push(frontendUrl);
|
||||
} else if (frontendUrl && frontendUrl !== '*') {
|
||||
|
||||
const origins = frontendUrl.split(',').map(url => url.trim()).filter(Boolean);
|
||||
connectSrc.push(...origins);
|
||||
}
|
||||
@ -97,6 +109,7 @@ export const initializeAppDatabase = async () => {
|
||||
try {
|
||||
await sequelize.authenticate();
|
||||
console.log('✅ App database connection established');
|
||||
await ensureCpcCdcSchema();
|
||||
} catch (error) {
|
||||
console.error('❌ App database connection failed:', error);
|
||||
throw error;
|
||||
@ -147,6 +160,9 @@ app.get('/health', (_req: express.Request, res: express.Response) => {
|
||||
});
|
||||
});
|
||||
|
||||
// CPC-CSD-compatible paths (`/api/upload`, `/api/documents/*`, `/api/v1/ocr/*`) — same as `CPC-CSD/server` router
|
||||
registerCpcCsdCompatRoutes(app);
|
||||
|
||||
// Mount API routes (form16 already mounted above before body parser)
|
||||
app.use('/api/v1', routes);
|
||||
|
||||
|
||||
1147
src/controllers/CpcCdcController.ts
Normal file
1147
src/controllers/CpcCdcController.ts
Normal file
File diff suppressed because it is too large
Load Diff
204
src/controllers/CpcReportController.ts
Normal file
204
src/controllers/CpcReportController.ts
Normal file
@ -0,0 +1,204 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { CpcHistoryService } from '../services/cpc-cdc/CpcHistoryService';
|
||||
import { CpcDocument } from '../models/CpcDocument';
|
||||
import { appendCpcDocumentFilters, cpcWhereFromAndParts } from '../services/cpc-cdc/utils';
|
||||
import ExcelJS from 'exceljs';
|
||||
import { ResponseHandler } from '../utils/responseHandler';
|
||||
|
||||
import { Op } from 'sequelize';
|
||||
|
||||
export class CpcReportController {
|
||||
/**
|
||||
* Download Excel report for a specific claim
|
||||
*/
|
||||
async downloadReport(req: Request, res: Response) {
|
||||
try {
|
||||
const { claimId } = req.params;
|
||||
const { attempt } = req.query;
|
||||
|
||||
const where: any = {
|
||||
[Op.or]: [
|
||||
{ claimId: claimId },
|
||||
{ bookingId: claimId }
|
||||
]
|
||||
};
|
||||
if (attempt) where.attemptNo = parseInt(attempt as string);
|
||||
|
||||
const docs = await CpcDocument.findAll({
|
||||
where,
|
||||
order: [['createdAt', 'DESC']]
|
||||
});
|
||||
|
||||
if (!docs || docs.length === 0) {
|
||||
return ResponseHandler.error(res, "No records found for this claim", 404);
|
||||
}
|
||||
|
||||
const workbook = new ExcelJS.Workbook();
|
||||
const sheet = workbook.addWorksheet('Validation Report');
|
||||
|
||||
// HEADERS
|
||||
const row1 = sheet.getRow(1);
|
||||
row1.values = [
|
||||
'Booking Type', 'Booking Number', 'Document Count', 'Document Name',
|
||||
'Customer Name', '', '', '', '',
|
||||
'PO Number /Authorisation Letter Number', '', '', '', '',
|
||||
'Aadhar Number', '', '', '', '',
|
||||
'PO Amount / Authorisation Letter Amount', '', '', '', '',
|
||||
'Signature & Stamp Availability', '', '', '', '',
|
||||
'Final Validation'
|
||||
];
|
||||
|
||||
const row2 = sheet.getRow(2);
|
||||
row2.values = [
|
||||
'', '', '', '',
|
||||
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||
'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy',
|
||||
'Expected', 'OCR', 'Accuracy Matching Availability', 'Accuracy Criteria', 'Success Ratio',
|
||||
''
|
||||
];
|
||||
|
||||
sheet.mergeCells('E1:I1');
|
||||
sheet.mergeCells('J1:N1');
|
||||
sheet.mergeCells('O1:S1');
|
||||
sheet.mergeCells('T1:X1');
|
||||
sheet.mergeCells('Y1:AC1');
|
||||
sheet.mergeCells('A1:A2'); sheet.mergeCells('B1:B2'); sheet.mergeCells('C1:C2'); sheet.mergeCells('D1:D2');
|
||||
sheet.mergeCells('AD1:AD2');
|
||||
|
||||
[row1, row2].forEach((r: any) => {
|
||||
r.font = { bold: true, size: 9 };
|
||||
r.alignment = { vertical: 'middle', horizontal: 'center', wrapText: true };
|
||||
r.eachCell((cell: any) => {
|
||||
cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFD9D9D9' } };
|
||||
cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } };
|
||||
});
|
||||
});
|
||||
|
||||
docs.forEach((doc: any, idx: number) => {
|
||||
|
||||
const rowData = CpcHistoryService.getSummaryRow(doc, idx);
|
||||
const values = [
|
||||
rowData.booking_type,
|
||||
rowData.booking_number,
|
||||
rowData.document_count,
|
||||
rowData.document_name,
|
||||
rowData.customer_name_group.msd, rowData.customer_name_group.ocr, rowData.customer_name_group.accuracy_pct, rowData.customer_name_group.criteria, rowData.customer_name_group.is_match,
|
||||
rowData.po_or_auth_number_group.msd, rowData.po_or_auth_number_group.ocr, rowData.po_or_auth_number_group.accuracy_pct, rowData.po_or_auth_number_group.criteria, rowData.po_or_auth_number_group.is_match,
|
||||
rowData.aadhaar_number_group.msd, rowData.aadhaar_number_group.ocr, rowData.aadhaar_number_group.accuracy_pct, rowData.aadhaar_number_group.criteria, rowData.aadhaar_number_group.is_match,
|
||||
rowData.amount_group.msd, rowData.amount_group.ocr, rowData.amount_group.accuracy_pct, rowData.amount_group.criteria, rowData.amount_group.is_match,
|
||||
rowData.stamp_group.msd, rowData.stamp_group.ocr, rowData.stamp_group.accuracy_pct, rowData.stamp_group.criteria, rowData.stamp_group.is_match,
|
||||
rowData.final_validation
|
||||
];
|
||||
const row = sheet.addRow(values);
|
||||
row.eachCell((cell: any, colNum: number) => {
|
||||
cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } };
|
||||
cell.font = { size: 8 };
|
||||
cell.alignment = { vertical: 'middle', horizontal: 'center' };
|
||||
|
||||
if (cell.value === 'N.A.' && colNum > 4) {
|
||||
cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFFF0000' } };
|
||||
cell.font = { color: { argb: 'FFFFFFFF' }, size: 8, bold: true };
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
sheet.addRow([]);
|
||||
sheet.addRow([]);
|
||||
const detailHeader = sheet.addRow(['Detailed Field-Wise Comparison']);
|
||||
detailHeader.font = { bold: true, size: 12 };
|
||||
|
||||
docs.forEach((doc: any) => {
|
||||
const docHeader = sheet.addRow([`Document: ${doc.documentType?.replace(/_/g, ' ')}`]);
|
||||
docHeader.font = { bold: true, size: 10 };
|
||||
|
||||
|
||||
const subHeader = sheet.addRow(['Field', 'Expected', 'Extracted (OCR)', 'Accuracy %', 'Criteria', 'Status', 'Message']);
|
||||
const finalResults = CpcHistoryService.getDetailedFieldResults(doc);
|
||||
|
||||
finalResults.forEach((f: any) => {
|
||||
sheet.addRow([
|
||||
f.field.replace(/_/g, ' '),
|
||||
f.expected || '-',
|
||||
f.extracted || 'Not extracted',
|
||||
f.accuracy,
|
||||
f.criteria,
|
||||
f.pass ? 'PASS' : 'FAIL',
|
||||
f.message
|
||||
]);
|
||||
});
|
||||
|
||||
sheet.addRow([]);
|
||||
});
|
||||
|
||||
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||
res.setHeader('Content-Disposition', `attachment; filename=Report_${claimId}.xlsx`);
|
||||
await workbook.xlsx.write(res);
|
||||
res.end();
|
||||
} catch (error: any) {
|
||||
return ResponseHandler.error(res, error.message || "Report generation failed", 500);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download Master Audit Report for all filtered documents
|
||||
*/
|
||||
async downloadAllReport(req: Request, res: Response) {
|
||||
try {
|
||||
const { search, status, type } = req.query;
|
||||
const andParts: Record<string, unknown>[] = [];
|
||||
appendCpcDocumentFilters(andParts, {
|
||||
type: type as string,
|
||||
status: status as string,
|
||||
search: search as string,
|
||||
searchIncludeId: false
|
||||
});
|
||||
const where = cpcWhereFromAndParts(andParts);
|
||||
|
||||
const docs = await CpcDocument.findAll({
|
||||
where,
|
||||
order: [['createdAt', 'DESC']]
|
||||
});
|
||||
|
||||
const workbook = new ExcelJS.Workbook();
|
||||
const sheet = workbook.addWorksheet('Master Audit Trail');
|
||||
|
||||
const row1 = sheet.getRow(1);
|
||||
row1.values = ['Booking Type', 'Booking Number', 'Doc ID', 'Document Name', 'Customer Name', '', '', '', '', 'PO Number /Authorisation Letter Number', '', '', '', '', 'Aadhar Number', '', '', '', '', 'PO Amount / Authorisation Letter Amount', '', '', '', '', 'Signature & Stamp Availability', '', '', '', '', 'Final Validation'];
|
||||
|
||||
const row2 = sheet.getRow(2);
|
||||
row2.values = ['', '', '', '', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching %', 'Accuracy Criteria', 'Is Match the Accuracy', 'Expected', 'OCR', 'Accuracy Matching Availability', 'Accuracy Criteria', 'Success Ratio', ''];
|
||||
|
||||
sheet.mergeCells('E1:I1'); sheet.mergeCells('J1:N1'); sheet.mergeCells('O1:S1'); sheet.mergeCells('T1:X1'); sheet.mergeCells('Y1:AC1'); sheet.mergeCells('A1:A2'); sheet.mergeCells('B1:B2'); sheet.mergeCells('C1:C2'); sheet.mergeCells('D1:D2'); sheet.mergeCells('AD1:AD2');
|
||||
|
||||
[row1, row2].forEach((r: any) => {
|
||||
r.font = { bold: true, size: 9 };
|
||||
r.alignment = { vertical: 'middle', horizontal: 'center', wrapText: true };
|
||||
r.eachCell((cell: any) => { cell.fill = { type: 'pattern', pattern: 'solid', fgColor: { argb: 'FFD9D9D9' } }; cell.border = { top: { style: 'thin' }, left: { style: 'thin' }, bottom: { style: 'thin' }, right: { style: 'thin' } }; });
|
||||
});
|
||||
|
||||
docs.forEach((doc: any, idx: number) => {
|
||||
|
||||
const rowData = CpcHistoryService.getSummaryRow(doc, idx);
|
||||
const values = [
|
||||
rowData.booking_type, rowData.booking_number, String(doc.id).slice(0, 8), rowData.document_name,
|
||||
rowData.customer_name_group.msd, rowData.customer_name_group.ocr, rowData.customer_name_group.accuracy_pct, rowData.customer_name_group.criteria, rowData.customer_name_group.is_match,
|
||||
rowData.po_or_auth_number_group.msd, rowData.po_or_auth_number_group.ocr, rowData.po_or_auth_number_group.accuracy_pct, rowData.po_or_auth_number_group.criteria, rowData.po_or_auth_number_group.is_match,
|
||||
rowData.aadhaar_number_group.msd, rowData.aadhaar_number_group.ocr, rowData.aadhaar_number_group.accuracy_pct, rowData.aadhaar_number_group.criteria, rowData.aadhaar_number_group.is_match,
|
||||
rowData.amount_group.msd, rowData.amount_group.ocr, rowData.amount_group.accuracy_pct, rowData.amount_group.criteria, rowData.amount_group.is_match,
|
||||
rowData.stamp_group.msd, rowData.stamp_group.ocr, rowData.stamp_group.accuracy_pct, rowData.stamp_group.criteria, rowData.stamp_group.is_match,
|
||||
rowData.final_validation
|
||||
];
|
||||
const row = sheet.addRow(values);
|
||||
});
|
||||
|
||||
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||
res.setHeader('Content-Disposition', `attachment; filename=Master_Audit_Report.xlsx`);
|
||||
await workbook.xlsx.write(res);
|
||||
res.end();
|
||||
} catch (error: any) {
|
||||
return ResponseHandler.error(res, error.message || "Master report failed", 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -9,6 +9,11 @@ import { initializeHolidaysCache, clearWorkingHoursCache } from '@utils/tatTimeU
|
||||
import { clearConfigCache } from '@services/configReader.service';
|
||||
import { User, UserRole } from '@models/User';
|
||||
import { sanitizeHtml, sanitizeObject, isHtmlEmpty } from '@utils/sanitizer';
|
||||
import {
|
||||
CPC_CSD_ADMIN_CONFIG_KEY,
|
||||
CPC_CDC_ADMIN_CONFIG_KEY_LEGACY,
|
||||
selectCpcCsdAdminConfigValue,
|
||||
} from '@utils/cpcCsdAdminConfigDb';
|
||||
|
||||
/**
|
||||
* Get all holidays (with optional year filter)
|
||||
@ -564,6 +569,10 @@ const DEFAULT_FORM16_CONFIG = {
|
||||
reminderNotificationTemplate: 'Reminder: Dear [Name], your Form 16A submission is pending for request [Request ID]. Please complete it.',
|
||||
};
|
||||
|
||||
const DEFAULT_CPC_CSD_CONFIG = {
|
||||
viewerEmails: [] as string[],
|
||||
};
|
||||
|
||||
/**
|
||||
* Get Form 16 admin configuration (who can see submission data, 26AS, reminders)
|
||||
*/
|
||||
@ -721,6 +730,93 @@ export const putForm16Config = async (req: Request, res: Response): Promise<void
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get CPC-CSD admin configuration (who can access CPC-CSD module).
|
||||
*/
|
||||
export const getCpcCdcConfig = async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const raw = await selectCpcCsdAdminConfigValue();
|
||||
|
||||
if (raw) {
|
||||
try {
|
||||
const parsed = JSON.parse(raw);
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
viewerEmails: Array.isArray(parsed.viewerEmails) ? parsed.viewerEmails : DEFAULT_CPC_CSD_CONFIG.viewerEmails,
|
||||
},
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
// fall through to defaults
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, data: DEFAULT_CPC_CSD_CONFIG });
|
||||
} catch (error: any) {
|
||||
logger.error('[Admin] Error fetching CPC-CSD config:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message || 'Failed to fetch CPC-CSD configuration',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Update CPC-CSD admin configuration.
|
||||
*/
|
||||
export const putCpcCdcConfig = async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const userId = req.user?.userId;
|
||||
if (!userId) {
|
||||
res.status(401).json({ success: false, error: 'User not authenticated' });
|
||||
return;
|
||||
}
|
||||
|
||||
const body = sanitizeObject(req.body as Record<string, unknown>);
|
||||
const normalizeEmail = (e: unknown) => String(e ?? '').trim().toLowerCase();
|
||||
const viewerEmails = Array.isArray(body.viewerEmails)
|
||||
? body.viewerEmails.map(normalizeEmail).filter(Boolean)
|
||||
: DEFAULT_CPC_CSD_CONFIG.viewerEmails;
|
||||
|
||||
const configValue = JSON.stringify({
|
||||
viewerEmails,
|
||||
});
|
||||
|
||||
await sequelize.query(
|
||||
`INSERT INTO admin_configurations (
|
||||
config_id, config_key, config_category, config_value, value_type, display_name, description, is_editable, is_sensitive, sort_order, created_at, updated_at, last_modified_by, last_modified_at
|
||||
) VALUES (
|
||||
gen_random_uuid(), :configKey, 'SYSTEM_SETTINGS', :configValue, 'JSON', 'CPC-CSD Admin Config', 'CPC-CSD module visibility settings', true, false, 0, NOW(), NOW(), :userId, NOW()
|
||||
)
|
||||
ON CONFLICT (config_key) DO UPDATE SET
|
||||
config_value = EXCLUDED.config_value,
|
||||
last_modified_by = EXCLUDED.last_modified_by,
|
||||
last_modified_at = NOW(),
|
||||
updated_at = NOW()`,
|
||||
{
|
||||
replacements: { configKey: CPC_CSD_ADMIN_CONFIG_KEY, configValue, userId },
|
||||
type: QueryTypes.RAW,
|
||||
}
|
||||
);
|
||||
|
||||
await sequelize.query(
|
||||
`DELETE FROM admin_configurations WHERE config_key = :legacy`,
|
||||
{ replacements: { legacy: CPC_CDC_ADMIN_CONFIG_KEY_LEGACY }, type: QueryTypes.RAW }
|
||||
);
|
||||
|
||||
clearConfigCache();
|
||||
logger.info('[Admin] CPC-CSD configuration updated');
|
||||
res.json({ success: true, message: 'CPC-CSD configuration saved' });
|
||||
} catch (error: any) {
|
||||
logger.error('[Admin] Error updating CPC-CSD config:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message || 'Failed to save CPC-CSD configuration',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* ============================================
|
||||
* USER ROLE MANAGEMENT (RBAC)
|
||||
|
||||
36
src/controllers/cpcPermission.controller.ts
Normal file
36
src/controllers/cpcPermission.controller.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { ResponseHandler } from '../utils/responseHandler';
|
||||
import logger from '@utils/logger';
|
||||
import { canAccessCpcCdc } from '../services/cpcPermission.service';
|
||||
|
||||
class CpcPermissionController {
|
||||
/**
|
||||
* GET /api/v1/cpc-csd/permissions (legacy: /api/v1/cpc-cdc/permissions)
|
||||
* Returns CPC-CSD access permission for current user.
|
||||
*/
|
||||
async getPermissions(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const user = req.user;
|
||||
if (!user?.userId || !user?.email) {
|
||||
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||
return;
|
||||
}
|
||||
|
||||
const role = (user as any).role as string | undefined;
|
||||
const canViewCpcCsd = await canAccessCpcCdc(user.email, role);
|
||||
|
||||
ResponseHandler.success(
|
||||
res,
|
||||
{ canViewCpcCsd, canViewCpcCdc: canViewCpcCsd },
|
||||
'CPC-CSD permissions'
|
||||
);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error('[CpcPermissionController] getPermissions error:', error);
|
||||
ResponseHandler.error(res, 'Failed to get CPC-CSD permissions', 500, errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const cpcPermissionController = new CpcPermissionController();
|
||||
|
||||
@ -1,5 +1,14 @@
|
||||
import cors from 'cors';
|
||||
|
||||
/** Vite dev: localhost vs 127.0.0.1, and ports 3000–3010 when 3000/3001 are already taken. */
|
||||
function getDevViteOrigins(): string[] {
|
||||
const out: string[] = [];
|
||||
for (let port = 3000; port <= 3010; port++) {
|
||||
out.push(`http://localhost:${port}`, `http://127.0.0.1:${port}`);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Configure allowed origins - uses only FRONTEND_URL from environment
|
||||
const getAllowedOrigins = (): string[] | boolean => {
|
||||
const frontendUrl = process.env.FRONTEND_URL;
|
||||
@ -15,10 +24,9 @@ const getAllowedOrigins = (): string[] | boolean => {
|
||||
console.error(' Multiple origins: FRONTEND_URL=https://app1.com,https://app2.com');
|
||||
return [];
|
||||
} else {
|
||||
// Dev fallback: allow localhost:3000
|
||||
console.warn('⚠️ WARNING: FRONTEND_URL not set. Defaulting to http://localhost:3000 for development.');
|
||||
console.warn(' To avoid this warning, set FRONTEND_URL=http://localhost:3000 in your .env file');
|
||||
return ['http://localhost:3000'];
|
||||
console.warn('⚠️ WARNING: FRONTEND_URL not set. Defaulting Vite dev origins (localhost + 127.0.0.1).');
|
||||
console.warn(' Set FRONTEND_URL in .env if you use another host/port.');
|
||||
return getDevViteOrigins();
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,12 +43,14 @@ const getAllowedOrigins = (): string[] | boolean => {
|
||||
|
||||
if (origins.length === 0) {
|
||||
console.error('❌ ERROR: FRONTEND_URL is set but contains no valid URLs!');
|
||||
return isProduction ? [] : ['http://localhost:3000']; // Fallback for development
|
||||
return isProduction ? [] : getDevViteOrigins(); // Fallback for development
|
||||
}
|
||||
|
||||
// In development always allow localhost:3000 (Vite default) so frontend works even if FRONTEND_URL is 3001
|
||||
if (!isProduction && !origins.includes('http://localhost:3000')) {
|
||||
origins = ['http://localhost:3000', ...origins];
|
||||
// In development allow common Vite host/port combos (avoids CORS when Vite bumps to 3002+)
|
||||
if (!isProduction) {
|
||||
for (const o of getDevViteOrigins()) {
|
||||
if (!origins.includes(o)) origins.push(o);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`✅ CORS: Allowing origins from FRONTEND_URL: ${origins.join(', ')}`);
|
||||
|
||||
34
src/middlewares/cpcPermission.middleware.ts
Normal file
34
src/middlewares/cpcPermission.middleware.ts
Normal file
@ -0,0 +1,34 @@
|
||||
/**
|
||||
* CPC-CSD permission middleware – enforces API-driven viewer list.
|
||||
* Use after authenticateToken so req.user is available.
|
||||
*/
|
||||
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { ResponseHandler } from '../utils/responseHandler';
|
||||
import { canAccessCpcCdc } from '../services/cpcPermission.service';
|
||||
|
||||
export const requireCpcCdcAccess = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const user = req.user;
|
||||
if (!user?.userId || !user?.email) {
|
||||
ResponseHandler.unauthorized(res, 'Authentication required');
|
||||
return;
|
||||
}
|
||||
|
||||
const role = (user as any).role as string | undefined;
|
||||
const allowed = await canAccessCpcCdc(user.email, role);
|
||||
if (!allowed) {
|
||||
ResponseHandler.forbidden(res, 'You do not have permission to access CPC-CSD');
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
ResponseHandler.error(res, 'Permission check failed', 500, error instanceof Error ? error.message : 'Unknown error');
|
||||
}
|
||||
};
|
||||
|
||||
130
src/migrations/2026041300-create-cpc-cdc-tables.ts
Normal file
130
src/migrations/2026041300-create-cpc-cdc-tables.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import { QueryInterface, DataTypes } from 'sequelize';
|
||||
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
// Create cpc_documents table
|
||||
await queryInterface.createTable('cpc_documents', {
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
allowNull: false
|
||||
},
|
||||
booking_id: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
claim_id: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
attempt_no: {
|
||||
type: DataTypes.INTEGER,
|
||||
defaultValue: 1,
|
||||
allowNull: false
|
||||
},
|
||||
document_type: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
document_gcp_url: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true
|
||||
},
|
||||
provider: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
msd_payload: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
extracted_fields: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
field_confidence: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
validation_status: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
match_percentage: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: true
|
||||
},
|
||||
mismatch_reasons: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
field_results: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
ip_address: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
created_at: {
|
||||
type: DataTypes.DATE,
|
||||
defaultValue: DataTypes.NOW,
|
||||
allowNull: false
|
||||
}
|
||||
});
|
||||
|
||||
// Create cpc_audit_logs table
|
||||
await queryInterface.createTable('cpc_audit_logs', {
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
allowNull: false
|
||||
},
|
||||
document_id: {
|
||||
type: DataTypes.UUID,
|
||||
allowNull: false,
|
||||
references: {
|
||||
model: 'cpc_documents',
|
||||
key: 'id'
|
||||
},
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
action: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: false
|
||||
},
|
||||
previous_state: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
new_state: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true
|
||||
},
|
||||
performed_by: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true
|
||||
},
|
||||
remarks: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true
|
||||
},
|
||||
created_at: {
|
||||
type: DataTypes.DATE,
|
||||
defaultValue: DataTypes.NOW,
|
||||
allowNull: false
|
||||
}
|
||||
});
|
||||
|
||||
// Unique index for the multi-attempt claim logic (idempotent for repeated startup migrations)
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
|
||||
ON cpc_documents (claim_id, attempt_no, document_type);
|
||||
`);
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.dropTable('cpc_audit_logs');
|
||||
await queryInterface.dropTable('cpc_documents');
|
||||
}
|
||||
50
src/migrations/20260414100000-ensure-cpc-cdc-tables-exist.ts
Normal file
50
src/migrations/20260414100000-ensure-cpc-cdc-tables-exist.ts
Normal file
@ -0,0 +1,50 @@
|
||||
import { QueryInterface } from 'sequelize';
|
||||
|
||||
/**
|
||||
* Idempotent CPC-CDC schema for environments where 2026041300 did not run or tables were dropped.
|
||||
* Safe to run on top of an existing DB that already has these tables from the earlier migration.
|
||||
*/
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE TABLE IF NOT EXISTS cpc_documents (
|
||||
id UUID NOT NULL PRIMARY KEY,
|
||||
booking_id VARCHAR(255),
|
||||
claim_id VARCHAR(255),
|
||||
attempt_no INTEGER NOT NULL DEFAULT 1,
|
||||
document_type VARCHAR(255),
|
||||
document_gcp_url TEXT,
|
||||
provider VARCHAR(255),
|
||||
msd_payload JSONB,
|
||||
extracted_fields JSONB,
|
||||
field_confidence JSONB,
|
||||
validation_status VARCHAR(255),
|
||||
match_percentage DOUBLE PRECISION,
|
||||
mismatch_reasons JSONB,
|
||||
field_results JSONB,
|
||||
ip_address VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE TABLE IF NOT EXISTS cpc_audit_logs (
|
||||
id UUID NOT NULL PRIMARY KEY,
|
||||
document_id UUID NOT NULL REFERENCES cpc_documents(id) ON DELETE CASCADE,
|
||||
action VARCHAR(255) NOT NULL,
|
||||
previous_state JSONB,
|
||||
new_state JSONB,
|
||||
performed_by VARCHAR(255),
|
||||
remarks TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
|
||||
ON cpc_documents (claim_id, attempt_no, booking_id);
|
||||
`);
|
||||
}
|
||||
|
||||
export async function down(_queryInterface: QueryInterface): Promise<void> {
|
||||
// Non-destructive: tables may contain production CPC data.
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
import { QueryInterface } from 'sequelize';
|
||||
|
||||
/**
|
||||
* Batch upload can include multiple files of the same document_type in one attempt.
|
||||
* Replace unique(claim_id, attempt_no, document_type) with unique(claim_id, attempt_no, booking_id)
|
||||
* because booking_id is distinct per file (e.g. CLAIM-1, CLAIM-2, ...).
|
||||
*/
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(`
|
||||
DROP INDEX IF EXISTS unique_cpc_document_attempt;
|
||||
`);
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_claim_attempt_booking
|
||||
ON cpc_documents (claim_id, attempt_no, booking_id);
|
||||
`);
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(`
|
||||
DROP INDEX IF EXISTS unique_cpc_document_claim_attempt_booking;
|
||||
`);
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_attempt
|
||||
ON cpc_documents (claim_id, attempt_no, document_type);
|
||||
`);
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
import { QueryInterface } from 'sequelize';
|
||||
|
||||
/**
|
||||
* Rename CPC admin viewer-list config key from CPC_CDC_* to CPC_CSD_* (display name aligned).
|
||||
*/
|
||||
export async function up(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(`
|
||||
UPDATE admin_configurations
|
||||
SET
|
||||
config_key = 'CPC_CSD_ADMIN_CONFIG',
|
||||
display_name = 'CPC-CSD Admin Config',
|
||||
description = 'CPC-CSD module visibility settings'
|
||||
WHERE config_key = 'CPC_CDC_ADMIN_CONFIG'
|
||||
`);
|
||||
}
|
||||
|
||||
export async function down(queryInterface: QueryInterface): Promise<void> {
|
||||
await queryInterface.sequelize.query(`
|
||||
UPDATE admin_configurations
|
||||
SET
|
||||
config_key = 'CPC_CDC_ADMIN_CONFIG',
|
||||
display_name = 'CPC-CDC Admin Config',
|
||||
description = 'CPC-CDC module visibility settings'
|
||||
WHERE config_key = 'CPC_CSD_ADMIN_CONFIG'
|
||||
`);
|
||||
}
|
||||
89
src/models/CpcAuditLog.ts
Normal file
89
src/models/CpcAuditLog.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import { DataTypes, Model, Optional } from 'sequelize';
|
||||
import { sequelize } from '@config/database';
|
||||
|
||||
interface CpcAuditLogAttributes {
|
||||
id: string;
|
||||
documentId: string;
|
||||
action: string;
|
||||
previousState?: any;
|
||||
newState?: any;
|
||||
performedBy?: string;
|
||||
remarks?: string;
|
||||
createdAt?: Date;
|
||||
}
|
||||
|
||||
interface CpcAuditLogCreationAttributes extends Optional<CpcAuditLogAttributes, 'id' | 'createdAt'> {}
|
||||
|
||||
class CpcAuditLog extends Model<CpcAuditLogAttributes, CpcAuditLogCreationAttributes> implements CpcAuditLogAttributes {
|
||||
public id!: string;
|
||||
public documentId!: string;
|
||||
public action!: string;
|
||||
public previousState?: any;
|
||||
public newState?: any;
|
||||
public performedBy?: string;
|
||||
public remarks?: string;
|
||||
public createdAt!: Date;
|
||||
}
|
||||
|
||||
CpcAuditLog.init(
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
field: 'id'
|
||||
},
|
||||
documentId: {
|
||||
type: DataTypes.UUID,
|
||||
allowNull: false,
|
||||
field: 'document_id',
|
||||
references: {
|
||||
model: 'cpc_documents',
|
||||
key: 'id'
|
||||
}
|
||||
},
|
||||
action: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: false
|
||||
},
|
||||
previousState: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'previous_state'
|
||||
},
|
||||
newState: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'new_state'
|
||||
},
|
||||
performedBy: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'performed_by'
|
||||
},
|
||||
remarks: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true
|
||||
},
|
||||
createdAt: {
|
||||
type: DataTypes.DATE,
|
||||
defaultValue: DataTypes.NOW,
|
||||
field: 'created_at'
|
||||
}
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
modelName: 'CpcAuditLog',
|
||||
tableName: 'cpc_audit_logs',
|
||||
timestamps: false
|
||||
}
|
||||
);
|
||||
|
||||
CpcAuditLog.belongsTo(sequelize.models.CpcDocument, {
|
||||
foreignKey: 'documentId',
|
||||
targetKey: 'id',
|
||||
as: 'document'
|
||||
});
|
||||
|
||||
export { CpcAuditLog };
|
||||
|
||||
143
src/models/CpcDocument.ts
Normal file
143
src/models/CpcDocument.ts
Normal file
@ -0,0 +1,143 @@
|
||||
import { DataTypes, Model, Optional } from 'sequelize';
|
||||
import { sequelize } from '@config/database';
|
||||
|
||||
interface CpcDocumentAttributes {
|
||||
id: string;
|
||||
bookingId?: string;
|
||||
claimId?: string;
|
||||
attemptNo?: number;
|
||||
documentType?: string;
|
||||
documentGcpUrl?: string;
|
||||
provider?: string;
|
||||
msdPayload?: any;
|
||||
extractedFields?: any;
|
||||
fieldConfidence?: any;
|
||||
validationStatus?: string;
|
||||
matchPercentage?: number;
|
||||
mismatchReasons?: any;
|
||||
fieldResults?: any;
|
||||
ipAddress?: string;
|
||||
createdAt?: Date;
|
||||
}
|
||||
|
||||
interface CpcDocumentCreationAttributes extends Optional<CpcDocumentAttributes, 'id' | 'attemptNo' | 'createdAt'> {}
|
||||
|
||||
class CpcDocument extends Model<CpcDocumentAttributes, CpcDocumentCreationAttributes> implements CpcDocumentAttributes {
|
||||
public id!: string;
|
||||
public bookingId?: string;
|
||||
public claimId?: string;
|
||||
public attemptNo?: number;
|
||||
public documentType?: string;
|
||||
public documentGcpUrl?: string;
|
||||
public provider?: string;
|
||||
public msdPayload?: any;
|
||||
public extractedFields?: any;
|
||||
public fieldConfidence?: any;
|
||||
public validationStatus?: string;
|
||||
public matchPercentage?: number;
|
||||
public mismatchReasons?: any;
|
||||
public fieldResults?: any;
|
||||
public ipAddress?: string;
|
||||
public createdAt!: Date;
|
||||
}
|
||||
|
||||
CpcDocument.init(
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
field: 'id'
|
||||
},
|
||||
bookingId: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'booking_id'
|
||||
},
|
||||
claimId: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'claim_id'
|
||||
},
|
||||
attemptNo: {
|
||||
type: DataTypes.INTEGER,
|
||||
defaultValue: 1,
|
||||
field: 'attempt_no'
|
||||
},
|
||||
documentType: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'document_type'
|
||||
},
|
||||
documentGcpUrl: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
field: 'document_gcp_url'
|
||||
},
|
||||
provider: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'provider'
|
||||
},
|
||||
msdPayload: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'msd_payload'
|
||||
},
|
||||
extractedFields: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'extracted_fields'
|
||||
},
|
||||
fieldConfidence: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'field_confidence'
|
||||
},
|
||||
validationStatus: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'validation_status'
|
||||
},
|
||||
matchPercentage: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: true,
|
||||
field: 'match_percentage'
|
||||
},
|
||||
mismatchReasons: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'mismatch_reasons'
|
||||
},
|
||||
fieldResults: {
|
||||
type: DataTypes.JSONB,
|
||||
allowNull: true,
|
||||
field: 'field_results'
|
||||
},
|
||||
ipAddress: {
|
||||
type: DataTypes.STRING(255),
|
||||
allowNull: true,
|
||||
field: 'ip_address'
|
||||
},
|
||||
createdAt: {
|
||||
type: DataTypes.DATE,
|
||||
defaultValue: DataTypes.NOW,
|
||||
field: 'created_at'
|
||||
}
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
modelName: 'CpcDocument',
|
||||
tableName: 'cpc_documents',
|
||||
timestamps: false,
|
||||
indexes: [
|
||||
{
|
||||
name: 'unique_cpc_document_claim_attempt_booking',
|
||||
unique: true,
|
||||
fields: ['claimId', 'attemptNo', 'bookingId']
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
|
||||
export { CpcDocument };
|
||||
@ -42,6 +42,9 @@ import { Form16LedgerEntry } from './Form16LedgerEntry';
|
||||
import { Form16SapResponse } from './Form16SapResponse';
|
||||
import { Form16DebitNoteSapResponse } from './Form16DebitNoteSapResponse';
|
||||
import { From16SapReadFile } from './From16SapReadFile';
|
||||
import { CpcDocument } from './CpcDocument';
|
||||
import { CpcAuditLog } from './CpcAuditLog';
|
||||
|
||||
|
||||
// Define associations
|
||||
const defineAssociations = () => {
|
||||
@ -189,6 +192,13 @@ const defineAssociations = () => {
|
||||
|
||||
// Note: belongsTo associations are defined in individual model files to avoid duplicate alias conflicts
|
||||
// Only hasMany associations from WorkflowRequest are defined here since they're one-way
|
||||
|
||||
// CPC-CSD associations
|
||||
CpcDocument.hasMany(CpcAuditLog, {
|
||||
as: 'auditLogs',
|
||||
foreignKey: 'documentId',
|
||||
sourceKey: 'id'
|
||||
});
|
||||
};
|
||||
|
||||
// Initialize associations
|
||||
@ -237,7 +247,9 @@ export {
|
||||
Form16LedgerEntry,
|
||||
Form16SapResponse,
|
||||
Form16DebitNoteSapResponse,
|
||||
From16SapReadFile
|
||||
From16SapReadFile,
|
||||
CpcDocument,
|
||||
CpcAuditLog
|
||||
};
|
||||
|
||||
// Export default sequelize instance
|
||||
|
||||
@ -16,6 +16,7 @@ import {
|
||||
updateActivityTypeSchema,
|
||||
activityTypeParamsSchema,
|
||||
updateForm16ConfigSchema,
|
||||
updateCpcCdcConfigSchema,
|
||||
} from '../validators/admin.validator';
|
||||
import {
|
||||
getAllHolidays,
|
||||
@ -29,6 +30,8 @@ import {
|
||||
resetConfiguration,
|
||||
getForm16Config,
|
||||
putForm16Config,
|
||||
getCpcCdcConfig,
|
||||
putCpcCdcConfig,
|
||||
updateUserRole,
|
||||
getUsersByRole,
|
||||
getRoleStatistics,
|
||||
@ -139,6 +142,21 @@ router.get('/form16-config', getForm16Config);
|
||||
*/
|
||||
router.put('/form16-config', validateBody(updateForm16ConfigSchema), putForm16Config);
|
||||
|
||||
/**
|
||||
* @route GET /api/admin/cpc-csd-config
|
||||
* @desc Get CPC-CSD admin config (viewer emails)
|
||||
* @access Admin
|
||||
*/
|
||||
router.get('/cpc-csd-config', getCpcCdcConfig);
|
||||
|
||||
/**
|
||||
* @route PUT /api/admin/cpc-csd-config
|
||||
* @desc Update CPC-CSD admin config
|
||||
* @body { viewerEmails? }
|
||||
* @access Admin
|
||||
*/
|
||||
router.put('/cpc-csd-config', validateBody(updateCpcCdcConfigSchema), putCpcCdcConfig);
|
||||
|
||||
// ==================== User Role Management Routes (RBAC) ====================
|
||||
|
||||
/**
|
||||
|
||||
60
src/routes/cpc-cdc.routes.ts
Normal file
60
src/routes/cpc-cdc.routes.ts
Normal file
@ -0,0 +1,60 @@
|
||||
import { Router } from 'express';
|
||||
import multer from 'multer';
|
||||
import { cpcCdcController } from '../controllers/CpcCdcController';
|
||||
import { CpcReportController } from '../controllers/CpcReportController';
|
||||
import { asyncHandler } from '../middlewares/errorHandler.middleware';
|
||||
import { authenticateToken } from '../middlewares/auth.middleware';
|
||||
import { requireCpcCdcAccess } from '../middlewares/cpcPermission.middleware';
|
||||
|
||||
const router = Router();
|
||||
const cpcReportController = new CpcReportController();
|
||||
|
||||
|
||||
const disallowZipUpload: multer.Options['fileFilter'] = (_req, file, cb) => {
|
||||
const lowerName = String(file.originalname || '').toLowerCase();
|
||||
const lowerMime = String(file.mimetype || '').toLowerCase();
|
||||
const isZip = lowerName.endsWith('.zip') || lowerMime.includes('zip');
|
||||
if (isZip) {
|
||||
cb(new Error('ZIP files are not allowed for CPC-CSD validation'));
|
||||
return;
|
||||
}
|
||||
cb(null, true);
|
||||
};
|
||||
|
||||
// Configure Multer for memory storage (buffers needed for GCS/Gemini)
|
||||
const upload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
limits: { fileSize: 15 * 1024 * 1024 }, // 15MB limit
|
||||
fileFilter: disallowZipUpload
|
||||
});
|
||||
|
||||
// All CPC-CSD routes require authentication (mounted at /cpc-csd and legacy /cpc-cdc)
|
||||
router.use(authenticateToken);
|
||||
router.use(requireCpcCdcAccess);
|
||||
|
||||
// OCR / Validation — mirror CPC-CSD: bulk uses `files[]`, single upload uses `file`
|
||||
router.post('/v1/ocr/upload', upload.array('files', 20), asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController)));
|
||||
router.post('/v1/ocr/validate-upload', upload.single('file'), asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController)));
|
||||
router.post('/v1/ocr/validate', asyncHandler(cpcCdcController.validateDocumentByUrlStub.bind(cpcCdcController)));
|
||||
|
||||
|
||||
// History and Documents (order aligned with CPC-CSD/server/src/routes/index.js)
|
||||
router.get('/documents/analytics', asyncHandler(cpcCdcController.getAnalytics.bind(cpcCdcController)));
|
||||
router.get('/documents/history', asyncHandler(cpcCdcController.getClaimHistory.bind(cpcCdcController)));
|
||||
router.get('/documents/recent', asyncHandler(cpcCdcController.getRecentDocuments.bind(cpcCdcController)));
|
||||
router.get('/documents/:id/file', asyncHandler(cpcCdcController.getDocumentFile.bind(cpcCdcController)));
|
||||
router.get('/documents/:id', asyncHandler(cpcCdcController.getDocumentById.bind(cpcCdcController)));
|
||||
router.put('/documents/:id/status', asyncHandler(cpcCdcController.updateDocumentStatus.bind(cpcCdcController)));
|
||||
router.delete('/documents/:id', asyncHandler(cpcCdcController.deleteDocument.bind(cpcCdcController)));
|
||||
|
||||
// Reports (Matching History.jsx exactly)
|
||||
router.get('/v1/ocr/report/all/download', asyncHandler(cpcReportController.downloadAllReport.bind(cpcReportController)));
|
||||
router.get('/v1/ocr/report/:claimId/download', asyncHandler(cpcReportController.downloadReport.bind(cpcReportController)));
|
||||
|
||||
// Backwards compatibility or alternative paths
|
||||
router.get('/report/all/download', asyncHandler(cpcReportController.downloadAllReport.bind(cpcReportController)));
|
||||
router.get('/report/:claimId/download', asyncHandler(cpcReportController.downloadReport.bind(cpcReportController)));
|
||||
|
||||
export default router;
|
||||
|
||||
|
||||
89
src/routes/cpc-csd-compat.mount.ts
Normal file
89
src/routes/cpc-csd-compat.mount.ts
Normal file
@ -0,0 +1,89 @@
|
||||
import express from 'express';
|
||||
import multer from 'multer';
|
||||
import { authenticateToken } from '../middlewares/auth.middleware';
|
||||
import { asyncHandler } from '../middlewares/errorHandler.middleware';
|
||||
import { generalApiLimiter } from '../middlewares/rateLimiter.middleware';
|
||||
import { requireCpcCdcAccess } from '../middlewares/cpcPermission.middleware';
|
||||
import { cpcCdcController } from '../controllers/CpcCdcController';
|
||||
import { CpcReportController } from '../controllers/CpcReportController';
|
||||
|
||||
const memoryUpload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
limits: { fileSize: 15 * 1024 * 1024 },
|
||||
fileFilter: (_req, file, cb) => {
|
||||
const lowerName = String(file.originalname || '').toLowerCase();
|
||||
const lowerMime = String(file.mimetype || '').toLowerCase();
|
||||
const isZip = lowerName.endsWith('.zip') || lowerMime.includes('zip');
|
||||
if (isZip) {
|
||||
cb(new Error('ZIP files are not allowed for CPC-CSD validation'));
|
||||
return;
|
||||
}
|
||||
cb(null, true);
|
||||
}
|
||||
});
|
||||
|
||||
const cpcReportController = new CpcReportController();
|
||||
|
||||
const authLim = [authenticateToken, requireCpcCdcAccess, generalApiLimiter];
|
||||
|
||||
/**
|
||||
* CPC-CSD (`CPC-CSD/server`) style URLs on re-workflow:
|
||||
* - `POST /api/upload`
|
||||
* - `GET /api/documents/...` (same order as legacy router)
|
||||
* - `POST /api/v1/ocr/validate` | `validate-upload` | `upload`
|
||||
* - `GET /api/v1/ocr/report/...`
|
||||
*
|
||||
* Disable with `CPC_LEGACY_COMPAT_ROUTES=false`.
|
||||
*/
|
||||
export function registerCpcCsdCompatRoutes(app: express.Application): void {
|
||||
if (String(process.env.CPC_LEGACY_COMPAT_ROUTES || '').toLowerCase() === 'false') {
|
||||
return;
|
||||
}
|
||||
|
||||
app.post(
|
||||
'/api/upload',
|
||||
...authLim,
|
||||
memoryUpload.single('file'),
|
||||
asyncHandler(cpcCdcController.uploadBareFile.bind(cpcCdcController))
|
||||
);
|
||||
|
||||
const documentsRouter = express.Router();
|
||||
documentsRouter.use(...authLim);
|
||||
documentsRouter.get('/analytics', asyncHandler(cpcCdcController.getAnalytics.bind(cpcCdcController)));
|
||||
documentsRouter.get('/history', asyncHandler(cpcCdcController.getClaimHistory.bind(cpcCdcController)));
|
||||
documentsRouter.get('/recent', asyncHandler(cpcCdcController.getRecentDocuments.bind(cpcCdcController)));
|
||||
documentsRouter.get('/:id/file', asyncHandler(cpcCdcController.getDocumentFile.bind(cpcCdcController)));
|
||||
documentsRouter.get('/:id', asyncHandler(cpcCdcController.getDocumentById.bind(cpcCdcController)));
|
||||
documentsRouter.put('/:id/status', asyncHandler(cpcCdcController.updateDocumentStatus.bind(cpcCdcController)));
|
||||
documentsRouter.delete('/:id', asyncHandler(cpcCdcController.deleteDocument.bind(cpcCdcController)));
|
||||
app.use('/api/documents', documentsRouter);
|
||||
|
||||
app.post(
|
||||
'/api/v1/ocr/validate',
|
||||
...authLim,
|
||||
asyncHandler(cpcCdcController.validateDocumentByUrlStub.bind(cpcCdcController))
|
||||
);
|
||||
app.post(
|
||||
'/api/v1/ocr/validate-upload',
|
||||
...authLim,
|
||||
memoryUpload.single('file'),
|
||||
asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController))
|
||||
);
|
||||
app.post(
|
||||
'/api/v1/ocr/upload',
|
||||
...authLim,
|
||||
memoryUpload.array('files', 20),
|
||||
asyncHandler(cpcCdcController.validateDocumentUpload.bind(cpcCdcController))
|
||||
);
|
||||
|
||||
app.get(
|
||||
'/api/v1/ocr/report/all/download',
|
||||
...authLim,
|
||||
asyncHandler(cpcReportController.downloadAllReport.bind(cpcReportController))
|
||||
);
|
||||
app.get(
|
||||
'/api/v1/ocr/report/:claimId/download',
|
||||
...authLim,
|
||||
asyncHandler(cpcReportController.downloadReport.bind(cpcReportController))
|
||||
);
|
||||
}
|
||||
16
src/routes/cpc-permission.routes.ts
Normal file
16
src/routes/cpc-permission.routes.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { Router } from 'express';
|
||||
import { authenticateToken } from '../middlewares/auth.middleware';
|
||||
import { asyncHandler } from '../middlewares/errorHandler.middleware';
|
||||
import { cpcPermissionController } from '../controllers/cpcPermission.controller';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.use(authenticateToken);
|
||||
|
||||
router.get(
|
||||
'/permissions',
|
||||
asyncHandler(cpcPermissionController.getPermissions.bind(cpcPermissionController))
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@ -35,6 +35,9 @@ import antivirusRoutes from './antivirus.routes';
|
||||
import dealerExternalRoutes from './dealerExternal.routes';
|
||||
import form16Routes from './form16.routes';
|
||||
import hsnSacCodeRoutes from './hsnSacCode.routes';
|
||||
import cpcCdcRoutes from './cpc-cdc.routes';
|
||||
import cpcPermissionRoutes from './cpc-permission.routes';
|
||||
|
||||
|
||||
const router = Router();
|
||||
|
||||
@ -101,6 +104,10 @@ router.use('/dealers-external', generalApiLimiter, dealerExternalRoutes); // 200
|
||||
router.use('/form16', uploadLimiter, form16Routes); // 50 req/15min (file uploads: extract, submissions, 26as)
|
||||
router.use('/api-tokens', authLimiter, apiTokenRoutes); // 20 req/15min (sensitive — same as auth)
|
||||
router.use('/hsn-sac', generalApiLimiter, hsnSacCodeRoutes); // 200 req/15min
|
||||
router.use('/cpc-csd', generalApiLimiter, cpcPermissionRoutes); // 200 req/15min (canonical)
|
||||
router.use('/cpc-csd', generalApiLimiter, cpcCdcRoutes);
|
||||
|
||||
|
||||
|
||||
export default router;
|
||||
|
||||
|
||||
163
src/scripts/migrate-cpc-csd-to-cpc-tables.ts
Normal file
163
src/scripts/migrate-cpc-csd-to-cpc-tables.ts
Normal file
@ -0,0 +1,163 @@
|
||||
/**
|
||||
* One-off migration: CPC-CSD Prisma tables "Document" and "AuditLog" →
|
||||
* re-workflow tables cpc_documents and cpc_audit_logs.
|
||||
*
|
||||
* Usage:
|
||||
* DATABASE_URL=postgres://... npm run migrate:cpc-csd
|
||||
*
|
||||
* Optional CPC_CSD_DATABASE_URL: when set, rows are read from that database
|
||||
* and written to DATABASE_URL. When unset, both use DATABASE_URL (same DB;
|
||||
* Prisma legacy tables must still exist alongside cpc_* tables).
|
||||
*/
|
||||
import 'dotenv/config';
|
||||
import { Sequelize, QueryTypes } from 'sequelize';
|
||||
import { sequelize, CpcDocument, CpcAuditLog } from '../models';
|
||||
|
||||
type LegacyDoc = Record<string, any>;
|
||||
type LegacyLog = Record<string, any>;
|
||||
|
||||
async function openSource(): Promise<{ sequelize: Sequelize; close: () => Promise<void> }> {
|
||||
const url = process.env.CPC_CSD_DATABASE_URL?.trim();
|
||||
if (url) {
|
||||
const s = new Sequelize(url, {
|
||||
dialect: 'postgres',
|
||||
logging: false
|
||||
});
|
||||
return {
|
||||
sequelize: s,
|
||||
close: async () => {
|
||||
await s.close();
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
sequelize,
|
||||
close: async () => {}
|
||||
};
|
||||
}
|
||||
|
||||
async function tableExists(client: Sequelize, tableName: string): Promise<boolean> {
|
||||
const rows = (await client.query(
|
||||
`SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_schema = 'public' AND table_name = :tableName
|
||||
) AS "exists"`,
|
||||
{ replacements: { tableName }, type: QueryTypes.SELECT }
|
||||
)) as { exists: boolean }[];
|
||||
return Boolean(rows[0]?.exists);
|
||||
}
|
||||
|
||||
async function migrateDocuments(source: Sequelize): Promise<number> {
|
||||
if (!(await tableExists(source, 'Document'))) {
|
||||
console.warn('[migrate-cpc-csd] Table "Document" not found on source; skipping documents.');
|
||||
return 0;
|
||||
}
|
||||
|
||||
const rows = (await source.query('SELECT * FROM "Document"', {
|
||||
type: QueryTypes.SELECT
|
||||
})) as LegacyDoc[];
|
||||
|
||||
let inserted = 0;
|
||||
for (const r of rows) {
|
||||
if (!r.id) continue;
|
||||
const existing = await CpcDocument.findByPk(r.id);
|
||||
if (existing) continue;
|
||||
|
||||
try {
|
||||
await CpcDocument.create({
|
||||
id: r.id,
|
||||
bookingId: r.bookingId ?? null,
|
||||
claimId: r.claimId ?? null,
|
||||
attemptNo: r.attemptNo ?? 1,
|
||||
documentType: r.documentType ?? null,
|
||||
documentGcpUrl: r.documentGcpUrl ?? null,
|
||||
provider: r.provider ?? null,
|
||||
msdPayload: r.msdPayload ?? null,
|
||||
extractedFields: r.extractedFields ?? null,
|
||||
fieldConfidence: r.fieldConfidence ?? null,
|
||||
validationStatus: r.validationStatus ?? null,
|
||||
matchPercentage: r.matchPercentage ?? null,
|
||||
mismatchReasons: r.mismatchReasons ?? null,
|
||||
fieldResults: r.fieldResults ?? null,
|
||||
ipAddress: r.ipAddress ?? null,
|
||||
createdAt: r.createdAt ? new Date(r.createdAt) : new Date()
|
||||
});
|
||||
inserted += 1;
|
||||
} catch (err: any) {
|
||||
console.error(`[migrate-cpc-csd] Skip document ${r.id}:`, err?.message || err);
|
||||
}
|
||||
}
|
||||
|
||||
return inserted;
|
||||
}
|
||||
|
||||
async function migrateAuditLogs(source: Sequelize): Promise<number> {
|
||||
if (!(await tableExists(source, 'AuditLog'))) {
|
||||
console.warn('[migrate-cpc-csd] Table "AuditLog" not found on source; skipping audit logs.');
|
||||
return 0;
|
||||
}
|
||||
|
||||
const rows = (await source.query('SELECT * FROM "AuditLog"', {
|
||||
type: QueryTypes.SELECT
|
||||
})) as LegacyLog[];
|
||||
|
||||
let inserted = 0;
|
||||
for (const r of rows) {
|
||||
if (!r.id || !r.documentId) continue;
|
||||
const parent = await CpcDocument.findByPk(r.documentId);
|
||||
if (!parent) {
|
||||
console.warn(`[migrate-cpc-csd] Skip audit ${r.id}: parent document ${r.documentId} missing`);
|
||||
continue;
|
||||
}
|
||||
const existingLog = await CpcAuditLog.findByPk(r.id);
|
||||
if (existingLog) continue;
|
||||
|
||||
try {
|
||||
await CpcAuditLog.create({
|
||||
id: r.id,
|
||||
documentId: r.documentId,
|
||||
action: r.action,
|
||||
previousState: r.previousState ?? null,
|
||||
newState: r.newState ?? null,
|
||||
performedBy: r.performedBy ?? null,
|
||||
remarks: r.remarks ?? null,
|
||||
createdAt: r.createdAt ? new Date(r.createdAt) : new Date()
|
||||
});
|
||||
inserted += 1;
|
||||
} catch (err: any) {
|
||||
console.error(`[migrate-cpc-csd] Skip audit log ${r.id}:`, err?.message || err);
|
||||
}
|
||||
}
|
||||
|
||||
return inserted;
|
||||
}
|
||||
|
||||
async function printCounts(): Promise<void> {
|
||||
const docTotal = await CpcDocument.count();
|
||||
const logTotal = await CpcAuditLog.count();
|
||||
console.log(`[migrate-cpc-csd] Target counts: cpc_documents=${docTotal}, cpc_audit_logs=${logTotal}`);
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const { sequelize: source, close } = await openSource();
|
||||
try {
|
||||
await sequelize.authenticate();
|
||||
await source.authenticate();
|
||||
console.log('[migrate-cpc-csd] Connected to target (DATABASE_URL) and source.');
|
||||
|
||||
const docInserted = await migrateDocuments(source);
|
||||
const logInserted = await migrateAuditLogs(source);
|
||||
|
||||
console.log(`[migrate-cpc-csd] New cpc_documents rows: ${docInserted}`);
|
||||
console.log(`[migrate-cpc-csd] New cpc_audit_logs rows: ${logInserted}`);
|
||||
await printCounts();
|
||||
} finally {
|
||||
await close();
|
||||
await sequelize.close();
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error('[migrate-cpc-csd] Failed:', e);
|
||||
process.exit(1);
|
||||
});
|
||||
@ -75,6 +75,10 @@ import * as m67 from '../migrations/20260324110001-add-pan-number-to-26as';
|
||||
import * as m68 from '../migrations/20260325090001-ensure-pan-number-in-26as';
|
||||
import * as m69 from '../migrations/20260325094500-add-user-session-and-hsn-sac-codes';
|
||||
import * as m70 from '../migrations/20260325175000-update-credit-notes-and-add-items';
|
||||
import * as m71 from '../migrations/2026041300-create-cpc-cdc-tables';
|
||||
import * as m72 from '../migrations/20260414100000-ensure-cpc-cdc-tables-exist';
|
||||
import * as m73 from '../migrations/20260416120000-rename-cpc-cdc-admin-config-key';
|
||||
|
||||
|
||||
interface Migration {
|
||||
name: string;
|
||||
@ -157,6 +161,9 @@ const migrations: Migration[] = [
|
||||
{ name: '20260325090001-ensure-pan-number-in-26as', module: m68 },
|
||||
{ name: '20260325094500-add-user-session-and-hsn-sac-codes', module: m69 },
|
||||
{ name: '20260325175000-update-credit-notes-and-add-items', module: m70 },
|
||||
{ name: '2026041300-create-cpc-cdc-tables', module: m71 },
|
||||
{ name: '20260414100000-ensure-cpc-cdc-tables-exist', module: m72 },
|
||||
{ name: '20260416120000-rename-cpc-cdc-admin-config-key', module: m73 },
|
||||
];
|
||||
|
||||
/**
|
||||
|
||||
@ -113,13 +113,6 @@ const startServer = async (): Promise<void> => {
|
||||
console.error('⚠️ Activity type seeding error:', error);
|
||||
}
|
||||
|
||||
// Ensure demo admin user exists (admin@example.com / Admin@123)
|
||||
const { ensureDemoAdminUser } = require('./scripts/seed-admin-user');
|
||||
try {
|
||||
await ensureDemoAdminUser();
|
||||
} catch (error) {
|
||||
console.warn('⚠️ Demo admin user setup warning:', error);
|
||||
}
|
||||
|
||||
// Initialize holidays cache for TAT calculations
|
||||
try {
|
||||
|
||||
@ -5,7 +5,6 @@ import type { StringValue } from 'ms';
|
||||
import { LoginResponse } from '../types/auth.types';
|
||||
import logger, { logAuthEvent } from '../utils/logger';
|
||||
import axios from 'axios';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { emitToUser } from '../realtime/socket';
|
||||
import { ACCESS_TOKEN_TTL_MS } from '../config/sessionPolicy';
|
||||
@ -616,174 +615,16 @@ export class AuthService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate user with username (email) and password via Okta API
|
||||
* This is for direct API authentication (e.g., Postman, mobile apps)
|
||||
*
|
||||
* Flow:
|
||||
* 1. Authenticate with Okta using username/password
|
||||
* 2. Get access token from Okta
|
||||
* 3. Fetch user info from Okta
|
||||
* 4. Create/update user in our database if needed
|
||||
* 5. Return our JWT tokens
|
||||
* Authenticate user with username (email) and password via Okta (Resource Owner Password grant).
|
||||
* For direct API clients (e.g. Postman) when Okta allows this grant; otherwise use token-exchange.
|
||||
* No local or demo password bypass.
|
||||
*/
|
||||
async authenticateWithPassword(username: string, password: string, userAgent?: string): Promise<LoginResponse> {
|
||||
// Demo admin: admin@example.com / Admin@123 (works with or without .env; for dev/demo only)
|
||||
const DEMO_ADMIN_EMAIL = 'admin@example.com';
|
||||
const DEFAULT_DEMO_ADMIN_HASH = '$2a$10$H4ikTC.HDZPM0iFxjBy2C./WlkbGbidipIiZlXIJx6QpcBazdf12K'; // bcrypt of "Admin@123"
|
||||
const tryLocalAdminLogin = async (): Promise<LoginResponse | null> => {
|
||||
const normalizedInput = username?.trim?.()?.toLowerCase?.() ?? '';
|
||||
const adminEmail = process.env.LOCAL_ADMIN_EMAIL?.trim() || DEMO_ADMIN_EMAIL;
|
||||
if (normalizedInput !== adminEmail.toLowerCase()) return null;
|
||||
const hash = process.env.LOCAL_ADMIN_PASSWORD_HASH?.trim() || DEFAULT_DEMO_ADMIN_HASH;
|
||||
const passwordMatch = await bcrypt.compare(password, hash);
|
||||
if (!passwordMatch) return null;
|
||||
let user = await User.findOne({ where: { email: adminEmail } });
|
||||
const sessionToken = uuidv4();
|
||||
const lastLoginDevice = parseDeviceFromUserAgent(userAgent);
|
||||
|
||||
if (!user) {
|
||||
user = await User.create({
|
||||
email: adminEmail,
|
||||
oktaSub: 'local-ADMIN',
|
||||
displayName: 'RE Admin',
|
||||
firstName: 'RE',
|
||||
lastName: 'Admin',
|
||||
isActive: true,
|
||||
role: 'ADMIN',
|
||||
emailNotificationsEnabled: true,
|
||||
pushNotificationsEnabled: true,
|
||||
inAppNotificationsEnabled: true,
|
||||
sessionToken,
|
||||
lastLoginDevice,
|
||||
lastLogin: new Date()
|
||||
});
|
||||
logger.info('Demo admin user created on first login', { email: adminEmail });
|
||||
} else {
|
||||
await user.update({ lastLogin: new Date(), sessionToken, lastLoginDevice });
|
||||
}
|
||||
logger.info('Demo admin login successful', { email: adminEmail });
|
||||
const accessToken = this.generateAccessToken(user);
|
||||
const refreshToken = this.generateRefreshToken(user);
|
||||
return {
|
||||
user: {
|
||||
userId: user.userId,
|
||||
employeeId: user.employeeId ?? null,
|
||||
email: user.email,
|
||||
firstName: user.firstName ?? null,
|
||||
lastName: user.lastName ?? null,
|
||||
displayName: user.displayName ?? null,
|
||||
department: user.department ?? null,
|
||||
designation: user.designation ?? null,
|
||||
jobTitle: user.jobTitle ?? null,
|
||||
role: user.role,
|
||||
},
|
||||
accessToken,
|
||||
refreshToken,
|
||||
};
|
||||
};
|
||||
|
||||
// Helper: try local dealer login (TESTREFLOW) when ENABLE_LOCAL_DEALER_LOGIN is set (in scope for try and catch)
|
||||
const tryLocalDealerLogin = async (): Promise<LoginResponse | null> => {
|
||||
const enabled = process.env.ENABLE_LOCAL_DEALER_LOGIN?.toLowerCase()?.trim() === 'true';
|
||||
const hash = process.env.LOCAL_DEALER_PASSWORD_HASH?.trim();
|
||||
const localUsername = 'TESTREFLOW';
|
||||
const normalizedUsername = username?.trim?.()?.toUpperCase?.() ?? '';
|
||||
if (!enabled || !hash || normalizedUsername !== localUsername) return null;
|
||||
const passwordMatch = await bcrypt.compare(password, hash);
|
||||
if (!passwordMatch) return null;
|
||||
logger.info('Local dealer login successful', { username: localUsername });
|
||||
return this.handleSSOCallback({
|
||||
oktaSub: 'local-TESTREFLOW',
|
||||
email: 'testreflow@example.com',
|
||||
displayName: 'Test Reflow Dealer',
|
||||
firstName: 'Test',
|
||||
lastName: 'Reflow',
|
||||
}, userAgent);
|
||||
};
|
||||
|
||||
// Fallback bcrypt hash for "Test@123" when .env hash is corrupted (dev only)
|
||||
const ROHIT_DEALER_EMAIL = 'rohitm_ext@royalenfield.com';
|
||||
const FALLBACK_HASH_TEST123 = '$2a$10$gQ34/Jt9rOFDBWJqVur2W.ZWlN0vqAzt2I/6HKBKOtggowY/R8W/C';
|
||||
|
||||
// Helper: try local login by email (e.g. rohitm_ext@royalenfield.com) when LOCAL_DEALER_2_* is set or known dealer
|
||||
const tryLocalDealerLoginByEmail = async (): Promise<LoginResponse | null> => {
|
||||
const envEmail = process.env.LOCAL_DEALER_2_EMAIL?.trim()?.toLowerCase();
|
||||
const rawHash = process.env.LOCAL_DEALER_2_PASSWORD_HASH;
|
||||
let hash = (typeof rawHash === 'string' ? rawHash.trim() : '') || '';
|
||||
if (hash.length >= 2 && ((hash.startsWith('"') && hash.endsWith('"')) || (hash.startsWith("'") && hash.endsWith("'")))) hash = hash.slice(1, -1);
|
||||
const normalizedInput = username?.trim?.()?.toLowerCase?.() ?? '';
|
||||
const isRohitEmail = normalizedInput === ROHIT_DEALER_EMAIL;
|
||||
const email = envEmail || (isRohitEmail ? ROHIT_DEALER_EMAIL : null);
|
||||
const inputMatches = !!email && normalizedInput === email;
|
||||
if (!inputMatches) {
|
||||
logger.info('[Auth] Local dealer by email skip', {
|
||||
hasEmail: !!envEmail,
|
||||
hasHash: !!hash,
|
||||
hashLen: hash.length,
|
||||
inputMatch: inputMatches,
|
||||
normalizedInput: normalizedInput ? `${normalizedInput.slice(0, 5)}...` : '',
|
||||
});
|
||||
return null;
|
||||
}
|
||||
let passwordMatch = false;
|
||||
if (hash.length >= 50) {
|
||||
passwordMatch = await bcrypt.compare(password, hash);
|
||||
}
|
||||
if (!passwordMatch && isRohitEmail) {
|
||||
passwordMatch = await bcrypt.compare(password, FALLBACK_HASH_TEST123);
|
||||
if (passwordMatch) logger.info('[Auth] Local dealer login used fallback hash for', { email: ROHIT_DEALER_EMAIL });
|
||||
}
|
||||
if (!passwordMatch) {
|
||||
logger.warn('[Auth] Local dealer by email: password mismatch', { email });
|
||||
return null;
|
||||
}
|
||||
const { Op } = await import('sequelize');
|
||||
const user = await User.findOne({ where: { email: { [Op.iLike]: email } } });
|
||||
if (!user) {
|
||||
logger.warn('Local dealer login by email: user not found', { email });
|
||||
return null;
|
||||
}
|
||||
const sessionToken = uuidv4();
|
||||
const lastLoginDevice = parseDeviceFromUserAgent(userAgent);
|
||||
await user.update({ lastLogin: new Date(), sessionToken, lastLoginDevice });
|
||||
logger.info('Local dealer login by email successful', { email });
|
||||
const accessToken = this.generateAccessToken(user);
|
||||
const refreshToken = this.generateRefreshToken(user);
|
||||
return {
|
||||
user: {
|
||||
userId: user.userId,
|
||||
employeeId: user.employeeId ?? null,
|
||||
email: user.email,
|
||||
firstName: user.firstName ?? null,
|
||||
lastName: user.lastName ?? null,
|
||||
displayName: user.displayName ?? null,
|
||||
department: user.department ?? null,
|
||||
designation: user.designation ?? null,
|
||||
jobTitle: user.jobTitle ?? null,
|
||||
role: user.role,
|
||||
},
|
||||
accessToken,
|
||||
refreshToken,
|
||||
};
|
||||
};
|
||||
|
||||
try {
|
||||
logger.info('Authenticating user with username/password', { username });
|
||||
|
||||
// Demo admin (admin@example.com / Admin@123) and optional env-based local admin
|
||||
const adminResult = await tryLocalAdminLogin();
|
||||
if (adminResult) return adminResult;
|
||||
|
||||
// Development-only: try local dealer login when enabled
|
||||
const localResult = await tryLocalDealerLogin();
|
||||
if (localResult) return localResult;
|
||||
|
||||
// Optional: local login by email (e.g. rohit.m.ext@royalenfield.com) when LOCAL_DEALER_2_* set
|
||||
const localEmailResult = await tryLocalDealerLoginByEmail();
|
||||
if (localEmailResult) return localEmailResult;
|
||||
|
||||
// Step 1: Authenticate with Okta using Resource Owner Password flow
|
||||
// Note: This requires Okta to have Resource Owner Password grant type enabled
|
||||
// Authenticate with Okta using Resource Owner Password flow
|
||||
// Requires Okta Resource Owner Password grant when used; otherwise use SSO / token-exchange.
|
||||
const tokenEndpoint = `${ssoConfig.oktaDomain}/oauth2/default/v1/token`;
|
||||
|
||||
const tokenResponse = await axios.post(
|
||||
@ -894,21 +735,6 @@ export class AuthService {
|
||||
oktaError: error.response?.data,
|
||||
});
|
||||
|
||||
// When Okta does not allow password grant (e.g. only authorization_code), fall back to local logins
|
||||
const msg = (error.message || '').toLowerCase();
|
||||
if (msg.includes('grant type') || msg.includes('not authorized to use the provided grant type')) {
|
||||
const adminFallback = await tryLocalAdminLogin();
|
||||
if (adminFallback) {
|
||||
logger.info('Local admin login used after Okta grant-type rejection');
|
||||
return adminFallback;
|
||||
}
|
||||
const localResult = await tryLocalDealerLogin();
|
||||
if (localResult) {
|
||||
logger.info('Local dealer login used after Okta grant-type rejection');
|
||||
return localResult;
|
||||
}
|
||||
}
|
||||
|
||||
if (error.response?.data) {
|
||||
const errorData = error.response.data;
|
||||
if (typeof errorData === 'object' && !Array.isArray(errorData)) {
|
||||
|
||||
71
src/services/cpc-cdc/CpcGcsService.ts
Normal file
71
src/services/cpc-cdc/CpcGcsService.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import { Storage } from "@google-cloud/storage";
|
||||
import path from 'path';
|
||||
import logger from "@utils/logger";
|
||||
|
||||
/** Optional layout for CPC/CSD objects (mirrors local `uploads/cpc-csd-files/...`). */
|
||||
export type CpcGcsUploadOptions = {
|
||||
bucket?: string;
|
||||
/** Directory prefix inside the bucket, no leading slash, e.g. `cpc-csd/csd/BOOK-1/documents` */
|
||||
objectDir?: string;
|
||||
/** Final filename segment only (no path separators) */
|
||||
objectBaseName?: string;
|
||||
};
|
||||
|
||||
class CpcGcsService {
|
||||
private storage: Storage;
|
||||
private bucketName: string;
|
||||
|
||||
constructor() {
|
||||
this.storage = new Storage({
|
||||
projectId: process.env.GCP_PROJECT_ID,
|
||||
keyFilename: process.env.GCP_KEY_FILE
|
||||
});
|
||||
this.bucketName = process.env.GCP_BUCKET_NAME || '';
|
||||
}
|
||||
|
||||
parseGsUrl(gsUrl: string) {
|
||||
if (!gsUrl || !gsUrl.startsWith("gs://")) throw new Error("INVALID_DOCUMENT_URL");
|
||||
const s = gsUrl.slice(5);
|
||||
const [bucket, ...rest] = s.split("/");
|
||||
const objectPath = rest.join("/");
|
||||
if (!bucket || !objectPath) throw new Error("INVALID_DOCUMENT_URL");
|
||||
return { bucket, objectPath };
|
||||
}
|
||||
|
||||
async downloadFromGcs(gsUrl: string): Promise<Buffer> {
|
||||
const { bucket, objectPath } = this.parseGsUrl(gsUrl);
|
||||
const [buf] = await this.storage.bucket(bucket).file(objectPath).download();
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Third argument can be a legacy custom bucket string, or structured options for path layout.
|
||||
*/
|
||||
async uploadToGcs(
|
||||
fileBuffer: Buffer,
|
||||
originalName: string,
|
||||
legacyBucketOrOpts?: string | CpcGcsUploadOptions
|
||||
): Promise<string> {
|
||||
const opts: CpcGcsUploadOptions =
|
||||
typeof legacyBucketOrOpts === 'string' ? { bucket: legacyBucketOrOpts } : legacyBucketOrOpts || {};
|
||||
const targetBucket = opts.bucket || this.bucketName;
|
||||
const base =
|
||||
opts.objectBaseName && !opts.objectBaseName.includes('/') && !opts.objectBaseName.includes('..')
|
||||
? opts.objectBaseName
|
||||
: `${Date.now()}-${path.basename(originalName)}`;
|
||||
const dir =
|
||||
opts.objectDir && !opts.objectDir.includes('..')
|
||||
? opts.objectDir.replace(/^\/+|\/+$/g, '')
|
||||
: 'cpc-csd/uploads';
|
||||
const fileName = `${dir}/${base}`.replace(/\\/g, '/');
|
||||
const bucket = this.storage.bucket(targetBucket);
|
||||
const file = bucket.file(fileName);
|
||||
|
||||
await file.save(fileBuffer);
|
||||
|
||||
logger.info(`[CpcGcsService] File uploaded to gs://${targetBucket}/${fileName}`);
|
||||
return `gs://${targetBucket}/${fileName}`;
|
||||
}
|
||||
}
|
||||
|
||||
export const cpcGcsService = new CpcGcsService();
|
||||
301
src/services/cpc-cdc/CpcHistoryService.ts
Normal file
301
src/services/cpc-cdc/CpcHistoryService.ts
Normal file
@ -0,0 +1,301 @@
|
||||
/**
|
||||
* Utility to map OCR document data to the "Excel Screenshot Summary" format
|
||||
* and ensure uniform detail field results for the CPC-CSD module.
|
||||
*/
|
||||
|
||||
const FIELD_DEFAULTS: any = {
|
||||
AADHAAR: ['customer_name', 'aadhar_number', 'name', 'dob', 'gender', 'address'],
|
||||
ADHAAR: ['customer_name', 'aadhar_number', 'name', 'dob', 'gender', 'address'],
|
||||
CSD_PO: ['customer_name', 'po_number', 'po_amount', 'signature_and_stamp'],
|
||||
GENERIC_INVOICE: ['customer_name', 'order_or_auth_number', 'invoice_value', 'invoice_date', 'tax_amount'],
|
||||
RETAIL_INVOICE: ['customer_name', 'order_or_auth_number', 'invoice_value', 'invoice_date', 'tax_amount'],
|
||||
INVOICE: ['customer_name', 'order_or_auth_number', 'invoice_value', 'invoice_date', 'tax_amount'],
|
||||
AUTHORITY_LETTER: [
|
||||
'customer_name',
|
||||
'letter_number',
|
||||
'letter_amount',
|
||||
'signature_and_stamp',
|
||||
'authorized_person_name',
|
||||
'order_or_authorisation_number',
|
||||
'invoice_value',
|
||||
'govt_signatory_and_stamp_present',
|
||||
'authority_grantor_name',
|
||||
'valid_until',
|
||||
'purpose',
|
||||
'date_of_issue'
|
||||
],
|
||||
AUTH_LETTER: [
|
||||
'customer_name',
|
||||
'letter_number',
|
||||
'letter_amount',
|
||||
'signature_and_stamp',
|
||||
'authorized_person_name',
|
||||
'order_or_authorisation_number',
|
||||
'invoice_value',
|
||||
'govt_signatory_and_stamp_present',
|
||||
'authority_grantor_name',
|
||||
'valid_until',
|
||||
'purpose',
|
||||
'date_of_issue'
|
||||
]
|
||||
};
|
||||
|
||||
const CRITERIA_MAP: any = {
|
||||
// Fallbacks when doc-type–specific text is not applied (UI / reports only)
|
||||
aadhaar_number: 'Exact match',
|
||||
aadhar_number: 'Exact match',
|
||||
name: 'Text match',
|
||||
dob: 'Exact after normalization',
|
||||
gender: 'Exact (M/Male normalize)',
|
||||
address: 'Text match',
|
||||
|
||||
customer_name: 'Text match',
|
||||
order_or_auth_number: 'Text match',
|
||||
order_or_authorisation_number: 'Text match',
|
||||
invoice_value: 'Amount comparison',
|
||||
invoice_date: 'Date comparison',
|
||||
tax_amount: 'Amount comparison',
|
||||
|
||||
authorized_person_name: 'Text match',
|
||||
authority_grantor_name: 'Text match',
|
||||
letter_number: 'Text match',
|
||||
valid_until: 'Exact date match',
|
||||
purpose: 'Text match',
|
||||
date_of_issue: 'Exact match',
|
||||
|
||||
mail_extraction: 'Email on document vs expected',
|
||||
|
||||
stamp: 'Signature / stamp vs expected',
|
||||
signatory: 'Signature / stamp vs expected',
|
||||
govt_signatory_and_stamp_present: 'Signature / stamp vs expected',
|
||||
stamp_sign_present: 'Signature / stamp vs expected',
|
||||
signature_and_stamp: 'Signature / stamp vs expected',
|
||||
po_number: 'Exact match',
|
||||
po_amount: 'Amount comparison',
|
||||
letter_amount: 'Amount comparison'
|
||||
};
|
||||
|
||||
/** Normalize document type for criteria copy (matches validation service naming). */
|
||||
function normalizeCriteriaDocType(docType?: string): string {
|
||||
const u = String(docType || '').toUpperCase().trim();
|
||||
if (u.includes('AADHAAR') || u === 'ADHAAR') return 'AADHAAR';
|
||||
if (u.includes('CPC_AUTH') || u.includes('AUTHORITY')) return 'CPC_AUTH';
|
||||
if (u.includes('CSD_PO') || u.includes('PURCHASE') || (u.includes('PO') && u.includes('CSD'))) return 'CSD_PO';
|
||||
if (u.includes('RETAIL') || u.includes('INVOICE')) return 'RETAIL_INVOICE';
|
||||
return u;
|
||||
}
|
||||
|
||||
/** Human-readable accuracy criteria for reports / API field_results (shared with validation). */
|
||||
export function getCriteriaLabel(field: string, docType?: string): string {
|
||||
if (!field) return 'Exact check';
|
||||
const f = field.toLowerCase();
|
||||
const dt = normalizeCriteriaDocType(docType);
|
||||
|
||||
if ((f === 'order_or_authorisation_number' || f === 'po_number') && dt === 'CSD_PO') {
|
||||
return 'Exact match';
|
||||
}
|
||||
if (f === 'letter_number' && dt === 'CPC_AUTH') {
|
||||
return 'Text match';
|
||||
}
|
||||
if (f === 'aadhaar_number' || f === 'aadhar_number') {
|
||||
return 'Exact match';
|
||||
}
|
||||
if ((f === 'customer_name' || f === 'name' || f === 'authorized_person_name') && (dt === 'CSD_PO' || dt === 'CPC_AUTH' || dt === 'AADHAAR')) {
|
||||
return 'Text match';
|
||||
}
|
||||
if ((f === 'invoice_value' || f === 'po_amount') && dt === 'CSD_PO') {
|
||||
return 'Amount comparison';
|
||||
}
|
||||
if ((f === 'invoice_value' || f === 'letter_amount') && dt === 'CPC_AUTH') {
|
||||
return 'Amount comparison';
|
||||
}
|
||||
if (f === 'govt_signatory_and_stamp_present' || f === 'stamp_sign_present' || f === 'signature_and_stamp') {
|
||||
return 'Signature / stamp vs expected';
|
||||
}
|
||||
if (f === 'mail_extraction') {
|
||||
return 'Email on document vs expected';
|
||||
}
|
||||
|
||||
const key = Object.keys(CRITERIA_MAP).find((k: string) => f.includes(k.toLowerCase()));
|
||||
return CRITERIA_MAP[key || ''] || 'Exact check';
|
||||
}
|
||||
|
||||
/** Normalize match % from persisted validation row (snake or camel). */
|
||||
function matchPctFromResult(found: Record<string, unknown> | null | undefined): number | null {
|
||||
if (!found || typeof found !== 'object') return null;
|
||||
const row = found as { match_percentage?: unknown; matchPercentage?: unknown };
|
||||
const v = row.match_percentage != null ? row.match_percentage : row.matchPercentage;
|
||||
if (v == null || v === '') return null;
|
||||
const n = Number(v);
|
||||
if (!Number.isFinite(n)) return null;
|
||||
return Math.round(n);
|
||||
}
|
||||
|
||||
export class CpcHistoryService {
|
||||
/**
|
||||
* Transforms a document into a detailed field result array.
|
||||
* Ensures that if a field was expected but not extracted, it still shows up as a fail.
|
||||
*/
|
||||
static getDetailedFieldResults(doc: any) {
|
||||
const rawDocTypeUpper = String(doc.documentType || doc.document_type || '').trim();
|
||||
const rawType = rawDocTypeUpper.toLowerCase().replace(/_/g, ' ');
|
||||
|
||||
// Normalize type for internal lookup
|
||||
let type = 'UNKNOWN';
|
||||
if (rawType.includes('aadhaar') || rawType.includes('adhaar')) type = 'AADHAAR';
|
||||
else if (rawType.includes('authority') || rawType.includes('auth') || rawType.includes('cpc letter')) type = 'AUTHORITY_LETTER';
|
||||
else if (rawType.includes('invoice')) type = 'GENERIC_INVOICE';
|
||||
else if (rawType.includes('purchase order') || rawType.includes('csd_po') || rawType.includes('po'))
|
||||
type = 'CSD_PO';
|
||||
|
||||
const hardcodedKeys = FIELD_DEFAULTS[type] || [];
|
||||
|
||||
// Read expected/extracted values from ALL possible variant keys
|
||||
const expectedObj = doc.msdPayload || doc.msd_payload || {};
|
||||
const extractedObj = doc.extractedFields || doc.extracted_fields || {};
|
||||
|
||||
const payloadKeys = Object.keys(expectedObj);
|
||||
const expectedKeys = payloadKeys.length > 0 ? payloadKeys : hardcodedKeys;
|
||||
|
||||
const rawFr = doc.fieldResults ?? doc.field_results;
|
||||
const existingResults = Array.isArray(rawFr) ? rawFr : [];
|
||||
|
||||
const finalResults = expectedKeys.map((key: string) => {
|
||||
const found = existingResults.find((r: any) => r.field?.toLowerCase() === key.toLowerCase());
|
||||
|
||||
const msdVal = expectedObj[key] || (found ? found.expected : '-');
|
||||
const ocrVal = extractedObj[key] || (found ? (found.extracted || found.actual) : '-');
|
||||
|
||||
if (found) {
|
||||
const mp = matchPctFromResult(found);
|
||||
const mpNum = mp != null ? mp : 0;
|
||||
const st = String((found as { status?: string }).status || '');
|
||||
const pass =
|
||||
(found as { pass?: boolean }).pass === true ||
|
||||
st === 'SUCCESSFUL' ||
|
||||
st === 'MATCH';
|
||||
return {
|
||||
...found,
|
||||
field: key,
|
||||
expected: String(msdVal),
|
||||
extracted: String(ocrVal),
|
||||
status: st || (found as { status?: string }).status,
|
||||
match_percentage: mpNum,
|
||||
matchPercentage: mpNum,
|
||||
accuracy:
|
||||
(found as { accuracy?: string }).accuracy ||
|
||||
(mp != null ? `${mp}%` : `${mpNum}%`),
|
||||
criteria: (found as { criteria?: string }).criteria || getCriteriaLabel(key, rawDocTypeUpper),
|
||||
pass,
|
||||
message:
|
||||
(found as { reason?: string }).reason ||
|
||||
(found as { message?: string }).message ||
|
||||
(pass ? 'Matched' : 'Mismatch detected')
|
||||
};
|
||||
}
|
||||
return {
|
||||
field: key,
|
||||
expected: String(msdVal),
|
||||
extracted: String(ocrVal),
|
||||
match_percentage: 0,
|
||||
matchPercentage: 0,
|
||||
accuracy: '0%',
|
||||
criteria: getCriteriaLabel(key, rawDocTypeUpper),
|
||||
pass: false,
|
||||
status: 'MISSING',
|
||||
message: 'Not found'
|
||||
};
|
||||
});
|
||||
|
||||
return finalResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the "Excel Screenshot Summary" row for a document.
|
||||
*/
|
||||
static getSummaryRow(doc: any, idx: number) {
|
||||
const rawType = String(doc.documentType || doc.document_type || '')
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.replace(/_/g, ' ');
|
||||
const results = this.getDetailedFieldResults(doc);
|
||||
const findRes = (key: string) => results.find((r: any) => r.field.toLowerCase() === key.toLowerCase()) || null;
|
||||
|
||||
|
||||
const booking_type = doc.claimId?.startsWith('CPC') ? 'CPC' : 'CSD';
|
||||
const booking_number = doc.bookingId || doc.claimId || 'N/A';
|
||||
|
||||
const mapGroup = (fieldKey: string, altKeys: string[] = []) => {
|
||||
const res = findRes(fieldKey) || (altKeys.length > 0 ? results.find((r: any) => altKeys.some(ak => r.field.toLowerCase() === ak.toLowerCase())) : null);
|
||||
|
||||
if (!res) return { msd: 'N.A.', ocr: 'N.A.', accuracy_pct: 'N.A.', criteria: 'N.A.', is_match: 'N.A.', isNA: true };
|
||||
return {
|
||||
msd: res.expected,
|
||||
ocr: res.extracted,
|
||||
accuracy_pct: res.accuracy,
|
||||
criteria: res.criteria,
|
||||
is_match: res.pass ? 'Yes' : 'No',
|
||||
isNA: false
|
||||
};
|
||||
};
|
||||
|
||||
let f1, f2, f3, f4, f5;
|
||||
const na = { msd: 'N.A.', ocr: 'N.A.', accuracy_pct: 'N.A.', criteria: 'N.A.', is_match: 'N.A.', isNA: true };
|
||||
|
||||
if (rawType.includes('aadhaar') || rawType.includes('adhaar')) {
|
||||
f1 = mapGroup('customer_name', ['name', 'authorized_person_name']);
|
||||
f2 = na;
|
||||
f3 = mapGroup('aadhar_number', ['aadhaar_number']);
|
||||
f4 = na;
|
||||
f5 = na;
|
||||
} else if (rawType.includes('authority') || rawType.includes('auth') || rawType.includes('cpc letter')) {
|
||||
f1 = mapGroup('customer_name', ['authorized_person_name', 'name']);
|
||||
f2 = mapGroup('letter_number', [
|
||||
'order_or_auth_number',
|
||||
'letter_no',
|
||||
'order_or_authorisation_number'
|
||||
]);
|
||||
f3 = na;
|
||||
f4 = mapGroup('letter_amount', ['invoice_value', 'amount']);
|
||||
f5 = mapGroup('signature_and_stamp', ['govt_signatory_and_stamp_present', 'stamp', 'signatory', 'stamp_sign_present']);
|
||||
} else if (rawType.includes('purchase order') || rawType.includes('csd_po') || rawType.includes('po')) {
|
||||
f1 = mapGroup('customer_name');
|
||||
f2 = mapGroup('po_number', ['order_or_authorisation_number', 'order_or_auth_number']);
|
||||
f3 = na;
|
||||
f4 = mapGroup('po_amount', ['invoice_value', 'amount']);
|
||||
f5 = mapGroup('signature_and_stamp', ['govt_signatory_and_stamp_present', 'stamp', 'signatory', 'stamp_sign_present']);
|
||||
} else if (rawType.includes('invoice')) {
|
||||
f1 = mapGroup('customer_name');
|
||||
f2 = mapGroup('order_or_auth_number', ['order_or_authorisation_number']);
|
||||
f3 = na;
|
||||
f4 = mapGroup('invoice_value', ['tax_amount']);
|
||||
f5 = mapGroup('govt_signatory_and_stamp_present', ['stamp', 'signatory', 'stamp_sign_present']);
|
||||
} else {
|
||||
f1 = mapGroup('customer_name', ['name', 'authorized_person_name']);
|
||||
f2 = mapGroup('order_or_auth_number', ['aadhaar_number', 'order_or_authorisation_number']);
|
||||
f3 = na;
|
||||
f4 = mapGroup('invoice_value', ['amount']);
|
||||
f5 = mapGroup('govt_signatory_and_stamp_present', ['stamp', 'signatory', 'stamp_sign_present']);
|
||||
}
|
||||
|
||||
const vs = String(doc.validationStatus || '').toUpperCase();
|
||||
const final_validation =
|
||||
vs === 'SUCCESSFUL' || vs === 'MATCH' || vs === 'APPROVED' ? 'Successful' : 'Unsuccessful';
|
||||
|
||||
return {
|
||||
booking_type,
|
||||
booking_number,
|
||||
document_count: idx + 1,
|
||||
document_name: rawType.toUpperCase(),
|
||||
f1, f2, f3, f4, f5,
|
||||
customer_name_group: f1,
|
||||
po_or_auth_number_group: f2,
|
||||
aadhaar_number_group: f3,
|
||||
amount_group: f4,
|
||||
stamp_group: f5,
|
||||
field_results: results,
|
||||
final_validation,
|
||||
createdAt: doc.createdAt
|
||||
};
|
||||
}
|
||||
}
|
||||
44
src/services/cpc-cdc/CpcOcrService.ts
Normal file
44
src/services/cpc-cdc/CpcOcrService.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import { DocumentProcessorServiceClient } from "@google-cloud/documentai";
|
||||
import logger from "@utils/logger";
|
||||
|
||||
export class CpcOcrService {
|
||||
private client: DocumentProcessorServiceClient;
|
||||
|
||||
constructor() {
|
||||
this.client = new DocumentProcessorServiceClient({
|
||||
keyFilename: process.env.GCP_KEY_FILE
|
||||
});
|
||||
}
|
||||
|
||||
async runDocAIOcr(params: {
|
||||
projectId: string,
|
||||
location: string,
|
||||
processorId: string,
|
||||
fileBuffer: Buffer,
|
||||
mimeType?: string
|
||||
}) {
|
||||
const { projectId, location, processorId, fileBuffer, mimeType } = params;
|
||||
const name = `projects/${projectId}/locations/${location}/processors/${processorId}`;
|
||||
|
||||
logger.info(`[CpcOcrService] Running Document AI OCR for processor: ${processorId}`);
|
||||
|
||||
const request = {
|
||||
name,
|
||||
rawDocument: {
|
||||
content: fileBuffer.toString("base64"),
|
||||
mimeType: mimeType || "application/pdf",
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const [result] = await this.client.processDocument(request);
|
||||
const text = result?.document?.text || "";
|
||||
return { text };
|
||||
} catch (error) {
|
||||
logger.error(`[CpcOcrService] Document AI Error: ${error instanceof Error ? error.message : String(error)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const cpcOcrService = new CpcOcrService();
|
||||
371
src/services/cpc-cdc/CpcRuleExtractService.ts
Normal file
371
src/services/cpc-cdc/CpcRuleExtractService.ts
Normal file
@ -0,0 +1,371 @@
|
||||
import { calculateMatch } from './utils';
|
||||
|
||||
export type RuleExtractHints = {
|
||||
/** MSD fields typed in UI — used to find the same text inside the PDF (no "Name:" label needed). */
|
||||
msdPayload?: Record<string, unknown>;
|
||||
/** When `CSD_PO`, prefer buyer/beneficiary lines (Sold To, Bill To, …) over the first generic `Name:` (often supplier). */
|
||||
documentType?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Regex-based extraction logic for CPC-CSD documents.
|
||||
* Provides a lightweight alternative to Gemini for common patterns.
|
||||
* Field names align with MSD payloads from the CPC dashboard (e.g. authority_letter).
|
||||
*/
|
||||
function escapeRegExp(s: string): string {
|
||||
return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
/** If MSD name appears verbatim (spacing flexible) in PDF text, return the matched span. */
|
||||
function matchMsdNameInBody(body: string, expected: string): string | null {
|
||||
const e = String(expected || '').trim();
|
||||
if (e.length < 2) return null;
|
||||
const flex = escapeRegExp(e).replace(/\s+/g, '\\s+');
|
||||
const m = body.match(new RegExp(flex, 'i'));
|
||||
return m ? m[0].replace(/\s+/g, ' ').trim() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same word as MSD on a line with other text (table cells, "Customer Arjun …") — strict substring match often fails.
|
||||
*/
|
||||
function findMsdNameTokenInOcr(body: string, expected: string): string | null {
|
||||
const h = String(expected || '').trim();
|
||||
if (h.length < 2 || !body.trim()) return null;
|
||||
const hl = h.toLowerCase();
|
||||
const noise = /^(qty|ref|date|page|gst|hsn|po|no|id|by|to|of|in|at|sl|sr|index|desc|amount|total)$/i;
|
||||
const lines = body.split(/\r?\n/).map((l) => l.trim()).filter((l) => l.length > 0);
|
||||
for (const line of lines) {
|
||||
if (line.length > 160) continue;
|
||||
if (line.toLowerCase() === hl) return line;
|
||||
const parts = line.split(/[\s,;:|/<>()[\]]+/).filter(Boolean);
|
||||
for (const raw of parts) {
|
||||
const p = raw.replace(/^[^A-Za-z\u0900-\u097F0-9]+|[^A-Za-z\u0900-\u097F0-9]+$/g, '');
|
||||
if (!p || p.length < 2 || noise.test(p)) continue;
|
||||
if (p.toLowerCase() === hl) return p;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Pick a short line whose fuzzy score vs MSD is high (authority letters often put name on its own line). */
|
||||
function pickNameLineByMsd(body: string, expected: string, minScore = 52): string | null {
|
||||
const exp = String(expected || '').trim();
|
||||
if (exp.length < 2 || !body.trim()) return null;
|
||||
let best: { line: string; score: number } | null = null;
|
||||
const lines = body.split(/\r?\n/).map((l) => l.trim()).filter((l) => l.length > 2 && l.length < 120);
|
||||
for (const line of lines) {
|
||||
if (/^(page|ref|no\.?|date|subject|to|from|dear|sir|madam|annex|schedule|authority|letter|royal|enfield|\d+\s*\/\s*\d+)/i.test(line)) {
|
||||
continue;
|
||||
}
|
||||
const s = calculateMatch(exp, line, 'authorized_person_name');
|
||||
if (s >= minScore && (!best || s > best.score)) {
|
||||
best = { line, score: s };
|
||||
}
|
||||
}
|
||||
return best?.line ?? null;
|
||||
}
|
||||
|
||||
function normalizePan(s: string): string | null {
|
||||
const p = String(s || '')
|
||||
.toUpperCase()
|
||||
.replace(/\s/g, '');
|
||||
return /^[A-Z]{5}[0-9]{4}[A-Z]$/.test(p) ? p : null;
|
||||
}
|
||||
|
||||
/** If MSD PAN appears in PDF text, return canonical PAN (OCR may split with spaces). */
|
||||
function panFromMsdHint(body: string, msdPan: unknown): string | null {
|
||||
const p = normalizePan(String(msdPan ?? ''));
|
||||
if (!p || !body) return null;
|
||||
const compact = body.toUpperCase().replace(/[\s-]/g, '');
|
||||
return compact.includes(p) ? p : null;
|
||||
}
|
||||
|
||||
/** If MSD amount digits appear in body, return normalized digit string for range match. */
|
||||
function invoiceDigitsFromMsdHint(body: string, msdAmt: unknown): string | null {
|
||||
const d = String(msdAmt ?? '').replace(/[^\d.]/g, '');
|
||||
if (!d || d.length < 1) return null;
|
||||
const intPart = d.split('.')[0];
|
||||
if (intPart.length >= 2 && body.replace(/[^\d]/g, '').includes(intPart)) {
|
||||
return d;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Supplier / letterhead lines — not the CSD customer individual name. */
|
||||
const RE_COMPANY_NAME_HINT =
|
||||
/\b(LIMITED|LTD\.?|L\.?\s*L\.?\s*P\.?|PVT\.?\s*LTD|PRIVATE\s+LIMITED|PVT|PTE|INC\.?|CORP|CORPORATION|INDIA\s+LTD|MOTORS?|AUTOMOBILES?|DEALERS?|ENTERPRISES?|SALES\s*(?:&|AND)?\s*SERVICE|WORKS|AGENCIES)\b/i;
|
||||
|
||||
function looksLikeCompanyLine(s: string): boolean {
|
||||
const x = String(s || '').trim();
|
||||
if (!x) return false;
|
||||
if (RE_COMPANY_NAME_HINT.test(x)) return true;
|
||||
if (/^[A-Z0-9.&\s\-]{14,}$/.test(x) && !/\s{2,}/.test(x)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
function trimBuyerCapture(raw: string): string {
|
||||
let s = String(raw || '').replace(/\r/g, '').trim();
|
||||
s = s.replace(/^[:\-–—\s]+/, '');
|
||||
const cut = s.split(/\b(?:GSTIN|PAN|Phone|Tel|Email|E-?mail|Mob|Mobile|Address|Qty|Quantity|Part)\b/i)[0];
|
||||
s = (cut ?? s).trim();
|
||||
return s.replace(/\s+/g, ' ').trim();
|
||||
}
|
||||
|
||||
function isCsdPoHints(hints?: RuleExtractHints): boolean {
|
||||
const dt = String(hints?.documentType || '').toUpperCase();
|
||||
return dt.includes('CSD_PO') || dt.includes('PURCHASE_ORDER');
|
||||
}
|
||||
|
||||
/** Many CSD PO line-items print: 16-digit card/UIN then customer name then plot no / address (Description column). */
|
||||
const RE_VEHICLE_TOKENS =
|
||||
/^(ROYAL|ENFIELD|METEOR|CLASSIC|BULLET|HIMALAYAN|INTERCEPTOR|CONTINENTAL|STELLAR|THUNDER|BS-?VI|BSVI|SUPER|VARIANT|MODEL|CC|HP|ABS|QTY|HSN)$/i;
|
||||
|
||||
function isPlausibleHumanNameFromPoDescription(s: string): boolean {
|
||||
const x = String(s || '')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
if (x.length < 3 || x.length > 72) return false;
|
||||
const parts = x.split(/\s+/).filter(Boolean);
|
||||
if (parts.length < 1 || parts.length > 6) return false;
|
||||
if (looksLikeCompanyLine(x)) return false;
|
||||
for (const p of parts) {
|
||||
if (RE_VEHICLE_TOKENS.test(p)) return false;
|
||||
}
|
||||
return parts.some((p) => /^[A-Za-z\u0900-\u097F]{2,}$/.test(p));
|
||||
}
|
||||
|
||||
/**
|
||||
* Pattern: `5312423002619089 KALAIYARASAN K 71` — 16 digits (optional spaces in groups of 4),
|
||||
* then name tokens, then often a short plot/house number or newline/address.
|
||||
*/
|
||||
function extractCsdPoNameInDescriptionColumn(body: string): string | null {
|
||||
const norm = body.replace(/\r\n/g, '\n').replace(/\u00a0/g, ' ');
|
||||
const digitRes: RegExp[] = [/\b\d{4}\s+\d{4}\s+\d{4}\s+\d{4}\b/g, /\b\d{16}\b/g];
|
||||
const seenAt = new Set<number>();
|
||||
|
||||
for (const re of digitRes) {
|
||||
re.lastIndex = 0;
|
||||
let dm: RegExpExecArray | null;
|
||||
while ((dm = re.exec(norm)) !== null) {
|
||||
const compact = dm[0].replace(/\s/g, '');
|
||||
if (compact.length !== 16 || !/^\d{16}$/.test(compact)) continue;
|
||||
if (seenAt.has(dm.index)) continue;
|
||||
seenAt.add(dm.index);
|
||||
|
||||
const tail = norm.slice(dm.index + dm[0].length).replace(/^\s+/, '');
|
||||
let nm = tail.match(
|
||||
/^([A-Za-z\u0900-\u097F]+(?:\s+[A-Za-z\u0900-\u097F]+){0,5})(?=\s+\d{1,4}\b|\s*\n|\s*$)/i
|
||||
);
|
||||
if (!nm?.[1]) {
|
||||
const loose = tail.match(/^([A-Za-z\u0900-\u097F]{2,25})\b/i);
|
||||
if (loose?.[1] && isPlausibleHumanNameFromPoDescription(loose[1])) nm = loose;
|
||||
}
|
||||
if (!nm?.[1]) continue;
|
||||
const candidate = nm[1].replace(/\s+/g, ' ').trim();
|
||||
if (isPlausibleHumanNameFromPoDescription(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* CSD / defence-style POs usually put the customer under Sold To / Bill To / card holder,
|
||||
* not under the first "Name:" (often dealer contact).
|
||||
*/
|
||||
function extractCsdPoBuyerFromLabels(body: string): string | null {
|
||||
const norm = body.replace(/\r\n/g, '\n');
|
||||
const patterns: RegExp[] = [
|
||||
/(?:^|\n)\s*Sold\s*To\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Bill\s*To\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Ship\s*To\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Consignee\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*(?:Buyer|Purchaser)\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Customer\s*(?:Name|Details)?\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*CSD\s*Card(?:\s*Holder)?\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Card\s*Holder(?:\s*Name)?\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Beneficiary\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*Name\s*of\s*(?:the\s*)?(?:Purchaser|Buyer|Customer)\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i,
|
||||
/(?:^|\n)\s*(?:Ordered|Order)\s*(?:By|Placed\s*By)\s*[:\-]?\s*\n*\s*([^\n\r]{2,120})/i
|
||||
];
|
||||
for (const re of patterns) {
|
||||
const m = norm.match(re);
|
||||
if (!m?.[1]) continue;
|
||||
const line = trimBuyerCapture(m[1]);
|
||||
if (line.length < 2 || line.length > 100) continue;
|
||||
if (/^(page|date|amount|total|ref|subject)\b/i.test(line)) continue;
|
||||
return line;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export class CpcRuleExtractService {
|
||||
/**
|
||||
* If Vertex returned a supplier-style string but OCR shows a clear buyer line, prefer the buyer line.
|
||||
*/
|
||||
static refineCsdPoCustomerName(ocrText: string, customerName: unknown): string | null {
|
||||
const cur = String(customerName ?? '').trim();
|
||||
const text = String(ocrText || '');
|
||||
const fromDesc = extractCsdPoNameInDescriptionColumn(text);
|
||||
const fromLabels = extractCsdPoBuyerFromLabels(text);
|
||||
const buyer = fromDesc || fromLabels;
|
||||
if (!buyer) return cur.length >= 2 ? cur : null;
|
||||
if (!cur) return buyer;
|
||||
if (looksLikeCompanyLine(cur) && !looksLikeCompanyLine(buyer)) return buyer;
|
||||
return cur;
|
||||
}
|
||||
|
||||
static extractWithRules(ocrText: string, hints?: RuleExtractHints) {
|
||||
const t = String(ocrText || "");
|
||||
const msd = hints?.msdPayload || {};
|
||||
const isCsdPo = isCsdPoHints(hints);
|
||||
|
||||
// Matches 12 digit Aadhaar (with optional spaces)
|
||||
const aadhaarMatch = t.match(/\b\d{4}\s?\d{4}\s?\d{4}\b/);
|
||||
|
||||
// Matches currency patterns
|
||||
const invoiceMatch = t.match(/(?:₹|Rs\.?|INR)\s?[\d,]+(?:\.\d{1,2})?/i);
|
||||
|
||||
// Matches common order/auth patterns
|
||||
const orderMatch = t.match(/\b(?:PO|ORDER|AUTH|AUTHORIZATION)\s*[:\-]?\s*([A-Z0-9\-\/]{4,})/i);
|
||||
|
||||
// Matches "Name: [Value]" / "Authorised Person" / applicant-style labels
|
||||
const nameMatch = t.match(/\bName\s*[:\-]\s*([A-Za-z][A-Za-z0-9\s.'-]{2,79})/i);
|
||||
const authPersonMatch = t.match(
|
||||
/\b(?:authorized|authorised)\s+person\s*[:\-]\s*([A-Za-z][A-Za-z0-9\s.'-]{2,79})/i
|
||||
);
|
||||
const applicantMatch = t.match(
|
||||
/\b(?:applicant|holder|customer|borrower|dealer)\s*[:\-]\s*([A-Za-z][A-Za-z0-9\s.'-]{2,79})/i
|
||||
);
|
||||
let displayNameRaw = isCsdPo
|
||||
? extractCsdPoNameInDescriptionColumn(t) || extractCsdPoBuyerFromLabels(t) || ''
|
||||
: '';
|
||||
if (!displayNameRaw) {
|
||||
displayNameRaw = (authPersonMatch?.[1] || nameMatch?.[1] || applicantMatch?.[1] || '').trim();
|
||||
}
|
||||
|
||||
// MSD-guided: name often appears in body exactly as user typed (no label) — same idea as manual compare in CPC-CSD UI flow
|
||||
if (!displayNameRaw) {
|
||||
const fromAuth = msd.customer_name ?? msd.authorized_person_name ?? msd.name;
|
||||
const hint = String(fromAuth ?? '').trim();
|
||||
if (hint) {
|
||||
const minFuzzy = hint.length <= 10 ? 40 : 52;
|
||||
displayNameRaw =
|
||||
matchMsdNameInBody(t, hint) ||
|
||||
findMsdNameTokenInOcr(t, hint) ||
|
||||
pickNameLineByMsd(t, hint, minFuzzy) ||
|
||||
'';
|
||||
}
|
||||
}
|
||||
|
||||
// Title / ALL CAPS line fallback — include short single names (e.g. "Arjun") skipped by older rules
|
||||
if (!displayNameRaw) {
|
||||
const lines = t.split(/\r?\n/).map((l) => l.trim()).filter(Boolean);
|
||||
const noiseLine = /^(qty|ref|date|page|gst|hsn|po|no|id|total|amount|index|desc|sl)$/i;
|
||||
for (const line of lines) {
|
||||
if (line.length < 3 || line.length > 80) continue;
|
||||
if (noiseLine.test(line)) continue;
|
||||
if (/^(ref|date|subject|to|from|dear|page|annex|authority|letter|royal|enfield|cpc|csd)\b/i.test(line)) {
|
||||
continue;
|
||||
}
|
||||
if (isCsdPo && looksLikeCompanyLine(line)) {
|
||||
continue;
|
||||
}
|
||||
const words = line.split(/\s+/).filter(Boolean);
|
||||
const singleName =
|
||||
words.length === 1 &&
|
||||
/^[A-Za-z\u0900-\u097F]{2,25}$/.test(words[0]) &&
|
||||
!RE_VEHICLE_TOKENS.test(words[0]) &&
|
||||
!looksLikeCompanyLine(words[0]);
|
||||
const multiAllCaps =
|
||||
/^[A-Z][A-Z0-9\s.'-]{4,70}$/.test(line) && words.length >= 2;
|
||||
if (singleName || multiAllCaps) {
|
||||
displayNameRaw = line;
|
||||
break;
|
||||
}
|
||||
const titleCaseName =
|
||||
words.length >= 1 &&
|
||||
words.length <= 4 &&
|
||||
words.every((w) => /^[A-Za-z\u0900-\u097F]{2,}$/.test(w)) &&
|
||||
!words.some((w) => RE_VEHICLE_TOKENS.test(w)) &&
|
||||
line[0] === line[0].toUpperCase() &&
|
||||
/[a-z\u0900-\u097F]/.test(line) &&
|
||||
!looksLikeCompanyLine(line);
|
||||
if (titleCaseName && line.length <= 48) {
|
||||
displayNameRaw = line;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let displayName = displayNameRaw.length >= 2 ? displayNameRaw.replace(/\s+/g, ' ').trim() : null;
|
||||
if (isCsdPo && displayName) {
|
||||
displayName = CpcRuleExtractService.refineCsdPoCustomerName(t, displayName) ?? displayName;
|
||||
}
|
||||
|
||||
// PAN (Indian format) + MSD hint (PDF may lack strict word boundaries)
|
||||
let panFromRegex = t.match(/\b([A-Z]{5}[0-9]{4}[A-Z])\b/i);
|
||||
let panVal = panFromRegex ? String(panFromRegex[1]).toUpperCase() : null;
|
||||
if (!panVal && msd.pan_number != null) {
|
||||
panVal = panFromMsdHint(t, msd.pan_number);
|
||||
}
|
||||
|
||||
// Numeric amount for range matching against MSD invoice_value
|
||||
const amountDigits = invoiceMatch
|
||||
? String(invoiceMatch[0]).replace(/[^\d.]/g, '').replace(/^\.+|\.+$/g, '')
|
||||
: null;
|
||||
let invoiceValueNormalized =
|
||||
amountDigits && amountDigits.length ? amountDigits : null;
|
||||
if (!invoiceValueNormalized) {
|
||||
invoiceValueNormalized =
|
||||
invoiceDigitsFromMsdHint(t, msd.po_amount) ||
|
||||
invoiceDigitsFromMsdHint(t, msd.letter_amount) ||
|
||||
invoiceDigitsFromMsdHint(t, msd.invoice_value);
|
||||
}
|
||||
|
||||
const stampPresent = /(stamp|seal|authorized signatory|signature)/i.test(t);
|
||||
const govtStampPresent = /(govt\.?\s*stamp|government\s*seal|govt\.?\s*signatory|official\s*stamp|authorized\s*signatory)/i.test(t) || stampPresent;
|
||||
const stampYesNo = govtStampPresent ? 'yes' : 'no';
|
||||
const poOrOrder = orderMatch ? orderMatch[1].trim() : null;
|
||||
const aadhaarDigits = aadhaarMatch ? aadhaarMatch[0].replace(/\s/g, '').trim() : null;
|
||||
|
||||
return {
|
||||
extracted_fields: {
|
||||
authorized_person_name: displayName,
|
||||
customer_name: displayName,
|
||||
pan_number: panVal,
|
||||
order_or_authorisation_number: poOrOrder,
|
||||
po_number: poOrOrder,
|
||||
order_or_auth_number: poOrOrder,
|
||||
invoice_value: invoiceValueNormalized,
|
||||
po_amount: invoiceValueNormalized,
|
||||
letter_amount: invoiceValueNormalized,
|
||||
aadhaar_number: aadhaarDigits,
|
||||
aadhar_number: aadhaarDigits,
|
||||
stamp_or_signatory_present: stampPresent,
|
||||
stamp_sign_present: stampPresent,
|
||||
govt_signatory_and_stamp_present: stampYesNo,
|
||||
signature_and_stamp: stampYesNo
|
||||
},
|
||||
field_confidence: {
|
||||
authorized_person_name: displayName ? 0.65 : 0.2,
|
||||
customer_name: displayName ? 0.65 : 0.2,
|
||||
pan_number: panVal ? 0.85 : 0.2,
|
||||
order_or_authorisation_number: orderMatch ? 0.7 : 0.2,
|
||||
po_number: orderMatch ? 0.7 : 0.2,
|
||||
order_or_auth_number: orderMatch ? 0.7 : 0.2,
|
||||
invoice_value: invoiceValueNormalized ? 0.7 : 0.2,
|
||||
po_amount: invoiceValueNormalized ? 0.7 : 0.2,
|
||||
letter_amount: invoiceValueNormalized ? 0.7 : 0.2,
|
||||
aadhaar_number: aadhaarMatch ? 0.85 : 0.2,
|
||||
aadhar_number: aadhaarMatch ? 0.85 : 0.2,
|
||||
stamp_or_signatory_present: stampPresent ? 0.55 : 0.3,
|
||||
stamp_sign_present: stampPresent ? 0.55 : 0.3,
|
||||
govt_signatory_and_stamp_present: govtStampPresent ? 0.55 : 0.3,
|
||||
signature_and_stamp: govtStampPresent ? 0.55 : 0.3
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
802
src/services/cpc-cdc/CpcValidationService.ts
Normal file
802
src/services/cpc-cdc/CpcValidationService.ts
Normal file
@ -0,0 +1,802 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { VertexAI } from '@google-cloud/vertexai';
|
||||
import { calculateMatch, digitsOnly, normalizeMoney } from './utils';
|
||||
import { getCriteriaLabel } from './CpcHistoryService';
|
||||
import logger from '@utils/logger';
|
||||
|
||||
/** Vertex SDK does not read `GCP_KEY_FILE` by itself — must pass keyFilename (critical in Docker). */
|
||||
function resolveVertexServiceAccountPath(): string | undefined {
|
||||
const fromAdc = (process.env.GOOGLE_APPLICATION_CREDENTIALS || '').trim();
|
||||
const fromKeyFile = (process.env.GCP_KEY_FILE || '').trim();
|
||||
const candidates = [
|
||||
fromAdc,
|
||||
fromKeyFile ? path.resolve(process.cwd(), fromKeyFile) : ''
|
||||
].filter(Boolean);
|
||||
for (const p of candidates) {
|
||||
try {
|
||||
if (fs.existsSync(p)) return path.resolve(p);
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decide which printed script Gemini should prefer when the document shows the same field in English and Hindi.
|
||||
* Driven by the user's MSD/form string (Devanagari vs Latin letter counts).
|
||||
*/
|
||||
function preferScriptForMsdFieldValue(value: unknown): 'Devanagari' | 'Latin' {
|
||||
const s = String(value ?? '').trim();
|
||||
if (!s) return 'Latin';
|
||||
try {
|
||||
const dev = (s.match(/\p{Script=Devanagari}/gu) || []).length;
|
||||
const lat = (s.match(/\p{Script=Latin}/gu) || []).length;
|
||||
if (dev === 0 && lat === 0) return 'Latin';
|
||||
return dev >= lat ? 'Devanagari' : 'Latin';
|
||||
} catch {
|
||||
return /[\u0900-\u097F]/.test(s) ? 'Devanagari' : 'Latin';
|
||||
}
|
||||
}
|
||||
|
||||
/** JSON block appended to Vertex prompt: per-field prefer_script from MSD input language. */
|
||||
function buildMsdScriptPreferenceBlock(
|
||||
expectedFields: string[],
|
||||
msdReferencePayload?: Record<string, unknown>
|
||||
): string {
|
||||
if (!msdReferencePayload || typeof msdReferencePayload !== 'object') return '';
|
||||
const uniq = [...new Set((expectedFields || []).map((f) => String(f || '').trim()).filter(Boolean))];
|
||||
const keys =
|
||||
uniq.length > 0
|
||||
? uniq
|
||||
: Object.keys(msdReferencePayload).filter((k) => {
|
||||
const v = msdReferencePayload[k];
|
||||
return v !== undefined && v !== null && String(v).trim() !== '';
|
||||
});
|
||||
if (keys.length === 0) return '';
|
||||
const hints: Record<string, { prefer_script: 'Devanagari' | 'Latin' }> = {};
|
||||
for (const key of keys) {
|
||||
const raw = msdReferencePayload[key];
|
||||
if (raw === undefined || raw === null) continue;
|
||||
const str = String(raw).trim();
|
||||
if (!str) continue;
|
||||
hints[key] = { prefer_script: preferScriptForMsdFieldValue(raw) };
|
||||
}
|
||||
if (Object.keys(hints).length === 0) return '';
|
||||
return `MSD_SCRIPT_PREFERENCE (per field: infer input language from MSD; when the document shows the same field in both English and Hindi, extract ONLY the on-page text whose script matches prefer_script for that key — do not translate; do not swap languages):\n${JSON.stringify(hints, null, 2)}\n`;
|
||||
}
|
||||
|
||||
const VALID_DOC_TYPES = ['CSD_PO', 'CPC_AUTH', 'AADHAAR', 'RETAIL_INVOICE'] as const;
|
||||
|
||||
/**
|
||||
* Field rules aligned with RE / Softude mail (Feb–Apr 2026):
|
||||
* - Rahul: CSD PO # 100% exact, amounts ±₹5, per-field all-pass (no average-based gate).
|
||||
* - Rohit table: customer / order (where fuzzy) ≥95%, invoice ≥98% OR ±₹5, stamp ≥85% fuzzy,
|
||||
* Aadhaar 12-digit 100%, retail invoice # ≥95%, document date ≥90%.
|
||||
*/
|
||||
const DOCUMENT_RULES: any = {
|
||||
/** CPC claim doc 2 */
|
||||
'AADHAAR': {
|
||||
'name': { threshold: 90, method: 'fuzzy' },
|
||||
'customer_name': { threshold: 90, method: 'fuzzy' },
|
||||
'aadhaar_number': { threshold: 100, method: 'exact_length_12' },
|
||||
'aadhar_number': { threshold: 100, method: 'exact_length_12' },
|
||||
'gender': { threshold: 100, method: 'exact' },
|
||||
'mail_extraction': { threshold: 90, method: 'fuzzy' }
|
||||
},
|
||||
/** CPC claim doc 1 — authorization letter */
|
||||
'CPC_AUTH': {
|
||||
'authorized_person_name': { threshold: 90, method: 'fuzzy' },
|
||||
'customer_name': { threshold: 90, method: 'fuzzy' },
|
||||
'authority_grantor_name': { threshold: 90, method: 'fuzzy' },
|
||||
'letter_number': { threshold: 90, method: 'fuzzy' },
|
||||
'invoice_value': { threshold: null, method: 'range_5_or_fuzzy_98' },
|
||||
'letter_amount': { threshold: null, method: 'range_5_or_fuzzy_98' },
|
||||
'amount': { threshold: null, method: 'range_5_or_fuzzy_98' },
|
||||
'pan_number': { threshold: 95, method: 'fuzzy' },
|
||||
'order_or_authorisation_number': { threshold: 95, method: 'fuzzy' },
|
||||
'stamp_sign_present': { threshold: 85, method: 'boolean_fuzzy_85' },
|
||||
'govt_signatory_and_stamp_present': { threshold: 85, method: 'boolean_fuzzy_85' },
|
||||
'signature_and_stamp': { threshold: 85, method: 'boolean_fuzzy_85' },
|
||||
'mail_extraction': { threshold: 90, method: 'fuzzy' }
|
||||
},
|
||||
/** CSD — Purchase order: PO# remains exact 100% per Rahul; other fuzzy thresholds per Rohit table. */
|
||||
'CSD_PO': {
|
||||
'customer_name': { threshold: 90, method: 'fuzzy' },
|
||||
'name': { threshold: 90, method: 'fuzzy' },
|
||||
'order_or_authorisation_number': { threshold: 100, method: 'exact' },
|
||||
'po_number': { threshold: 100, method: 'exact' },
|
||||
'invoice_value': { threshold: null, method: 'range_5_or_fuzzy_98' },
|
||||
'po_amount': { threshold: null, method: 'range_5_or_fuzzy_98' },
|
||||
'vendor_name': { threshold: 95, method: 'fuzzy' },
|
||||
'govt_signatory_and_stamp_present': { threshold: 85, method: 'boolean_fuzzy_85' },
|
||||
'signature_and_stamp': { threshold: 85, method: 'boolean_fuzzy_85' },
|
||||
'mail_extraction': { threshold: 90, method: 'fuzzy' }
|
||||
},
|
||||
'RETAIL_INVOICE': {
|
||||
'customer_name': { threshold: 95, method: 'fuzzy' },
|
||||
'order_or_authorisation_number': { threshold: 95, method: 'fuzzy' },
|
||||
'invoice_value': { threshold: null, method: 'range_5_or_fuzzy_98' },
|
||||
'invoice_date': { threshold: 90, method: 'fuzzy' },
|
||||
'vendor_name': { threshold: 95, method: 'fuzzy' },
|
||||
'mail_extraction': { threshold: 90, method: 'fuzzy' }
|
||||
},
|
||||
'GENERIC': {
|
||||
'default': { threshold: 95, method: 'fuzzy' }
|
||||
}
|
||||
};
|
||||
|
||||
/** Human-readable `field_results.threshold` for API/UI (no percentage figures). */
|
||||
function apiThresholdLabel(rule: { method?: string; threshold?: number | null }): string {
|
||||
const m = rule?.method;
|
||||
if (m === 'range_5_or_fuzzy_98' || m === 'range_5') return 'Amount comparison';
|
||||
if (m === 'boolean_fuzzy_85' || m === 'boolean') return 'Stamp / signature';
|
||||
if (m === 'exact_length_12') return 'Aadhaar number';
|
||||
if (m === 'exact' || m === 'exact_numeric') return 'Exact match';
|
||||
if (m === 'fuzzy') return 'Text match';
|
||||
return 'N/A';
|
||||
}
|
||||
|
||||
function msdFieldDisplayName(fieldKey: string, docType?: string): string {
|
||||
if (fieldKey === 'invoice_value' || fieldKey === 'po_amount') {
|
||||
if (docType === 'CSD_PO') return 'PO Amount';
|
||||
if (docType === 'CPC_AUTH') return 'Letter Amount';
|
||||
}
|
||||
if (fieldKey === 'letter_amount') return 'Letter Amount';
|
||||
const map: Record<string, string> = {
|
||||
authorized_person_name: 'Customer Name',
|
||||
customer_name: 'Customer Name',
|
||||
name: 'Customer Name',
|
||||
letter_number: 'Letter Number',
|
||||
po_number: 'PO Number',
|
||||
order_or_authorisation_number: 'PO Number',
|
||||
invoice_value: 'Document Amount',
|
||||
po_amount: 'PO Amount',
|
||||
amount: 'Letter Amount',
|
||||
aadhaar_number: 'Aadhaar Number',
|
||||
aadhar_number: 'Aadhaar Number',
|
||||
govt_signatory_and_stamp_present: 'Signature & Stamp',
|
||||
signature_and_stamp: 'Signature & Stamp',
|
||||
stamp_sign_present: 'Signature & Stamp',
|
||||
mail_extraction: 'Mail extraction',
|
||||
pan_number: 'PAN',
|
||||
vendor_name: 'Supplier Name',
|
||||
authority_grantor_name: 'Authority Grantor',
|
||||
gender: 'Gender'
|
||||
};
|
||||
return map[fieldKey] || fieldKey.replace(/_/g, ' ');
|
||||
}
|
||||
|
||||
function buildMsdStyleMessage(fieldKey: string, status: string, docType?: string): string {
|
||||
const label = msdFieldDisplayName(fieldKey, docType);
|
||||
if (status === 'MISSING') {
|
||||
return `According to the expected record and the document, the "${label}" could not be read from the document.\nKindly upload the document again or update the expected value.`;
|
||||
}
|
||||
return `According to the expected record and the document, the "${label}" does not match.\nKindly upload the document again or update the expected value.`;
|
||||
}
|
||||
|
||||
function pickRuleForKey(rules: Record<string, unknown>, key: string): string {
|
||||
const k = key.toLowerCase();
|
||||
const candidates = Object.keys(rules)
|
||||
.filter((rk) => rk !== 'default')
|
||||
.sort((a, b) => b.length - a.length);
|
||||
const hit = candidates.find((rk) => k.includes(rk.toLowerCase()));
|
||||
return hit || 'default';
|
||||
}
|
||||
|
||||
function isWithinRange(valA: any, valB: any, diff: number = 5): boolean {
|
||||
const a = parseFloat(String(valA).replace(/[^0-9.]/g, ""));
|
||||
const b = parseFloat(String(valB).replace(/[^0-9.]/g, ""));
|
||||
if (isNaN(a) || isNaN(b)) return false;
|
||||
return Math.abs(a - b) <= diff;
|
||||
}
|
||||
|
||||
function isVertexModelAccessIssue(err: unknown): boolean {
|
||||
const e = err as { message?: string; name?: string; code?: number | string };
|
||||
const blob = `${e?.name || ''} ${e?.message || ''} ${String(e?.code || '')}`.toLowerCase();
|
||||
return (
|
||||
blob.includes('publisher model') ||
|
||||
blob.includes('model') && blob.includes('not found') ||
|
||||
blob.includes('does not have access') ||
|
||||
blob.includes('status: 404') ||
|
||||
blob.includes('code":404')
|
||||
);
|
||||
}
|
||||
|
||||
export class CpcValidationService {
|
||||
/**
|
||||
* @param expectedFieldKeys When set (e.g. from UI row order), every listed key is validated — MSD values may be empty (fails with clear reason) and keys are not dropped. When omitted, keys come from `msdPayload` (non-blank key names only).
|
||||
*/
|
||||
static validateSrs(
|
||||
msdPayload: any,
|
||||
extractedFields: any,
|
||||
fieldConfidence: any = {},
|
||||
docTypeAttr: string = 'generic_invoice',
|
||||
claimId: string | null = null,
|
||||
attemptNo: number = 1,
|
||||
expectedFieldKeys?: string[] | null
|
||||
) {
|
||||
let normalizedDocType = (docTypeAttr || "generic_invoice").toUpperCase();
|
||||
if (normalizedDocType === 'AADHAAR_CARD' || normalizedDocType === 'ADHAAR') normalizedDocType = 'AADHAAR';
|
||||
if (normalizedDocType === 'AUTHORITY_LETTER' || normalizedDocType === 'CPC_LETTER') normalizedDocType = 'CPC_AUTH';
|
||||
if (normalizedDocType === 'PURCHASE_ORDER' || normalizedDocType === 'PO') normalizedDocType = 'CSD_PO';
|
||||
if (normalizedDocType === 'INVOICE' || normalizedDocType === 'GENERIC_INVOICE') normalizedDocType = 'RETAIL_INVOICE';
|
||||
if (!VALID_DOC_TYPES.includes(normalizedDocType as any) && normalizedDocType !== 'GENERIC') {
|
||||
logger.warn(`[CpcValidation] Unknown doc type "${docTypeAttr}" → falling back to GENERIC`);
|
||||
}
|
||||
|
||||
const rules = DOCUMENT_RULES[normalizedDocType] || DOCUMENT_RULES.GENERIC;
|
||||
|
||||
const fieldResults: any[] = [];
|
||||
const mismatchReasons: string[] = [];
|
||||
let totalMatchPercent = 0;
|
||||
let totalFields = 0;
|
||||
let matchedCount = 0;
|
||||
let mismatchedCount = 0;
|
||||
let missingCount = 0;
|
||||
|
||||
const globalThreshold = 95;
|
||||
|
||||
const findNormalizedValue = (obj: any, targetKey: string) => {
|
||||
const norm = (k: string) => k.toLowerCase().replace(/[\s_]/g, '');
|
||||
const normTarget = norm(targetKey);
|
||||
if (obj[targetKey] !== undefined) return obj[targetKey];
|
||||
|
||||
/** MSD field → alternate keys produced by rules / Gemini */
|
||||
const synonymSources: Record<string, string[]> = {
|
||||
authorized_person_name: ['customer_name', 'name', 'authorized_person_name', 'account_holder_name'],
|
||||
customer_name: ['customer_name', 'name', 'authorized_person_name', 'account_holder_name', 'customername'],
|
||||
name: ['authorized_person_name', 'customer_name', 'customername'],
|
||||
pan_number: ['pan_number', 'pan', 'panno'],
|
||||
invoice_value: ['invoice_value', 'amount', 'total_amount', 'total_value', 'po_amount', 'letter_amount'],
|
||||
po_amount: ['po_amount', 'invoice_value', 'amount', 'total_amount', 'total_value'],
|
||||
letter_amount: ['letter_amount', 'invoice_value', 'amount', 'total_amount', 'total_value'],
|
||||
aadhaar_number: ['aadhaar_number', 'aadhar_number', 'aadhaar', 'aadhaarnumber', 'id_number'],
|
||||
aadhar_number: ['aadhar_number', 'aadhaar_number', 'aadhaar', 'aadhaarnumber', 'id_number'],
|
||||
letter_number: ['letter_number', 'order_or_auth_number', 'auth_number', 'auth_no'],
|
||||
order_or_authorisation_number: ['order_or_authorisation_number', 'order_or_auth_number', 'po_number', 'order_number'],
|
||||
po_number: ['po_number', 'order_or_authorisation_number', 'order_or_auth_number', 'order_number'],
|
||||
govt_signatory_and_stamp_present: [
|
||||
'govt_signatory_and_stamp_present',
|
||||
'signature_and_stamp',
|
||||
'stamp_sign_present',
|
||||
'stamp_or_signatory_present'
|
||||
],
|
||||
signature_and_stamp: [
|
||||
'signature_and_stamp',
|
||||
'govt_signatory_and_stamp_present',
|
||||
'stamp_sign_present',
|
||||
'stamp_or_signatory_present'
|
||||
],
|
||||
mail_extraction: ['mail_extraction', 'email', 'registered_email', 'contact_email', 'buyer_email', 'correspondence_email']
|
||||
};
|
||||
for (const alt of synonymSources[targetKey] || []) {
|
||||
if (obj[alt] !== undefined && obj[alt] !== null && String(obj[alt]).trim() !== '') {
|
||||
return obj[alt];
|
||||
}
|
||||
}
|
||||
|
||||
const aliases: any = {
|
||||
name: ['customername', 'customer_name', 'full_name', 'authorized_person_name', 'account_holder_name'],
|
||||
customer_name: ['customername', 'name', 'full_name', 'authorized_person_name', 'account_holder_name'],
|
||||
aadhaar_number: ['aadhaarnumber', 'aadhar_number', 'aadhar', 'aadhaar', 'id_number'],
|
||||
aadhar_number: ['aadhaarnumber', 'aadhaar_number', 'aadhaar', 'id_number'],
|
||||
invoice_value: ['total_amount', 'amount', 'total_value', 'po_amount', 'letter_amount'],
|
||||
po_amount: ['invoice_value', 'total_amount', 'amount', 'total_value'],
|
||||
letter_amount: ['invoice_value', 'amount', 'total_value'],
|
||||
letter_number: ['order_or_auth_number', 'auth_number', 'auth_no'],
|
||||
order_or_authorisation_number: ['order_or_auth_number', 'po_number', 'order_number'],
|
||||
po_number: ['order_or_authorisation_number', 'order_or_auth_number', 'order_number'],
|
||||
govt_signatory_and_stamp_present: ['stamp_sign_present', 'stamp_or_signatory_present', 'signature_and_stamp'],
|
||||
signature_and_stamp: ['govt_signatory_and_stamp_present', 'stamp_sign_present', 'stamp_or_signatory_present'],
|
||||
mail_extraction: ['email', 'e_mail', 'contactemail', 'correspondenceemail']
|
||||
};
|
||||
|
||||
for (const k of Object.keys(obj)) {
|
||||
const normKey = norm(k);
|
||||
if (normKey === normTarget) return obj[k];
|
||||
for (const [canonical, list] of Object.entries(aliases)) {
|
||||
if (
|
||||
norm(canonical) === normTarget &&
|
||||
(list as string[]).some((a) => norm(a) === normKey)
|
||||
) {
|
||||
return obj[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const fromUi = Array.isArray(expectedFieldKeys)
|
||||
? [...new Set(expectedFieldKeys.map((k) => String(k || '').trim()).filter(Boolean))]
|
||||
: [];
|
||||
|
||||
const expectedKeys =
|
||||
fromUi.length > 0
|
||||
? fromUi
|
||||
: Object.keys(msdPayload || {}).filter((k) => k && String(k).trim() !== '');
|
||||
|
||||
for (const key of expectedKeys) {
|
||||
totalFields++;
|
||||
const rawExpected = msdPayload?.[key];
|
||||
const expectedStr =
|
||||
rawExpected === null || rawExpected === undefined ? '' : String(rawExpected);
|
||||
const msdValueEmpty =
|
||||
expectedStr.trim() === '' || expectedStr.trim().toLowerCase() === 'null';
|
||||
|
||||
if (msdValueEmpty) {
|
||||
const foundPeek = findNormalizedValue(extractedFields, key);
|
||||
const confidence = fieldConfidence[key] || 0;
|
||||
const label = msdFieldDisplayName(key, normalizedDocType);
|
||||
mismatchReasons.push(
|
||||
`According to the expected record, "${label}" was not provided. Enter the expected value to validate against the document.`
|
||||
);
|
||||
fieldResults.push({
|
||||
field: key,
|
||||
expected: '(not provided)',
|
||||
extracted: foundPeek ?? null,
|
||||
status: 'UNSUCCESSFUL',
|
||||
match_percentage: 0,
|
||||
threshold: 'N/A',
|
||||
match_method: 'n/a',
|
||||
extraction_confidence: confidence,
|
||||
reason: 'Expected value was empty — enter a value to compare with the document.',
|
||||
criteria: getCriteriaLabel(key, normalizedDocType)
|
||||
});
|
||||
mismatchedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const expected = rawExpected;
|
||||
const found = findNormalizedValue(extractedFields, key);
|
||||
const confidence = fieldConfidence[key] || 0;
|
||||
|
||||
const ruleKey = pickRuleForKey(rules as Record<string, unknown>, key);
|
||||
const rule = rules[ruleKey] || rules.default || DOCUMENT_RULES.GENERIC.default;
|
||||
|
||||
let matchPercent = 0;
|
||||
let isPass = false;
|
||||
let status = "UNSUCCESSFUL";
|
||||
let reason = null;
|
||||
|
||||
if (found === undefined || found === null || String(found).trim() === "" || String(found).toLowerCase() === "null") {
|
||||
status = "MISSING";
|
||||
reason = "Field not found in document";
|
||||
missingCount++;
|
||||
} else {
|
||||
if (rule.method === 'exact_numeric') {
|
||||
const numExp = parseFloat(String(expected).replace(/[^0-9.]/g, ''));
|
||||
const numFnd = parseFloat(String(found).replace(/[^0-9.]/g, ''));
|
||||
isPass = !isNaN(numExp) && !isNaN(numFnd) && Math.round(numExp) === Math.round(numFnd);
|
||||
matchPercent = isPass ? 100 : 0;
|
||||
} else if (rule.method === 'exact') {
|
||||
const normExp = String(expected).trim().toLowerCase().replace(/[\s\-\/]+/g, '');
|
||||
const normFnd = String(found).trim().toLowerCase().replace(/[\s\-\/]+/g, '');
|
||||
isPass = normExp === normFnd;
|
||||
matchPercent = isPass ? 100 : 0;
|
||||
} else if (rule.method === 'range_5') {
|
||||
isPass = isWithinRange(expected, found, 5);
|
||||
matchPercent = isPass ? 100 : 0;
|
||||
} else if (rule.method === 'range_5_or_fuzzy_98') {
|
||||
const inRange = isWithinRange(expected, found, 5);
|
||||
const expM = normalizeMoney(String(expected));
|
||||
const fndM = normalizeMoney(String(found));
|
||||
const fuzzyMoney =
|
||||
expM && fndM ? calculateMatch(expM, fndM, key) : calculateMatch(String(expected), String(found), key);
|
||||
isPass = inRange || fuzzyMoney >= 98;
|
||||
matchPercent = inRange ? 100 : fuzzyMoney;
|
||||
} else if (rule.method === 'boolean') {
|
||||
const normBool = (v: unknown) => {
|
||||
const t = String(v ?? '')
|
||||
.toLowerCase()
|
||||
.trim();
|
||||
if (/\b(yes|true|1|present|available|signed)\b/.test(t)) return 'pos';
|
||||
if (/\b(no|false|0|absent|not\s*available|unavailable|n\/a)\b/.test(t)) return 'neg';
|
||||
return 'unk';
|
||||
};
|
||||
const ePol = normBool(expected);
|
||||
const fPol = normBool(found);
|
||||
if (ePol !== 'unk' && fPol !== 'unk') {
|
||||
isPass = ePol === fPol;
|
||||
} else {
|
||||
isPass =
|
||||
String(expected).trim().toLowerCase() === String(found).trim().toLowerCase();
|
||||
}
|
||||
matchPercent = isPass ? 100 : 0;
|
||||
} else if (rule.method === 'boolean_fuzzy_85') {
|
||||
const expand = (v: unknown) => {
|
||||
const t = String(v ?? '').toLowerCase();
|
||||
if (/\b(yes|true|1|present|available|signed)\b/.test(t)) return 'available';
|
||||
if (/\b(no|false|0|absent|not\s*available|unavailable|n\/a)\b/.test(t)) return 'not available';
|
||||
return String(v ?? '')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
};
|
||||
const ex = expand(expected);
|
||||
const fd = expand(found);
|
||||
matchPercent = calculateMatch(ex, fd, key);
|
||||
isPass = matchPercent >= 85;
|
||||
} else if (rule.method === 'exact_length_12') {
|
||||
const dExp = String(expected).replace(/\D/g, "");
|
||||
const dFnd = String(found).replace(/\D/g, "");
|
||||
isPass = (dExp === dFnd && dFnd.length === 12);
|
||||
matchPercent = isPass ? 100 : 0;
|
||||
} else if (rule.threshold === 100) {
|
||||
matchPercent = String(expected).trim().toLowerCase() === String(found).trim().toLowerCase() ? 100 : 0;
|
||||
isPass = (matchPercent === 100);
|
||||
} else {
|
||||
matchPercent = calculateMatch(expected, found, key);
|
||||
isPass = (matchPercent >= (rule.threshold || globalThreshold));
|
||||
}
|
||||
|
||||
if (isPass) {
|
||||
status = "SUCCESSFUL";
|
||||
matchedCount++;
|
||||
} else {
|
||||
status = "UNSUCCESSFUL";
|
||||
reason = 'Value does not match expected';
|
||||
mismatchedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
totalMatchPercent += matchPercent;
|
||||
|
||||
if (status !== "SUCCESSFUL") {
|
||||
mismatchReasons.push(buildMsdStyleMessage(key, status, normalizedDocType));
|
||||
}
|
||||
|
||||
fieldResults.push({
|
||||
field: key,
|
||||
expected: expected,
|
||||
extracted: found || null,
|
||||
status: status,
|
||||
match_percentage: matchPercent,
|
||||
threshold: apiThresholdLabel(rule),
|
||||
match_method: rule.method,
|
||||
extraction_confidence: confidence,
|
||||
reason: reason,
|
||||
criteria: getCriteriaLabel(key, normalizedDocType)
|
||||
});
|
||||
}
|
||||
|
||||
/** MSD: success only if every expected field passes its own rule (no averaging). */
|
||||
const allFieldsPass =
|
||||
totalFields > 0 && mismatchedCount === 0 && missingCount === 0 && matchedCount === totalFields;
|
||||
const overallAccuracy = totalFields > 0 ? Math.round(totalMatchPercent / totalFields) : 0;
|
||||
const displayMatchPercent = allFieldsPass ? 100 : overallAccuracy;
|
||||
const hasMissing = missingCount > 0;
|
||||
const overallValidationStatus = hasMissing
|
||||
? "NEED_MANUAL"
|
||||
: allFieldsPass
|
||||
? "MATCH"
|
||||
: "MISMATCH";
|
||||
const overallStatus = overallValidationStatus === "MATCH" ? "SUCCESSFUL" : "UNSUCCESSFUL";
|
||||
|
||||
return {
|
||||
claim_id: claimId,
|
||||
attempt_no: attemptNo,
|
||||
status: overallStatus,
|
||||
validation_status: overallValidationStatus,
|
||||
match_percentage: displayMatchPercent,
|
||||
overall_match_percentage: displayMatchPercent,
|
||||
threshold: 100,
|
||||
all_fields_passed: allFieldsPass,
|
||||
mismatch_summary: {
|
||||
total_expected_fields: totalFields,
|
||||
matched: matchedCount,
|
||||
mismatched: mismatchedCount,
|
||||
missing: missingCount,
|
||||
all_fields_passed: allFieldsPass
|
||||
},
|
||||
mismatch_reasons: mismatchReasons,
|
||||
field_results: fieldResults
|
||||
};
|
||||
}
|
||||
|
||||
static async extractWithGemini(params: {
|
||||
projectId: string;
|
||||
location: string;
|
||||
modelName?: string;
|
||||
documentType: string;
|
||||
ocrText?: string;
|
||||
fileBuffer?: Buffer;
|
||||
mimeType?: string;
|
||||
expectedFields?: string[];
|
||||
/** MSD / form values — passed into prompt so Gemini aligns labels with user input (no secrets; same as document check). */
|
||||
msdReferencePayload?: Record<string, unknown>;
|
||||
}) {
|
||||
const {
|
||||
projectId,
|
||||
location,
|
||||
modelName,
|
||||
documentType,
|
||||
ocrText,
|
||||
fileBuffer,
|
||||
mimeType,
|
||||
expectedFields = [],
|
||||
msdReferencePayload
|
||||
} = params;
|
||||
|
||||
const saPath = resolveVertexServiceAccountPath();
|
||||
const vertexInit: ConstructorParameters<typeof VertexAI>[0] = {
|
||||
project: projectId,
|
||||
location
|
||||
};
|
||||
if (saPath) {
|
||||
(vertexInit as { googleAuthOptions?: { keyFilename: string } }).googleAuthOptions = {
|
||||
keyFilename: saPath
|
||||
};
|
||||
logger.info(`[CpcValidation] Vertex AI using service account file: ${saPath}`);
|
||||
} else {
|
||||
logger.warn(
|
||||
'[CpcValidation] No GCP_KEY_FILE / GOOGLE_APPLICATION_CREDENTIALS on disk — Vertex uses ADC only (often empty inside Docker).'
|
||||
);
|
||||
}
|
||||
|
||||
const usedModel =
|
||||
(modelName && String(modelName).trim()) ||
|
||||
process.env.GEMINI_MODEL?.trim() ||
|
||||
process.env.VERTEX_AI_MODEL?.trim() ||
|
||||
'gemini-1.5-flash';
|
||||
|
||||
const promptText = this.buildPrompt(documentType, ocrText || "", expectedFields, params.msdReferencePayload);
|
||||
const parts: any[] = [{ text: promptText }];
|
||||
|
||||
if (fileBuffer) {
|
||||
parts.push({
|
||||
inlineData: {
|
||||
mimeType: mimeType || "application/pdf",
|
||||
data: fileBuffer.toString("base64")
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const fallbackLocation = (process.env.CPC_VERTEX_FALLBACK_LOCATION || 'us-central1').trim();
|
||||
const fallbackModel = (process.env.CPC_VERTEX_FALLBACK_MODEL || 'gemini-2.0-flash-lite').trim();
|
||||
const attempts = [
|
||||
{ location, model: usedModel, label: 'primary' },
|
||||
{ location: fallbackLocation, model: fallbackModel, label: 'fallback' }
|
||||
].filter((a, i, arr) => arr.findIndex((x) => x.location === a.location && x.model === a.model) === i);
|
||||
|
||||
let lastErr: unknown;
|
||||
for (let idx = 0; idx < attempts.length; idx++) {
|
||||
const attempt = attempts[idx];
|
||||
const attemptVertexInit: ConstructorParameters<typeof VertexAI>[0] = {
|
||||
project: projectId,
|
||||
location: attempt.location
|
||||
};
|
||||
if (saPath) {
|
||||
(attemptVertexInit as { googleAuthOptions?: { keyFilename: string } }).googleAuthOptions = {
|
||||
keyFilename: saPath
|
||||
};
|
||||
}
|
||||
const vertexAI = new VertexAI(attemptVertexInit);
|
||||
const model = vertexAI.getGenerativeModel({ model: attempt.model });
|
||||
try {
|
||||
if (idx > 0) {
|
||||
logger.warn(
|
||||
`[CpcValidation] Retrying Vertex extraction using ${attempt.label} model/location (${attempt.model} @ ${attempt.location})`
|
||||
);
|
||||
}
|
||||
const resp = await model.generateContent({
|
||||
contents: [{ role: 'user', parts }],
|
||||
generationConfig: {
|
||||
temperature: 0.1,
|
||||
maxOutputTokens: Math.min(
|
||||
8192,
|
||||
parseInt(process.env.CPC_VERTEX_MAX_OUTPUT_TOKENS || '8192', 10) || 8192
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
const cand = resp?.response?.candidates?.[0] as { finishReason?: string; content?: { parts?: unknown[] } } | undefined;
|
||||
if (cand?.finishReason && cand.finishReason !== 'STOP') {
|
||||
logger.warn(`[CpcValidation] Gemini finishReason=${cand.finishReason}`);
|
||||
}
|
||||
|
||||
const out =
|
||||
cand?.content?.parts?.map((p: any) => (typeof p?.text === 'string' ? p.text : '')).join('') || '';
|
||||
|
||||
if (!out) throw new Error('EMPTY_AI_RESPONSE');
|
||||
|
||||
const parsed = this.parseJsonLoose(out);
|
||||
const merged: Record<string, unknown> = { ...(parsed.extracted_fields || {}) };
|
||||
const lockKeys = [...new Set(expectedFields.map((k) => String(k || '').trim()).filter(Boolean))];
|
||||
for (const k of lockKeys) {
|
||||
if (!(k in merged)) merged[k] = null;
|
||||
}
|
||||
parsed.extracted_fields = merged;
|
||||
const keys = Object.keys(parsed.extracted_fields || {});
|
||||
if (keys.length === 0) {
|
||||
logger.warn('[CpcValidation] Gemini returned empty extracted_fields; raw head: ' + out.slice(0, 400));
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
lastErr = error;
|
||||
const shouldRetry = idx < attempts.length - 1 && isVertexModelAccessIssue(error);
|
||||
if (shouldRetry) {
|
||||
logger.warn(
|
||||
`[CpcValidation] Vertex attempt failed for ${attempt.model} @ ${attempt.location}. Trying fallback...`,
|
||||
error
|
||||
);
|
||||
continue;
|
||||
}
|
||||
logger.error("Gemini Extraction Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
throw lastErr || new Error('AI_EXTRACTION_FAILED: Vertex extraction failed');
|
||||
}
|
||||
|
||||
private static buildPrompt(
|
||||
documentType: string,
|
||||
ocrText: string,
|
||||
expectedFields: string[] = [],
|
||||
msdReferencePayload?: Record<string, unknown>
|
||||
) {
|
||||
const dt = documentType.toLowerCase();
|
||||
const rawDocType = String(documentType || '');
|
||||
const isAadhaar = dt.includes('aadhaar');
|
||||
const isInvoice = dt.includes('invoice') || dt.includes('retail');
|
||||
/** Avoid `includes('po')` — false positives on unrelated doc type strings. */
|
||||
const isCsdPo =
|
||||
/\bcsd[_\s-]*po\b/i.test(rawDocType) ||
|
||||
/\bpurchase[_\s-]*order\b/i.test(rawDocType) ||
|
||||
/^\s*PO\s*$/i.test(rawDocType.trim());
|
||||
const isAuthorityDoc =
|
||||
dt.includes('authority') ||
|
||||
dt.includes('cpc_auth') ||
|
||||
dt.includes('auth_letter') ||
|
||||
dt.includes('authority_letter') ||
|
||||
dt.includes('cpc_letter');
|
||||
|
||||
const schema: any = {
|
||||
extracted_fields: {},
|
||||
field_confidence: {}
|
||||
};
|
||||
|
||||
const userLockedKeys = [...new Set((expectedFields || []).map((f) => String(f || '').trim()).filter(Boolean))];
|
||||
|
||||
if (userLockedKeys.length > 0) {
|
||||
userLockedKeys.forEach((f) => {
|
||||
schema.extracted_fields[f] = 'string|null';
|
||||
});
|
||||
} else if (isAadhaar) {
|
||||
schema.extracted_fields = {
|
||||
customer_name: 'string',
|
||||
aadhar_number: 'string',
|
||||
name: 'string|null',
|
||||
dob: 'string',
|
||||
gender: 'string',
|
||||
address: 'string',
|
||||
aadhaar_number: 'string|null'
|
||||
};
|
||||
} else if (isCsdPo) {
|
||||
schema.extracted_fields = {
|
||||
customer_name: 'string',
|
||||
po_number: 'string',
|
||||
po_amount: 'string',
|
||||
signature_and_stamp: 'string|boolean',
|
||||
vendor_name: 'string',
|
||||
invoice_date: 'string',
|
||||
order_or_authorisation_number: 'string|null',
|
||||
invoice_value: 'string|null',
|
||||
govt_signatory_and_stamp_present: 'string|boolean|null'
|
||||
};
|
||||
} else if (isInvoice) {
|
||||
schema.extracted_fields = {
|
||||
customer_name: 'string',
|
||||
order_or_authorisation_number: 'string',
|
||||
invoice_value: 'string',
|
||||
invoice_date: 'string',
|
||||
vendor_name: 'string'
|
||||
};
|
||||
} else if (isAuthorityDoc) {
|
||||
schema.extracted_fields = {
|
||||
customer_name: 'string',
|
||||
letter_number: 'string|null',
|
||||
letter_amount: 'string|null',
|
||||
signature_and_stamp: 'string|boolean|null',
|
||||
authorized_person_name: 'string|null',
|
||||
authority_grantor_name: 'string',
|
||||
valid_until: 'string',
|
||||
purpose: 'string',
|
||||
date_of_issue: 'string',
|
||||
pan_number: 'string|null',
|
||||
order_or_authorisation_number: 'string|null',
|
||||
amount: 'string|null',
|
||||
invoice_value: 'string|null',
|
||||
stamp_sign_present: 'string|boolean|null',
|
||||
govt_signatory_and_stamp_present: 'string|boolean|null'
|
||||
};
|
||||
}
|
||||
|
||||
Object.keys(schema.extracted_fields).forEach(key => {
|
||||
schema.field_confidence[key] = "number (0-1)";
|
||||
});
|
||||
|
||||
const msdRef =
|
||||
msdReferencePayload &&
|
||||
typeof msdReferencePayload === 'object' &&
|
||||
Object.keys(msdReferencePayload).length > 0
|
||||
? JSON.stringify(msdReferencePayload, null, 2)
|
||||
: '';
|
||||
|
||||
const scriptPrefBlock = buildMsdScriptPreferenceBlock(userLockedKeys, msdReferencePayload);
|
||||
|
||||
return `
|
||||
Return ONLY valid JSON (no markdown).
|
||||
Schema:
|
||||
${JSON.stringify(schema, null, 2)}
|
||||
|
||||
Instructions:
|
||||
Extract fields based on the provided document_type.
|
||||
${userLockedKeys.length > 0
|
||||
? `MANDATORY_KEYS: Your JSON property "extracted_fields" MUST contain exactly these keys (same spelling, no extras): ${userLockedKeys.join(', ')}. Use null only when that value is not visible on the document image/PDF.`
|
||||
: ''}
|
||||
${userLockedKeys.length > 0
|
||||
? `EXTRACTION REQUEST: Extract only what is needed for those keys; do not invent keys outside the list.`
|
||||
: ''}
|
||||
${msdRef ? `REFERENCE_VALUES (from the user's form — use to locate the correct rows/labels on the document; values in extracted_fields must match what is visibly printed on the PDF/image, not invented):\n${msdRef}\n` : ''}
|
||||
${scriptPrefBlock}
|
||||
BILINGUAL_FORMS: Indian CPC/CSD forms often print the same label in English and Hindi. For each key in MSD_SCRIPT_PREFERENCE (if present), the MSD value shows which language the user entered — prefer_script is Devanagari (Hindi script) vs Latin (English). When both languages appear for that field on the image/PDF, copy the value whose script matches prefer_script. When only one script is visible, extract that visible value. Never return the other language if both are printed and MSD is clearly single-script. Numeric-only fields (amounts, IDs): use digits as printed; script rule applies mainly to name and free-text fields.
|
||||
|
||||
For Aadhaar: customer_name (holder name), aadhar_number (12 digits, no spaces preferred), optional dob (DDMMYYYY), gender, address. You may also populate legacy keys name and aadhaar_number if visible.
|
||||
CRITICAL: For 'address', extract ONLY the physical location details.
|
||||
${isCsdPo
|
||||
? `For CSD Purchase Order: extract po_number (PO reference — exact text), po_amount (digits only, rupees), vendor_name (supplier/dealer company from letterhead or From/Supplier block), customer_name (the human buyer / beneficiary — NOT the dealer company name), invoice_date, signature_and_stamp as yes/no (official stamp or authorized signatory visible). Legacy keys order_or_authorisation_number, invoice_value, govt_signatory_and_stamp_present may be filled with the same values if present.
|
||||
For customer_name, read the value beside or under labels such as: Sold To, Bill To, Ship To, Consignee, Buyer, Purchaser, Customer, CSD Card / Card Holder, Beneficiary, Name of Purchaser/Buyer, Ordered By. Do NOT use the first generic "Name:" on the page if it sits under supplier/dealer details or is clearly a sales contact.
|
||||
Many CSD PO line tables put the beneficiary in the Description column as: a 16-digit number (card/UIN style) immediately followed by the person's name (then often a house/plot number and address). Prefer that name for customer_name when present.
|
||||
${expectedFields.some((f) => String(f).toLowerCase() === 'customer_name') ? "CRITICAL: The JSON key customer_name must hold the printed buyer/beneficiary person name from the PO (what the user typed in customer_name). Put the supplying company's legal name only under vendor_name when that key exists; never put the dealer letterhead name in customer_name." : ''}`
|
||||
: ''}
|
||||
${isInvoice ? 'For Retail Invoice: customer name, invoice amount (numeric only, exclude currency symbol), order/authorisation number, vendor name, and date.' : ''}
|
||||
${isAuthorityDoc
|
||||
? 'For CPC / Authorization Letter: extract customer_name (person being authorized), letter_number, letter_amount (numeric), signature_and_stamp yes/no (stamp/signature visible). Also extract authority grantor, dates, purpose, PAN if visible when those keys exist in the schema. Legacy keys authorized_person_name, invoice_value, govt_signatory_and_stamp_present may mirror the same values.'
|
||||
: ''}
|
||||
${userLockedKeys.some((f) => String(f).toLowerCase() === 'mail_extraction')
|
||||
? "If 'mail_extraction' is requested: extract the email address or mail reference line visible on the document (official correspondence / contact email). Put the primary value in extracted_fields.mail_extraction."
|
||||
: ''}
|
||||
If a field name like 'pan_number' is requested, look for a 10-character alphanumeric string (5 letters, 4 digits, 1 letter).
|
||||
For 'govt_signatory_and_stamp_present' or 'signature_and_stamp', check if the document has an official stamp or authorized signatory mark and return "yes" or "no".
|
||||
|
||||
document_type: ${documentType}
|
||||
|
||||
OCR_TEXT:
|
||||
"""${ocrText ? ocrText.slice(0, 20000) : "No OCR text provided. Please extract directly from the provided document image/PDF."}"""
|
||||
`;
|
||||
}
|
||||
|
||||
private static parseJsonLoose(text: string): { extracted_fields: Record<string, unknown>; field_confidence: Record<string, unknown> } {
|
||||
let s = String(text || '').trim();
|
||||
s = s.replace(/^```(?:json)?\s*/i, '').replace(/\s*```\s*$/i, '');
|
||||
const a = s.indexOf('{');
|
||||
const b = s.lastIndexOf('}');
|
||||
if (a === -1) throw new Error('AI_EXTRACTION_FAILED: No JSON object found in LLM response');
|
||||
let parsed: Record<string, unknown>;
|
||||
try {
|
||||
parsed = JSON.parse(s.slice(a, b + 1)) as Record<string, unknown>;
|
||||
} catch {
|
||||
throw new Error('AI_EXTRACTION_FAILED: Invalid JSON from model');
|
||||
}
|
||||
const nested = parsed.extracted_fields;
|
||||
if (nested && typeof nested === 'object' && !Array.isArray(nested)) {
|
||||
return {
|
||||
extracted_fields: nested as Record<string, unknown>,
|
||||
field_confidence:
|
||||
parsed.field_confidence && typeof parsed.field_confidence === 'object'
|
||||
? (parsed.field_confidence as Record<string, unknown>)
|
||||
: {}
|
||||
};
|
||||
}
|
||||
// Model sometimes returns flat keys instead of { extracted_fields: { ... } }
|
||||
const fc =
|
||||
parsed.field_confidence && typeof parsed.field_confidence === 'object'
|
||||
? (parsed.field_confidence as Record<string, unknown>)
|
||||
: {};
|
||||
const ef: Record<string, unknown> = { ...parsed };
|
||||
delete ef.field_confidence;
|
||||
delete ef.extracted_fields;
|
||||
return { extracted_fields: ef, field_confidence: fc };
|
||||
}
|
||||
}
|
||||
55
src/services/cpc-cdc/ensureCpcCdcSchema.ts
Normal file
55
src/services/cpc-cdc/ensureCpcCdcSchema.ts
Normal file
@ -0,0 +1,55 @@
|
||||
import { sequelize } from '@config/database';
|
||||
import logger from '@utils/logger';
|
||||
|
||||
/**
|
||||
* Ensures CPC-CSD tables exist (idempotent). Runs at app startup so a fresh DB
|
||||
* still serves CPC routes even if the migrations runner was skipped once.
|
||||
*/
|
||||
export async function ensureCpcCdcSchema(): Promise<void> {
|
||||
try {
|
||||
await sequelize.query(`
|
||||
CREATE TABLE IF NOT EXISTS cpc_documents (
|
||||
id UUID NOT NULL PRIMARY KEY,
|
||||
booking_id VARCHAR(255),
|
||||
claim_id VARCHAR(255),
|
||||
attempt_no INTEGER NOT NULL DEFAULT 1,
|
||||
document_type VARCHAR(255),
|
||||
document_gcp_url TEXT,
|
||||
provider VARCHAR(255),
|
||||
msd_payload JSONB,
|
||||
extracted_fields JSONB,
|
||||
field_confidence JSONB,
|
||||
validation_status VARCHAR(255),
|
||||
match_percentage DOUBLE PRECISION,
|
||||
mismatch_reasons JSONB,
|
||||
field_results JSONB,
|
||||
ip_address VARCHAR(255),
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
|
||||
await sequelize.query(`
|
||||
CREATE TABLE IF NOT EXISTS cpc_audit_logs (
|
||||
id UUID NOT NULL PRIMARY KEY,
|
||||
document_id UUID NOT NULL REFERENCES cpc_documents(id) ON DELETE CASCADE,
|
||||
action VARCHAR(255) NOT NULL,
|
||||
previous_state JSONB,
|
||||
new_state JSONB,
|
||||
performed_by VARCHAR(255),
|
||||
remarks TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
|
||||
await sequelize.query(`DROP INDEX IF EXISTS unique_cpc_document_attempt;`);
|
||||
await sequelize.query(`
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_cpc_document_claim_attempt_booking
|
||||
ON cpc_documents (claim_id, attempt_no, booking_id);
|
||||
`);
|
||||
|
||||
logger.info('[CPC-CSD] Schema check complete (cpc_documents / cpc_audit_logs).');
|
||||
} catch (err) {
|
||||
logger.error('[CPC-CSD] ensureCpcCdcSchema failed — run `npm run migrate` in re-workflow-be.', err);
|
||||
// Do not block app boot; CPC routes will error until DB is fixed.
|
||||
}
|
||||
}
|
||||
22
src/services/cpc-cdc/extractPdfText.ts
Normal file
22
src/services/cpc-cdc/extractPdfText.ts
Normal file
@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Pull plain text from a PDF buffer for CPC RULES / regex extraction when Document AI is off.
|
||||
*/
|
||||
export async function extractPdfTextFromBuffer(buffer: Buffer): Promise<string> {
|
||||
if (!buffer?.length) return '';
|
||||
try {
|
||||
const { PDFParse } = await import('pdf-parse');
|
||||
const parser = new PDFParse({ data: new Uint8Array(buffer) });
|
||||
const textResult = await parser.getText();
|
||||
const text = textResult?.text ?? '';
|
||||
await parser.destroy();
|
||||
// #region agent log
|
||||
fetch('http://127.0.0.1:7259/ingest/1bcd6134-2d07-4e57-96c5-9f7406df102e',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'5f3c70'},body:JSON.stringify({sessionId:'5f3c70',location:'extractPdfText.ts:success',message:'pdf-parse succeeded',data:{textLen:text.length},timestamp:Date.now(),hypothesisId:'B'})}).catch(()=>{});
|
||||
// #endregion
|
||||
return typeof text === 'string' ? text : '';
|
||||
} catch (pdfErr: any) {
|
||||
// #region agent log
|
||||
fetch('http://127.0.0.1:7259/ingest/1bcd6134-2d07-4e57-96c5-9f7406df102e',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'5f3c70'},body:JSON.stringify({sessionId:'5f3c70',location:'extractPdfText.ts:catch',message:'pdf-parse FAILED',data:{errorName:pdfErr?.name,errorMessage:pdfErr?.message?.slice(0,200)},timestamp:Date.now(),hypothesisId:'B'})}).catch(()=>{});
|
||||
// #endregion
|
||||
return '';
|
||||
}
|
||||
}
|
||||
195
src/services/cpc-cdc/utils.ts
Normal file
195
src/services/cpc-cdc/utils.ts
Normal file
@ -0,0 +1,195 @@
|
||||
import stringSimilarity from 'string-similarity';
|
||||
import { Op } from 'sequelize';
|
||||
|
||||
/** Shared list/report filters for CPC documents (parity with legacy CPC-CSD). */
|
||||
export function appendCpcDocumentFilters(
|
||||
andParts: Record<string, unknown>[],
|
||||
opts: {
|
||||
type?: string;
|
||||
status?: string;
|
||||
search?: string;
|
||||
/** When true, `search` also matches document `id` (recent documents API). */
|
||||
searchIncludeId?: boolean;
|
||||
}
|
||||
): void {
|
||||
const { type, status, search, searchIncludeId = false } = opts;
|
||||
|
||||
if (type && type !== 'ALL') {
|
||||
if (type === 'AADHAAR') {
|
||||
andParts.push({
|
||||
[Op.or]: [
|
||||
{ documentType: { [Op.iLike]: '%AADHAAR%' } },
|
||||
{ documentType: { [Op.iLike]: '%ADHAAR%' } }
|
||||
]
|
||||
});
|
||||
} else if (type === 'RETAIL_INVOICE') {
|
||||
andParts.push({
|
||||
[Op.or]: [
|
||||
{ documentType: { [Op.iLike]: '%RETAIL%' } },
|
||||
{ documentType: { [Op.iLike]: '%INVOICE%' } }
|
||||
]
|
||||
});
|
||||
} else if (type === 'CPC_AUTH') {
|
||||
andParts.push({
|
||||
[Op.or]: [
|
||||
{ documentType: { [Op.iLike]: '%AUTHORITY%' } },
|
||||
{ documentType: { [Op.iLike]: '%CPC_AUTH%' } },
|
||||
{ documentType: { [Op.iLike]: '%AUTH%' } }
|
||||
]
|
||||
});
|
||||
} else if (type === 'CSD_PO') {
|
||||
andParts.push({
|
||||
[Op.or]: [
|
||||
{ documentType: { [Op.iLike]: '%CSD_PO%' } },
|
||||
{ documentType: { [Op.iLike]: '%PURCHASE_ORDER%' } },
|
||||
{ documentType: { [Op.iLike]: '%PO%' } }
|
||||
]
|
||||
});
|
||||
} else {
|
||||
andParts.push({ documentType: { [Op.iLike]: `%${type}%` } });
|
||||
}
|
||||
}
|
||||
|
||||
if (status && status !== 'ALL') {
|
||||
if (status === 'SUCCESSFUL') {
|
||||
andParts.push({
|
||||
validationStatus: { [Op.in]: ['SUCCESSFUL', 'MATCH', 'APPROVED'] }
|
||||
});
|
||||
} else if (status === 'UNSUCCESSFUL') {
|
||||
// Document-level "failed" outcomes. Per-field columns can still show green for fields that passed.
|
||||
// NEED_MANUAL = missing required extraction; not MATCH/SUCCESSFUL/APPROVED.
|
||||
andParts.push({
|
||||
validationStatus: {
|
||||
[Op.in]: ['UNSUCCESSFUL', 'MISMATCH', 'REJECTED', 'NEED_MANUAL']
|
||||
}
|
||||
});
|
||||
} else {
|
||||
andParts.push({ validationStatus: status });
|
||||
}
|
||||
}
|
||||
|
||||
if (search) {
|
||||
const orClause: Record<string, unknown>[] = [
|
||||
{ bookingId: { [Op.iLike]: `%${search}%` } },
|
||||
{ claimId: { [Op.iLike]: `%${search}%` } },
|
||||
{ documentType: { [Op.iLike]: `%${search}%` } }
|
||||
];
|
||||
if (searchIncludeId) {
|
||||
orClause.unshift({ id: { [Op.iLike]: `%${search}%` } });
|
||||
}
|
||||
andParts.push({ [Op.or]: orClause });
|
||||
}
|
||||
}
|
||||
|
||||
export function cpcWhereFromAndParts(andParts: Record<string, unknown>[]): Record<string, unknown> {
|
||||
if (andParts.length === 0) return {};
|
||||
return { [Op.and]: andParts };
|
||||
}
|
||||
|
||||
export function digitsOnly(str: string | null | undefined): string {
|
||||
return String(str || "").replace(/\D/g, "");
|
||||
}
|
||||
|
||||
export function normalizeMoney(str: string | null | undefined): string {
|
||||
const cleaned = String(str || "").replace(/[^\d.]/g, "");
|
||||
const num = cleaned ? Number(cleaned) : NaN;
|
||||
if (Number.isNaN(num)) return "";
|
||||
return String(Math.round(num));
|
||||
}
|
||||
|
||||
export function cleanText(str: string | null | undefined): string {
|
||||
return String(str || "").trim().replace(/\s+/g, " ");
|
||||
}
|
||||
|
||||
export function nameTokens(str: string | null | undefined): string[] {
|
||||
return cleanText(str)
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z\s]/g, " ")
|
||||
.split(/\s+/)
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
export function normalizeDate(str: string | null | undefined): string | null {
|
||||
if (!str) return null;
|
||||
// Remove non-alphanumeric chars
|
||||
const clean = str.replace(/[^a-zA-Z0-9]/g, "");
|
||||
|
||||
// Attempt to match common formats (DDMMYYYY, DD-MM-YYYY, YYYY-MM-DD)
|
||||
// 1. DDMMYYYY (8 digits)
|
||||
if (/^\d{8}$/.test(clean)) {
|
||||
const day = clean.substring(0, 2);
|
||||
const month = clean.substring(2, 4);
|
||||
const year = clean.substring(4, 8);
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
// 2. Already ISO-like YYYYMMDD
|
||||
if (/^\d{4}\d{2}\d{2}$/.test(clean) && (clean.startsWith("19") || clean.startsWith("20"))) {
|
||||
const year = clean.substring(0, 4);
|
||||
const month = clean.substring(4, 6);
|
||||
const day = clean.substring(6, 8);
|
||||
return `${year}-${month}-${day}`;
|
||||
}
|
||||
|
||||
// Try Native Date parsing if it has separators
|
||||
try {
|
||||
const d = new Date(str);
|
||||
if (!isNaN(d.getTime())) {
|
||||
return d.toISOString().split('T')[0];
|
||||
}
|
||||
} catch (e) { }
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function cleanAddress(str: string | null | undefined): string {
|
||||
if (!str) return "";
|
||||
// Remove "S/O", "C/O", "D/O", "W/O" and following name until a comma or newline
|
||||
return str.replace(/(?:[scdw]\/o[:\s]|care\sof[:\s]|son\sof[:\s]|daughter\sof[:\s]|wife\sof[:\s])[^,\n]*(?:,|\n)?/gi, "").trim();
|
||||
}
|
||||
|
||||
export function calculateMatch(expected: string, found: string, key: string = ""): number {
|
||||
if (!expected || !found) return 0;
|
||||
|
||||
const lowerKey = key.toLowerCase();
|
||||
let expStr = String(expected).trim().toLowerCase();
|
||||
let fndStr = String(found).trim().toLowerCase();
|
||||
|
||||
// 1. Date Normalization Special Handling
|
||||
if (lowerKey.includes('dob') || lowerKey.includes('date')) {
|
||||
const normExp = normalizeDate(expStr);
|
||||
const normFnd = normalizeDate(fndStr);
|
||||
if (normExp && normFnd && normExp === normFnd) return 100;
|
||||
|
||||
// Fallback to digits only for dates like "28-06-1990" vs "28061990"
|
||||
const dExp = expStr.replace(/\D/g, "");
|
||||
const dFnd = fndStr.replace(/\D/g, "");
|
||||
if (dExp !== "" && dExp === dFnd) return 100;
|
||||
}
|
||||
|
||||
// 2. Address Cleanup
|
||||
if (lowerKey.includes('address')) {
|
||||
fndStr = cleanAddress(fndStr).toLowerCase();
|
||||
expStr = cleanAddress(expStr).toLowerCase();
|
||||
}
|
||||
|
||||
// 3. Exact match
|
||||
if (expStr === fndStr) return 100;
|
||||
|
||||
// 4. String Similarity (Levenshtein/Dice)
|
||||
const similarity = stringSimilarity.compareTwoStrings(expStr, fndStr);
|
||||
const score = Math.round(similarity * 100);
|
||||
|
||||
// 5. Token-based fallback (Good for names/addresses)
|
||||
const tokensA = nameTokens(expStr);
|
||||
const tokensB = nameTokens(fndStr);
|
||||
if (tokensA.length > 0 && tokensB.length > 0) {
|
||||
const setA = new Set(tokensA);
|
||||
const setB = new Set(tokensB);
|
||||
let intersection = 0;
|
||||
for (const t of setA) if (setB.has(t)) intersection++;
|
||||
const tokenScore = Math.round((intersection / Math.max(setA.size, setB.size)) * 100);
|
||||
return Math.max(score, tokenScore);
|
||||
}
|
||||
|
||||
return score > 0 ? score : 0;
|
||||
}
|
||||
54
src/services/cpcPermission.service.ts
Normal file
54
src/services/cpcPermission.service.ts
Normal file
@ -0,0 +1,54 @@
|
||||
/**
|
||||
* CPC-CSD permission service – API-driven access based on admin configuration.
|
||||
* Reads viewerEmails from CPC_CSD_ADMIN_CONFIG (legacy CPC_CDC_ADMIN_CONFIG until migrated).
|
||||
*/
|
||||
|
||||
import { selectCpcCsdAdminConfigValue } from '../utils/cpcCsdAdminConfigDb';
|
||||
|
||||
export interface CpcCdcViewerConfig {
|
||||
viewerEmails: string[];
|
||||
}
|
||||
|
||||
const emptyConfig: CpcCdcViewerConfig = {
|
||||
viewerEmails: [],
|
||||
};
|
||||
|
||||
function normalizeEmail(email: string): string {
|
||||
return (email || '').trim().toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load CPC-CSD viewer config from admin_configurations.
|
||||
* Returns empty list if config is missing or invalid.
|
||||
*/
|
||||
export async function getCpcCdcViewerConfig(): Promise<CpcCdcViewerConfig> {
|
||||
try {
|
||||
const raw = await selectCpcCsdAdminConfigValue();
|
||||
if (!raw) {
|
||||
return emptyConfig;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(raw);
|
||||
const viewerEmails = Array.isArray(parsed.viewerEmails)
|
||||
? parsed.viewerEmails.map((e: unknown) => normalizeEmail(String(e ?? ''))).filter(Boolean)
|
||||
: [];
|
||||
|
||||
return { viewerEmails };
|
||||
} catch {
|
||||
return emptyConfig;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can access CPC-CSD section.
|
||||
* - Admin: always allowed.
|
||||
* - Otherwise: only listed emails are allowed.
|
||||
*/
|
||||
export async function canAccessCpcCdc(userEmail: string, role?: string): Promise<boolean> {
|
||||
if (role === 'ADMIN') return true;
|
||||
|
||||
const config = await getCpcCdcViewerConfig();
|
||||
const email = normalizeEmail(userEmail);
|
||||
if (!email) return false;
|
||||
return config.viewerEmails.includes(email);
|
||||
}
|
||||
@ -13,6 +13,18 @@ interface UploadFileOptions {
|
||||
fileType: 'documents' | 'attachments'; // Type of file: documents or attachments
|
||||
}
|
||||
|
||||
/** CPC/CSD uploads — same GCS vs local rules as {@link uploadFileWithFallback}. */
|
||||
export interface UploadCpcCsdFileOptions {
|
||||
buffer: Buffer;
|
||||
originalName: string;
|
||||
mimeType: string;
|
||||
channel: 'csd' | 'cpc';
|
||||
/** Booking / claim id (caller may pre-sanitize; service sanitizes again) */
|
||||
bookingSegment: string;
|
||||
/** When set, used as the final filename (no path segments). Otherwise same pattern as workflow documents. */
|
||||
fileName?: string;
|
||||
}
|
||||
|
||||
interface UploadResult {
|
||||
storageUrl: string;
|
||||
filePath: string; // GCS path
|
||||
@ -322,6 +334,146 @@ class GCSStorageService {
|
||||
}
|
||||
}
|
||||
|
||||
private cpcCsdSanitizeBookingSegment(segment: string): string {
|
||||
const s = String(segment || '').trim();
|
||||
if (!s) return 'unknown-booking';
|
||||
return s.replace(/[^a-zA-Z0-9._-]+/g, '_').replace(/_+/g, '_').slice(0, 120);
|
||||
}
|
||||
|
||||
/** Same filename pattern as workflow `saveToLocalStorage` / `uploadFile`. */
|
||||
private buildCpcCsdFileName(originalName: string, explicit?: string): string {
|
||||
if (explicit && !explicit.includes('/') && !explicit.includes('..')) {
|
||||
return explicit;
|
||||
}
|
||||
const timestamp = Date.now();
|
||||
const randomHash = Math.random().toString(36).substring(2, 8);
|
||||
const safeName = originalName.replace(/[^a-zA-Z0-9._-]/g, '_');
|
||||
const extension = path.extname(originalName);
|
||||
const nameWithoutExt = safeName.substring(0, Math.max(0, safeName.length - extension.length));
|
||||
return `${nameWithoutExt}-${timestamp}-${randomHash}${extension}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Relative object path (same string in GCS and under {@link UPLOAD_DIR} for local fallback).
|
||||
* Example: `cpc-csd-files/csd/BOOK-1/documents/scan-1713-abc.pdf`
|
||||
*/
|
||||
private cpcCsdRelativeObjectPath(channel: 'csd' | 'cpc', bookingSeg: string, fileName: string): string {
|
||||
const ch = channel === 'cpc' ? 'cpc' : 'csd';
|
||||
const b = this.cpcCsdSanitizeBookingSegment(bookingSeg);
|
||||
return `cpc-csd-files/${ch}/${b}/documents/${fileName}`.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload CPC/CSD document to GCS (same bucket lifecycle as workflow requests).
|
||||
*/
|
||||
async uploadCpcCsdFile(options: UploadCpcCsdFileOptions): Promise<UploadResult> {
|
||||
if (!this.storage) {
|
||||
throw new Error('GCS storage not initialized. Check GCP configuration.');
|
||||
}
|
||||
|
||||
const { buffer, originalName, mimeType, channel, bookingSegment } = options;
|
||||
if (!buffer?.length) {
|
||||
throw new Error('Buffer is required for CPC/CSD upload');
|
||||
}
|
||||
|
||||
const fileName = this.buildCpcCsdFileName(originalName, options.fileName);
|
||||
const gcsFilePath = this.cpcCsdRelativeObjectPath(channel, bookingSegment, fileName);
|
||||
|
||||
try {
|
||||
await this.ensureBucketExists();
|
||||
|
||||
const bucket = this.storage.bucket(this.bucketName);
|
||||
const file = bucket.file(gcsFilePath);
|
||||
|
||||
const uploadOptions: any = {
|
||||
metadata: {
|
||||
contentType: mimeType,
|
||||
metadata: {
|
||||
originalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
cpcCsdChannel: channel,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await file.save(buffer, uploadOptions);
|
||||
|
||||
let publicUrl: string;
|
||||
try {
|
||||
await file.makePublic();
|
||||
publicUrl = `https://storage.googleapis.com/${this.bucketName}/${gcsFilePath}`;
|
||||
} catch (makePublicError: any) {
|
||||
if (makePublicError?.code === 400 || makePublicError?.message?.includes('publicAccessPrevention')) {
|
||||
logger.warn('[GCS] CPC/CSD file cannot be public; using signed URL.');
|
||||
publicUrl = await this.getSignedUrl(gcsFilePath, 60 * 24 * 365);
|
||||
} else {
|
||||
throw makePublicError;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('[GCS] CPC/CSD file uploaded', { gcsPath: gcsFilePath, storageUrl: publicUrl });
|
||||
|
||||
return {
|
||||
storageUrl: publicUrl,
|
||||
filePath: gcsFilePath,
|
||||
fileName,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('[GCS] CPC/CSD upload failed:', error);
|
||||
throw new Error(`Failed to upload CPC/CSD file to GCS: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Local fallback for CPC/CSD — mirrors folder layout used in GCS (`cpc-csd-files/...`).
|
||||
*/
|
||||
saveCpcCsdToLocalStorage(options: UploadCpcCsdFileOptions): UploadResult {
|
||||
const { buffer, originalName, channel, bookingSegment } = options;
|
||||
if (!buffer?.length) {
|
||||
throw new Error('Buffer is required for CPC/CSD local storage');
|
||||
}
|
||||
|
||||
const fileName = this.buildCpcCsdFileName(originalName, options.fileName);
|
||||
const relativePath = this.cpcCsdRelativeObjectPath(channel, bookingSegment, fileName);
|
||||
const segments = relativePath.split('/').filter(Boolean);
|
||||
const localDir = path.join(UPLOAD_DIR, ...segments.slice(0, -1));
|
||||
const localFilePath = path.join(UPLOAD_DIR, ...segments);
|
||||
|
||||
if (!fs.existsSync(localDir)) {
|
||||
fs.mkdirSync(localDir, { recursive: true });
|
||||
}
|
||||
fs.writeFileSync(localFilePath, buffer);
|
||||
const storageUrl = `/uploads/${relativePath}`;
|
||||
|
||||
logger.info('[GCS] CPC/CSD file saved to local storage (fallback)', {
|
||||
originalName,
|
||||
localPath: relativePath,
|
||||
storageUrl,
|
||||
});
|
||||
|
||||
return {
|
||||
storageUrl,
|
||||
filePath: relativePath,
|
||||
fileName,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* CPC/CSD: try GCS first, then local under `uploads/` — same behaviour as {@link uploadFileWithFallback}.
|
||||
*/
|
||||
async uploadCpcCsdFileWithFallback(options: UploadCpcCsdFileOptions): Promise<UploadResult> {
|
||||
if (!this.isConfigured()) {
|
||||
logger.info('[GCS] GCS not configured, using local storage for CPC/CSD');
|
||||
return this.saveCpcCsdToLocalStorage(options);
|
||||
}
|
||||
try {
|
||||
return await this.uploadCpcCsdFile(options);
|
||||
} catch (gcsError) {
|
||||
logger.warn('[GCS] CPC/CSD GCS upload failed, falling back to local storage', { error: gcsError });
|
||||
return this.saveCpcCsdToLocalStorage(options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload file with automatic fallback to local storage
|
||||
* If GCS is configured and works, uploads to GCS. Otherwise, saves to local storage.
|
||||
|
||||
@ -36,8 +36,12 @@ class NotificationService {
|
||||
logger.warn('VAPID keys are not configured. Push notifications are disabled.');
|
||||
return;
|
||||
}
|
||||
webpush.setVapidDetails(contact, pub, priv);
|
||||
logger.info('Web Push configured');
|
||||
try {
|
||||
webpush.setVapidDetails(contact, pub, priv);
|
||||
logger.info('Web Push configured');
|
||||
} catch (error) {
|
||||
logger.warn('Invalid VAPID keys. Push notifications are disabled.', error);
|
||||
}
|
||||
}
|
||||
|
||||
async addSubscription(userId: string, subscription: PushSubscription, userAgent?: string) {
|
||||
|
||||
25
src/utils/cpcCsdAdminConfigDb.ts
Normal file
25
src/utils/cpcCsdAdminConfigDb.ts
Normal file
@ -0,0 +1,25 @@
|
||||
/**
|
||||
* CPC/CSD document module — admin_configurations keys for viewer allow-list.
|
||||
* Legacy key kept for reads until migrated or overwritten by admin save.
|
||||
*/
|
||||
import { QueryTypes } from 'sequelize';
|
||||
import { sequelize } from '../config/database';
|
||||
|
||||
export const CPC_CSD_ADMIN_CONFIG_KEY = 'CPC_CSD_ADMIN_CONFIG';
|
||||
export const CPC_CDC_ADMIN_CONFIG_KEY_LEGACY = 'CPC_CDC_ADMIN_CONFIG';
|
||||
|
||||
/** Prefer CPC_CSD_ADMIN_CONFIG row; fall back to legacy CPC_CDC_ADMIN_CONFIG if present. */
|
||||
export async function selectCpcCsdAdminConfigValue(): Promise<string | null> {
|
||||
const result = await sequelize.query<{ config_value: string }>(
|
||||
`SELECT config_value FROM admin_configurations
|
||||
WHERE config_key IN (:kCsd, :kLegacy)
|
||||
ORDER BY CASE WHEN config_key = :kCsd THEN 0 ELSE 1 END
|
||||
LIMIT 1`,
|
||||
{
|
||||
replacements: { kCsd: CPC_CSD_ADMIN_CONFIG_KEY, kLegacy: CPC_CDC_ADMIN_CONFIG_KEY_LEGACY },
|
||||
type: QueryTypes.SELECT,
|
||||
}
|
||||
);
|
||||
if (!result?.length || !result[0].config_value) return null;
|
||||
return result[0].config_value;
|
||||
}
|
||||
@ -101,3 +101,8 @@ export const updateForm16ConfigSchema = z.object({
|
||||
reminderRunAtTime: z.string().regex(/^(\d{1,2}:\d{2})?$/, 'Time must be in HH:mm format').optional(),
|
||||
reminderNotificationTemplate: z.string().optional(),
|
||||
});
|
||||
|
||||
// ── CPC-CSD Configuration Schemas ──
|
||||
export const updateCpcCdcConfigSchema = z.object({
|
||||
viewerEmails: z.array(z.string().email()).optional(),
|
||||
});
|
||||
|
||||
Loading…
Reference in New Issue
Block a user