1148 lines
51 KiB
TypeScript
1148 lines
51 KiB
TypeScript
import { Request, Response } from 'express';
|
|
import fs from 'fs';
|
|
import path from 'path';
|
|
import { randomUUID } from 'crypto';
|
|
import { CpcDocument } from '@models/CpcDocument';
|
|
import { CpcAuditLog } from '@models/CpcAuditLog';
|
|
import { cpcOcrService } from '@services/cpc-cdc/CpcOcrService';
|
|
import { CpcValidationService } from '@services/cpc-cdc/CpcValidationService';
|
|
import { CpcHistoryService } from '@services/cpc-cdc/CpcHistoryService';
|
|
import { CpcRuleExtractService } from '@services/cpc-cdc/CpcRuleExtractService';
|
|
import { cpcGcsService } from '@services/cpc-cdc/CpcGcsService';
|
|
import { extractPdfTextFromBuffer } from '@services/cpc-cdc/extractPdfText';
|
|
import { appendCpcDocumentFilters, cpcWhereFromAndParts } from '@services/cpc-cdc/utils';
|
|
import { gcsStorageService } from '@services/gcsStorage.service';
|
|
|
|
import logger from '@utils/logger';
|
|
import { Op } from 'sequelize';
|
|
import { sequelize } from '@config/database';
|
|
import { UPLOAD_DIR } from '@config/storage';
|
|
|
|
/** Vertex / ADC not configured locally — do not fail the whole upload in non-production. */
|
|
function isLikelyVertexAuthOrCredentialFailure(err: unknown): boolean {
|
|
const e = err as { message?: string; name?: string; code?: string };
|
|
const blob = `${e?.name || ''} ${e?.message || ''} ${e?.code || ''}`.toLowerCase();
|
|
return (
|
|
blob.includes('googleauth') ||
|
|
blob.includes('unable to authenticate') ||
|
|
blob.includes('could not load the default credentials') ||
|
|
blob.includes('application default credentials') ||
|
|
blob.includes('invalid_grant') ||
|
|
(blob.includes('enotfound') && blob.includes('metadata.google.internal'))
|
|
);
|
|
}
|
|
|
|
const CPC_LOCAL_UPLOAD_DIR = path.join(process.cwd(), 'uploads', 'cpc-csd-files');
|
|
const CPC_LOCAL_URL_PREFIX = 'cpc-local:';
|
|
/** Max validation attempts per claim when `CPC_ENFORCE_MAX_ATTEMPTS` is not `false` (MSD may own attempts — then set env false). */
|
|
const CPC_MAX_ATTEMPTS = Math.max(1, parseInt(process.env.CPC_MAX_ATTEMPTS || '2', 10) || 2);
|
|
|
|
function isCpcAttemptLimitEnforced(): boolean {
|
|
return String(process.env.CPC_ENFORCE_MAX_ATTEMPTS ?? 'true').toLowerCase() !== 'false';
|
|
}
|
|
|
|
function minCpcCdcAttachmentsForClaim(bookingIdRaw: unknown, bookingTypeRaw: unknown): number {
|
|
const bt = String(bookingTypeRaw || '').toUpperCase();
|
|
if (bt === 'CSD') return 1;
|
|
if (bt === 'CPC') return 2;
|
|
const bid = String(bookingIdRaw || '').toUpperCase();
|
|
if (bid.startsWith('CSD-') || bid.startsWith('CSD_')) return 1;
|
|
return 2;
|
|
}
|
|
|
|
function extForUploadedFile(mimetype: string, originalName: string): string {
|
|
const fromName = path.extname(originalName || '').toLowerCase();
|
|
if (fromName && fromName.length <= 12) return fromName;
|
|
if (mimetype === 'application/pdf') return '.pdf';
|
|
if (mimetype?.includes('jpeg') || mimetype === 'image/jpg') return '.jpg';
|
|
if (mimetype?.includes('png')) return '.png';
|
|
return '.bin';
|
|
}
|
|
|
|
/** Safe single path segment for disk/GCS (booking id, doc type token). */
|
|
function sanitizePathSegment(segment: string, maxLen = 120): string {
|
|
const s = String(segment || '').trim();
|
|
if (!s) return 'unknown-booking';
|
|
const cleaned = s.replace(/[^a-zA-Z0-9._-]+/g, '_').replace(/_+/g, '_');
|
|
return cleaned.slice(0, maxLen);
|
|
}
|
|
|
|
/** Folder key under `cpc-csd-files/{csd|cpc}/` (same relative path in GCS bucket and local `uploads/`). */
|
|
function deriveCpcCsdStorageChannel(bookingIdRaw: unknown, bookingTypeRaw: unknown): 'csd' | 'cpc' {
|
|
const bt = String(bookingTypeRaw || '').toUpperCase();
|
|
if (bt === 'CSD') return 'csd';
|
|
if (bt === 'CPC') return 'cpc';
|
|
const bid = String(bookingIdRaw || '').toUpperCase();
|
|
if (bid.startsWith('CSD-') || bid.startsWith('CSD_')) return 'csd';
|
|
return 'cpc';
|
|
}
|
|
|
|
/**
|
|
* Same shape as workflow uploads: time + short id + role fields + safe original stem + extension.
|
|
* Example: `1713350400123-a1b2c3d4e5f6-a1-PO-purchase_order.pdf`
|
|
*/
|
|
function buildCpcCsdStoredFileName(params: {
|
|
documentId: string;
|
|
attemptNo: number;
|
|
docType: string;
|
|
originalName: string;
|
|
mimetype: string;
|
|
fileIndex: number;
|
|
}): string {
|
|
const ext = extForUploadedFile(params.mimetype, params.originalName);
|
|
const ts = Date.now();
|
|
const shortId = params.documentId.replace(/-/g, '').slice(0, 12);
|
|
const stem = path.basename(params.originalName || 'file', path.extname(params.originalName || ''));
|
|
const origStem = sanitizePathSegment(stem, 72).toLowerCase() || 'file';
|
|
const typePart = sanitizePathSegment(params.docType, 36).toLowerCase() || 'doc';
|
|
return `${ts}-${shortId}-a${params.attemptNo}-i${params.fileIndex + 1}-${typePart}-${origStem}${ext}`;
|
|
}
|
|
|
|
function contentTypeFromPath(p: string): string {
|
|
const ext = path.extname(p).toLowerCase();
|
|
if (ext === '.pdf') return 'application/pdf';
|
|
if (ext === '.png') return 'image/png';
|
|
if (ext === '.jpg' || ext === '.jpeg') return 'image/jpeg';
|
|
if (ext === '.gif') return 'image/gif';
|
|
return 'application/octet-stream';
|
|
}
|
|
|
|
function resolveCpcLocalDiskPath(documentGcpUrl: string): string | null {
|
|
if (!documentGcpUrl.startsWith(CPC_LOCAL_URL_PREFIX)) return null;
|
|
const rel = documentGcpUrl.slice(CPC_LOCAL_URL_PREFIX.length).replace(/^\/+/, '').replace(/\\/g, '/');
|
|
if (!rel || rel.includes('..')) return null;
|
|
const segments = rel.split('/').filter(Boolean);
|
|
if (segments.some((s) => s === '..')) return null;
|
|
const full = path.resolve(CPC_LOCAL_UPLOAD_DIR, ...segments);
|
|
const base = path.resolve(CPC_LOCAL_UPLOAD_DIR);
|
|
const baseSep = base.endsWith(path.sep) ? base : `${base}${path.sep}`;
|
|
if (!full.startsWith(baseSep) && full !== base) return null;
|
|
return full;
|
|
}
|
|
|
|
/** Workflow-style `/uploads/...` URLs stored when GCS is unavailable (UAT). */
|
|
function resolveUploadsDirFromPublicUrl(storageRef: string): string | null {
|
|
if (!storageRef.startsWith('/uploads/')) return null;
|
|
const rel = storageRef.slice('/uploads/'.length).replace(/^\/+/, '').replace(/\\/g, '/');
|
|
if (!rel || rel.includes('..')) return null;
|
|
const segments = rel.split('/').filter(Boolean);
|
|
if (segments.some((s) => s === '..')) return null;
|
|
const full = path.resolve(UPLOAD_DIR, ...segments);
|
|
const base = path.resolve(UPLOAD_DIR);
|
|
const baseSep = base.endsWith(path.sep) ? base : `${base}${path.sep}`;
|
|
if (!full.startsWith(baseSep) && full !== base) return null;
|
|
return full;
|
|
}
|
|
|
|
export class CpcCdcController {
|
|
|
|
/**
|
|
* Validate a single document upload (Legacy support or single file)
|
|
*/
|
|
async validateDocumentUpload(req: Request, res: Response) {
|
|
|
|
try {
|
|
const requestId = String(req.headers['x-request-id'] || randomUUID());
|
|
const clientId = String(req.headers['x-client-id'] || (req as any).user?.email || 'unknown');
|
|
const files = (req.files as Express.Multer.File[]) || (req.file ? [req.file] : []);
|
|
if (files.length === 0) {
|
|
return res.status(400).json({
|
|
error_code: 'NO_FILE_UPLOADED',
|
|
error_message: 'No files were provided in the request.',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const skipMinAttachmentCheck = String((req.body as any)?.skip_min_attachment_check || '').toLowerCase() === 'true';
|
|
const isUploadFlow = (req.path || req.originalUrl || '').includes('/ocr/upload');
|
|
const { document_type, msd_payload, provider, booking_id, booking_type, claim_id, metadata_queue } = req.body;
|
|
const minAttachments = minCpcCdcAttachmentsForClaim(booking_id || claim_id, booking_type);
|
|
if (isUploadFlow && !skipMinAttachmentCheck && files.length < minAttachments) {
|
|
const msg =
|
|
minAttachments === 1
|
|
? 'CSD claims require at least 1 attachment (Purchase Order PDF/image).'
|
|
: 'CPC claims require at least 2 attachments: Authorization Letter and Aadhaar (one file each, or more if you split pages).';
|
|
return res.status(400).json({
|
|
error_code: 'MIN_ATTACHMENTS_REQUIRED',
|
|
error_message: msg,
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const targetClaimId = claim_id || booking_id;
|
|
|
|
if (!targetClaimId) {
|
|
return res.status(400).json({
|
|
error_code: 'MISSING_CLAIM_ID',
|
|
error_message: 'claim_id or booking_id is required to track validation attempts.',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const storageChannel = deriveCpcCsdStorageChannel(targetClaimId, booking_type);
|
|
const safeBookingSeg = sanitizePathSegment(targetClaimId);
|
|
|
|
// Support both single payload and metadata queue
|
|
let queue: any[] = [];
|
|
if (metadata_queue) {
|
|
try {
|
|
queue = typeof metadata_queue === 'string' ? JSON.parse(metadata_queue) : metadata_queue;
|
|
} catch (e) {
|
|
return res.status(400).json({
|
|
error_code: 'INVALID_QUEUE',
|
|
error_message: 'Invalid metadata_queue format.',
|
|
retryable: false
|
|
});
|
|
}
|
|
} else {
|
|
let parsedPayload = {};
|
|
try {
|
|
parsedPayload = typeof msd_payload === 'string' ? JSON.parse(msd_payload) : msd_payload;
|
|
} catch (e) {
|
|
return res.status(400).json({
|
|
error_code: 'INVALID_PAYLOAD',
|
|
error_message: 'Invalid msd_payload.',
|
|
retryable: false
|
|
});
|
|
}
|
|
queue = [{
|
|
document_type: document_type || "GENERIC_INVOICE",
|
|
msd_payload: parsedPayload
|
|
}];
|
|
}
|
|
|
|
const results: any[] = [];
|
|
const ipAddress = req.ip || req.headers['x-forwarded-for'] || req.socket.remoteAddress;
|
|
// Production: real Vertex/Gemini only unless CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI=true.
|
|
// Non-production: allow degraded saves by default so local CPC works without GCP; set env to "false" to force strict.
|
|
const nodeEnv = (process.env.NODE_ENV || '').toLowerCase();
|
|
const isProdRuntime = nodeEnv === 'production' || nodeEnv === 'prod';
|
|
const degradedEnv = String(process.env.CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI || '').toLowerCase();
|
|
const allowDegradedSave =
|
|
degradedEnv === 'true' || (!isProdRuntime && degradedEnv !== 'false');
|
|
|
|
const requestedAttemptNo = Number((req.body as any)?.attempt_no);
|
|
const hasRequestedAttemptNo = Number.isFinite(requestedAttemptNo) && requestedAttemptNo > 0;
|
|
|
|
const attemptRows = await CpcDocument.findAll({
|
|
attributes: ['attemptNo'],
|
|
where: { claimId: targetClaimId },
|
|
group: ['attemptNo'],
|
|
raw: true
|
|
}) as Array<{ attemptNo?: number }>;
|
|
const usedAttempts = new Set(
|
|
attemptRows
|
|
.map((r) => Number(r?.attemptNo || 0))
|
|
.filter((n) => Number.isFinite(n) && n > 0)
|
|
);
|
|
|
|
if (isCpcAttemptLimitEnforced()) {
|
|
if (hasRequestedAttemptNo && requestedAttemptNo > CPC_MAX_ATTEMPTS) {
|
|
return res.status(422).json({
|
|
error_code: 'MAX_ATTEMPTS_REACHED',
|
|
error_message: `Only ${CPC_MAX_ATTEMPTS} validation attempts are allowed per claim.`,
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
if (!hasRequestedAttemptNo && usedAttempts.size >= CPC_MAX_ATTEMPTS) {
|
|
return res.status(422).json({
|
|
error_code: 'MAX_ATTEMPTS_REACHED',
|
|
error_message: `Only ${CPC_MAX_ATTEMPTS} validation attempts are allowed per claim.`,
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
if (hasRequestedAttemptNo && !usedAttempts.has(requestedAttemptNo) && usedAttempts.size >= CPC_MAX_ATTEMPTS) {
|
|
return res.status(422).json({
|
|
error_code: 'MAX_ATTEMPTS_REACHED',
|
|
error_message: `Only ${CPC_MAX_ATTEMPTS} validation attempts are allowed per claim.`,
|
|
retryable: false
|
|
});
|
|
}
|
|
}
|
|
|
|
const currentAttempt = hasRequestedAttemptNo ? requestedAttemptNo : (usedAttempts.size + 1);
|
|
|
|
for (let i = 0; i < files.length; i++) {
|
|
const file = files[i];
|
|
const meta = queue[i] || queue[0]; // Fallback to first meta if queue is shorter than files
|
|
|
|
const currentDocType = (meta.document_type || "GENERIC_INVOICE").toUpperCase();
|
|
const expectedPayload = meta.msd_payload || {};
|
|
const rawUiKeys = Array.isArray((meta as { expected_field_keys?: unknown }).expected_field_keys)
|
|
? (meta as { expected_field_keys: unknown[] }).expected_field_keys
|
|
: Object.keys(expectedPayload);
|
|
const expectedFieldKeysForPipeline = [
|
|
...new Set(
|
|
rawUiKeys.map((k: unknown) => String(k ?? '').trim()).filter(Boolean)
|
|
)
|
|
];
|
|
|
|
try {
|
|
// 1. OCR (Optional)
|
|
let ocrText = "";
|
|
const isDocAiConfigured = process.env.DOC_AI_PROCESSOR_ID && process.env.DOC_AI_PROCESSOR_ID !== "your-processor-id";
|
|
if (isDocAiConfigured && provider !== "GEMINI_VERTEX_DIRECT") {
|
|
try {
|
|
const ocrResult = await cpcOcrService.runDocAIOcr({
|
|
projectId: process.env.GCP_PROJECT_ID!,
|
|
location: process.env.GCP_LOCATION_DOC_AI || 'us',
|
|
processorId: process.env.DOC_AI_PROCESSOR_ID!,
|
|
fileBuffer: file.buffer,
|
|
mimeType: file.mimetype
|
|
});
|
|
ocrText = ocrResult.text;
|
|
} catch (e) {
|
|
logger.warn(`[CpcController] OCR failed for ${file.originalname}`, e);
|
|
}
|
|
}
|
|
if (!ocrText?.trim() && file.buffer?.length && file.mimetype === 'application/pdf') {
|
|
try {
|
|
const pdfText = await extractPdfTextFromBuffer(file.buffer);
|
|
if (pdfText?.trim()) {
|
|
ocrText = pdfText;
|
|
logger.info(
|
|
`[CpcController] PDF text fallback for ${file.originalname} (${pdfText.length} chars)`
|
|
);
|
|
}
|
|
} catch (e) {
|
|
logger.warn(`[CpcController] pdf-parse failed for ${file.originalname}`, e);
|
|
}
|
|
}
|
|
// 2. Extraction: RULES = local parser on OCR text; otherwise Vertex Gemini (real API — no fake values).
|
|
let extracted: Record<string, unknown> = {};
|
|
let confidence: Record<string, unknown> = {};
|
|
let extractionSource: 'rules_engine' | 'vertex_gemini' | 'degraded_empty' = 'degraded_empty';
|
|
|
|
if (provider === "RULES") {
|
|
const ruleOut = CpcRuleExtractService.extractWithRules(ocrText, {
|
|
msdPayload: expectedPayload,
|
|
documentType: currentDocType
|
|
});
|
|
extracted = ruleOut.extracted_fields;
|
|
confidence = ruleOut.field_confidence;
|
|
extractionSource = 'rules_engine';
|
|
} else {
|
|
const projectId = (process.env.GCP_PROJECT_ID || '').trim();
|
|
const hasVertexProject = Boolean(projectId && !/^your-?project/i.test(projectId));
|
|
if (!hasVertexProject) {
|
|
if (!allowDegradedSave) {
|
|
results.push({
|
|
filename: file.originalname,
|
|
error_code: 'CPC_VERTEX_NOT_CONFIGURED',
|
|
error_message:
|
|
'Vertex AI extraction requires a valid GCP_PROJECT_ID. Configure Vertex/Gemini, use provider RULES for OCR+rules-only, or set CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI=true for dev-only saves without AI.'
|
|
,
|
|
retryable: false
|
|
});
|
|
continue;
|
|
}
|
|
logger.warn(
|
|
`[CpcController] GCP_PROJECT_ID missing — degraded save allowed (CPC_ALLOW_DEGRADED_SAVE_WITHOUT_AI) for ${file.originalname}`
|
|
);
|
|
extracted = {};
|
|
confidence = {};
|
|
extractionSource = 'degraded_empty';
|
|
} else {
|
|
try {
|
|
const geminiOut = await CpcValidationService.extractWithGemini({
|
|
projectId,
|
|
location: process.env.VERTEX_AI_LOCATION || 'asia-south1',
|
|
documentType: currentDocType,
|
|
ocrText,
|
|
fileBuffer: file.buffer,
|
|
mimeType: file.mimetype,
|
|
expectedFields:
|
|
expectedFieldKeysForPipeline.length > 0
|
|
? expectedFieldKeysForPipeline
|
|
: Object.keys(expectedPayload),
|
|
msdReferencePayload: expectedPayload
|
|
});
|
|
extracted = geminiOut.extracted_fields || {};
|
|
confidence = geminiOut.field_confidence || {};
|
|
extractionSource = 'vertex_gemini';
|
|
} catch (geminiErr: any) {
|
|
// Non-production: never block upload on missing local GCP login / ADC (GoogleAuthError).
|
|
const authOrCred = isLikelyVertexAuthOrCredentialFailure(geminiErr);
|
|
const canDegrade =
|
|
allowDegradedSave || (!isProdRuntime && authOrCred);
|
|
if (!canDegrade) {
|
|
results.push({
|
|
filename: file.originalname,
|
|
error_code: 'CPC_EXTRACTION_FAILED',
|
|
error_message:
|
|
authOrCred && isProdRuntime
|
|
? 'Vertex AI could not authenticate (check GOOGLE_APPLICATION_CREDENTIALS or workload identity).'
|
|
: geminiErr?.message || 'Gemini/Vertex extraction failed'
|
|
,
|
|
retryable: true
|
|
});
|
|
continue;
|
|
}
|
|
logger.warn(
|
|
`[CpcController] Vertex/Gemini unavailable or failed — saving with empty extraction (${file.originalname})`,
|
|
geminiErr?.message || geminiErr
|
|
);
|
|
extracted = {};
|
|
confidence = {};
|
|
extractionSource = 'degraded_empty';
|
|
}
|
|
}
|
|
}
|
|
|
|
// 2b. When not using RULES-only: fill gaps from rule engine on OCR/PDF text (Vertex may omit; Docker OCR often empty).
|
|
if (extractionSource !== 'rules_engine' && ocrText?.trim()) {
|
|
const ruleFill = CpcRuleExtractService.extractWithRules(ocrText, {
|
|
msdPayload: expectedPayload,
|
|
documentType: currentDocType
|
|
});
|
|
const ruleExtracted = ruleFill.extracted_fields as Record<string, unknown>;
|
|
const ruleConfidence = ruleFill.field_confidence as Record<string, unknown>;
|
|
for (const key of Object.keys(ruleExtracted)) {
|
|
if (
|
|
expectedFieldKeysForPipeline.length > 0 &&
|
|
!expectedFieldKeysForPipeline.includes(key)
|
|
) {
|
|
continue;
|
|
}
|
|
const rv = ruleExtracted[key];
|
|
const ev = extracted[key];
|
|
const emptyEv =
|
|
ev === undefined ||
|
|
ev === null ||
|
|
(typeof ev === 'string' && String(ev).trim() === '') ||
|
|
String(ev).toLowerCase() === 'null';
|
|
if (emptyEv && rv != null && String(rv).trim() !== '') {
|
|
extracted[key] = rv;
|
|
if (confidence[key] === undefined || confidence[key] === null) {
|
|
confidence[key] = ruleConfidence[key];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// 2c. CSD PO: Vertex sometimes returns supplier letterhead as customer_name; OCR often has Sold To / Bill To.
|
|
if (String(currentDocType).toUpperCase().includes('CSD_PO') && ocrText?.trim()) {
|
|
const refined = CpcRuleExtractService.refineCsdPoCustomerName(ocrText, extracted.customer_name);
|
|
const prev = String(extracted.customer_name ?? '').trim();
|
|
if (refined && refined !== prev) {
|
|
extracted.customer_name = refined;
|
|
extracted.authorized_person_name = refined;
|
|
const cc = confidence as Record<string, unknown>;
|
|
if (cc.customer_name === undefined || cc.customer_name === null || Number(cc.customer_name) < 0.68) {
|
|
cc.customer_name = 0.72;
|
|
}
|
|
}
|
|
}
|
|
|
|
// 3. Validation
|
|
const v = CpcValidationService.validateSrs(
|
|
expectedPayload,
|
|
extracted,
|
|
confidence,
|
|
currentDocType,
|
|
targetClaimId,
|
|
currentAttempt,
|
|
expectedFieldKeysForPipeline.length > 0 ? expectedFieldKeysForPipeline : null
|
|
);
|
|
|
|
const uiStatus = v.validation_status || v.status;
|
|
|
|
const documentId = randomUUID();
|
|
const storedFileName = buildCpcCsdStoredFileName({
|
|
documentId,
|
|
attemptNo: currentAttempt,
|
|
docType: currentDocType,
|
|
originalName: file.originalname,
|
|
mimetype: file.mimetype,
|
|
fileIndex: i
|
|
});
|
|
|
|
let docUrl: string;
|
|
try {
|
|
const uploadResult = await gcsStorageService.uploadCpcCsdFileWithFallback({
|
|
buffer: file.buffer,
|
|
originalName: file.originalname,
|
|
mimeType: file.mimetype,
|
|
channel: storageChannel,
|
|
bookingSegment: safeBookingSeg,
|
|
fileName: storedFileName
|
|
});
|
|
docUrl = uploadResult.storageUrl;
|
|
const fp = uploadResult.filePath || '';
|
|
const nestedOk =
|
|
fp.includes('/csd/') ||
|
|
fp.includes('/cpc/') ||
|
|
docUrl.startsWith('https://') ||
|
|
docUrl.startsWith('http://');
|
|
if (!nestedOk) {
|
|
logger.warn(
|
|
'[CpcController] Unexpected CPC/CSD storage path (expected cpc-csd-files/csd|cpc/.../documents/). Check you are not hitting an old API process.',
|
|
{ filePath: fp, storageUrl: docUrl }
|
|
);
|
|
}
|
|
} catch (e) {
|
|
logger.error(
|
|
`[CpcController] Could not persist CPC/CSD file for ${file.originalname}`,
|
|
e
|
|
);
|
|
docUrl = `local://temp/${file.originalname}`;
|
|
}
|
|
|
|
const saved = await CpcDocument.create({
|
|
id: documentId,
|
|
bookingId: files.length > 1 ? `${targetClaimId}-${i + 1}` : (booking_id || targetClaimId),
|
|
claimId: targetClaimId,
|
|
attemptNo: currentAttempt,
|
|
documentType: currentDocType,
|
|
documentGcpUrl: docUrl,
|
|
provider: provider || "GEMINI_VERTEX",
|
|
msdPayload: expectedPayload,
|
|
extractedFields: extracted,
|
|
fieldConfidence: confidence,
|
|
validationStatus: uiStatus,
|
|
matchPercentage: v.match_percentage,
|
|
ipAddress: String(ipAddress || ''),
|
|
mismatchReasons: v.mismatch_reasons,
|
|
fieldResults: v.field_results
|
|
});
|
|
|
|
await CpcAuditLog.create({
|
|
documentId: saved.id,
|
|
action: 'UPLOADED',
|
|
performedBy: clientId,
|
|
newState: {
|
|
status: uiStatus,
|
|
match: v.match_percentage,
|
|
attempt: currentAttempt,
|
|
request_id: requestId,
|
|
client_id: clientId,
|
|
timestamp: new Date().toISOString()
|
|
},
|
|
remarks: `Document ${file.originalname} uploaded via unified pipeline (Attempt ${currentAttempt})`
|
|
});
|
|
|
|
results.push({
|
|
document_id: saved.id,
|
|
booking_id: saved.bookingId,
|
|
claim_id: saved.claimId,
|
|
attempt_no: saved.attemptNo,
|
|
status: v.status,
|
|
validation_status: v.validation_status,
|
|
match_percentage: v.match_percentage,
|
|
overall_match_percentage: v.match_percentage,
|
|
threshold: v.threshold,
|
|
mismatch_summary: v.mismatch_summary,
|
|
mismatch_reasons: v.mismatch_reasons,
|
|
extracted_fields: extracted,
|
|
field_confidence: confidence,
|
|
field_results: v.field_results,
|
|
extraction_source: extractionSource
|
|
});
|
|
} catch (fileErr: any) {
|
|
logger.error(`[CpcController] Failed processing file ${file.originalname}`, fileErr);
|
|
results.push({
|
|
filename: file.originalname,
|
|
error: fileErr?.message || 'Processing failed',
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: fileErr?.message || 'Processing failed',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
// Return legacy compatible bulk response if multiple files, or single object if one file
|
|
if (files.length > 1) {
|
|
return res.json({
|
|
count: results.length,
|
|
results: results
|
|
});
|
|
} else {
|
|
const single = results[0];
|
|
if (!single || (single as { error?: string }).error) {
|
|
return res.status(422).json({
|
|
error_code: (single as { error_code?: string })?.error_code || 'UPLOAD_FAILED',
|
|
error_message: (single as { error_message?: string })?.error_message || (single as { error?: string })?.error || 'Upload failed',
|
|
retryable: (single as { retryable?: boolean })?.retryable ?? true
|
|
});
|
|
}
|
|
return res.json(single);
|
|
}
|
|
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] validateDocumentUpload Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'SERVER_ERROR',
|
|
error_message: error.message || 'Internal Server Error',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
* Get recent documents for the dashboard
|
|
*/
|
|
async getRecentDocuments(req: Request, res: Response) {
|
|
try {
|
|
const { search, status, type, limit = 50, page, sortBy, order } = req.query;
|
|
const take = parseInt(limit as string);
|
|
const pageNum = parseInt(page as string || '1');
|
|
const skip = (pageNum - 1) * take;
|
|
|
|
const andParts: Record<string, unknown>[] = [];
|
|
appendCpcDocumentFilters(andParts, {
|
|
type: type as string,
|
|
status: status as string,
|
|
search: search as string,
|
|
searchIncludeId: true
|
|
});
|
|
const where = cpcWhereFromAndParts(andParts);
|
|
|
|
const validSortFields = ['id', 'bookingId', 'createdAt', 'documentType', 'validationStatus', 'claimId', 'matchPercentage'];
|
|
const sortKey = typeof sortBy === 'string' && validSortFields.includes(sortBy) ? sortBy : 'createdAt';
|
|
const sortDir = order === 'asc' ? 'ASC' : 'DESC';
|
|
|
|
const { count, rows } = await CpcDocument.findAndCountAll({
|
|
where,
|
|
limit: take,
|
|
offset: skip,
|
|
order: [[sortKey, sortDir]]
|
|
});
|
|
|
|
const enriched = rows.map((doc: any, idx: number) => {
|
|
const docJson = doc.toJSON();
|
|
return {
|
|
...docJson,
|
|
summary: CpcHistoryService.getSummaryRow(docJson, skip + idx)
|
|
};
|
|
});
|
|
|
|
|
|
return res.json({
|
|
items: enriched,
|
|
meta: {
|
|
total: count,
|
|
page: pageNum,
|
|
limit: take,
|
|
pages: Math.ceil(count / take)
|
|
}
|
|
});
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] getRecentDocuments Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to fetch documents',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Stream original upload bytes (GCS or local fallback). Used by UI preview; gs:// is not loadable in a browser iframe.
|
|
*/
|
|
async getDocumentFile(req: Request, res: Response) {
|
|
try {
|
|
const { id } = req.params;
|
|
const document = await CpcDocument.findByPk(id);
|
|
if (!document) {
|
|
return res.status(404).json({
|
|
error_code: 'DOCUMENT_NOT_FOUND',
|
|
error_message: 'Document not found',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const ref = document.documentGcpUrl || '';
|
|
|
|
if (ref.startsWith('gs://')) {
|
|
try {
|
|
const buf = await cpcGcsService.downloadFromGcs(ref);
|
|
const ct = contentTypeFromPath(ref);
|
|
res.setHeader('Content-Type', ct);
|
|
const base = path.basename(ref.split('?')[0] || '') || 'document';
|
|
res.setHeader('Content-Disposition', `inline; filename="${base}"`);
|
|
return res.status(200).send(buf);
|
|
} catch (err: unknown) {
|
|
logger.error('[CpcController] getDocumentFile GCS download failed', err);
|
|
return res.status(502).json({
|
|
error_code: 'DOCUMENT_FETCH_FAILED',
|
|
error_message: 'Could not read file from storage',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
const uploadsAbs = resolveUploadsDirFromPublicUrl(ref);
|
|
if (uploadsAbs && fs.existsSync(uploadsAbs)) {
|
|
res.setHeader('Content-Type', contentTypeFromPath(uploadsAbs));
|
|
res.setHeader('Content-Disposition', 'inline');
|
|
return res.status(200).sendFile(path.resolve(uploadsAbs));
|
|
}
|
|
|
|
const localAbs = resolveCpcLocalDiskPath(ref);
|
|
if (localAbs && fs.existsSync(localAbs)) {
|
|
res.setHeader('Content-Type', contentTypeFromPath(localAbs));
|
|
res.setHeader('Content-Disposition', 'inline');
|
|
return res.status(200).sendFile(path.resolve(localAbs));
|
|
}
|
|
|
|
if (ref.startsWith('http://') || ref.startsWith('https://')) {
|
|
return res.redirect(302, ref);
|
|
}
|
|
|
|
return res.status(404).json({
|
|
error_code: 'NO_PREVIEWABLE_FILE',
|
|
error_message: 'No previewable file for this document',
|
|
retryable: false
|
|
});
|
|
} catch (error: unknown) {
|
|
logger.error('[CpcController] getDocumentFile Error:', error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to stream document',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get single document details including analytics breakdown
|
|
*/
|
|
async getDocumentById(req: Request, res: Response) {
|
|
try {
|
|
const { id } = req.params;
|
|
const document = await CpcDocument.findByPk(id, {
|
|
include: [{ model: CpcAuditLog, as: 'auditLogs' }]
|
|
});
|
|
|
|
if (!document) {
|
|
return res.status(404).json({
|
|
error_code: 'DOCUMENT_NOT_FOUND',
|
|
error_message: 'Document not found',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const docJson = document.toJSON();
|
|
const enriched = {
|
|
...docJson,
|
|
field_results: CpcHistoryService.getDetailedFieldResults(docJson)
|
|
};
|
|
|
|
return res.json(enriched);
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] getDocumentById Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to fetch document',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get dashboard analytics
|
|
*/
|
|
async getAnalytics(req: Request, res: Response) {
|
|
try {
|
|
const statusCounts: any = await CpcDocument.findAll({
|
|
attributes: [
|
|
'validationStatus',
|
|
[sequelize.fn('COUNT', sequelize.col('id')), 'count']
|
|
],
|
|
group: ['validationStatus']
|
|
});
|
|
|
|
const distribution: any = {};
|
|
let totalDocs = 0;
|
|
statusCounts.forEach((item: any) => {
|
|
const status = item.get('validationStatus');
|
|
const count = parseInt(item.get('count'));
|
|
distribution[status] = count;
|
|
totalDocs += count;
|
|
});
|
|
|
|
|
|
const matchCount = (distribution['MATCH'] || 0) + (distribution['APPROVED'] || 0) + (distribution['SUCCESSFUL'] || 0);
|
|
const passRate = totalDocs > 0 ? Math.round((matchCount / totalDocs) * 100) : 0;
|
|
|
|
const mismatchDocs = await CpcDocument.findAll({
|
|
where: {
|
|
validationStatus: { [Op.in]: ['MISMATCH', 'REJECTED', 'UNSUCCESSFUL'] }
|
|
},
|
|
attributes: ['mismatchReasons'],
|
|
limit: 200,
|
|
order: [['createdAt', 'DESC']]
|
|
});
|
|
|
|
const fieldErrors: Record<string, number> = {};
|
|
mismatchDocs.forEach((doc: any) => {
|
|
const reasons = doc.mismatchReasons;
|
|
if (Array.isArray(reasons)) {
|
|
reasons.forEach((reason: { field?: string }) => {
|
|
const field = reason.field;
|
|
if (field) {
|
|
fieldErrors[field] = (fieldErrors[field] || 0) + 1;
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
const topMismatchFields = Object.entries(fieldErrors)
|
|
.sort(([, a], [, b]) => b - a)
|
|
.slice(0, 5)
|
|
.map(([field, count]) => ({ field, count }));
|
|
|
|
const sevenDaysAgo = new Date();
|
|
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
|
|
|
|
const dailyDocs: any = await CpcDocument.findAll({
|
|
attributes: [
|
|
[sequelize.fn('DATE', sequelize.col('created_at')), 'date'],
|
|
[sequelize.fn('COUNT', sequelize.col('id')), 'count']
|
|
],
|
|
where: { createdAt: { [Op.gte]: sevenDaysAgo } },
|
|
group: [sequelize.fn('DATE', sequelize.col('created_at'))],
|
|
order: [[sequelize.fn('DATE', sequelize.col('created_at')), 'ASC']]
|
|
});
|
|
|
|
const dailyVolume = dailyDocs.map((d: any) => ({
|
|
date: d.get('date'),
|
|
count: parseInt(d.get('count'))
|
|
}));
|
|
|
|
return res.json({
|
|
totalDocs,
|
|
passRate,
|
|
distribution,
|
|
topMismatchFields,
|
|
dailyVolume
|
|
});
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] getAnalytics Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to fetch analytics',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Manually override validation status
|
|
*/
|
|
async updateDocumentStatus(req: Request, res: Response) {
|
|
try {
|
|
const { id } = req.params;
|
|
const { status, remarks, correctedFields } = req.body;
|
|
|
|
const document = await CpcDocument.findByPk(id);
|
|
if (!document) {
|
|
return res.status(404).json({
|
|
error_code: 'DOCUMENT_NOT_FOUND',
|
|
error_message: 'Document not found',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const previousStatus = document.validationStatus;
|
|
|
|
await document.update({
|
|
validationStatus: status,
|
|
extractedFields: correctedFields || document.extractedFields,
|
|
mismatchReasons: remarks ? [{ field: 'MANUAL_REVIEW', expected: '-', actual: remarks }] : document.mismatchReasons
|
|
});
|
|
|
|
const statusRequestId = String(req.headers['x-request-id'] || randomUUID());
|
|
const statusClientId = String(req.headers['x-client-id'] || (req as any).user?.email || 'unknown');
|
|
|
|
await CpcAuditLog.create({
|
|
documentId: id,
|
|
action: 'STATUS_UPDATED',
|
|
performedBy: statusClientId,
|
|
previousState: { status: previousStatus },
|
|
newState: {
|
|
status,
|
|
request_id: statusRequestId,
|
|
client_id: statusClientId,
|
|
timestamp: new Date().toISOString()
|
|
},
|
|
remarks: remarks || `Status manual update to ${status}`
|
|
});
|
|
|
|
return res.json(document);
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] updateDocumentStatus Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to update status',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Delete document record
|
|
*/
|
|
async deleteDocument(req: Request, res: Response) {
|
|
try {
|
|
const { id } = req.params;
|
|
const document = await CpcDocument.findByPk(id);
|
|
if (!document) {
|
|
return res.status(404).json({
|
|
error_code: 'DOCUMENT_NOT_FOUND',
|
|
error_message: 'Document not found',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const ref = document.documentGcpUrl || '';
|
|
const uploadsAbs = resolveUploadsDirFromPublicUrl(ref);
|
|
if (uploadsAbs && fs.existsSync(uploadsAbs)) {
|
|
try {
|
|
fs.unlinkSync(uploadsAbs);
|
|
} catch (e) {
|
|
logger.warn('[CpcController] Could not delete CPC/CSD file under uploads', e);
|
|
}
|
|
}
|
|
const localAbs = resolveCpcLocalDiskPath(ref);
|
|
if (localAbs && fs.existsSync(localAbs)) {
|
|
try {
|
|
fs.unlinkSync(localAbs);
|
|
} catch (e) {
|
|
logger.warn('[CpcController] Could not delete local CPC file', e);
|
|
}
|
|
}
|
|
|
|
await document.destroy();
|
|
return res.json({ success: true, message: 'Document deleted successfully' });
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] deleteDocument Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to delete document',
|
|
retryable: false
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Fetch all validation attempts for a claim
|
|
*/
|
|
async getClaimHistory(req: Request, res: Response) {
|
|
try {
|
|
const { claimId } = req.query;
|
|
if (!claimId) {
|
|
return res.status(400).json({
|
|
error_code: 'MISSING_CLAIM_ID',
|
|
error_message: 'claimId is required',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const documents = await CpcDocument.findAll({
|
|
where: { [Op.or]: [{ claimId: claimId as string }, { bookingId: claimId as string }] },
|
|
order: [['attemptNo', 'ASC'], ['createdAt', 'DESC']]
|
|
});
|
|
|
|
const attemptsMap: any = {};
|
|
documents.forEach((doc: any) => {
|
|
const attNum = doc.attemptNo || 1;
|
|
if (!attemptsMap[attNum]) attemptsMap[attNum] = [];
|
|
attemptsMap[attNum].push(doc.toJSON());
|
|
});
|
|
|
|
const sortedAttempts = Object.keys(attemptsMap)
|
|
.sort((a, b) => parseInt(a) - parseInt(b))
|
|
.map((num: string) => {
|
|
const docsInAttempt = attemptsMap[num];
|
|
return {
|
|
attempt_no: parseInt(num),
|
|
created_at: docsInAttempt[0].createdAt,
|
|
documents: docsInAttempt.map((d: any) => ({
|
|
...d,
|
|
field_results: CpcHistoryService.getDetailedFieldResults(d)
|
|
})),
|
|
summary_report_rows: docsInAttempt.map((d: any, idx: number) => CpcHistoryService.getSummaryRow(d, idx))
|
|
};
|
|
});
|
|
|
|
|
|
return res.json({ claimId, attempts: sortedAttempts });
|
|
} catch (error: any) {
|
|
logger.error("[CpcController] getClaimHistory Error:", error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: 'Failed to fetch history',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/** CPC-CSD `POST /api/v1/ocr/validate` — URL-based validation using document_gcp_url + msd_payload. */
|
|
async validateDocumentByUrlStub(req: Request, res: Response) {
|
|
try {
|
|
const {
|
|
claim_id,
|
|
booking_id,
|
|
document_type,
|
|
document_gcp_url,
|
|
msd_payload,
|
|
provider
|
|
} = req.body || {};
|
|
|
|
const targetClaimId = claim_id || booking_id;
|
|
if (!targetClaimId) {
|
|
return res.status(400).json({
|
|
error_code: 'MISSING_CLAIM_ID',
|
|
error_message: 'claim_id or booking_id is required',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
if (!document_gcp_url || typeof document_gcp_url !== 'string') {
|
|
return res.status(400).json({
|
|
error_code: 'INVALID_DOCUMENT_URL',
|
|
error_message: 'document_gcp_url is required and must be a gs:// path',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
let payload = {};
|
|
try {
|
|
payload = typeof msd_payload === 'string' ? JSON.parse(msd_payload) : (msd_payload || {});
|
|
} catch {
|
|
return res.status(400).json({
|
|
error_code: 'INVALID_PAYLOAD',
|
|
error_message: 'msd_payload must be valid JSON',
|
|
retryable: false
|
|
});
|
|
}
|
|
|
|
const requestedAttemptNo = Number((req.body as any)?.attempt_no);
|
|
const hasRequestedAttemptNo = Number.isFinite(requestedAttemptNo) && requestedAttemptNo > 0;
|
|
const attemptRows = await CpcDocument.findAll({
|
|
attributes: ['attemptNo'],
|
|
where: { claimId: targetClaimId },
|
|
group: ['attemptNo'],
|
|
raw: true
|
|
}) as Array<{ attemptNo?: number }>;
|
|
const usedAttempts = new Set(
|
|
attemptRows
|
|
.map((r) => Number(r?.attemptNo || 0))
|
|
.filter((n) => Number.isFinite(n) && n > 0)
|
|
);
|
|
|
|
if (isCpcAttemptLimitEnforced()) {
|
|
if (hasRequestedAttemptNo && requestedAttemptNo > CPC_MAX_ATTEMPTS) {
|
|
return res.status(422).json({
|
|
error_code: 'MAX_ATTEMPTS_REACHED',
|
|
error_message: `Only ${CPC_MAX_ATTEMPTS} validation attempts are allowed per claim`,
|
|
retryable: false
|
|
});
|
|
}
|
|
if (!hasRequestedAttemptNo && usedAttempts.size >= CPC_MAX_ATTEMPTS) {
|
|
return res.status(422).json({
|
|
error_code: 'MAX_ATTEMPTS_REACHED',
|
|
error_message: `Only ${CPC_MAX_ATTEMPTS} validation attempts are allowed per claim`,
|
|
retryable: false
|
|
});
|
|
}
|
|
if (hasRequestedAttemptNo && !usedAttempts.has(requestedAttemptNo) && usedAttempts.size >= CPC_MAX_ATTEMPTS) {
|
|
return res.status(422).json({
|
|
error_code: 'MAX_ATTEMPTS_REACHED',
|
|
error_message: `Only ${CPC_MAX_ATTEMPTS} validation attempts are allowed per claim`,
|
|
retryable: false
|
|
});
|
|
}
|
|
}
|
|
|
|
const currentAttempt = hasRequestedAttemptNo ? requestedAttemptNo : (usedAttempts.size + 1);
|
|
let fileBuffer: Buffer;
|
|
try {
|
|
fileBuffer = await cpcGcsService.downloadFromGcs(String(document_gcp_url));
|
|
} catch (error: any) {
|
|
return res.status(422).json({
|
|
error_code: error?.message === 'INVALID_DOCUMENT_URL' ? 'INVALID_DOCUMENT_URL' : 'DOCUMENT_FETCH_FAILED',
|
|
error_message: error?.message || 'Unable to fetch document from GCS',
|
|
retryable: true
|
|
});
|
|
}
|
|
|
|
const tempFile: Express.Multer.File = {
|
|
fieldname: 'file',
|
|
originalname: path.basename(String(document_gcp_url)),
|
|
encoding: '7bit',
|
|
mimetype: contentTypeFromPath(String(document_gcp_url)),
|
|
size: fileBuffer.length,
|
|
buffer: fileBuffer,
|
|
stream: undefined as any,
|
|
destination: '',
|
|
filename: '',
|
|
path: ''
|
|
};
|
|
|
|
(req as any).file = tempFile;
|
|
(req as any).files = [tempFile];
|
|
req.body = {
|
|
...req.body,
|
|
booking_id: booking_id || claim_id,
|
|
claim_id: targetClaimId,
|
|
attempt_no: currentAttempt,
|
|
skip_min_attachment_check: 'true',
|
|
provider: provider || 'GEMINI_VERTEX',
|
|
document_type: document_type || 'GENERIC_INVOICE',
|
|
msd_payload: payload
|
|
};
|
|
|
|
return this.validateDocumentUpload(req, res);
|
|
} catch (error: any) {
|
|
logger.error('[CpcController] validateDocumentByUrl Error:', error);
|
|
return res.status(500).json({
|
|
error_code: 'INTERNAL_SERVER_ERROR',
|
|
error_message: error?.message || 'Internal server error',
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
|
|
/** CPC-CSD `POST /api/upload` — same GCS vs local behaviour as workflow requests; returns `gcsUrl` + `storageUrl`. */
|
|
async uploadBareFile(req: Request, res: Response) {
|
|
try {
|
|
const file = req.file;
|
|
if (!file) {
|
|
return res.status(400).json({
|
|
error_code: 'NO_FILE_UPLOADED',
|
|
error_message: 'No file uploaded',
|
|
retryable: false
|
|
});
|
|
}
|
|
const uploadResult = await gcsStorageService.uploadCpcCsdFileWithFallback({
|
|
buffer: file.buffer,
|
|
originalName: file.originalname,
|
|
mimeType: file.mimetype,
|
|
channel: 'cpc',
|
|
bookingSegment: 'bare-upload'
|
|
});
|
|
const m = uploadResult.storageUrl.match(/^https:\/\/storage\.googleapis\.com\/([^/]+)\/(.+)$/);
|
|
const gcsUrl = m ? `gs://${m[1]}/${m[2]}` : uploadResult.storageUrl;
|
|
return res.json({
|
|
gcsUrl,
|
|
storageUrl: uploadResult.storageUrl,
|
|
filePath: uploadResult.filePath
|
|
});
|
|
} catch (error: unknown) {
|
|
const msg = error instanceof Error ? error.message : 'UPLOAD_FAILED';
|
|
return res.status(500).json({
|
|
error_code: 'UPLOAD_FAILED',
|
|
error_message: msg,
|
|
retryable: true
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
export const cpcCdcController = new CpcCdcController();
|