GCP related changes for the preview

This commit is contained in:
laxmanhalaki 2025-12-24 11:02:05 +05:30
parent 53302fea21
commit 90fe2c8e87
5 changed files with 454 additions and 38 deletions

View File

@ -96,16 +96,84 @@ export class DocumentController {
}
}
const doc = await Document.create({
// Check if storageUrl exceeds database column limit (500 chars)
// GCS signed URLs can be very long (500-1000+ chars)
const MAX_STORAGE_URL_LENGTH = 500;
let finalStorageUrl = storageUrl;
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
logWithContext('warn', 'Storage URL exceeds database column limit, truncating', {
originalLength: storageUrl.length,
maxLength: MAX_STORAGE_URL_LENGTH,
urlPrefix: storageUrl.substring(0, 100),
});
// For signed URLs, we can't truncate as it will break the URL
// Instead, store null and generate signed URLs on-demand when needed
// The filePath is sufficient to generate a new signed URL later
finalStorageUrl = null as any;
logWithContext('info', 'Storing null storageUrl - will generate signed URL on-demand', {
filePath: gcsFilePath,
reason: 'Signed URL too long for database column',
});
}
// Truncate file names if they exceed database column limits (255 chars)
const MAX_FILE_NAME_LENGTH = 255;
const originalFileName = file.originalname;
let truncatedOriginalFileName = originalFileName;
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
// Preserve file extension when truncating
const ext = path.extname(originalFileName);
const nameWithoutExt = path.basename(originalFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
// If extension itself is too long, just use the extension
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logWithContext('warn', 'File name truncated to fit database column', {
originalLength: originalFileName.length,
truncatedLength: truncatedOriginalFileName.length,
originalName: originalFileName.substring(0, 100) + '...',
truncatedName: truncatedOriginalFileName,
});
}
// Generate fileName (basename of the generated file name in GCS)
const generatedFileName = path.basename(gcsFilePath);
let truncatedFileName = generatedFileName;
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
const ext = path.extname(generatedFileName);
const nameWithoutExt = path.basename(generatedFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logWithContext('warn', 'Generated file name truncated', {
originalLength: generatedFileName.length,
truncatedLength: truncatedFileName.length,
});
}
// Prepare document data
const documentData = {
requestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
originalFileName: file.originalname,
fileName: truncatedFileName,
originalFileName: truncatedOriginalFileName,
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: gcsFilePath, // Store GCS path or local path
storageUrl: storageUrl, // Store GCS URL or local URL
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
@ -115,7 +183,43 @@ export class DocumentController {
parentDocumentId: null as any,
isDeleted: false,
downloadCount: 0,
} as any);
};
logWithContext('info', 'Creating document record', {
requestId,
userId,
fileName: file.originalname,
filePath: gcsFilePath,
storageUrl: storageUrl,
documentData: JSON.stringify(documentData, null, 2),
});
let doc;
try {
doc = await Document.create(documentData as any);
logWithContext('info', 'Document record created successfully', {
documentId: doc.documentId,
requestId,
fileName: file.originalname,
});
} catch (createError) {
const createErrorMessage = createError instanceof Error ? createError.message : 'Unknown error';
const createErrorStack = createError instanceof Error ? createError.stack : undefined;
// Check if it's a Sequelize validation error
const sequelizeError = (createError as any)?.errors || (createError as any)?.parent;
logWithContext('error', 'Document.create() failed', {
error: createErrorMessage,
stack: createErrorStack,
sequelizeErrors: sequelizeError,
requestId,
userId,
fileName: file.originalname,
filePath: gcsFilePath,
storageUrl: storageUrl,
documentData: JSON.stringify(documentData, null, 2),
});
throw createError; // Re-throw to be caught by outer catch block
}
// Log document upload event
logDocumentEvent('uploaded', doc.documentId, {

View File

@ -587,11 +587,28 @@ export class WorkflowController {
}
// Update workflow
const workflow = await workflowService.updateWorkflow(id, updateData);
let workflow;
try {
workflow = await workflowService.updateWorkflow(id, updateData);
if (!workflow) {
ResponseHandler.notFound(res, 'Workflow not found');
return;
}
logger.info('[WorkflowController] Workflow updated successfully', {
requestId: id,
workflowId: (workflow as any).requestId,
});
} catch (updateError) {
const updateErrorMessage = updateError instanceof Error ? updateError.message : 'Unknown error';
const updateErrorStack = updateError instanceof Error ? updateError.stack : undefined;
logger.error('[WorkflowController] updateWorkflow failed', {
error: updateErrorMessage,
stack: updateErrorStack,
requestId: id,
updateData: JSON.stringify(updateData, null, 2),
});
throw updateError; // Re-throw to be caught by outer catch block
}
// Attach new files as documents
const files = (req as any).files as Express.Multer.File[] | undefined;
@ -627,23 +644,85 @@ export class WorkflowController {
}
}
logger.info('[Workflow] Creating document record', {
fileName: file.originalname,
// Truncate file names if they exceed database column limits (255 chars)
const MAX_FILE_NAME_LENGTH = 255;
const originalFileName = file.originalname;
let truncatedOriginalFileName = originalFileName;
if (originalFileName.length > MAX_FILE_NAME_LENGTH) {
// Preserve file extension when truncating
const ext = path.extname(originalFileName);
const nameWithoutExt = path.basename(originalFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedOriginalFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
// If extension itself is too long, just use the extension
truncatedOriginalFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logger.warn('[Workflow] File name truncated to fit database column', {
originalLength: originalFileName.length,
truncatedLength: truncatedOriginalFileName.length,
originalName: originalFileName.substring(0, 100) + '...',
truncatedName: truncatedOriginalFileName,
});
}
// Generate fileName (basename of the generated file name in GCS)
const generatedFileName = path.basename(gcsFilePath);
let truncatedFileName = generatedFileName;
if (generatedFileName.length > MAX_FILE_NAME_LENGTH) {
const ext = path.extname(generatedFileName);
const nameWithoutExt = path.basename(generatedFileName, ext);
const maxNameLength = MAX_FILE_NAME_LENGTH - ext.length;
if (maxNameLength > 0) {
truncatedFileName = nameWithoutExt.substring(0, maxNameLength) + ext;
} else {
truncatedFileName = ext.substring(0, MAX_FILE_NAME_LENGTH);
}
logger.warn('[Workflow] Generated file name truncated', {
originalLength: generatedFileName.length,
truncatedLength: truncatedFileName.length,
});
}
// Check if storageUrl exceeds database column limit (500 chars)
const MAX_STORAGE_URL_LENGTH = 500;
let finalStorageUrl = storageUrl;
if (storageUrl && storageUrl.length > MAX_STORAGE_URL_LENGTH) {
logger.warn('[Workflow] Storage URL exceeds database column limit, storing null', {
originalLength: storageUrl.length,
maxLength: MAX_STORAGE_URL_LENGTH,
urlPrefix: storageUrl.substring(0, 100),
filePath: gcsFilePath,
storageUrl: storageUrl,
});
// For signed URLs, store null and generate on-demand later
finalStorageUrl = null as any;
}
logger.info('[Workflow] Creating document record', {
fileName: truncatedOriginalFileName,
filePath: gcsFilePath,
storageUrl: finalStorageUrl ? 'present' : 'null (too long)',
requestId: actualRequestId
});
try {
const doc = await Document.create({
requestId: actualRequestId,
uploadedBy: userId,
fileName: path.basename(file.filename || file.originalname),
originalFileName: file.originalname,
fileName: truncatedFileName,
originalFileName: truncatedOriginalFileName,
fileType: extension,
fileExtension: extension,
fileSize: file.size,
filePath: gcsFilePath, // Store GCS path or local path
storageUrl: storageUrl, // Store GCS URL or local URL
storageUrl: finalStorageUrl, // Store GCS URL or local URL (null if too long)
mimeType: file.mimetype,
checksum,
isGoogleDoc: false,
@ -655,12 +734,39 @@ export class WorkflowController {
downloadCount: 0,
} as any);
docs.push(doc);
logger.info('[Workflow] Document record created successfully', {
documentId: doc.documentId,
fileName: file.originalname,
});
} catch (docError) {
const docErrorMessage = docError instanceof Error ? docError.message : 'Unknown error';
const docErrorStack = docError instanceof Error ? docError.stack : undefined;
logger.error('[Workflow] Failed to create document record', {
error: docErrorMessage,
stack: docErrorStack,
fileName: file.originalname,
requestId: actualRequestId,
filePath: gcsFilePath,
storageUrl: storageUrl,
});
// Continue with other files, but log the error
// Don't throw here - let the workflow update complete
}
}
}
ResponseHandler.success(res, { workflow, newDocuments: docs }, 'Workflow updated with documents', 200);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const errorStack = error instanceof Error ? error.stack : undefined;
logger.error('[WorkflowController] updateWorkflowMultipart failed', {
error: errorMessage,
stack: errorStack,
requestId: req.params.id,
userId: req.user?.userId,
hasFiles: !!(req as any).files && (req as any).files.length > 0,
fileCount: (req as any).files ? (req as any).files.length : 0,
});
ResponseHandler.error(res, 'Failed to update workflow', 400, errorMessage);
}
}

View File

@ -21,6 +21,33 @@ import { pauseController } from '../controllers/pause.controller';
import logger from '@utils/logger';
const router = Router();
/**
* Helper function to create proper Content-Disposition header
* Returns clean filename header that browsers handle correctly
*/
function createContentDisposition(disposition: 'inline' | 'attachment', filename: string): string {
// Clean filename: only remove truly problematic characters for HTTP headers
// Keep spaces, dots, hyphens, underscores - these are safe
const cleanFilename = filename
.replace(/[<>:"|?*\x00-\x1F\x7F]/g, '_') // Only replace truly problematic chars
.replace(/\\/g, '_') // Replace backslashes
.trim();
// For ASCII-only filenames, use simple format (browsers prefer this)
// Only use filename* for non-ASCII characters
const hasNonASCII = /[^\x00-\x7F]/.test(filename);
if (hasNonASCII) {
// Use RFC 5987 encoding for non-ASCII characters
const encodedFilename = encodeURIComponent(filename);
return `${disposition}; filename="${cleanFilename}"; filename*=UTF-8''${encodedFilename}`;
} else {
// Simple ASCII filename - use clean version (no filename* needed)
// This prevents browsers from showing both filename and filename*
return `${disposition}; filename="${cleanFilename}"`;
}
}
const workflowController = new WorkflowController();
const approvalController = new ApprovalController();
const workNoteController = new WorkNoteController();
@ -223,6 +250,89 @@ router.get('/documents/:documentId/preview',
return;
}
// If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS
if (!storageUrl && filePath && filePath.startsWith('requests/')) {
try {
// Use the existing GCS storage service instance
if (!gcsStorageService.isConfigured()) {
throw new Error('GCS not configured');
}
// Access the storage instance from the service
const { Storage } = require('@google-cloud/storage');
const keyFilePath = process.env.GCP_KEY_FILE || '';
const bucketName = process.env.GCP_BUCKET_NAME || '';
const path = require('path');
const resolvedKeyPath = path.isAbsolute(keyFilePath)
? keyFilePath
: path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath,
});
const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath);
// Check if file exists
const [exists] = await file.exists();
if (!exists) {
res.status(404).json({ success: false, error: 'File not found in GCS' });
return;
}
// Get file metadata for content type
const [metadata] = await file.getMetadata();
const contentType = metadata.contentType || fileType || 'application/octet-stream';
// Set CORS headers
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
res.setHeader('Content-Type', contentType);
// For images and PDFs, allow inline viewing
const isPreviewable = fileType && (fileType.includes('image') || fileType.includes('pdf'));
const disposition = isPreviewable ? 'inline' : 'attachment';
res.setHeader('Content-Disposition', createContentDisposition(disposition, fileName));
// Stream file from GCS to response
file.createReadStream()
.on('error', (streamError: Error) => {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to stream file from GCS', {
documentId,
filePath,
error: streamError.message,
});
if (!res.headersSent) {
res.status(500).json({
success: false,
error: 'Failed to stream file from storage'
});
}
})
.pipe(res);
return;
} catch (gcsError) {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to access GCS file for preview', {
documentId,
filePath,
error: gcsError instanceof Error ? gcsError.message : 'Unknown error',
});
res.status(500).json({
success: false,
error: 'Failed to access file. Please try again.'
});
return;
}
}
// Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (storageUrl && storageUrl.startsWith('/uploads/')) {
// File is served by express.static middleware, redirect to the storage URL
@ -296,6 +406,87 @@ router.get('/documents/:documentId/download',
return;
}
// If storageUrl is null but filePath indicates GCS storage, stream file directly from GCS
if (!storageUrl && filePath && filePath.startsWith('requests/')) {
try {
// Use the existing GCS storage service instance
if (!gcsStorageService.isConfigured()) {
throw new Error('GCS not configured');
}
// Access the storage instance from the service
const { Storage } = require('@google-cloud/storage');
const keyFilePath = process.env.GCP_KEY_FILE || '';
const bucketName = process.env.GCP_BUCKET_NAME || '';
const path = require('path');
const resolvedKeyPath = path.isAbsolute(keyFilePath)
? keyFilePath
: path.resolve(process.cwd(), keyFilePath);
const storage = new Storage({
projectId: process.env.GCP_PROJECT_ID || '',
keyFilename: resolvedKeyPath,
});
const bucket = storage.bucket(bucketName);
const file = bucket.file(filePath);
// Check if file exists
const [exists] = await file.exists();
if (!exists) {
res.status(404).json({ success: false, error: 'File not found in GCS' });
return;
}
// Get file metadata for content type
const [metadata] = await file.getMetadata();
const contentType = metadata.contentType || (document as any).mimeType || (document as any).mime_type || 'application/octet-stream';
// Set CORS headers
const origin = req.headers.origin;
if (origin) {
res.setHeader('Access-Control-Allow-Origin', origin);
res.setHeader('Access-Control-Allow-Credentials', 'true');
}
res.setHeader('Access-Control-Expose-Headers', 'Content-Type, Content-Disposition');
// Set headers for download
res.setHeader('Content-Type', contentType);
res.setHeader('Content-Disposition', createContentDisposition('attachment', fileName));
// Stream file from GCS to response
file.createReadStream()
.on('error', (streamError: Error) => {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to stream file from GCS for download', {
documentId,
filePath,
error: streamError.message,
});
if (!res.headersSent) {
res.status(500).json({
success: false,
error: 'Failed to stream file from storage'
});
}
})
.pipe(res);
return;
} catch (gcsError) {
const logger = require('../utils/logger').default;
logger.error('[Workflow] Failed to access GCS file for download', {
documentId,
filePath,
error: gcsError instanceof Error ? gcsError.message : 'Unknown error',
});
res.status(500).json({
success: false,
error: 'Failed to access file. Please try again.'
});
return;
}
}
// Local file handling - check if storageUrl is a local path (starts with /uploads/)
if (storageUrl && storageUrl.startsWith('/uploads/')) {
// File is served by express.static middleware, redirect to the storage URL

View File

@ -128,12 +128,15 @@ class GCSStorageService {
// Ensure bucket exists before uploading
await this.ensureBucketExists();
// Generate unique file name
// Generate unique file name with original name first for readability
// Format: originalName-timestamp-hash.ext (e.g., proposal-1766490022228-qjlojs.pdf)
const timestamp = Date.now();
const randomHash = Math.random().toString(36).substring(2, 8);
const safeName = originalName.replace(/[^a-zA-Z0-9._-]/g, '_');
const extension = path.extname(originalName);
const fileName = `${timestamp}-${randomHash}-${safeName}`;
// Extract name without extension, then add timestamp and hash before extension
const nameWithoutExt = safeName.substring(0, safeName.length - extension.length);
const fileName = `${nameWithoutExt}-${timestamp}-${randomHash}${extension}`;
// Build GCS path: requests/{requestNumber}/{fileType}/{fileName}
// Example: requests/REQ-2025-12-0001/documents/proposal.pdf
@ -265,11 +268,15 @@ class GCSStorageService {
}
try {
// Generate unique file name (same format as GCS)
// Generate unique file name (same format as GCS) - original name first for readability
// Format: originalName-timestamp-hash.ext (e.g., proposal-1766490022228-qjlojs.pdf)
const timestamp = Date.now();
const randomHash = Math.random().toString(36).substring(2, 8);
const safeName = originalName.replace(/[^a-zA-Z0-9._-]/g, '_');
const fileName = `${timestamp}-${randomHash}-${safeName}`;
const extension = path.extname(originalName);
// Extract name without extension, then add timestamp and hash before extension
const nameWithoutExt = safeName.substring(0, safeName.length - extension.length);
const fileName = `${nameWithoutExt}-${timestamp}-${randomHash}${extension}`;
// Build local path: uploads/requests/{requestNumber}/{fileType}/{fileName}
// This matches the GCS structure: requests/{requestNumber}/{fileType}/{fileName}

View File

@ -3074,8 +3074,16 @@ export class WorkflowService {
const refreshed = await WorkflowRequest.findByPk(actualRequestId);
return refreshed;
} catch (error) {
logger.error(`Failed to update workflow ${requestId}:`, error);
throw new Error('Failed to update workflow');
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const errorStack = error instanceof Error ? error.stack : undefined;
logger.error(`Failed to update workflow ${requestId}:`, {
error: errorMessage,
stack: errorStack,
requestId,
updateData: JSON.stringify(updateData, null, 2),
});
// Preserve original error message for better debugging
throw new Error(`Failed to update workflow: ${errorMessage}`);
}
}