backend changes

This commit is contained in:
Chandini 2025-09-29 15:42:35 +05:30
parent 84736d86a8
commit 96bf6062bb
2 changed files with 492 additions and 25 deletions

View File

@ -394,6 +394,66 @@ app.use('/api/templates',
} }
); );
// Git Integration Service - expose /api/github via gateway
console.log('🔧 Registering /api/github proxy route...');
app.use('/api/github',
createServiceLimiter(300),
// Allow unauthenticated GETs; for modifying routes, auth can be enforced downstream or here later
(req, res, next) => next(),
(req, res, next) => {
const gitUrl = serviceTargets.GIT_INTEGRATION_URL;
const targetUrl = `${gitUrl}${req.originalUrl}`;
console.log(`🔥 [GIT PROXY] ${req.method} ${req.originalUrl}${targetUrl}`);
// Set response timeout
res.setTimeout(20000, () => {
console.error('❌ [GIT PROXY] Response timeout');
if (!res.headersSent) {
res.status(504).json({ error: 'Gateway timeout', service: 'git-integration' });
}
});
const options = {
method: req.method,
url: targetUrl,
headers: {
'Content-Type': 'application/json',
'User-Agent': 'API-Gateway/1.0',
'Connection': 'keep-alive',
// Forward auth and user context
'Authorization': req.headers.authorization,
'X-User-ID': req.user?.id || req.user?.userId || req.headers['x-user-id'] || req.headers['x-user-id']
},
timeout: 15000,
validateStatus: () => true,
maxRedirects: 0,
data: (req.method === 'POST' || req.method === 'PUT' || req.method === 'PATCH') ? (req.body || {}) : undefined,
};
axios(options)
.then(response => {
console.log(`✅ [GIT PROXY] Response: ${response.status} for ${req.method} ${req.originalUrl}`);
if (!res.headersSent) {
res.status(response.status).json(response.data);
}
})
.catch(error => {
console.error(`❌ [GIT PROXY ERROR]:`, error.message);
if (!res.headersSent) {
if (error.response) {
res.status(error.response.status).json(error.response.data);
} else {
res.status(502).json({
error: 'Git Integration service unavailable',
message: error.code || error.message,
service: 'git-integration'
});
}
}
});
}
);
// Admin endpoints (Template Manager) - expose /api/admin via gateway // Admin endpoints (Template Manager) - expose /api/admin via gateway
console.log('🔧 Registering /api/admin proxy route...'); console.log('🔧 Registering /api/admin proxy route...');
app.use('/api/admin', app.use('/api/admin',

View File

@ -350,6 +350,251 @@ router.post('/attach-repository', async (req, res) => {
}); });
} }
}); });
// Get repository commit summary (latest commit + total commit count + branch/tag counts)
router.get('/repository/:id/commit-summary', async (req, res) => {
try {
const { id } = req.params;
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
const storageRes = await database.query(storageQ, [id]);
if (storageRes.rows.length === 0) {
return res.status(404).json({ success: false, message: 'Local repository path not found' });
}
const localPath = storageRes.rows[0].local_path;
const { execSync } = require('child_process');
const opts = { encoding: 'utf8' };
try { execSync(`git -C "${localPath}" rev-parse --is-inside-work-tree`, opts); } catch {
return res.status(400).json({ success: false, message: 'Path is not a git repository' });
}
let lastRaw = '';
try {
lastRaw = execSync(`git -C "${localPath}" log --pretty=format:%H|%an|%ae|%ad|%s -n 1 --date=iso`, opts).trim();
} catch (e) {
console.warn('[commit-summary] git log failed:', e?.message);
lastRaw = '';
}
let last_commit = null;
if (lastRaw) {
const [hash, author_name, author_email, committed_at, ...rest] = lastRaw.split('|');
const message = rest.join('|');
last_commit = { hash, short_hash: hash ? hash.substring(0,7) : null, author_name, author_email, committed_at, message };
} else {
// Fallback: use HEAD directly
try {
const head = execSync(`git -C "${localPath}" rev-parse HEAD`, opts).trim();
if (head) {
const show = execSync(`git -C "${localPath}" show -s --format=%H|%an|%ae|%ad|%s --date=iso ${head}`, opts).trim();
if (show) {
const [hash, author_name, author_email, committed_at, ...rest] = show.split('|');
const message = rest.join('|');
last_commit = { hash, short_hash: hash ? hash.substring(0,7) : null, author_name, author_email, committed_at, message };
}
}
} catch (e2) {
console.warn('[commit-summary] fallback rev-parse/show failed:', e2?.message);
}
}
let total_commits = 0;
try {
total_commits = parseInt(execSync(`git -C "${localPath}" rev-list --count HEAD`, opts).trim(), 10) || 0;
} catch { total_commits = 0; }
let branch_count = 0, tag_count = 0;
try { branch_count = execSync(`git -C "${localPath}" branch --list | wc -l`, opts).trim() * 1 || 0; } catch {}
try { tag_count = execSync(`git -C "${localPath}" tag --list | wc -l`, opts).trim() * 1 || 0; } catch {}
return res.json({ success: true, data: { last_commit, total_commits, branch_count, tag_count } });
} catch (error) {
console.error('Error getting commit summary:', error);
res.status(500).json({ success: false, message: error.message || 'Failed to get commit summary' });
}
});
// Get last commit that touched a given path
router.get('/repository/:id/path-commit', async (req, res) => {
try {
const { id } = req.params;
const relPath = (req.query.path || '').toString();
if (!relPath) return res.status(400).json({ success: false, message: 'path is required' });
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
const storageRes = await database.query(storageQ, [id]);
if (storageRes.rows.length === 0) {
return res.status(404).json({ success: false, message: 'Local repository path not found' });
}
const localPath = storageRes.rows[0].local_path;
const { execSync } = require('child_process');
const fs = require('fs');
const path = require('path');
const opts = { encoding: 'utf8' };
const resolveCaseInsensitive = (base, rel) => {
const parts = rel.split('/').filter(Boolean);
let cur = base, acc = [];
for (const p of parts) {
if (!fs.existsSync(cur)) return null;
const entries = fs.readdirSync(cur);
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
if (!match) return null;
acc.push(match);
cur = path.join(cur, match);
}
return acc.join('/');
};
let resolvedRel = relPath;
const absCandidate = path.join(localPath, relPath);
if (!fs.existsSync(absCandidate)) {
const fixed = resolveCaseInsensitive(localPath, relPath);
if (fixed) resolvedRel = fixed; else return res.status(404).json({ success: false, message: 'Path not found' });
}
let raw = '';
try {
raw = execSync(`git -C "${localPath}" log --pretty=format:%H|%an|%ae|%ad|%s -n 1 --date=iso -- "${resolvedRel}"`, opts).trim();
} catch { raw = ''; }
if (!raw) return res.json({ success: true, data: null });
const [hash, author_name, author_email, committed_at, ...rest] = raw.split('|');
const message = rest.join('|');
return res.json({ success: true, data: { hash, short_hash: hash.substring(0,7), author_name, author_email, committed_at, message, path: resolvedRel } });
} catch (error) {
console.error('Error getting path commit:', error);
res.status(500).json({ success: false, message: error.message || 'Failed to get path commit' });
}
});
// List commits with pagination and optional path filter (includes files changed)
router.get('/repository/:id/commits', async (req, res) => {
try {
const { id } = req.params;
const page = Math.max(1, parseInt((req.query.page || '1').toString(), 10));
const limit = Math.min(100, Math.max(1, parseInt((req.query.limit || '20').toString(), 10)));
const relPath = req.query.path ? req.query.path.toString() : '';
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
const storageRes = await database.query(storageQ, [id]);
if (storageRes.rows.length === 0) {
return res.status(404).json({ success: false, message: 'Local repository path not found' });
}
const localPath = storageRes.rows[0].local_path;
const { execSync } = require('child_process');
const fs = require('fs');
const path = require('path');
const opts = { encoding: 'utf8' };
// Count total
let countCmd = `git -C "${localPath}" rev-list --count HEAD`;
if (relPath) {
const candidate = path.join(localPath, relPath);
const exists = fs.existsSync(candidate);
if (!exists) {
// try to ignore if path missing; zero commits
return res.json({ success: true, data: { items: [], page, limit, total: 0, has_next: false } });
}
countCmd = `git -C "${localPath}" rev-list --count HEAD -- "${relPath}"`;
}
let total = 0;
try { total = parseInt(execSync(countCmd, opts).trim(), 10) || 0; } catch { total = 0; }
const skip = (page - 1) * limit;
// Use record separator \x1e for each commit block
let logCmd = `git -C "${localPath}" log --date=iso --pretty=format:%x1e%H|%an|%ae|%ad|%s --name-status --numstat --no-color --skip ${skip} -n ${limit}`;
if (relPath) logCmd += ` -- "${relPath}"`;
let raw = '';
try { raw = execSync(logCmd, opts); } catch { raw = ''; }
const blocks = raw.split('\x1e').map(b => b.trim()).filter(Boolean);
const items = blocks.map(block => {
const lines = block.split('\n').filter(Boolean);
const header = lines.shift() || '';
const [hash, author_name, author_email, committed_at, ...rest] = header.split('|');
const message = rest.join('|');
const fileMap = new Map();
for (const ln of lines) {
// numstat: additions\tdeletions\tpath
const numParts = ln.split('\t');
if (numParts.length === 3 && /^\d+|-$/u.test(numParts[0]) && /^\d+|-$/u.test(numParts[1])) {
const additions = numParts[0] === '-' ? null : parseInt(numParts[0], 10);
const deletions = numParts[1] === '-' ? null : parseInt(numParts[1], 10);
const fpath = numParts[2];
const entry = fileMap.get(fpath) || { path: fpath };
entry.additions = additions;
entry.deletions = deletions;
fileMap.set(fpath, entry);
continue;
}
// name-status: M\tpath or R100\told\tnew etc.
const ns = ln.split('\t');
if (ns.length >= 2) {
const status = ns[0];
let fpath = ns[1];
if (status.startsWith('R') && ns.length >= 3) {
// rename: old -> new
fpath = ns[2];
}
const entry = fileMap.get(fpath) || { path: fpath };
entry.change_type = status;
fileMap.set(fpath, entry);
}
}
const files = Array.from(fileMap.values());
return { hash, short_hash: hash?.substring(0,7), author_name, author_email, committed_at, message, files };
});
const has_next = skip + items.length < total;
return res.json({ success: true, data: { items, page, limit, total, has_next } });
} catch (error) {
console.error('Error listing commits:', error);
res.status(500).json({ success: false, message: error.message || 'Failed to list commits' });
}
});
// Get a single commit by SHA with files changed
router.get('/repository/:id/commit/:sha', async (req, res) => {
try {
const { id, sha } = req.params;
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
const storageRes = await database.query(storageQ, [id]);
if (storageRes.rows.length === 0) {
return res.status(404).json({ success: false, message: 'Local repository path not found' });
}
const localPath = storageRes.rows[0].local_path;
const { execSync } = require('child_process');
const opts = { encoding: 'utf8' };
const header = execSync(`git -C "${localPath}" show -s --format=%H|%an|%ae|%ad|%s --date=iso ${sha}`, opts).trim();
const [hash, author_name, author_email, committed_at, ...rest] = header.split('|');
const message = rest.join('|');
const filesRaw = execSync(`git -C "${localPath}" show --name-status --numstat --format= ${sha}`, opts);
const lines = filesRaw.split('\n').filter(Boolean);
const fileMap = new Map();
for (const ln of lines) {
const numParts = ln.split('\t');
if (numParts.length === 3 && /^\d+|-$/u.test(numParts[0]) && /^\d+|-$/u.test(numParts[1])) {
const additions = numParts[0] === '-' ? null : parseInt(numParts[0], 10);
const deletions = numParts[1] === '-' ? null : parseInt(numParts[1], 10);
const fpath = numParts[2];
const entry = fileMap.get(fpath) || { path: fpath };
entry.additions = additions;
entry.deletions = deletions;
fileMap.set(fpath, entry);
continue;
}
const ns = ln.split('\t');
if (ns.length >= 2) {
const status = ns[0];
let fpath = ns[1];
if (status.startsWith('R') && ns.length >= 3) {
fpath = ns[2];
}
const entry = fileMap.get(fpath) || { path: fpath };
entry.change_type = status;
fileMap.set(fpath, entry);
}
}
const files = Array.from(fileMap.values());
return res.json({ success: true, data: { hash, short_hash: hash?.substring(0,7), author_name, author_email, committed_at, message, files } });
} catch (error) {
console.error('Error getting commit by sha:', error);
res.status(500).json({ success: false, message: error.message || 'Failed to get commit' });
}
});
// Get repository diff between two SHAs (unified patch) // Get repository diff between two SHAs (unified patch)
router.get('/repository/:id/diff', async (req, res) => { router.get('/repository/:id/diff', async (req, res) => {
try { try {
@ -469,14 +714,68 @@ router.get('/repository/:id/structure', async (req, res) => {
}); });
} }
const structure = await fileStorageService.getRepositoryStructure(id, directoryPath); let structure = [];
try {
structure = await fileStorageService.getRepositoryStructure(id, directoryPath);
} catch (e) {
console.warn('[structure] storage service failed, will try FS fallback:', e?.message);
}
// Filesystem fallback when storage has no entries
if (!Array.isArray(structure) || structure.length === 0) {
try {
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
const storageRes = await database.query(storageQ, [id]);
if (storageRes.rows.length > 0) {
const base = storageRes.rows[0].local_path;
const fs = require('fs');
const pth = require('path');
const resolveCaseInsensitive = (baseDir, rel) => {
if (!rel) return baseDir;
const parts = rel.split('/').filter(Boolean);
let cur = baseDir;
for (const p of parts) {
if (!fs.existsSync(cur)) return null;
const entries = fs.readdirSync(cur);
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
if (!match) return null;
cur = pth.join(cur, match);
}
return cur;
};
const absDir = resolveCaseInsensitive(base, directoryPath || '');
if (absDir && fs.existsSync(absDir) && fs.statSync(absDir).isDirectory()) {
const items = fs.readdirSync(absDir);
structure = items.map(name => {
const absChild = pth.join(absDir, name);
const isDir = fs.statSync(absChild).isDirectory();
// compute relative path from base
const relPath = pth.relative(base, absChild).replace(/\\/g, '/');
return {
name,
path: relPath,
type: isDir ? 'directory' : 'file'
};
}).sort((a, b) => {
// directories first, then alphabetical
if (a.type !== b.type) return a.type === 'directory' ? -1 : 1;
return a.name.localeCompare(b.name);
});
}
}
} catch (fsErr) {
console.warn('[structure] FS fallback failed:', fsErr?.message);
}
}
res.json({ res.json({
success: true, success: true,
data: { data: {
repository_id: id, repository_id: id,
directory_path: directoryPath || '', directory_path: directoryPath || '',
structure: structure structure: structure || []
} }
}); });
@ -549,16 +848,9 @@ router.get('/repository/:id/file-content', async (req, res) => {
const result = await database.query(query, [id, file_path]); const result = await database.query(query, [id, file_path]);
if (result.rows.length === 0) { if (result.rows.length > 0) {
return res.status(404).json({
success: false,
message: 'File not found'
});
}
const file = result.rows[0]; const file = result.rows[0];
return res.json({
res.json({
success: true, success: true,
data: { data: {
file_info: { file_info: {
@ -577,6 +869,79 @@ router.get('/repository/:id/file-content', async (req, res) => {
preview: file.content_preview preview: file.content_preview
} }
}); });
}
// Fallback: read from filesystem using repository_storage.local_path
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
const storageRes = await database.query(storageQ, [id]);
if (storageRes.rows.length === 0) {
return res.status(404).json({ success: false, message: 'File not found (no storage path)' });
}
const localBase = storageRes.rows[0].local_path;
const pathJoin = require('path').join;
const fs = require('fs');
// Helper: case-insensitive resolution
const resolveCaseInsensitive = (base, rel) => {
const parts = rel.split('/').filter(Boolean);
let cur = base;
for (const p of parts) {
if (!fs.existsSync(cur)) return null;
const entries = fs.readdirSync(cur);
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
if (!match) return null;
cur = pathJoin(cur, match);
}
return cur;
};
let absPath = pathJoin(localBase, file_path);
if (!fs.existsSync(absPath)) {
absPath = resolveCaseInsensitive(localBase, file_path);
}
if (!absPath || !fs.existsSync(absPath)) {
return res.status(404).json({ success: false, message: 'File not found' });
}
// Disallow directories for file-content
const stat = fs.statSync(absPath);
if (stat.isDirectory()) {
return res.status(400).json({ success: false, message: 'Requested path is a directory' });
}
// Basic binary detection
let buffer = fs.readFileSync(absPath);
let hasNull = buffer.includes(0);
let isBinary = hasNull;
const filename = require('path').basename(absPath);
const ext = require('path').extname(absPath).replace(/^\./, '') || null;
// Relax detection for well-known text extensions
const textExts = new Set(['txt','md','markdown','json','yml','yaml','xml','csv','tsv','py','js','jsx','ts','tsx','java','go','rb','rs','php','c','h','cc','hh','cpp','hpp','cs','kt','swift','sql','ini','env','sh','bash','zsh','bat','ps1','toml','gradle','makefile','dockerfile']);
if (ext && textExts.has(ext.toLowerCase())) {
isBinary = false;
hasNull = false;
}
const contentText = isBinary ? null : buffer.toString('utf8');
return res.json({
success: true,
data: {
file_info: {
id: null,
filename: filename,
file_extension: ext,
relative_path: file_path,
file_size_bytes: stat.size,
mime_type: null,
is_binary: isBinary,
language_detected: null,
line_count: contentText ? contentText.split(/\r?\n/).length : null,
char_count: contentText ? contentText.length : stat.size
},
content: contentText,
preview: contentText ? contentText.slice(0, 500) : null
}
});
} catch (error) { } catch (error) {
console.error('Error fetching file content:', error); console.error('Error fetching file content:', error);
@ -791,7 +1156,49 @@ router.delete('/repository/:id', async (req, res) => {
console.error('Error removing repository:', error); console.error('Error removing repository:', error);
res.status(500).json({ res.status(500).json({
success: false, success: false,
message: error.message || 'Failed to remove repository' message: error.message || 'Failed to fetch repositories'
});
}
});
// List all repositories for a user (by path param user_id)
router.get('/user/:user_id/repositories', async (req, res) => {
try {
const { user_id } = req.params;
const query = `
SELECT gr.*, rs.local_path, rs.storage_status, rs.total_files_count,
rs.total_directories_count, rs.total_size_bytes, rs.download_completed_at
FROM github_repositories gr
LEFT JOIN repository_storage rs ON gr.id = rs.repository_id
WHERE gr.user_id = $1
ORDER BY gr.created_at DESC
`;
const result = await database.query(query, [user_id]);
const parseMaybe = (v) => {
if (v == null) return {};
if (typeof v === 'string') { try { return JSON.parse(v); } catch { return {}; } }
return v; // already object from jsonb
};
const repositories = result.rows.map(repo => ({
...repo,
metadata: parseMaybe(repo.metadata),
codebase_analysis: parseMaybe(repo.codebase_analysis)
}));
res.json({
success: true,
data: repositories
});
} catch (error) {
console.error('Error fetching repositories:', error);
res.status(500).json({
success: false,
message: error.message || 'Failed to fetch repositories'
}); });
} }
}); });