1593 lines
56 KiB
JavaScript
1593 lines
56 KiB
JavaScript
// Updated routes/github-integration.js
|
|
const express = require('express');
|
|
const router = express.Router();
|
|
const GitHubIntegrationService = require('../services/github-integration.service');
|
|
const GitHubOAuthService = require('../services/github-oauth');
|
|
const FileStorageService = require('../services/file-storage.service');
|
|
const database = require('../config/database');
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
|
|
const githubService = new GitHubIntegrationService();
|
|
const oauthService = new GitHubOAuthService();
|
|
const fileStorageService = new FileStorageService();
|
|
|
|
// Helper function to generate authentication response
|
|
const generateAuthResponse = (res, repository_url, branch_name, userId) => {
|
|
try {
|
|
console.log('🔧 [generateAuthResponse] Starting auth response generation...');
|
|
|
|
const { owner, repo } = githubService.parseGitHubUrl(repository_url);
|
|
console.log('🔧 [generateAuthResponse] Parsed URL:', { owner, repo });
|
|
|
|
// Generate an auth URL that encodes the current user AND repo context so callback can auto-attach
|
|
const stateBase = Math.random().toString(36).substring(7);
|
|
const userIdForAuth = userId || null;
|
|
const encodedRepoUrl = encodeURIComponent(repository_url);
|
|
const encodedBranchName = encodeURIComponent(branch_name || '');
|
|
const state = `${stateBase}|uid=${userIdForAuth || ''}|repo=${encodedRepoUrl}|branch=${encodedBranchName}`;
|
|
|
|
console.log('🔧 [generateAuthResponse] Generated state:', state);
|
|
|
|
const rawAuthUrl = oauthService.getAuthUrl(state, userIdForAuth);
|
|
console.log('🔧 [generateAuthResponse] Generated raw auth URL:', rawAuthUrl);
|
|
|
|
const gatewayBase = process.env.API_GATEWAY_PUBLIC_URL || 'https://backend.codenuk.com';
|
|
const serviceRelative = '/api/github/auth/github';
|
|
const serviceAuthUrl = `${gatewayBase}${serviceRelative}?redirect=1&state=${encodeURIComponent(state)}${userIdForAuth ? `&user_id=${encodeURIComponent(userIdForAuth)}` : ''}`;
|
|
|
|
console.log('🔧 [generateAuthResponse] Generated service auth URL:', serviceAuthUrl);
|
|
|
|
const response = {
|
|
success: false,
|
|
message: 'GitHub authentication required for private repository',
|
|
requires_auth: true,
|
|
auth_url: serviceAuthUrl,
|
|
service_auth_url: rawAuthUrl,
|
|
auth_error: false,
|
|
repository_info: {
|
|
owner,
|
|
repo,
|
|
repository_url,
|
|
branch_name: branch_name || 'main'
|
|
}
|
|
};
|
|
|
|
console.log('🔧 [generateAuthResponse] Sending response:', response);
|
|
|
|
return res.status(401).json(response);
|
|
} catch (error) {
|
|
console.error('❌ [generateAuthResponse] Error:', error);
|
|
return res.status(500).json({
|
|
success: false,
|
|
message: 'Error generating authentication URL',
|
|
error: error.message
|
|
});
|
|
}
|
|
};
|
|
|
|
// Attach GitHub repository to template
|
|
router.post('/attach-repository', async (req, res) => {
|
|
try {
|
|
const { repository_url, branch_name } = req.body;
|
|
const userId = req.headers['x-user-id'] || req.query.user_id || req.body.user_id || (req.user && (req.user.id || req.user.userId));
|
|
|
|
// Validate input
|
|
if (!repository_url) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'Repository URL is required'
|
|
});
|
|
}
|
|
|
|
// Parse GitHub URL
|
|
const { owner, repo, branch } = githubService.parseGitHubUrl(repository_url);
|
|
|
|
// Step 1: Determine if repository is public or private
|
|
let isPublicRepo = false;
|
|
let repositoryData = null;
|
|
let hasAuth = false;
|
|
|
|
// Check if user has GitHub authentication first
|
|
try {
|
|
if (userId) {
|
|
const userTokens = await oauthService.getUserTokens(userId);
|
|
hasAuth = userTokens && userTokens.length > 0;
|
|
} else {
|
|
const authStatus = await oauthService.getAuthStatus();
|
|
hasAuth = authStatus.connected;
|
|
}
|
|
console.log(`🔐 User authentication status: ${hasAuth ? 'Connected' : 'Not connected'}`);
|
|
} catch (authError) {
|
|
console.log(`❌ Error checking auth status: ${authError.message}`);
|
|
hasAuth = false;
|
|
}
|
|
|
|
try {
|
|
// Try to access the repository without authentication first (for public repos)
|
|
const unauthenticatedOctokit = new (require('@octokit/rest')).Octokit({
|
|
userAgent: 'CodeNuk-GitIntegration/1.0.0',
|
|
});
|
|
|
|
const { data: repoInfo } = await unauthenticatedOctokit.repos.get({ owner, repo });
|
|
isPublicRepo = !repoInfo.private;
|
|
repositoryData = {
|
|
full_name: repoInfo.full_name,
|
|
description: repoInfo.description,
|
|
language: repoInfo.language,
|
|
visibility: repoInfo.private ? 'private' : 'public',
|
|
stargazers_count: repoInfo.stargazers_count,
|
|
forks_count: repoInfo.forks_count,
|
|
default_branch: repoInfo.default_branch,
|
|
size: repoInfo.size,
|
|
updated_at: repoInfo.updated_at
|
|
};
|
|
|
|
console.log(`✅ Repository ${owner}/${repo} is ${isPublicRepo ? 'public' : 'private'}`);
|
|
|
|
// If it's public, proceed with cloning
|
|
if (isPublicRepo) {
|
|
console.log(`📥 Proceeding to clone public repository ${owner}/${repo}`);
|
|
// Continue to cloning logic below
|
|
} else {
|
|
// It's private, check if user has authentication
|
|
console.log(`🔧 Debug: isPublicRepo = ${isPublicRepo}, hasAuth = ${hasAuth}`);
|
|
if (!hasAuth) {
|
|
console.log(`🔒 Private repository requires authentication - generating OAuth URL`);
|
|
console.log(`🔧 About to call generateAuthResponse with:`, { repository_url, branch_name, userId });
|
|
|
|
// Generate auth response inline to avoid hanging
|
|
console.log('🔧 [INLINE AUTH] Starting inline auth response generation...');
|
|
|
|
const { owner, repo } = githubService.parseGitHubUrl(repository_url);
|
|
console.log('🔧 [INLINE AUTH] Parsed URL:', { owner, repo });
|
|
|
|
const stateBase = Math.random().toString(36).substring(7);
|
|
const userIdForAuth = userId || null;
|
|
const encodedRepoUrl = encodeURIComponent(repository_url);
|
|
const encodedBranchName = encodeURIComponent(branch_name || '');
|
|
const state = `${stateBase}|uid=${userIdForAuth || ''}|repo=${encodedRepoUrl}|branch=${encodedBranchName}`;
|
|
|
|
console.log('🔧 [INLINE AUTH] Generated state:', state);
|
|
|
|
const rawAuthUrl = oauthService.getAuthUrl(state, userIdForAuth);
|
|
console.log('🔧 [INLINE AUTH] Generated raw auth URL:', rawAuthUrl);
|
|
|
|
const gatewayBase = process.env.API_GATEWAY_PUBLIC_URL || 'https://backend.codenuk.com';
|
|
const serviceRelative = '/api/github/auth/github';
|
|
const serviceAuthUrl = `${gatewayBase}${serviceRelative}?redirect=1&state=${encodeURIComponent(state)}${userIdForAuth ? `&user_id=${encodeURIComponent(userIdForAuth)}` : ''}`;
|
|
|
|
console.log('🔧 [INLINE AUTH] Generated service auth URL:', serviceAuthUrl);
|
|
|
|
const response = {
|
|
success: false,
|
|
message: 'GitHub authentication required for private repository',
|
|
requires_auth: true,
|
|
auth_url: serviceAuthUrl,
|
|
service_auth_url: rawAuthUrl,
|
|
auth_error: false,
|
|
repository_info: {
|
|
owner,
|
|
repo,
|
|
repository_url,
|
|
branch_name: branch_name || 'main'
|
|
}
|
|
};
|
|
|
|
console.log('🔧 [INLINE AUTH] Sending response:', response);
|
|
|
|
return res.status(401).json(response);
|
|
} else {
|
|
console.log(`🔐 User has authentication for private repository - proceeding with authenticated access`);
|
|
// Continue to authenticated cloning logic below
|
|
}
|
|
}
|
|
} catch (error) {
|
|
// IMPORTANT: GitHub returns 404 for private repos when unauthenticated.
|
|
// Do NOT immediately return 404 here; instead continue to check auth and treat as potentially private.
|
|
if (error.status && error.status !== 404) {
|
|
// For non-404 errors (e.g., rate-limit, network), surface a meaningful message
|
|
console.warn(`Unauthenticated access failed with status ${error.status}: ${error.message}`);
|
|
}
|
|
|
|
// If we can't access it without auth (including 404), it's likely private
|
|
console.log(`❌ Cannot access ${owner}/${repo} without authentication (status=${error.status || 'unknown'})`);
|
|
console.log(`🔧 Debug: hasAuth = ${hasAuth}, userId = ${userId}`);
|
|
|
|
if (!hasAuth) {
|
|
console.log(`🔒 Repository appears to be private and user is not authenticated - generating OAuth URL`);
|
|
console.log(`🔧 About to call generateAuthResponse with:`, { repository_url, branch_name, userId });
|
|
|
|
// Generate auth response inline to avoid hanging
|
|
const { owner, repo } = githubService.parseGitHubUrl(repository_url);
|
|
const stateBase = Math.random().toString(36).substring(7);
|
|
const userIdForAuth = userId || null;
|
|
const encodedRepoUrl = encodeURIComponent(repository_url);
|
|
const encodedBranchName = encodeURIComponent(branch_name || '');
|
|
const state = `${stateBase}|uid=${userIdForAuth || ''}|repo=${encodedRepoUrl}|branch=${encodedBranchName}`;
|
|
const rawAuthUrl = oauthService.getAuthUrl(state, userIdForAuth);
|
|
|
|
const gatewayBase = process.env.API_GATEWAY_PUBLIC_URL || 'https://backend.codenuk.com';
|
|
const serviceRelative = '/api/github/auth/github';
|
|
const serviceAuthUrl = `${gatewayBase}${serviceRelative}?redirect=1&state=${encodeURIComponent(state)}${userIdForAuth ? `&user_id=${encodeURIComponent(userIdForAuth)}` : ''}`;
|
|
|
|
return res.status(401).json({
|
|
success: false,
|
|
message: 'GitHub authentication required for private repository',
|
|
requires_auth: true,
|
|
auth_url: serviceAuthUrl,
|
|
service_auth_url: rawAuthUrl,
|
|
auth_error: false,
|
|
repository_info: {
|
|
owner,
|
|
repo,
|
|
repository_url,
|
|
branch_name: branch_name || 'main'
|
|
}
|
|
});
|
|
} else {
|
|
console.log(`🔐 User has authentication - trying authenticated access for potentially private repository`);
|
|
// Continue to authenticated access logic below
|
|
}
|
|
|
|
}
|
|
|
|
// Step 2: Handle authenticated access for private repositories
|
|
if (!isPublicRepo && hasAuth) {
|
|
try {
|
|
const octokit = await githubService.getAuthenticatedOctokit();
|
|
const { data: repoInfo } = await octokit.repos.get({ owner, repo });
|
|
|
|
repositoryData = {
|
|
full_name: repoInfo.full_name,
|
|
description: repoInfo.description,
|
|
language: repoInfo.language,
|
|
visibility: 'private',
|
|
stargazers_count: repoInfo.stargazers_count,
|
|
forks_count: repoInfo.forks_count,
|
|
default_branch: repoInfo.default_branch,
|
|
size: repoInfo.size,
|
|
updated_at: repoInfo.updated_at
|
|
};
|
|
|
|
console.log(`✅ Private repository ${owner}/${repo} accessed with authentication`);
|
|
} catch (authError) {
|
|
console.log(`❌ Cannot access ${owner}/${repo} even with authentication: ${authError.message}`);
|
|
|
|
return res.status(403).json({
|
|
success: false,
|
|
message: 'Repository not accessible - you may not have permission to access this repository'
|
|
});
|
|
}
|
|
}
|
|
|
|
// Step 3: Ensure we have repository data
|
|
if (!repositoryData) {
|
|
console.log(`❌ No repository data available - this should not happen`);
|
|
return res.status(500).json({
|
|
success: false,
|
|
message: 'Failed to retrieve repository information'
|
|
});
|
|
}
|
|
|
|
// Use the actual default branch from repository metadata if the requested branch doesn't exist
|
|
// Priority: 1) User's explicit branch_name, 2) Branch from URL, 3) Repository's default branch, 4) 'main'
|
|
let actualBranch = branch_name || branch || repositoryData.default_branch || 'main';
|
|
|
|
// Validate that the requested branch exists, fallback to default if not
|
|
try {
|
|
if (branch || branch_name) {
|
|
// Use authenticated octokit for private repos, unauthenticated for public
|
|
let octokit;
|
|
if (isPublicRepo) {
|
|
octokit = new (require('@octokit/rest')).Octokit({
|
|
userAgent: 'CodeNuk-GitIntegration/1.0.0',
|
|
});
|
|
} else {
|
|
octokit = await githubService.getAuthenticatedOctokit();
|
|
}
|
|
|
|
await octokit.git.getRef({
|
|
owner,
|
|
repo,
|
|
ref: `heads/${actualBranch}`
|
|
});
|
|
}
|
|
} catch (error) {
|
|
if (error.status === 404) {
|
|
console.warn(`Branch ${actualBranch} not found, using default branch: ${repositoryData.default_branch}`);
|
|
actualBranch = repositoryData.default_branch || 'main';
|
|
} else {
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
// Analyze the codebase
|
|
const codebaseAnalysis = await githubService.analyzeCodebase(owner, repo, actualBranch, isPublicRepo);
|
|
|
|
// Store everything in PostgreSQL (without template_id)
|
|
const insertQuery = `
|
|
INSERT INTO all_repositories (
|
|
repository_url, repository_name, owner_name,
|
|
branch_name, is_public, metadata, codebase_analysis, sync_status,
|
|
requires_auth, user_id, provider_name
|
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
|
RETURNING *
|
|
`;
|
|
|
|
const insertValues = [
|
|
repository_url,
|
|
repo,
|
|
owner,
|
|
actualBranch,
|
|
isPublicRepo,
|
|
JSON.stringify(repositoryData),
|
|
JSON.stringify(codebaseAnalysis),
|
|
'syncing', // Start with syncing status
|
|
!isPublicRepo, // requires_auth is true for private repos
|
|
userId || null,
|
|
'github' // provider_name
|
|
];
|
|
|
|
const insertResult = await database.query(insertQuery, insertValues);
|
|
const repositoryRecord = insertResult.rows && insertResult.rows[0];
|
|
|
|
if (!repositoryRecord) {
|
|
return res.status(500).json({
|
|
success: false,
|
|
message: 'Failed to create repository record in database'
|
|
});
|
|
}
|
|
|
|
// Attempt to auto-create webhook on the attached repository using OAuth token (for all repos)
|
|
let webhookResult = null;
|
|
const publicBaseUrl = process.env.PUBLIC_BASE_URL || null; // e.g., your ngrok URL https://xxx.ngrok-free.app
|
|
const callbackUrl = publicBaseUrl ? `${publicBaseUrl}/api/github/webhook` : null;
|
|
if (callbackUrl) {
|
|
webhookResult = await githubService.ensureRepositoryWebhook(owner, repo, callbackUrl);
|
|
console.log(`🔗 Webhook creation result for ${owner}/${repo}:`, webhookResult);
|
|
} else {
|
|
console.warn(`⚠️ No PUBLIC_BASE_URL configured - webhook not created for ${owner}/${repo}`);
|
|
}
|
|
|
|
// Sync with fallback: try git first, then API
|
|
console.log(`Syncing ${isPublicRepo ? 'public' : 'private'} repository (git first, API fallback)...`);
|
|
const downloadResult = await githubService.syncRepositoryWithFallback(
|
|
owner, repo, actualBranch, repositoryRecord.id, isPublicRepo
|
|
);
|
|
|
|
// Update sync status based on download result
|
|
const finalSyncStatus = downloadResult.success ? 'synced' : 'error';
|
|
await database.query(
|
|
'UPDATE all_repositories SET sync_status = $1, updated_at = NOW() WHERE id = $2',
|
|
[finalSyncStatus, repositoryRecord.id]
|
|
);
|
|
|
|
if (!downloadResult.success) {
|
|
console.warn('Repository download failed:', downloadResult.error);
|
|
} else {
|
|
console.log(`✅ Repository ${owner}/${repo} synced successfully using ${downloadResult.method} method`);
|
|
}
|
|
|
|
// Get storage information
|
|
const storageInfo = await githubService.getRepositoryStorage(repositoryRecord.id);
|
|
|
|
res.status(201).json({
|
|
success: true,
|
|
message: `Repository attached and ${downloadResult.success ? 'synced' : 'partially synced'} successfully`,
|
|
data: {
|
|
repository_id: repositoryRecord.id,
|
|
repository_name: repositoryRecord.repository_name,
|
|
owner_name: repositoryRecord.owner_name,
|
|
branch_name: repositoryRecord.branch_name,
|
|
is_public: isPublicRepo,
|
|
requires_auth: !isPublicRepo,
|
|
sync_status: finalSyncStatus,
|
|
metadata: repositoryData,
|
|
codebase_analysis: codebaseAnalysis,
|
|
storage_info: storageInfo,
|
|
download_result: downloadResult,
|
|
webhook_result: webhookResult,
|
|
authentication_info: {
|
|
is_public: isPublicRepo,
|
|
authenticated: !isPublicRepo,
|
|
github_username: null
|
|
}
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error attaching repository:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to attach repository'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Get repository commit summary (latest commit + total commit count + branch/tag counts)
|
|
router.get('/repository/:id/commit-summary', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'Local repository path not found' });
|
|
}
|
|
const localPath = storageRes.rows[0].local_path;
|
|
const { execSync } = require('child_process');
|
|
const opts = { encoding: 'utf8' };
|
|
try { execSync(`git -C "${localPath}" rev-parse --is-inside-work-tree`, opts); } catch {
|
|
return res.status(400).json({ success: false, message: 'Path is not a git repository' });
|
|
}
|
|
let lastRaw = '';
|
|
try {
|
|
lastRaw = execSync(`git -C "${localPath}" log --pretty=format:%H|%an|%ae|%ad|%s -n 1 --date=iso`, opts).trim();
|
|
} catch (e) {
|
|
console.warn('[commit-summary] git log failed:', e?.message);
|
|
lastRaw = '';
|
|
}
|
|
let last_commit = null;
|
|
if (lastRaw) {
|
|
const [hash, author_name, author_email, committed_at, ...rest] = lastRaw.split('|');
|
|
const message = rest.join('|');
|
|
last_commit = { hash, short_hash: hash ? hash.substring(0,7) : null, author_name, author_email, committed_at, message };
|
|
} else {
|
|
// Fallback: use HEAD directly
|
|
try {
|
|
const head = execSync(`git -C "${localPath}" rev-parse HEAD`, opts).trim();
|
|
if (head) {
|
|
const show = execSync(`git -C "${localPath}" show -s --format=%H|%an|%ae|%ad|%s --date=iso ${head}`, opts).trim();
|
|
if (show) {
|
|
const [hash, author_name, author_email, committed_at, ...rest] = show.split('|');
|
|
const message = rest.join('|');
|
|
last_commit = { hash, short_hash: hash ? hash.substring(0,7) : null, author_name, author_email, committed_at, message };
|
|
}
|
|
}
|
|
} catch (e2) {
|
|
console.warn('[commit-summary] fallback rev-parse/show failed:', e2?.message);
|
|
}
|
|
}
|
|
let total_commits = 0;
|
|
try {
|
|
total_commits = parseInt(execSync(`git -C "${localPath}" rev-list --count HEAD`, opts).trim(), 10) || 0;
|
|
} catch { total_commits = 0; }
|
|
let branch_count = 0, tag_count = 0;
|
|
try { branch_count = execSync(`git -C "${localPath}" branch --list | wc -l`, opts).trim() * 1 || 0; } catch {}
|
|
try { tag_count = execSync(`git -C "${localPath}" tag --list | wc -l`, opts).trim() * 1 || 0; } catch {}
|
|
return res.json({ success: true, data: { last_commit, total_commits, branch_count, tag_count } });
|
|
} catch (error) {
|
|
console.error('Error getting commit summary:', error);
|
|
res.status(500).json({ success: false, message: error.message || 'Failed to get commit summary' });
|
|
}
|
|
});
|
|
|
|
// Get last commit that touched a given path
|
|
router.get('/repository/:id/path-commit', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const relPath = (req.query.path || '').toString();
|
|
if (!relPath) return res.status(400).json({ success: false, message: 'path is required' });
|
|
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'Local repository path not found' });
|
|
}
|
|
const localPath = storageRes.rows[0].local_path;
|
|
const { execSync } = require('child_process');
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
const opts = { encoding: 'utf8' };
|
|
|
|
const resolveCaseInsensitive = (base, rel) => {
|
|
const parts = rel.split('/').filter(Boolean);
|
|
let cur = base, acc = [];
|
|
for (const p of parts) {
|
|
if (!fs.existsSync(cur)) return null;
|
|
const entries = fs.readdirSync(cur);
|
|
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
|
|
if (!match) return null;
|
|
acc.push(match);
|
|
cur = path.join(cur, match);
|
|
}
|
|
return acc.join('/');
|
|
};
|
|
|
|
let resolvedRel = relPath;
|
|
const absCandidate = path.join(localPath, relPath);
|
|
if (!fs.existsSync(absCandidate)) {
|
|
const fixed = resolveCaseInsensitive(localPath, relPath);
|
|
if (fixed) resolvedRel = fixed; else return res.status(404).json({ success: false, message: 'Path not found' });
|
|
}
|
|
|
|
let raw = '';
|
|
try {
|
|
raw = execSync(`git -C "${localPath}" log --pretty=format:%H|%an|%ae|%ad|%s -n 1 --date=iso -- "${resolvedRel}"`, opts).trim();
|
|
} catch { raw = ''; }
|
|
if (!raw) return res.json({ success: true, data: null });
|
|
const [hash, author_name, author_email, committed_at, ...rest] = raw.split('|');
|
|
const message = rest.join('|');
|
|
return res.json({ success: true, data: { hash, short_hash: hash.substring(0,7), author_name, author_email, committed_at, message, path: resolvedRel } });
|
|
} catch (error) {
|
|
console.error('Error getting path commit:', error);
|
|
res.status(500).json({ success: false, message: error.message || 'Failed to get path commit' });
|
|
}
|
|
});
|
|
|
|
// List commits with pagination and optional path filter (includes files changed)
|
|
router.get('/repository/:id/commits', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const page = Math.max(1, parseInt((req.query.page || '1').toString(), 10));
|
|
const limit = Math.min(100, Math.max(1, parseInt((req.query.limit || '20').toString(), 10)));
|
|
const relPath = req.query.path ? req.query.path.toString() : '';
|
|
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'Local repository path not found' });
|
|
}
|
|
const localPath = storageRes.rows[0].local_path;
|
|
const { execSync } = require('child_process');
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
const opts = { encoding: 'utf8' };
|
|
|
|
// Count total
|
|
let countCmd = `git -C "${localPath}" rev-list --count HEAD`;
|
|
if (relPath) {
|
|
const candidate = path.join(localPath, relPath);
|
|
const exists = fs.existsSync(candidate);
|
|
if (!exists) {
|
|
// try to ignore if path missing; zero commits
|
|
return res.json({ success: true, data: { items: [], page, limit, total: 0, has_next: false } });
|
|
}
|
|
countCmd = `git -C "${localPath}" rev-list --count HEAD -- "${relPath}"`;
|
|
}
|
|
let total = 0;
|
|
try { total = parseInt(execSync(countCmd, opts).trim(), 10) || 0; } catch { total = 0; }
|
|
|
|
const skip = (page - 1) * limit;
|
|
// Use record separator \x1e for each commit block
|
|
let logCmd = `git -C "${localPath}" log --date=iso --pretty=format:%x1e%H|%an|%ae|%ad|%s --name-status --numstat --no-color --skip ${skip} -n ${limit}`;
|
|
if (relPath) logCmd += ` -- "${relPath}"`;
|
|
let raw = '';
|
|
try { raw = execSync(logCmd, opts); } catch { raw = ''; }
|
|
const blocks = raw.split('\x1e').map(b => b.trim()).filter(Boolean);
|
|
const items = blocks.map(block => {
|
|
const lines = block.split('\n').filter(Boolean);
|
|
const header = lines.shift() || '';
|
|
const [hash, author_name, author_email, committed_at, ...rest] = header.split('|');
|
|
const message = rest.join('|');
|
|
const fileMap = new Map();
|
|
for (const ln of lines) {
|
|
// numstat: additions\tdeletions\tpath
|
|
const numParts = ln.split('\t');
|
|
if (numParts.length === 3 && /^\d+|-$/u.test(numParts[0]) && /^\d+|-$/u.test(numParts[1])) {
|
|
const additions = numParts[0] === '-' ? null : parseInt(numParts[0], 10);
|
|
const deletions = numParts[1] === '-' ? null : parseInt(numParts[1], 10);
|
|
const fpath = numParts[2];
|
|
const entry = fileMap.get(fpath) || { path: fpath };
|
|
entry.additions = additions;
|
|
entry.deletions = deletions;
|
|
fileMap.set(fpath, entry);
|
|
continue;
|
|
}
|
|
// name-status: M\tpath or R100\told\tnew etc.
|
|
const ns = ln.split('\t');
|
|
if (ns.length >= 2) {
|
|
const status = ns[0];
|
|
let fpath = ns[1];
|
|
if (status.startsWith('R') && ns.length >= 3) {
|
|
// rename: old -> new
|
|
fpath = ns[2];
|
|
}
|
|
const entry = fileMap.get(fpath) || { path: fpath };
|
|
entry.change_type = status;
|
|
fileMap.set(fpath, entry);
|
|
}
|
|
}
|
|
const files = Array.from(fileMap.values());
|
|
return { hash, short_hash: hash?.substring(0,7), author_name, author_email, committed_at, message, files };
|
|
});
|
|
|
|
const has_next = skip + items.length < total;
|
|
return res.json({ success: true, data: { items, page, limit, total, has_next } });
|
|
} catch (error) {
|
|
console.error('Error listing commits:', error);
|
|
res.status(500).json({ success: false, message: error.message || 'Failed to list commits' });
|
|
}
|
|
});
|
|
|
|
// Get a single commit by SHA with files changed
|
|
router.get('/repository/:id/commit/:sha', async (req, res) => {
|
|
try {
|
|
const { id, sha } = req.params;
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'Local repository path not found' });
|
|
}
|
|
const localPath = storageRes.rows[0].local_path;
|
|
const { execSync } = require('child_process');
|
|
const opts = { encoding: 'utf8' };
|
|
const header = execSync(`git -C "${localPath}" show -s --format=%H|%an|%ae|%ad|%s --date=iso ${sha}`, opts).trim();
|
|
const [hash, author_name, author_email, committed_at, ...rest] = header.split('|');
|
|
const message = rest.join('|');
|
|
const filesRaw = execSync(`git -C "${localPath}" show --name-status --numstat --format= ${sha}`, opts);
|
|
const lines = filesRaw.split('\n').filter(Boolean);
|
|
const fileMap = new Map();
|
|
for (const ln of lines) {
|
|
const numParts = ln.split('\t');
|
|
if (numParts.length === 3 && /^\d+|-$/u.test(numParts[0]) && /^\d+|-$/u.test(numParts[1])) {
|
|
const additions = numParts[0] === '-' ? null : parseInt(numParts[0], 10);
|
|
const deletions = numParts[1] === '-' ? null : parseInt(numParts[1], 10);
|
|
const fpath = numParts[2];
|
|
const entry = fileMap.get(fpath) || { path: fpath };
|
|
entry.additions = additions;
|
|
entry.deletions = deletions;
|
|
fileMap.set(fpath, entry);
|
|
continue;
|
|
}
|
|
const ns = ln.split('\t');
|
|
if (ns.length >= 2) {
|
|
const status = ns[0];
|
|
let fpath = ns[1];
|
|
if (status.startsWith('R') && ns.length >= 3) {
|
|
fpath = ns[2];
|
|
}
|
|
const entry = fileMap.get(fpath) || { path: fpath };
|
|
entry.change_type = status;
|
|
fileMap.set(fpath, entry);
|
|
}
|
|
}
|
|
const files = Array.from(fileMap.values());
|
|
return res.json({ success: true, data: { hash, short_hash: hash?.substring(0,7), author_name, author_email, committed_at, message, files } });
|
|
} catch (error) {
|
|
console.error('Error getting commit by sha:', error);
|
|
res.status(500).json({ success: false, message: error.message || 'Failed to get commit' });
|
|
}
|
|
});
|
|
// Get repository diff between two SHAs (unified patch)
|
|
router.get('/repository/:id/diff', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { from, to, path: dirPath } = req.query;
|
|
|
|
const repoQuery = 'SELECT * FROM all_repositories WHERE id = $1';
|
|
const repoResult = await database.query(repoQuery, [id]);
|
|
if (repoResult.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'Repository not found' });
|
|
}
|
|
const record = repoResult.rows[0];
|
|
const { owner, repo } = githubService.parseGitHubUrl(record.repository_url);
|
|
// Always use stored branch_name to avoid mismatches like master/main
|
|
const targetBranch = record.branch_name || 'main';
|
|
const patch = await githubService.getRepositoryDiff(owner, repo, targetBranch, from || record.last_synced_commit_sha, to || 'HEAD');
|
|
res.json({ success: true, data: { patch, from: from || record.last_synced_commit_sha, to: to || 'HEAD' } });
|
|
} catch (error) {
|
|
console.error('Error getting diff:', error);
|
|
res.status(500).json({ success: false, message: error.message || 'Failed to get diff' });
|
|
}
|
|
});
|
|
|
|
// Get list of changed files since a SHA
|
|
router.get('/repository/:id/changes', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { since } = req.query;
|
|
|
|
const repoQuery = 'SELECT * FROM all_repositories WHERE id = $1';
|
|
const repoResult = await database.query(repoQuery, [id]);
|
|
if (repoResult.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'Repository not found' });
|
|
}
|
|
const record = repoResult.rows[0];
|
|
const { owner, repo, branch } = githubService.parseGitHubUrl(record.repository_url);
|
|
|
|
const sinceSha = since || record.last_synced_commit_sha;
|
|
if (!sinceSha) {
|
|
return res.status(400).json({ success: false, message: 'since SHA is required or must be available as last_synced_commit_sha' });
|
|
}
|
|
|
|
const changes = await githubService.getRepositoryChangesSince(owner, repo, branch || record.branch_name, sinceSha);
|
|
res.json({ success: true, data: { since: sinceSha, changes } });
|
|
} catch (error) {
|
|
console.error('Error getting changes:', error);
|
|
res.status(500).json({ success: false, message: error.message || 'Failed to get changes' });
|
|
}
|
|
});
|
|
|
|
// Get repository information for a template
|
|
router.get('/template/:id/repository', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
|
|
const query = `
|
|
SELECT gr.*, rs.local_path, rs.storage_status, rs.total_files_count,
|
|
rs.total_directories_count, rs.total_size_bytes, rs.download_completed_at
|
|
FROM all_repositories gr
|
|
LEFT JOIN repository_storage rs ON gr.id = rs.repository_id
|
|
WHERE gr.template_id = $1
|
|
ORDER BY gr.created_at DESC
|
|
LIMIT 1
|
|
`;
|
|
|
|
const result = await database.query(query, [id]);
|
|
|
|
if (result.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'No repository found for this template'
|
|
});
|
|
}
|
|
|
|
const repository = result.rows[0];
|
|
|
|
const parseMaybe = (v) => {
|
|
if (v == null) return {};
|
|
if (typeof v === 'string') {
|
|
try { return JSON.parse(v); } catch { return {}; }
|
|
}
|
|
return v; // already an object from jsonb
|
|
};
|
|
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
...repository,
|
|
metadata: parseMaybe(repository.metadata),
|
|
codebase_analysis: parseMaybe(repository.codebase_analysis)
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching repository:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch repository'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Get repository file structure
|
|
router.get('/repository/:id/structure', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { path: directoryPath } = req.query;
|
|
|
|
// Get repository info
|
|
const repoQuery = 'SELECT * FROM all_repositories WHERE id = $1';
|
|
const repoResult = await database.query(repoQuery, [id]);
|
|
|
|
if (repoResult.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'Repository not found'
|
|
});
|
|
}
|
|
|
|
let structure = [];
|
|
|
|
// Try to get files and directories from database first
|
|
try {
|
|
// Get files in the current directory
|
|
const filesQuery = `
|
|
SELECT
|
|
file->>'filename' as filename,
|
|
file->>'relative_path' as relative_path,
|
|
(file->>'file_size_bytes')::bigint as file_size_bytes
|
|
FROM repository_files rf,
|
|
jsonb_array_elements(rf.files) as file
|
|
WHERE rf.repository_id = $1 AND file->>'relative_path' = $2
|
|
ORDER BY file->>'filename'
|
|
`;
|
|
|
|
const filesResult = await database.query(filesQuery, [id, directoryPath || '']);
|
|
const files = filesResult.rows.map(file => ({
|
|
name: file.filename,
|
|
type: 'file',
|
|
path: file.relative_path,
|
|
size: file.file_size_bytes || 0
|
|
}));
|
|
|
|
// Get subdirectories
|
|
const dirsQuery = `
|
|
SELECT rd.directory_name, rd.relative_path, rd.total_size_bytes
|
|
FROM repository_directories rd
|
|
WHERE rd.repository_id = $1 AND rd.parent_directory_id = (
|
|
SELECT id FROM repository_directories
|
|
WHERE repository_id = $1 AND relative_path = $2
|
|
LIMIT 1
|
|
)
|
|
ORDER BY rd.directory_name
|
|
`;
|
|
|
|
const dirsResult = await database.query(dirsQuery, [id, directoryPath || '']);
|
|
const directories = dirsResult.rows
|
|
.filter(dir => dir.directory_name !== '.git') // Exclude .git folder
|
|
.map(dir => ({
|
|
name: dir.directory_name,
|
|
type: 'directory',
|
|
path: dir.relative_path,
|
|
size: dir.total_size_bytes || 0
|
|
}));
|
|
|
|
// Combine files and directories
|
|
structure = [...directories, ...files];
|
|
} catch (dbErr) {
|
|
console.warn('[structure] Database query failed, trying FS fallback:', dbErr?.message);
|
|
}
|
|
|
|
// Filesystem fallback when database has no entries
|
|
if (!Array.isArray(structure) || structure.length === 0) {
|
|
try {
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length > 0) {
|
|
const base = storageRes.rows[0].local_path;
|
|
const fs = require('fs');
|
|
const pth = require('path');
|
|
|
|
const resolveCaseInsensitive = (baseDir, rel) => {
|
|
if (!rel) return baseDir;
|
|
const parts = rel.split('/').filter(Boolean);
|
|
let cur = baseDir;
|
|
for (const p of parts) {
|
|
if (!fs.existsSync(cur)) return null;
|
|
const entries = fs.readdirSync(cur);
|
|
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
|
|
if (!match) return null;
|
|
cur = pth.join(cur, match);
|
|
}
|
|
return cur;
|
|
};
|
|
|
|
const absDir = resolveCaseInsensitive(base, directoryPath || '');
|
|
if (absDir && fs.existsSync(absDir) && fs.statSync(absDir).isDirectory()) {
|
|
const items = fs.readdirSync(absDir);
|
|
structure = items
|
|
.filter(name => name !== '.git') // Exclude .git folder
|
|
.map(name => {
|
|
const absChild = pth.join(absDir, name);
|
|
const isDir = fs.statSync(absChild).isDirectory();
|
|
// compute relative path from base
|
|
const relPath = pth.relative(base, absChild).replace(/\\/g, '/');
|
|
return {
|
|
name,
|
|
path: relPath,
|
|
type: isDir ? 'directory' : 'file'
|
|
};
|
|
}).sort((a, b) => {
|
|
// directories first, then alphabetical
|
|
if (a.type !== b.type) return a.type === 'directory' ? -1 : 1;
|
|
return a.name.localeCompare(b.name);
|
|
});
|
|
}
|
|
}
|
|
} catch (fsErr) {
|
|
console.warn('[structure] FS fallback failed:', fsErr?.message);
|
|
}
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
repository_id: id,
|
|
directory_path: directoryPath || '',
|
|
structure: structure || []
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching repository structure:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch repository structure'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Get files in a directory
|
|
router.get('/repository/:id/files', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { directory_path = '' } = req.query;
|
|
|
|
// Get repository info
|
|
const repoQuery = 'SELECT * FROM all_repositories WHERE id = $1';
|
|
const repoResult = await database.query(repoQuery, [id]);
|
|
|
|
if (repoResult.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'Repository not found'
|
|
});
|
|
}
|
|
|
|
const files = await fileStorageService.getDirectoryFiles(id, directory_path);
|
|
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
repository_id: id,
|
|
directory_path: directory_path,
|
|
files: files
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching directory files:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch directory files'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Resolve repository path (case-insensitive path resolution)
|
|
router.get('/repository/:id/resolve-path', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { file_path } = req.query;
|
|
|
|
if (!file_path) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'file_path query parameter is required'
|
|
});
|
|
}
|
|
|
|
// Get repository storage path
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'Repository not stored locally'
|
|
});
|
|
}
|
|
|
|
const localBase = storageRes.rows[0].local_path;
|
|
const pathJoin = require('path').join;
|
|
const fs = require('fs');
|
|
|
|
// Helper: case-insensitive resolution
|
|
const resolveCaseInsensitive = (base, rel) => {
|
|
const parts = rel.split('/').filter(Boolean);
|
|
let cur = base;
|
|
for (const p of parts) {
|
|
if (!fs.existsSync(cur)) return null;
|
|
const entries = fs.readdirSync(cur);
|
|
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
|
|
if (!match) return null;
|
|
cur = pathJoin(cur, match);
|
|
}
|
|
return cur;
|
|
};
|
|
|
|
let absPath = pathJoin(localBase, file_path);
|
|
let exists = fs.existsSync(absPath);
|
|
let isDirectory = false;
|
|
|
|
if (!exists) {
|
|
absPath = resolveCaseInsensitive(localBase, file_path);
|
|
if (absPath) {
|
|
exists = fs.existsSync(absPath);
|
|
if (exists) {
|
|
isDirectory = fs.statSync(absPath).isDirectory();
|
|
}
|
|
}
|
|
} else {
|
|
isDirectory = fs.statSync(absPath).isDirectory();
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
repository_id: id,
|
|
local_path: localBase,
|
|
requested_file_path: file_path,
|
|
resolved_absolute_path: absPath,
|
|
exists: exists,
|
|
is_directory: isDirectory
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error resolving repository path:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to resolve repository path'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Get file content
|
|
router.get('/repository/:id/file-content', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { file_path } = req.query;
|
|
|
|
if (!file_path) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'File path is required'
|
|
});
|
|
}
|
|
|
|
// Get file info from repository_files table
|
|
const query = `
|
|
SELECT
|
|
file->>'filename' as filename,
|
|
file->>'file_extension' as file_extension,
|
|
file->>'relative_path' as relative_path,
|
|
file->>'absolute_path' as absolute_path,
|
|
(file->>'file_size_bytes')::bigint as file_size_bytes,
|
|
(file->>'is_binary')::boolean as is_binary,
|
|
file->>'mime_type' as mime_type
|
|
FROM repository_files rf,
|
|
jsonb_array_elements(rf.files) as file
|
|
WHERE rf.repository_id = $1 AND file->>'relative_path' = $2
|
|
`;
|
|
|
|
const result = await database.query(query, [id, file_path]);
|
|
|
|
if (result.rows.length > 0) {
|
|
const file = result.rows[0];
|
|
|
|
// Read file content from filesystem
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'File not found (no storage path)' });
|
|
}
|
|
|
|
const localBase = storageRes.rows[0].local_path;
|
|
const pathJoin = require('path').join;
|
|
const fs = require('fs');
|
|
|
|
// Helper: case-insensitive resolution
|
|
const resolveCaseInsensitive = (base, rel) => {
|
|
const parts = rel.split('/').filter(Boolean);
|
|
let cur = base;
|
|
for (const p of parts) {
|
|
if (!fs.existsSync(cur)) return null;
|
|
const entries = fs.readdirSync(cur);
|
|
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
|
|
if (!match) return null;
|
|
cur = pathJoin(cur, match);
|
|
}
|
|
return cur;
|
|
};
|
|
|
|
let absPath = pathJoin(localBase, file_path);
|
|
if (!fs.existsSync(absPath)) {
|
|
absPath = resolveCaseInsensitive(localBase, file_path);
|
|
}
|
|
|
|
if (!absPath || !fs.existsSync(absPath)) {
|
|
return res.status(404).json({ success: false, message: 'File not found on filesystem' });
|
|
}
|
|
|
|
// Disallow directories for file-content
|
|
const stat = fs.statSync(absPath);
|
|
if (stat.isDirectory()) {
|
|
return res.status(400).json({ success: false, message: 'Path is a directory, not a file' });
|
|
}
|
|
|
|
// Read file content
|
|
let content = null;
|
|
let preview = null;
|
|
|
|
if (!file.is_binary) {
|
|
try {
|
|
content = fs.readFileSync(absPath, 'utf8');
|
|
// Create preview (first 500 characters)
|
|
preview = content.length > 500 ? content.substring(0, 500) + '...' : content;
|
|
} catch (readErr) {
|
|
console.warn('Failed to read file content:', readErr.message);
|
|
}
|
|
}
|
|
|
|
return res.json({
|
|
success: true,
|
|
data: {
|
|
file_info: {
|
|
id: file.id,
|
|
filename: file.filename,
|
|
file_extension: file.file_extension,
|
|
relative_path: file.relative_path,
|
|
file_size_bytes: file.file_size_bytes,
|
|
mime_type: file.mime_type,
|
|
is_binary: file.is_binary,
|
|
language_detected: file.language_detected,
|
|
line_count: file.line_count,
|
|
char_count: file.char_count
|
|
},
|
|
content: content,
|
|
preview: preview
|
|
}
|
|
});
|
|
}
|
|
|
|
// Fallback: read from filesystem using repository_storage.local_path
|
|
const storageQ = `SELECT local_path FROM repository_storage WHERE repository_id = $1 ORDER BY created_at DESC LIMIT 1`;
|
|
const storageRes = await database.query(storageQ, [id]);
|
|
if (storageRes.rows.length === 0) {
|
|
return res.status(404).json({ success: false, message: 'File not found (no storage path)' });
|
|
}
|
|
const localBase = storageRes.rows[0].local_path;
|
|
const pathJoin = require('path').join;
|
|
const fs = require('fs');
|
|
|
|
// Helper: case-insensitive resolution
|
|
const resolveCaseInsensitive = (base, rel) => {
|
|
const parts = rel.split('/').filter(Boolean);
|
|
let cur = base;
|
|
for (const p of parts) {
|
|
if (!fs.existsSync(cur)) return null;
|
|
const entries = fs.readdirSync(cur);
|
|
const match = entries.find(e => e.toLowerCase() === p.toLowerCase());
|
|
if (!match) return null;
|
|
cur = pathJoin(cur, match);
|
|
}
|
|
return cur;
|
|
};
|
|
|
|
let absPath = pathJoin(localBase, file_path);
|
|
if (!fs.existsSync(absPath)) {
|
|
absPath = resolveCaseInsensitive(localBase, file_path);
|
|
}
|
|
if (!absPath || !fs.existsSync(absPath)) {
|
|
return res.status(404).json({ success: false, message: 'File not found' });
|
|
}
|
|
|
|
// Disallow directories for file-content
|
|
const stat = fs.statSync(absPath);
|
|
if (stat.isDirectory()) {
|
|
return res.status(400).json({ success: false, message: 'Requested path is a directory' });
|
|
}
|
|
|
|
// Basic binary detection
|
|
let buffer = fs.readFileSync(absPath);
|
|
let hasNull = buffer.includes(0);
|
|
let isBinary = hasNull;
|
|
const filename = require('path').basename(absPath);
|
|
const ext = require('path').extname(absPath).replace(/^\./, '') || null;
|
|
// Relax detection for well-known text extensions
|
|
const textExts = new Set(['txt','md','markdown','json','yml','yaml','xml','csv','tsv','py','js','jsx','ts','tsx','java','go','rb','rs','php','c','h','cc','hh','cpp','hpp','cs','kt','swift','sql','ini','env','sh','bash','zsh','bat','ps1','toml','gradle','makefile','dockerfile']);
|
|
if (ext && textExts.has(ext.toLowerCase())) {
|
|
isBinary = false;
|
|
hasNull = false;
|
|
}
|
|
const contentText = isBinary ? null : buffer.toString('utf8');
|
|
|
|
return res.json({
|
|
success: true,
|
|
data: {
|
|
file_info: {
|
|
id: null,
|
|
filename: filename,
|
|
file_extension: ext,
|
|
relative_path: file_path,
|
|
file_size_bytes: stat.size,
|
|
mime_type: null,
|
|
is_binary: isBinary,
|
|
language_detected: null,
|
|
line_count: contentText ? contentText.split(/\r?\n/).length : null,
|
|
char_count: contentText ? contentText.length : stat.size
|
|
},
|
|
content: contentText,
|
|
preview: contentText ? contentText.slice(0, 500) : null
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching file content:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch file content'
|
|
});
|
|
}
|
|
});
|
|
// GitHub-like UI endpoint - Complete UI data for frontend
|
|
|
|
// Helper function to get file tree from local repository path
|
|
async function handleTreeView(repositoryId, options = {}) {
|
|
const fs = require('fs');
|
|
const pathModule = require('path');
|
|
|
|
// Get repository storage path
|
|
const storageQuery = `
|
|
SELECT local_path FROM repository_storage
|
|
WHERE repository_id = $1 AND storage_status = 'completed'
|
|
`;
|
|
const result = await database.query(storageQuery, [repositoryId]);
|
|
|
|
if (result.rows.length === 0) {
|
|
throw new Error('Repository storage not found or not completed');
|
|
}
|
|
|
|
const localPath = result.rows[0].local_path;
|
|
|
|
if (!fs.existsSync(localPath)) {
|
|
throw new Error('Repository local path does not exist');
|
|
}
|
|
|
|
// Recursively build file tree
|
|
function buildFileTree(dir, relativePath = '') {
|
|
const items = fs.readdirSync(dir, { withFileTypes: true });
|
|
const tree = {};
|
|
|
|
for (const item of items) {
|
|
// Skip .git directory
|
|
if (item.name === '.git') continue;
|
|
|
|
const itemPath = pathModule.join(relativePath, item.name);
|
|
const fullPath = pathModule.join(dir, item.name);
|
|
|
|
if (item.isDirectory()) {
|
|
tree[item.name] = {
|
|
type: 'directory',
|
|
path: itemPath,
|
|
children: buildFileTree(fullPath, itemPath)
|
|
};
|
|
} else {
|
|
tree[item.name] = {
|
|
type: 'file',
|
|
path: itemPath
|
|
};
|
|
}
|
|
}
|
|
|
|
return tree;
|
|
}
|
|
|
|
const fileTree = buildFileTree(localPath);
|
|
|
|
return {
|
|
left_panel: {
|
|
file_tree: fileTree
|
|
}
|
|
};
|
|
}
|
|
|
|
router.get('/repository/:id/ui-view', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const {
|
|
view_type = 'commit',
|
|
commit_sha = 'latest',
|
|
path = '',
|
|
file_path = '',
|
|
base_commit = '',
|
|
target_commit = ''
|
|
} = req.query;
|
|
|
|
// Validate repository exists
|
|
const repoQuery = `
|
|
SELECT gr.*, rs.storage_status, rs.local_path
|
|
FROM all_repositories gr
|
|
LEFT JOIN repository_storage rs ON gr.id = rs.repository_id
|
|
WHERE gr.id = $1
|
|
`;
|
|
|
|
const repoResult = await database.query(repoQuery, [id]);
|
|
|
|
if (repoResult.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'Repository not found'
|
|
});
|
|
}
|
|
|
|
const repository = repoResult.rows[0];
|
|
|
|
if (repository.storage_status !== 'completed') {
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'Repository not fully synced. Please wait for sync to complete.',
|
|
sync_status: repository.storage_status
|
|
});
|
|
}
|
|
|
|
let uiData;
|
|
|
|
switch (view_type) {
|
|
case 'commit':
|
|
uiData = await handleCommitView(id, { commit_sha, base_commit, target_commit });
|
|
break;
|
|
|
|
case 'tree':
|
|
uiData = await handleTreeView(id, { commit_sha, path });
|
|
break;
|
|
|
|
case 'blob':
|
|
uiData = await handleBlobView(id, { file_path, commit_sha });
|
|
break;
|
|
default:
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'Invalid view_type. Must be: commit, tree, or blob'
|
|
});
|
|
}
|
|
|
|
res.json({ success: true,
|
|
data: {
|
|
repository_info: {
|
|
id: repository.id,
|
|
name: repository.repository_name,
|
|
owner: repository.owner_name,
|
|
branch: repository.branch_name,
|
|
commit_sha: commit_sha,
|
|
last_synced: repository.last_synced_at,
|
|
repository_url: repository.repository_url
|
|
},
|
|
ui_data: uiData
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching UI view:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch UI view'
|
|
});
|
|
}
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// Search repository files
|
|
router.get('/repository/:id/search', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
const { q: query } = req.query;
|
|
|
|
if (!query) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'Search query is required'
|
|
});
|
|
}
|
|
|
|
const results = await fileStorageService.searchFileContent(id, query);
|
|
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
repository_id: id,
|
|
search_query: query,
|
|
results: results,
|
|
total_results: results.length
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error searching repository:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to search repository'
|
|
});
|
|
}
|
|
});
|
|
|
|
// List all repositories for a template
|
|
router.get('/template/:id/repositories', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
|
|
const query = `
|
|
SELECT gr.*, rs.local_path, rs.storage_status, rs.total_files_count,
|
|
rs.total_directories_count, rs.total_size_bytes, rs.download_completed_at
|
|
FROM all_repositories gr
|
|
LEFT JOIN repository_storage rs ON gr.id = rs.repository_id
|
|
WHERE gr.template_id = $1
|
|
ORDER BY gr.created_at DESC
|
|
`;
|
|
|
|
const result = await database.query(query, [id]);
|
|
|
|
const repositories = result.rows.map(repo => ({
|
|
...repo,
|
|
metadata: JSON.parse(repo.metadata || '{}'),
|
|
codebase_analysis: JSON.parse(repo.codebase_analysis || '{}')
|
|
}));
|
|
|
|
res.json({
|
|
success: true,
|
|
data: repositories
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching repositories:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch repositories'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Download repository files (legacy endpoint for backward compatibility)
|
|
router.post('/download', async (req, res) => {
|
|
try {
|
|
const { repository_url, branch_name } = req.body;
|
|
|
|
if (!repository_url) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
message: 'Repository URL is required'
|
|
});
|
|
}
|
|
|
|
const { owner, repo, branch } = githubService.parseGitHubUrl(repository_url);
|
|
const targetBranch = branch || branch_name || 'main';
|
|
|
|
const result = await githubService.downloadRepository(owner, repo, targetBranch);
|
|
|
|
if (result.success) {
|
|
res.json({
|
|
success: true,
|
|
message: 'Repository downloaded successfully',
|
|
data: result
|
|
});
|
|
} else {
|
|
res.status(500).json({
|
|
success: false,
|
|
message: 'Failed to download repository',
|
|
error: result.error
|
|
});
|
|
}
|
|
|
|
} catch (error) {
|
|
console.error('Error downloading repository:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to download repository'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Re-sync repository (re-download and update database)
|
|
router.post('/repository/:id/sync', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
|
|
// Get repository info
|
|
const repoQuery = 'SELECT * FROM all_repositories WHERE id = $1';
|
|
const repoResult = await database.query(repoQuery, [id]);
|
|
|
|
if (repoResult.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'Repository not found'
|
|
});
|
|
}
|
|
|
|
const repository = repoResult.rows[0];
|
|
const { owner, repo, branch } = githubService.parseGitHubUrl(repository.repository_url);
|
|
|
|
// Clean up existing storage
|
|
await githubService.cleanupRepositoryStorage(id);
|
|
|
|
// Re-sync with fallback (git first, API fallback)
|
|
const downloadResult = await githubService.syncRepositoryWithFallback(
|
|
owner, repo, branch || repository.branch_name, id
|
|
);
|
|
|
|
// Update sync status
|
|
await database.query(
|
|
'UPDATE all_repositories SET sync_status = $1, updated_at = NOW() WHERE id = $2',
|
|
[downloadResult.success ? 'synced' : 'error', id]
|
|
);
|
|
|
|
res.json({
|
|
success: downloadResult.success,
|
|
message: downloadResult.success ? 'Repository synced successfully' : 'Failed to sync repository',
|
|
data: downloadResult
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error syncing repository:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to sync repository'
|
|
});
|
|
}
|
|
});
|
|
|
|
// Remove repository from template
|
|
router.delete('/repository/:id', async (req, res) => {
|
|
try {
|
|
const { id } = req.params;
|
|
|
|
// Get repository info before deletion
|
|
const getQuery = 'SELECT * FROM all_repositories WHERE id = $1';
|
|
const getResult = await database.query(getQuery, [id]);
|
|
|
|
if (getResult.rows.length === 0) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
message: 'Repository not found'
|
|
});
|
|
}
|
|
|
|
const repository = getResult.rows[0];
|
|
|
|
// Clean up file storage
|
|
await githubService.cleanupRepositoryStorage(id);
|
|
|
|
|
|
|
|
// Delete repository record
|
|
await database.query(
|
|
'DELETE FROM all_repositories WHERE id = $1',
|
|
[id]
|
|
);
|
|
|
|
res.json({
|
|
success: true,
|
|
message: 'Repository removed successfully',
|
|
data: {
|
|
removed_repository: repository.repository_name,
|
|
template_id: repository.template_id
|
|
}
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error removing repository:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch repositories'
|
|
});
|
|
}
|
|
});
|
|
|
|
// List all repositories for a user (by path param user_id)
|
|
router.get('/user/:user_id/repositories', async (req, res) => {
|
|
try {
|
|
const { user_id } = req.params;
|
|
|
|
const query = `
|
|
SELECT gr.*, rs.local_path, rs.storage_status, rs.total_files_count,
|
|
rs.total_directories_count, rs.total_size_bytes, rs.download_completed_at
|
|
FROM all_repositories gr
|
|
LEFT JOIN repository_storage rs ON gr.id = rs.repository_id
|
|
WHERE gr.user_id = $1
|
|
ORDER BY gr.created_at DESC
|
|
`;
|
|
|
|
const result = await database.query(query, [user_id]);
|
|
|
|
const parseMaybe = (v) => {
|
|
if (v == null) return {};
|
|
if (typeof v === 'string') { try { return JSON.parse(v); } catch { return {}; } }
|
|
return v; // already object from jsonb
|
|
};
|
|
|
|
const repositories = result.rows.map(repo => ({
|
|
...repo,
|
|
metadata: parseMaybe(repo.metadata),
|
|
codebase_analysis: parseMaybe(repo.codebase_analysis)
|
|
}));
|
|
|
|
res.json({
|
|
success: true,
|
|
data: repositories
|
|
});
|
|
|
|
} catch (error) {
|
|
console.error('Error fetching repositories:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
message: error.message || 'Failed to fetch repositories'
|
|
});
|
|
}
|
|
});
|
|
|
|
module.exports = router; |