backend changes

This commit is contained in:
Chandini 2025-09-17 11:23:55 +05:30
parent fdbcdcb338
commit f3077d53a7
7 changed files with 228 additions and 38 deletions

View File

@ -41,7 +41,6 @@ services:
retries: 5 retries: 5
start_period: 30s start_period: 30s
mongodb: mongodb:
image: mongo:7 image: mongo:7
container_name: pipeline_mongodb container_name: pipeline_mongodb
@ -89,6 +88,7 @@ services:
working_dir: /app working_dir: /app
volumes: volumes:
- ./:/app - ./:/app
- migration_state:/tmp # Persistent volume for migration state
environment: environment:
- POSTGRES_HOST=postgres - POSTGRES_HOST=postgres
- POSTGRES_PORT=5432 - POSTGRES_PORT=5432
@ -100,6 +100,7 @@ services:
- REDIS_PASSWORD=redis_secure_2024 - REDIS_PASSWORD=redis_secure_2024
- NODE_ENV=development - NODE_ENV=development
- DATABASE_URL=postgresql://pipeline_admin:secure_pipeline_2024@postgres:5432/dev_pipeline - DATABASE_URL=postgresql://pipeline_admin:secure_pipeline_2024@postgres:5432/dev_pipeline
- ALLOW_DESTRUCTIVE_MIGRATIONS=false # Safety flag for destructive operations
entrypoint: ["/bin/sh", "-c", "chmod +x ./scripts/migrate-all.sh && ./scripts/migrate-all.sh"] entrypoint: ["/bin/sh", "-c", "chmod +x ./scripts/migrate-all.sh && ./scripts/migrate-all.sh"]
depends_on: depends_on:
postgres: postgres:
@ -535,7 +536,6 @@ services:
start_period: 40s start_period: 40s
restart: unless-stopped restart: unless-stopped
template-manager: template-manager:
build: ./services/template-manager build: ./services/template-manager
container_name: pipeline_template_manager container_name: pipeline_template_manager
@ -571,7 +571,6 @@ services:
start_period: 40s start_period: 40s
restart: unless-stopped restart: unless-stopped
# AI Mockup / Wireframe Generation Service # AI Mockup / Wireframe Generation Service
ai-mockup-service: ai-mockup-service:
build: ./services/ai-mockup-service build: ./services/ai-mockup-service
@ -793,6 +792,8 @@ volumes:
driver: local driver: local
api_gateway_logs: api_gateway_logs:
driver: local driver: local
migration_state:
driver: local
# ===================================== # =====================================
# Networks # Networks

View File

@ -46,6 +46,14 @@ if [ ${#missing_vars[@]} -gt 0 ]; then
exit 1 exit 1
fi fi
# Check if migrations have already been completed successfully
MIGRATION_MARKER="/tmp/migrations-completed"
if [ -f "${MIGRATION_MARKER}" ]; then
log "✅ Migrations already completed successfully (marker file exists)"
log "To force re-run migrations, delete: ${MIGRATION_MARKER}"
exit 0
fi
# Track failed services # Track failed services
failed_services=() failed_services=()
@ -95,4 +103,7 @@ if [ ${#failed_services[@]} -gt 0 ]; then
exit 1 exit 1
else else
log "✅ All migrations completed successfully" log "✅ All migrations completed successfully"
# Create marker file to indicate successful completion
touch "${MIGRATION_MARKER}"
log "📝 Created migration completion marker: ${MIGRATION_MARKER}"
fi fi

View File

@ -0,0 +1,59 @@
#!/usr/bin/env bash
set -euo pipefail
# ========================================
# MIGRATION RESET UTILITY SCRIPT
# ========================================
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
log "🔄 Migration Reset Utility"
log "This script will reset migration state to allow re-running migrations"
# Check if DATABASE_URL is set
if [ -z "${DATABASE_URL:-}" ]; then
log "ERROR: DATABASE_URL environment variable is required"
exit 1
fi
# Get confirmation from user
echo ""
echo "⚠️ WARNING: This will:"
echo " - Clear the schema_migrations table"
echo " - Remove the migration completion marker"
echo " - Allow migrations to run again on next docker compose up"
echo ""
echo " This will NOT delete your actual data tables."
echo ""
read -p "Are you sure you want to proceed? (y/N): " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
log "Operation cancelled"
exit 0
fi
log "🗑️ Clearing migration state..."
# Connect to database and clear migration tracking
psql "${DATABASE_URL}" -c "
DROP TABLE IF EXISTS schema_migrations;
SELECT 'Migration tracking table dropped' as status;
" || {
log "ERROR: Failed to clear database migration state"
exit 1
}
# Remove migration marker file
MIGRATION_MARKER="/tmp/migrations-completed"
if [ -f "${MIGRATION_MARKER}" ]; then
rm -f "${MIGRATION_MARKER}"
log "📝 Removed migration completion marker"
else
log "📝 Migration completion marker not found (already clean)"
fi
log "✅ Migration state reset complete!"
log "💡 Next 'docker compose up' will re-run all migrations"

View File

@ -3,12 +3,41 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const database = require('../config/database'); const database = require('../config/database');
async function createMigrationsTable() {
await database.query(`
CREATE TABLE IF NOT EXISTS schema_migrations (
version VARCHAR(255) PRIMARY KEY,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
service VARCHAR(100) DEFAULT 'template-manager'
)
`);
}
async function isMigrationApplied(version) {
const result = await database.query(
'SELECT version FROM schema_migrations WHERE version = $1 AND service = $2',
[version, 'template-manager']
);
return result.rows.length > 0;
}
async function markMigrationApplied(version) {
await database.query(
'INSERT INTO schema_migrations (version, service) VALUES ($1, $2) ON CONFLICT (version) DO NOTHING',
[version, 'template-manager']
);
}
async function runMigrations() { async function runMigrations() {
console.log('🚀 Starting Template Manager database migration...'); console.log('🚀 Starting template-manager database migrations...');
try { try {
// Create migrations tracking table first
await createMigrationsTable();
console.log('✅ Migration tracking table ready');
// Get all migration files in order // Get all migration files in order
let migrationFiles = [ const migrationFiles = [
'001_initial_schema.sql', '001_initial_schema.sql',
'002_admin_approval_workflow.sql', '002_admin_approval_workflow.sql',
'003_custom_templates.sql', '003_custom_templates.sql',
@ -19,27 +48,22 @@ async function runMigrations() {
'008_feature_business_rules.sql' '008_feature_business_rules.sql'
]; ];
// Safety: if core tables already exist, skip the destructive 001 file let appliedCount = 0;
try { let skippedCount = 0;
const existing = await database.query(`
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_name IN ('templates','template_features')
`);
const hasCoreTables = existing.rows && existing.rows.length >= 1;
if (hasCoreTables) {
migrationFiles = migrationFiles.filter((f) => f !== '001_initial_schema.sql');
console.log('⚠️ Core tables detected; skipping 001_initial_schema.sql to avoid destructive drops.');
}
} catch (probeErr) {
console.warn('Could not probe existing tables; proceeding with full migration list:', probeErr.message);
}
for (const migrationFile of migrationFiles) { for (const migrationFile of migrationFiles) {
const migrationPath = path.join(__dirname, migrationFile); const migrationPath = path.join(__dirname, migrationFile);
// Check if migration file exists // Check if migration file exists
if (!fs.existsSync(migrationPath)) { if (!fs.existsSync(migrationPath)) {
console.log(`Migration file not found: ${migrationFile}`); console.log(`⚠️ Migration file not found: ${migrationFile}`);
continue;
}
// Check if migration was already applied
if (await isMigrationApplied(migrationFile)) {
console.log(`⏭️ Migration ${migrationFile} already applied, skipping...`);
skippedCount++;
continue; continue;
} }
@ -50,18 +74,21 @@ async function runMigrations() {
const allowDestructive = String(process.env.ALLOW_DESTRUCTIVE_MIGRATIONS || '').toLowerCase() === 'true'; const allowDestructive = String(process.env.ALLOW_DESTRUCTIVE_MIGRATIONS || '').toLowerCase() === 'true';
if (containsDrop && !allowDestructive) { if (containsDrop && !allowDestructive) {
console.log(`⏭️ Skipping potentially destructive migration (set ALLOW_DESTRUCTIVE_MIGRATIONS=true to run): ${migrationFile}`); console.log(`⏭️ Skipping potentially destructive migration (set ALLOW_DESTRUCTIVE_MIGRATIONS=true to run): ${migrationFile}`);
skippedCount++;
continue; continue;
} }
console.log(`Running migration: ${migrationFile}`); console.log(`📄 Running migration: ${migrationFile}`);
// Execute the migration // Execute the migration
await database.query(migrationSQL); await database.query(migrationSQL);
await markMigrationApplied(migrationFile);
console.log(`Migration ${migrationFile} completed successfully!`); console.log(`✅ Migration ${migrationFile} completed!`);
appliedCount++;
} }
console.log('All migrations completed successfully!'); console.log(`📊 Migration summary: ${appliedCount} applied, ${skippedCount} skipped`);
// Verify tables were created // Verify tables were created
const result = await database.query(` const result = await database.query(`
@ -72,10 +99,11 @@ async function runMigrations() {
ORDER BY table_name ORDER BY table_name
`); `);
console.log('Verified tables:', result.rows.map(row => row.table_name)); console.log('🔍 Verified tables:', result.rows.map(row => row.table_name));
} catch (error) { } catch (error) {
console.error('Migration failed:', error.message); console.error('❌ Migration failed:', error.message);
console.error('📍 Error details:', error);
process.exit(1); process.exit(1);
} finally { } finally {
await database.close(); await database.close();

View File

@ -65,24 +65,72 @@ class Feature {
return result.rowCount > 0 return result.rowCount > 0
} }
// Get all features for a template // Get all features for a template (from both template_features and custom_features tables)
static async getByTemplateId(templateId) { static async getByTemplateId(templateId) {
const query = ` // Get features from template_features table
SELECT * FROM template_features const templateFeaturesQuery = `
WHERE template_id = $1 SELECT
tf.*,
fbr.business_rules AS additional_business_rules
FROM template_features tf
LEFT JOIN feature_business_rules fbr
ON tf.template_id = fbr.template_id
AND (
fbr.feature_id = (tf.id::text)
OR fbr.feature_id = tf.feature_id
)
WHERE tf.template_id = $1
ORDER BY ORDER BY
CASE feature_type CASE tf.feature_type
WHEN 'essential' THEN 1 WHEN 'essential' THEN 1
WHEN 'suggested' THEN 2 WHEN 'suggested' THEN 2
WHEN 'custom' THEN 3 WHEN 'custom' THEN 3
END, END,
display_order, tf.display_order,
usage_count DESC, tf.usage_count DESC,
name tf.name
`; `;
const result = await database.query(query, [templateId]); const templateFeaturesResult = await database.query(templateFeaturesQuery, [templateId]);
return result.rows.map(row => new Feature(row)); const templateFeatures = templateFeaturesResult.rows;
// Get custom features from custom_features table
const customFeaturesQuery = `
SELECT
cf.id,
cf.template_id,
cf.name,
cf.description,
cf.complexity,
cf.business_rules,
cf.technical_requirements,
'custom' as feature_type,
999 as display_order,
cf.usage_count,
0 as user_rating,
false as is_default,
true as created_by_user,
cf.created_at,
cf.updated_at,
fbr.business_rules as additional_business_rules
FROM custom_features cf
LEFT JOIN feature_business_rules fbr
ON cf.template_id = fbr.template_id
AND (
fbr.feature_id = (cf.id::text)
OR fbr.feature_id = ('custom_' || cf.id::text)
)
WHERE cf.template_id = $1
ORDER BY cf.created_at DESC
`;
const customFeaturesResult = await database.query(customFeaturesQuery, [templateId]);
const customFeatures = customFeaturesResult.rows;
// Combine both types of features
const allFeatures = [...templateFeatures, ...customFeatures];
return allFeatures.map(row => new Feature(row));
} }
// Get popular features across all templates // Get popular features across all templates

View File

@ -73,8 +73,33 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const database = require('../config/database'); const database = require('../config/database');
async function createMigrationsTable() {
await database.query(`
CREATE TABLE IF NOT EXISTS schema_migrations (
version VARCHAR(255) PRIMARY KEY,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
service VARCHAR(100) DEFAULT 'user-auth'
)
`);
}
async function isMigrationApplied(version) {
const result = await database.query(
'SELECT version FROM schema_migrations WHERE version = $1 AND service = $2',
[version, 'user-auth']
);
return result.rows.length > 0;
}
async function markMigrationApplied(version) {
await database.query(
'INSERT INTO schema_migrations (version, service) VALUES ($1, $2) ON CONFLICT (version) DO NOTHING',
[version, 'user-auth']
);
}
async function runMigrations() { async function runMigrations() {
console.log('🚀 Starting database migrations...'); console.log('🚀 Starting user-auth database migrations...');
const migrations = [ const migrations = [
'001_user_auth_schema.sql', '001_user_auth_schema.sql',
@ -87,6 +112,13 @@ async function runMigrations() {
await database.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'); await database.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";');
console.log('✅ Extensions ready'); console.log('✅ Extensions ready');
// Create migrations tracking table
await createMigrationsTable();
console.log('✅ Migration tracking table ready');
let appliedCount = 0;
let skippedCount = 0;
for (const migrationFile of migrations) { for (const migrationFile of migrations) {
const migrationPath = path.join(__dirname, migrationFile); const migrationPath = path.join(__dirname, migrationFile);
if (!fs.existsSync(migrationPath)) { if (!fs.existsSync(migrationPath)) {
@ -94,13 +126,24 @@ async function runMigrations() {
continue; continue;
} }
// Check if migration was already applied
if (await isMigrationApplied(migrationFile)) {
console.log(`⏭️ Migration ${migrationFile} already applied, skipping...`);
skippedCount++;
continue;
}
const migrationSQL = fs.readFileSync(migrationPath, 'utf8'); const migrationSQL = fs.readFileSync(migrationPath, 'utf8');
console.log(`📄 Running migration: ${migrationFile}`); console.log(`📄 Running migration: ${migrationFile}`);
await database.query(migrationSQL); await database.query(migrationSQL);
await markMigrationApplied(migrationFile);
console.log(`✅ Migration ${migrationFile} completed!`); console.log(`✅ Migration ${migrationFile} completed!`);
appliedCount++;
} }
console.log(`📊 Migration summary: ${appliedCount} applied, ${skippedCount} skipped`);
// Verify all tables // Verify all tables
const result = await database.query(` const result = await database.query(`
SELECT SELECT

View File

@ -65,7 +65,7 @@ router.get('/verify-email', async (req, res) => {
const { token } = req.query; const { token } = req.query;
await authService.verifyEmailToken(token); await authService.verifyEmailToken(token);
const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.20:3001'; const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.31:3001';
const redirectUrl = `${frontendUrl}/signin?verified=true`; const redirectUrl = `${frontendUrl}/signin?verified=true`;
// Prefer redirect by default; only return JSON if explicitly requested // Prefer redirect by default; only return JSON if explicitly requested
if (req.query.format === 'json') { if (req.query.format === 'json') {
@ -73,7 +73,7 @@ router.get('/verify-email', async (req, res) => {
} }
return res.redirect(302, redirectUrl); return res.redirect(302, redirectUrl);
} catch (error) { } catch (error) {
const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.20:3001'; const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.31:3001';
const redirectUrl = `${frontendUrl}/signin?error=${encodeURIComponent(error.message)}`; const redirectUrl = `${frontendUrl}/signin?error=${encodeURIComponent(error.message)}`;
if (req.query.format === 'json') { if (req.query.format === 'json') {
return res.status(400).json({ success: false, message: error.message, redirect: redirectUrl }); return res.status(400).json({ success: false, message: error.message, redirect: redirectUrl });