backend changes
This commit is contained in:
parent
fdbcdcb338
commit
f3077d53a7
@ -41,7 +41,6 @@ services:
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
|
||||
|
||||
mongodb:
|
||||
image: mongo:7
|
||||
container_name: pipeline_mongodb
|
||||
@ -89,6 +88,7 @@ services:
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ./:/app
|
||||
- migration_state:/tmp # Persistent volume for migration state
|
||||
environment:
|
||||
- POSTGRES_HOST=postgres
|
||||
- POSTGRES_PORT=5432
|
||||
@ -100,6 +100,7 @@ services:
|
||||
- REDIS_PASSWORD=redis_secure_2024
|
||||
- NODE_ENV=development
|
||||
- DATABASE_URL=postgresql://pipeline_admin:secure_pipeline_2024@postgres:5432/dev_pipeline
|
||||
- ALLOW_DESTRUCTIVE_MIGRATIONS=false # Safety flag for destructive operations
|
||||
entrypoint: ["/bin/sh", "-c", "chmod +x ./scripts/migrate-all.sh && ./scripts/migrate-all.sh"]
|
||||
depends_on:
|
||||
postgres:
|
||||
@ -535,7 +536,6 @@ services:
|
||||
start_period: 40s
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
template-manager:
|
||||
build: ./services/template-manager
|
||||
container_name: pipeline_template_manager
|
||||
@ -571,7 +571,6 @@ services:
|
||||
start_period: 40s
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
# AI Mockup / Wireframe Generation Service
|
||||
ai-mockup-service:
|
||||
build: ./services/ai-mockup-service
|
||||
@ -793,6 +792,8 @@ volumes:
|
||||
driver: local
|
||||
api_gateway_logs:
|
||||
driver: local
|
||||
migration_state:
|
||||
driver: local
|
||||
|
||||
# =====================================
|
||||
# Networks
|
||||
|
||||
@ -46,6 +46,14 @@ if [ ${#missing_vars[@]} -gt 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if migrations have already been completed successfully
|
||||
MIGRATION_MARKER="/tmp/migrations-completed"
|
||||
if [ -f "${MIGRATION_MARKER}" ]; then
|
||||
log "✅ Migrations already completed successfully (marker file exists)"
|
||||
log "To force re-run migrations, delete: ${MIGRATION_MARKER}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Track failed services
|
||||
failed_services=()
|
||||
|
||||
@ -95,4 +103,7 @@ if [ ${#failed_services[@]} -gt 0 ]; then
|
||||
exit 1
|
||||
else
|
||||
log "✅ All migrations completed successfully"
|
||||
# Create marker file to indicate successful completion
|
||||
touch "${MIGRATION_MARKER}"
|
||||
log "📝 Created migration completion marker: ${MIGRATION_MARKER}"
|
||||
fi
|
||||
|
||||
59
scripts/reset-migrations.sh
Normal file
59
scripts/reset-migrations.sh
Normal file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ========================================
|
||||
# MIGRATION RESET UTILITY SCRIPT
|
||||
# ========================================
|
||||
|
||||
log() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
log "🔄 Migration Reset Utility"
|
||||
log "This script will reset migration state to allow re-running migrations"
|
||||
|
||||
# Check if DATABASE_URL is set
|
||||
if [ -z "${DATABASE_URL:-}" ]; then
|
||||
log "ERROR: DATABASE_URL environment variable is required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get confirmation from user
|
||||
echo ""
|
||||
echo "⚠️ WARNING: This will:"
|
||||
echo " - Clear the schema_migrations table"
|
||||
echo " - Remove the migration completion marker"
|
||||
echo " - Allow migrations to run again on next docker compose up"
|
||||
echo ""
|
||||
echo " This will NOT delete your actual data tables."
|
||||
echo ""
|
||||
read -p "Are you sure you want to proceed? (y/N): " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
log "Operation cancelled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log "🗑️ Clearing migration state..."
|
||||
|
||||
# Connect to database and clear migration tracking
|
||||
psql "${DATABASE_URL}" -c "
|
||||
DROP TABLE IF EXISTS schema_migrations;
|
||||
SELECT 'Migration tracking table dropped' as status;
|
||||
" || {
|
||||
log "ERROR: Failed to clear database migration state"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Remove migration marker file
|
||||
MIGRATION_MARKER="/tmp/migrations-completed"
|
||||
if [ -f "${MIGRATION_MARKER}" ]; then
|
||||
rm -f "${MIGRATION_MARKER}"
|
||||
log "📝 Removed migration completion marker"
|
||||
else
|
||||
log "📝 Migration completion marker not found (already clean)"
|
||||
fi
|
||||
|
||||
log "✅ Migration state reset complete!"
|
||||
log "💡 Next 'docker compose up' will re-run all migrations"
|
||||
@ -3,12 +3,41 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
const database = require('../config/database');
|
||||
|
||||
async function createMigrationsTable() {
|
||||
await database.query(`
|
||||
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||
version VARCHAR(255) PRIMARY KEY,
|
||||
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
service VARCHAR(100) DEFAULT 'template-manager'
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
async function isMigrationApplied(version) {
|
||||
const result = await database.query(
|
||||
'SELECT version FROM schema_migrations WHERE version = $1 AND service = $2',
|
||||
[version, 'template-manager']
|
||||
);
|
||||
return result.rows.length > 0;
|
||||
}
|
||||
|
||||
async function markMigrationApplied(version) {
|
||||
await database.query(
|
||||
'INSERT INTO schema_migrations (version, service) VALUES ($1, $2) ON CONFLICT (version) DO NOTHING',
|
||||
[version, 'template-manager']
|
||||
);
|
||||
}
|
||||
|
||||
async function runMigrations() {
|
||||
console.log('🚀 Starting Template Manager database migration...');
|
||||
console.log('🚀 Starting template-manager database migrations...');
|
||||
|
||||
try {
|
||||
// Create migrations tracking table first
|
||||
await createMigrationsTable();
|
||||
console.log('✅ Migration tracking table ready');
|
||||
|
||||
// Get all migration files in order
|
||||
let migrationFiles = [
|
||||
const migrationFiles = [
|
||||
'001_initial_schema.sql',
|
||||
'002_admin_approval_workflow.sql',
|
||||
'003_custom_templates.sql',
|
||||
@ -19,27 +48,22 @@ async function runMigrations() {
|
||||
'008_feature_business_rules.sql'
|
||||
];
|
||||
|
||||
// Safety: if core tables already exist, skip the destructive 001 file
|
||||
try {
|
||||
const existing = await database.query(`
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_schema = 'public' AND table_name IN ('templates','template_features')
|
||||
`);
|
||||
const hasCoreTables = existing.rows && existing.rows.length >= 1;
|
||||
if (hasCoreTables) {
|
||||
migrationFiles = migrationFiles.filter((f) => f !== '001_initial_schema.sql');
|
||||
console.log('⚠️ Core tables detected; skipping 001_initial_schema.sql to avoid destructive drops.');
|
||||
}
|
||||
} catch (probeErr) {
|
||||
console.warn('Could not probe existing tables; proceeding with full migration list:', probeErr.message);
|
||||
}
|
||||
let appliedCount = 0;
|
||||
let skippedCount = 0;
|
||||
|
||||
for (const migrationFile of migrationFiles) {
|
||||
const migrationPath = path.join(__dirname, migrationFile);
|
||||
|
||||
// Check if migration file exists
|
||||
if (!fs.existsSync(migrationPath)) {
|
||||
console.log(`Migration file not found: ${migrationFile}`);
|
||||
console.log(`⚠️ Migration file not found: ${migrationFile}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if migration was already applied
|
||||
if (await isMigrationApplied(migrationFile)) {
|
||||
console.log(`⏭️ Migration ${migrationFile} already applied, skipping...`);
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -50,18 +74,21 @@ async function runMigrations() {
|
||||
const allowDestructive = String(process.env.ALLOW_DESTRUCTIVE_MIGRATIONS || '').toLowerCase() === 'true';
|
||||
if (containsDrop && !allowDestructive) {
|
||||
console.log(`⏭️ Skipping potentially destructive migration (set ALLOW_DESTRUCTIVE_MIGRATIONS=true to run): ${migrationFile}`);
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`Running migration: ${migrationFile}`);
|
||||
console.log(`📄 Running migration: ${migrationFile}`);
|
||||
|
||||
// Execute the migration
|
||||
await database.query(migrationSQL);
|
||||
await markMigrationApplied(migrationFile);
|
||||
|
||||
console.log(`Migration ${migrationFile} completed successfully!`);
|
||||
console.log(`✅ Migration ${migrationFile} completed!`);
|
||||
appliedCount++;
|
||||
}
|
||||
|
||||
console.log('All migrations completed successfully!');
|
||||
console.log(`📊 Migration summary: ${appliedCount} applied, ${skippedCount} skipped`);
|
||||
|
||||
// Verify tables were created
|
||||
const result = await database.query(`
|
||||
@ -72,10 +99,11 @@ async function runMigrations() {
|
||||
ORDER BY table_name
|
||||
`);
|
||||
|
||||
console.log('Verified tables:', result.rows.map(row => row.table_name));
|
||||
console.log('🔍 Verified tables:', result.rows.map(row => row.table_name));
|
||||
|
||||
} catch (error) {
|
||||
console.error('Migration failed:', error.message);
|
||||
console.error('❌ Migration failed:', error.message);
|
||||
console.error('📍 Error details:', error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await database.close();
|
||||
|
||||
@ -65,24 +65,72 @@ class Feature {
|
||||
return result.rowCount > 0
|
||||
}
|
||||
|
||||
// Get all features for a template
|
||||
// Get all features for a template (from both template_features and custom_features tables)
|
||||
static async getByTemplateId(templateId) {
|
||||
const query = `
|
||||
SELECT * FROM template_features
|
||||
WHERE template_id = $1
|
||||
// Get features from template_features table
|
||||
const templateFeaturesQuery = `
|
||||
SELECT
|
||||
tf.*,
|
||||
fbr.business_rules AS additional_business_rules
|
||||
FROM template_features tf
|
||||
LEFT JOIN feature_business_rules fbr
|
||||
ON tf.template_id = fbr.template_id
|
||||
AND (
|
||||
fbr.feature_id = (tf.id::text)
|
||||
OR fbr.feature_id = tf.feature_id
|
||||
)
|
||||
WHERE tf.template_id = $1
|
||||
ORDER BY
|
||||
CASE feature_type
|
||||
CASE tf.feature_type
|
||||
WHEN 'essential' THEN 1
|
||||
WHEN 'suggested' THEN 2
|
||||
WHEN 'custom' THEN 3
|
||||
END,
|
||||
display_order,
|
||||
usage_count DESC,
|
||||
name
|
||||
tf.display_order,
|
||||
tf.usage_count DESC,
|
||||
tf.name
|
||||
`;
|
||||
|
||||
const result = await database.query(query, [templateId]);
|
||||
return result.rows.map(row => new Feature(row));
|
||||
const templateFeaturesResult = await database.query(templateFeaturesQuery, [templateId]);
|
||||
const templateFeatures = templateFeaturesResult.rows;
|
||||
|
||||
// Get custom features from custom_features table
|
||||
const customFeaturesQuery = `
|
||||
SELECT
|
||||
cf.id,
|
||||
cf.template_id,
|
||||
cf.name,
|
||||
cf.description,
|
||||
cf.complexity,
|
||||
cf.business_rules,
|
||||
cf.technical_requirements,
|
||||
'custom' as feature_type,
|
||||
999 as display_order,
|
||||
cf.usage_count,
|
||||
0 as user_rating,
|
||||
false as is_default,
|
||||
true as created_by_user,
|
||||
cf.created_at,
|
||||
cf.updated_at,
|
||||
fbr.business_rules as additional_business_rules
|
||||
FROM custom_features cf
|
||||
LEFT JOIN feature_business_rules fbr
|
||||
ON cf.template_id = fbr.template_id
|
||||
AND (
|
||||
fbr.feature_id = (cf.id::text)
|
||||
OR fbr.feature_id = ('custom_' || cf.id::text)
|
||||
)
|
||||
WHERE cf.template_id = $1
|
||||
ORDER BY cf.created_at DESC
|
||||
`;
|
||||
|
||||
const customFeaturesResult = await database.query(customFeaturesQuery, [templateId]);
|
||||
const customFeatures = customFeaturesResult.rows;
|
||||
|
||||
// Combine both types of features
|
||||
const allFeatures = [...templateFeatures, ...customFeatures];
|
||||
|
||||
return allFeatures.map(row => new Feature(row));
|
||||
}
|
||||
|
||||
// Get popular features across all templates
|
||||
|
||||
@ -73,8 +73,33 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
const database = require('../config/database');
|
||||
|
||||
async function createMigrationsTable() {
|
||||
await database.query(`
|
||||
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||
version VARCHAR(255) PRIMARY KEY,
|
||||
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
service VARCHAR(100) DEFAULT 'user-auth'
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
async function isMigrationApplied(version) {
|
||||
const result = await database.query(
|
||||
'SELECT version FROM schema_migrations WHERE version = $1 AND service = $2',
|
||||
[version, 'user-auth']
|
||||
);
|
||||
return result.rows.length > 0;
|
||||
}
|
||||
|
||||
async function markMigrationApplied(version) {
|
||||
await database.query(
|
||||
'INSERT INTO schema_migrations (version, service) VALUES ($1, $2) ON CONFLICT (version) DO NOTHING',
|
||||
[version, 'user-auth']
|
||||
);
|
||||
}
|
||||
|
||||
async function runMigrations() {
|
||||
console.log('🚀 Starting database migrations...');
|
||||
console.log('🚀 Starting user-auth database migrations...');
|
||||
|
||||
const migrations = [
|
||||
'001_user_auth_schema.sql',
|
||||
@ -87,20 +112,38 @@ async function runMigrations() {
|
||||
await database.query('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";');
|
||||
console.log('✅ Extensions ready');
|
||||
|
||||
// Create migrations tracking table
|
||||
await createMigrationsTable();
|
||||
console.log('✅ Migration tracking table ready');
|
||||
|
||||
let appliedCount = 0;
|
||||
let skippedCount = 0;
|
||||
|
||||
for (const migrationFile of migrations) {
|
||||
const migrationPath = path.join(__dirname, migrationFile);
|
||||
if (!fs.existsSync(migrationPath)) {
|
||||
console.warn(`⚠️ Migration file ${migrationFile} not found, skipping...`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if migration was already applied
|
||||
if (await isMigrationApplied(migrationFile)) {
|
||||
console.log(`⏭️ Migration ${migrationFile} already applied, skipping...`);
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const migrationSQL = fs.readFileSync(migrationPath, 'utf8');
|
||||
console.log(`📄 Running migration: ${migrationFile}`);
|
||||
|
||||
await database.query(migrationSQL);
|
||||
await markMigrationApplied(migrationFile);
|
||||
console.log(`✅ Migration ${migrationFile} completed!`);
|
||||
appliedCount++;
|
||||
}
|
||||
|
||||
console.log(`📊 Migration summary: ${appliedCount} applied, ${skippedCount} skipped`);
|
||||
|
||||
// Verify all tables
|
||||
const result = await database.query(`
|
||||
SELECT
|
||||
|
||||
@ -65,7 +65,7 @@ router.get('/verify-email', async (req, res) => {
|
||||
const { token } = req.query;
|
||||
await authService.verifyEmailToken(token);
|
||||
|
||||
const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.20:3001';
|
||||
const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.31:3001';
|
||||
const redirectUrl = `${frontendUrl}/signin?verified=true`;
|
||||
// Prefer redirect by default; only return JSON if explicitly requested
|
||||
if (req.query.format === 'json') {
|
||||
@ -73,7 +73,7 @@ router.get('/verify-email', async (req, res) => {
|
||||
}
|
||||
return res.redirect(302, redirectUrl);
|
||||
} catch (error) {
|
||||
const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.20:3001';
|
||||
const frontendUrl = process.env.FRONTEND_URL || 'http://192.168.1.31:3001';
|
||||
const redirectUrl = `${frontendUrl}/signin?error=${encodeURIComponent(error.message)}`;
|
||||
if (req.query.format === 'json') {
|
||||
return res.status(400).json({ success: false, message: error.message, redirect: redirectUrl });
|
||||
|
||||
Loading…
Reference in New Issue
Block a user