codenuk_backend_mine/scripts/migrate-clean.sh
2025-10-03 10:13:06 +05:30

177 lines
5.7 KiB
Bash
Executable File
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/bin/sh
set -euo pipefail
# ========================================
# CLEAN DATABASE MIGRATION SYSTEM
# ========================================
# Get root directory (one level above this script)
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
# Database connection parameters
DB_HOST=${POSTGRES_HOST:-postgres}
DB_PORT=${POSTGRES_PORT:-5432}
DB_NAME=${POSTGRES_DB:-dev_pipeline}
DB_USER=${POSTGRES_USER:-pipeline_admin}
DB_PASSWORD=${POSTGRES_PASSWORD:-secure_pipeline_2024}
# Log function with timestamp
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
log "🚀 Starting clean database migration system..."
# ========================================
# STEP 1: CLEAN EXISTING DATABASE
# ========================================
log "🧹 Step 1: Cleaning existing database..."
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" << 'EOF'
-- Drop all existing tables to start fresh
DROP SCHEMA public CASCADE;
CREATE SCHEMA public;
GRANT ALL ON SCHEMA public TO pipeline_admin;
GRANT ALL ON SCHEMA public TO public;
-- Re-enable extensions
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
CREATE EXTENSION IF NOT EXISTS "pg_stat_statements";
-- Create migration tracking table
CREATE TABLE IF NOT EXISTS schema_migrations (
id SERIAL PRIMARY KEY,
version VARCHAR(255) NOT NULL UNIQUE,
service VARCHAR(100) NOT NULL,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
description TEXT
);
\echo '✅ Database cleaned and ready for migrations'
EOF
# ========================================
# STEP 2: APPLY CORE SCHEMA (from schemas.sql)
# ========================================
log "📋 Step 2: Applying core schema..."
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -f "${ROOT_DIR}/databases/scripts/schemas.sql"
# Mark core schema as applied
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" << 'EOF'
INSERT INTO schema_migrations (version, service, description)
VALUES ('001_core_schema', 'shared-schemas', 'Core pipeline tables from schemas.sql')
ON CONFLICT (version) DO NOTHING;
EOF
log "✅ Core schema applied"
# ========================================
# STEP 3: APPLY SERVICE-SPECIFIC MIGRATIONS
# ========================================
log "🔧 Step 3: Applying service-specific migrations..."
# Define migration order (dependencies first)
migration_services="user-auth template-manager git-integration requirement-processor ai-mockup-service tech-stack-selector"
# Track failed services
failed_services=""
for service in $migration_services; do
SERVICE_DIR="${ROOT_DIR}/services/${service}"
if [ ! -d "${SERVICE_DIR}" ]; then
log "⚠️ Skipping ${service}: directory not found"
continue
fi
# Temporary: skip tech-stack-selector migrations in container (asyncpg build deps on Alpine)
if [ "$service" = "tech-stack-selector" ]; then
log "⏭️ Skipping ${service}: requires asyncpg build deps not available in this environment"
continue
fi
log "========================================"
log "🔄 Processing ${service}..."
log "========================================"
# Install dependencies if package.json exists
if [ -f "${SERVICE_DIR}/package.json" ]; then
log "📦 Installing dependencies for ${service}..."
if [ -f "${SERVICE_DIR}/package-lock.json" ]; then
(cd "${SERVICE_DIR}" && npm ci --no-audit --no-fund --prefer-offline --silent)
else
(cd "${SERVICE_DIR}" && npm install --no-audit --no-fund --silent)
fi
fi
# Run migrations - check for both Node.js and Python services
if [ -f "${SERVICE_DIR}/package.json" ] && grep -q '"migrate":' "${SERVICE_DIR}/package.json"; then
log "🚀 Running Node.js migrations for ${service}..."
if (cd "${SERVICE_DIR}" && npm run -s migrate); then
log "${service}: migrations completed successfully"
else
log "${service}: migration failed"
failed_services="${failed_services} ${service}"
fi
elif [ -f "${SERVICE_DIR}/migrate.py" ]; then
log "🐍 Ensuring Python dependencies for ${service}..."
if [ -f "${SERVICE_DIR}/requirements.txt" ]; then
(cd "${SERVICE_DIR}" && pip3 install --no-cache-dir -r requirements.txt >/dev/null 2>&1 || true)
fi
# Ensure asyncpg is available for services that require it
(pip3 install --no-cache-dir asyncpg >/dev/null 2>&1 || true)
log "🚀 Running Python migrations for ${service}..."
if (cd "${SERVICE_DIR}" && python3 migrate.py); then
log "${service}: migrations completed successfully"
else
log "${service}: migration failed"
failed_services="${failed_services} ${service}"
fi
else
log " ${service}: no migrate script found; skipping"
fi
done
# ========================================
# STEP 4: VERIFY FINAL STATE
# ========================================
log "🔍 Step 4: Verifying final database state..."
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" << 'EOF'
\echo '📋 Final database tables:'
SELECT
schemaname,
tablename,
tableowner
FROM pg_tables
WHERE schemaname = 'public'
ORDER BY tablename;
\echo '📊 Applied migrations:'
SELECT
service,
version,
applied_at,
description
FROM schema_migrations
ORDER BY applied_at;
\echo '✅ Database migration verification complete'
EOF
# ========================================
# FINAL SUMMARY
# ========================================
log "========================================"
if [ -n "$failed_services" ]; then
log "❌ MIGRATIONS COMPLETED WITH ERRORS"
log "Failed services: $failed_services"
exit 1
else
log "✅ ALL MIGRATIONS COMPLETED SUCCESSFULLY"
log "Database is clean and ready for use"
fi