backend changes before techstack selector
This commit is contained in:
parent
4efbe6e2e6
commit
4f139af3c7
@ -6,14 +6,14 @@
|
|||||||
// ========================================
|
// ========================================
|
||||||
// LIVE PRODUCTION URLS (Currently Active)
|
// LIVE PRODUCTION URLS (Currently Active)
|
||||||
// ========================================
|
// ========================================
|
||||||
const FRONTEND_URL = 'https://dashboard.codenuk.com';
|
// const FRONTEND_URL = 'https://dashboard.codenuk.com';
|
||||||
const BACKEND_URL = 'https://backend.codenuk.com';
|
// const BACKEND_URL = 'https://backend.codenuk.com';
|
||||||
|
|
||||||
// ========================================
|
// ========================================
|
||||||
// LOCAL DEVELOPMENT URLS
|
// LOCAL DEVELOPMENT URLS
|
||||||
// ========================================
|
// ========================================
|
||||||
// const FRONTEND_URL = 'http://192.168.1.17:3001';
|
const FRONTEND_URL = 'http://192.168.1.13:3001';
|
||||||
// const BACKEND_URL = 'http://192.168.1.17:8000';
|
const BACKEND_URL = 'http://192.168.1.13:8000';
|
||||||
|
|
||||||
// ========================================
|
// ========================================
|
||||||
// CORS CONFIGURATION (Auto-generated)
|
// CORS CONFIGURATION (Auto-generated)
|
||||||
|
|||||||
@ -95,6 +95,7 @@ services:
|
|||||||
- POSTGRES_DB=dev_pipeline
|
- POSTGRES_DB=dev_pipeline
|
||||||
- POSTGRES_USER=pipeline_admin
|
- POSTGRES_USER=pipeline_admin
|
||||||
- POSTGRES_PASSWORD=secure_pipeline_2024
|
- POSTGRES_PASSWORD=secure_pipeline_2024
|
||||||
|
- APPLY_SCHEMAS_SQL=true
|
||||||
- REDIS_HOST=redis
|
- REDIS_HOST=redis
|
||||||
- REDIS_PORT=6379
|
- REDIS_PORT=6379
|
||||||
- REDIS_PASSWORD=redis_secure_2024
|
- REDIS_PASSWORD=redis_secure_2024
|
||||||
@ -233,7 +234,7 @@ services:
|
|||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
- PORT=8000
|
- PORT=8000
|
||||||
- HOST=0.0.0.0
|
- HOST=0.0.0.0
|
||||||
- CORS_ORIGINS=https://dashboard.codenuk.com
|
- CORS_ORIGINS=http://192.168.1.13:3001
|
||||||
- CORS_METHODS=GET,POST,PUT,DELETE,PATCH,OPTIONS # Add this line
|
- CORS_METHODS=GET,POST,PUT,DELETE,PATCH,OPTIONS # Add this line
|
||||||
- CORS_CREDENTIALS=true # Add this line
|
- CORS_CREDENTIALS=true # Add this line
|
||||||
# Database connections
|
# Database connections
|
||||||
@ -301,6 +302,7 @@ services:
|
|||||||
- POSTGRES_DB=dev_pipeline
|
- POSTGRES_DB=dev_pipeline
|
||||||
- POSTGRES_USER=pipeline_admin
|
- POSTGRES_USER=pipeline_admin
|
||||||
- POSTGRES_PASSWORD=secure_pipeline_2024
|
- POSTGRES_PASSWORD=secure_pipeline_2024
|
||||||
|
- DATABASE_URL=postgresql://pipeline_admin:secure_pipeline_2024@postgres:5432/dev_pipeline
|
||||||
- REDIS_HOST=redis
|
- REDIS_HOST=redis
|
||||||
- REDIS_PORT=6379
|
- REDIS_PORT=6379
|
||||||
- REDIS_PASSWORD=redis_secure_2024
|
- REDIS_PASSWORD=redis_secure_2024
|
||||||
@ -490,7 +492,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8011:8011"
|
- "8011:8011"
|
||||||
environment:
|
environment:
|
||||||
- FRONTEND_URL=https://dashboard.codenuk.com
|
- FRONTEND_URL=http://192.168.1.13:3001
|
||||||
- PORT=8011
|
- PORT=8011
|
||||||
- HOST=0.0.0.0
|
- HOST=0.0.0.0
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
|
|||||||
@ -46,13 +46,9 @@ if [ ${#missing_vars[@]} -gt 0 ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if migrations have already been completed successfully
|
# Always attempt to run migrations on startup.
|
||||||
MIGRATION_MARKER="/tmp/migrations-completed"
|
# Each service's migration script must be idempotent and skip already-applied versions.
|
||||||
if [ -f "${MIGRATION_MARKER}" ]; then
|
# The previous global marker skip is removed to allow new migrations to apply automatically.
|
||||||
log "✅ Migrations already completed successfully (marker file exists)"
|
|
||||||
log "To force re-run migrations, delete: ${MIGRATION_MARKER}"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Track failed services
|
# Track failed services
|
||||||
failed_services=()
|
failed_services=()
|
||||||
@ -103,7 +99,4 @@ if [ ${#failed_services[@]} -gt 0 ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
log "✅ All migrations completed successfully"
|
log "✅ All migrations completed successfully"
|
||||||
# Create marker file to indicate successful completion
|
|
||||||
touch "${MIGRATION_MARKER}"
|
|
||||||
log "📝 Created migration completion marker: ${MIGRATION_MARKER}"
|
|
||||||
fi
|
fi
|
||||||
|
|||||||
@ -2,16 +2,16 @@
|
|||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
PORT=8000
|
PORT=8000
|
||||||
|
|
||||||
# Service Targets (for Docker services)
|
# Service Targets (for local gateway, Docker services)
|
||||||
USER_AUTH_URL=http://user-auth:8011
|
USER_AUTH_URL=http://localhost:8011
|
||||||
TEMPLATE_MANAGER_URL=http://template-manager:8009
|
TEMPLATE_MANAGER_URL=http://localhost:8009
|
||||||
REQUIREMENT_PROCESSOR_URL=http://requirement-processor:8001
|
REQUIREMENT_PROCESSOR_URL=http://localhost:8001
|
||||||
TECH_STACK_SELECTOR_URL=http://tech-stack-selector:8002
|
TECH_STACK_SELECTOR_URL=http://localhost:8002
|
||||||
ARCHITECTURE_DESIGNER_URL=http://architecture-designer:8003
|
ARCHITECTURE_DESIGNER_URL=http://localhost:8003
|
||||||
CODE_GENERATOR_URL=http://code-generator:8004
|
CODE_GENERATOR_URL=http://localhost:8004
|
||||||
TEST_GENERATOR_URL=http://test-generator:8005
|
TEST_GENERATOR_URL=http://localhost:8005
|
||||||
DEPLOYMENT_MANAGER_URL=http://deployment-manager:8006
|
DEPLOYMENT_MANAGER_URL=http://localhost:8006
|
||||||
DASHBOARD_URL=http://dashboard:8008
|
DASHBOARD_URL=http://localhost:8008
|
||||||
|
|
||||||
# Infrastructure
|
# Infrastructure
|
||||||
REDIS_HOST=redis
|
REDIS_HOST=redis
|
||||||
@ -28,10 +28,10 @@ RABBITMQ_USER=pipeline_admin
|
|||||||
RABBITMQ_PASSWORD=secure_rabbitmq_password
|
RABBITMQ_PASSWORD=secure_rabbitmq_password
|
||||||
|
|
||||||
# CORS
|
# CORS
|
||||||
FRONTEND_URL=https://dashboard.codenuk.com
|
FRONTEND_URL=http://192.168.1.13:3001
|
||||||
|
|
||||||
# CORS Configuration
|
# CORS Configuration
|
||||||
CORS_ORIGIN=https://dashboard.codenuk.com
|
CORS_ORIGIN=http://192.168.1.13:3001
|
||||||
CORS_METHODS=GET,POST,PUT,DELETE,PATCH,OPT
|
CORS_METHODS=GET,POST,PUT,DELETE,PATCH,OPT
|
||||||
IONS
|
IONS
|
||||||
CORS_CREDENTIALS=true
|
CORS_CREDENTIALS=true
|
||||||
@ -27,4 +27,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
|||||||
CMD curl -f http://localhost:8000/health || exit 1
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
CMD ["npm", "start"]
|
CMD ["npm", "start"]
|
||||||
@ -7,10 +7,23 @@ const createServiceProxy = (targetUrl, serviceName, options = {}) => {
|
|||||||
target: targetUrl,
|
target: targetUrl,
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
ws: true,
|
ws: true,
|
||||||
timeout: parseInt(process.env.PROXY_TIMEOUT) || 30000,
|
timeout: parseInt(process.env.PROXY_TIMEOUT) || 60000,
|
||||||
proxyTimeout: parseInt(process.env.PROXY_TIMEOUT) || 30000,
|
proxyTimeout: parseInt(process.env.PROXY_TIMEOUT) || 60000,
|
||||||
pathRewrite: options.pathRewrite || {},
|
pathRewrite: options.pathRewrite || {},
|
||||||
onProxyReq: logProxyRequest(serviceName, targetUrl),
|
followRedirects: true,
|
||||||
|
secure: false,
|
||||||
|
onProxyReq: (proxyReq, req, res) => {
|
||||||
|
// Log the proxy request
|
||||||
|
logProxyRequest(serviceName, targetUrl)(proxyReq, req, res);
|
||||||
|
|
||||||
|
// Ensure proper headers for JSON requests
|
||||||
|
if (req.headers['content-type'] === 'application/json') {
|
||||||
|
proxyReq.setHeader('Content-Type', 'application/json');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add connection keep-alive
|
||||||
|
proxyReq.setHeader('Connection', 'keep-alive');
|
||||||
|
},
|
||||||
onProxyRes: logProxyResponse(serviceName),
|
onProxyRes: logProxyResponse(serviceName),
|
||||||
onError: (err, req, res) => {
|
onError: (err, req, res) => {
|
||||||
logProxyError(serviceName)(err, req, res);
|
logProxyError(serviceName)(err, req, res);
|
||||||
|
|||||||
@ -55,9 +55,9 @@ global.io = io;
|
|||||||
// Service targets configuration
|
// Service targets configuration
|
||||||
const serviceTargets = {
|
const serviceTargets = {
|
||||||
USER_AUTH_URL: process.env.USER_AUTH_URL || 'http://localhost:8011',
|
USER_AUTH_URL: process.env.USER_AUTH_URL || 'http://localhost:8011',
|
||||||
TEMPLATE_MANAGER_URL: process.env.TEMPLATE_MANAGER_URL || 'http://template-manager:8009',
|
TEMPLATE_MANAGER_URL: process.env.TEMPLATE_MANAGER_URL || 'http://192.168.1.13:8009',
|
||||||
GIT_INTEGRATION_URL: process.env.GIT_INTEGRATION_URL || 'http://localhost:8012',
|
GIT_INTEGRATION_URL: process.env.GIT_INTEGRATION_URL || 'http://localhost:8012',
|
||||||
REQUIREMENT_PROCESSOR_URL: process.env.REQUIREMENT_PROCESSOR_URL || 'http://localhost:8001',
|
REQUIREMENT_PROCESSOR_URL: process.env.REQUIREMENT_PROCESSOR_URL || 'http://requirement-processor:8001',
|
||||||
TECH_STACK_SELECTOR_URL: process.env.TECH_STACK_SELECTOR_URL || 'http://localhost:8002',
|
TECH_STACK_SELECTOR_URL: process.env.TECH_STACK_SELECTOR_URL || 'http://localhost:8002',
|
||||||
ARCHITECTURE_DESIGNER_URL: process.env.ARCHITECTURE_DESIGNER_URL || 'http://localhost:8003',
|
ARCHITECTURE_DESIGNER_URL: process.env.ARCHITECTURE_DESIGNER_URL || 'http://localhost:8003',
|
||||||
CODE_GENERATOR_URL: process.env.CODE_GENERATOR_URL || 'http://localhost:8004',
|
CODE_GENERATOR_URL: process.env.CODE_GENERATOR_URL || 'http://localhost:8004',
|
||||||
@ -519,15 +519,70 @@ app.use('/api/requirements',
|
|||||||
|
|
||||||
// Questions (Requirement Processor) - expose /api/questions via gateway
|
// Questions (Requirement Processor) - expose /api/questions via gateway
|
||||||
// Rewrites /api/questions/* -> /api/v1/* at the Requirement Processor
|
// Rewrites /api/questions/* -> /api/v1/* at the Requirement Processor
|
||||||
|
console.log('🔧 Registering /api/questions proxy route...');
|
||||||
app.use('/api/questions',
|
app.use('/api/questions',
|
||||||
createServiceLimiter(300),
|
createServiceLimiter(300),
|
||||||
// Allow unauthenticated access for generating questions (public step in builder)
|
// Allow unauthenticated access for generating questions (public step in builder)
|
||||||
(req, res, next) => next(),
|
(req, res, next) => next(),
|
||||||
serviceRouter.createServiceProxy(
|
(req, res, next) => {
|
||||||
serviceTargets.REQUIREMENT_PROCESSOR_URL,
|
const requirementServiceUrl = serviceTargets.REQUIREMENT_PROCESSOR_URL;
|
||||||
'requirement-processor-questions',
|
// Rewrite path: /api/questions -> /api/v1
|
||||||
{ pathRewrite: { '^/api/questions': '/api/v1' } }
|
const rewrittenPath = req.originalUrl.replace(/^\/api\/questions/, '/api/v1');
|
||||||
)
|
const targetUrl = `${requirementServiceUrl}${rewrittenPath}`;
|
||||||
|
console.log(`🔥 [QUESTIONS PROXY] ${req.method} ${req.originalUrl} → ${targetUrl}`);
|
||||||
|
|
||||||
|
// Set response timeout to prevent hanging
|
||||||
|
res.setTimeout(30000, () => {
|
||||||
|
console.error('❌ [QUESTIONS PROXY] Response timeout');
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(504).json({ error: 'Gateway timeout', service: 'requirement-processor' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
method: req.method,
|
||||||
|
url: targetUrl,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'User-Agent': 'API-Gateway/1.0',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'Authorization': req.headers.authorization,
|
||||||
|
'X-User-ID': req.user?.id || req.user?.userId,
|
||||||
|
'X-User-Role': req.user?.role,
|
||||||
|
},
|
||||||
|
timeout: 25000,
|
||||||
|
validateStatus: () => true,
|
||||||
|
maxRedirects: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
// Always include request body for POST/PUT/PATCH requests
|
||||||
|
if (req.method === 'POST' || req.method === 'PUT' || req.method === 'PATCH') {
|
||||||
|
options.data = req.body || {};
|
||||||
|
console.log(`📦 [QUESTIONS PROXY] Request body:`, JSON.stringify(req.body));
|
||||||
|
}
|
||||||
|
|
||||||
|
axios(options)
|
||||||
|
.then(response => {
|
||||||
|
console.log(`✅ [QUESTIONS PROXY] Response: ${response.status} for ${req.method} ${req.originalUrl}`);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(response.status).json(response.data);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
console.error(`❌ [QUESTIONS PROXY ERROR]:`, error.message);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
if (error.response) {
|
||||||
|
res.status(error.response.status).json(error.response.data);
|
||||||
|
} else {
|
||||||
|
res.status(502).json({
|
||||||
|
error: 'Questions service unavailable',
|
||||||
|
message: error.code || error.message,
|
||||||
|
service: 'requirement-processor'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
// Tech Stack Selector Service
|
// Tech Stack Selector Service
|
||||||
|
|||||||
@ -14,6 +14,9 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|||||||
# Copy application code
|
# Copy application code
|
||||||
COPY src/ ./src/
|
COPY src/ ./src/
|
||||||
|
|
||||||
|
# Copy migrations
|
||||||
|
COPY migrations/ ./migrations/
|
||||||
|
|
||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 8001
|
EXPOSE 8001
|
||||||
|
|
||||||
@ -21,5 +24,13 @@ EXPOSE 8001
|
|||||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \
|
||||||
CMD curl -f http://localhost:8001/health || exit 1
|
CMD curl -f http://localhost:8001/health || exit 1
|
||||||
|
|
||||||
# Start the application
|
# Create startup script that runs migrations then starts the app
|
||||||
CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "8001"]
|
RUN echo '#!/bin/bash\n\
|
||||||
|
echo "Running database migrations..."\n\
|
||||||
|
python migrations/migrate.py\n\
|
||||||
|
echo "Starting application..."\n\
|
||||||
|
exec uvicorn src.main:app --host 0.0.0.0 --port 8001' > /app/start.sh && \
|
||||||
|
chmod +x /app/start.sh
|
||||||
|
|
||||||
|
# Start with migration and then application
|
||||||
|
CMD ["/app/start.sh"]
|
||||||
|
|||||||
@ -0,0 +1,63 @@
|
|||||||
|
-- Migration: 001_business_context_tables.sql
|
||||||
|
-- Description: Add business context questions and responses tables to requirement processor
|
||||||
|
-- Date: 2024-01-15
|
||||||
|
|
||||||
|
-- Business Context Responses (Simple Structure)
|
||||||
|
CREATE TABLE IF NOT EXISTS business_context_responses (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
user_id UUID NOT NULL,
|
||||||
|
template_id UUID,
|
||||||
|
project_id UUID REFERENCES projects(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Simple JSONB structure with questions array
|
||||||
|
questions JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||||
|
|
||||||
|
-- Metadata
|
||||||
|
status VARCHAR(50) DEFAULT 'in_progress',
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT valid_status CHECK (status IN ('in_progress', 'completed', 'draft'))
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Question Templates (Optional - for reusable question sets)
|
||||||
|
CREATE TABLE IF NOT EXISTS question_templates (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
|
template_name VARCHAR(255) NOT NULL,
|
||||||
|
questions JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
is_active BOOLEAN DEFAULT true
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_business_context_user_id ON business_context_responses(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_business_context_project_id ON business_context_responses(project_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_business_context_template_id ON business_context_responses(template_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_business_context_questions ON business_context_responses USING GIN (questions);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_question_templates_questions ON question_templates USING GIN (questions);
|
||||||
|
|
||||||
|
-- Insert default question template
|
||||||
|
INSERT INTO question_templates (template_name, questions) VALUES
|
||||||
|
('Standard Business Context Questions', '[
|
||||||
|
{
|
||||||
|
"question": "How many local users will access your integrated restaurant Management System system across all detailed requirements?",
|
||||||
|
"answer": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"question": "How should Customer-Facing Features, Management Dashboard, Staff Operations interface features with their detailed requirements integrate and share data?",
|
||||||
|
"answer": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"question": "What are the workflow dependencies between detailed requirements?",
|
||||||
|
"answer": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"question": "Do you need real-time data synchronization across all detailed requirements?",
|
||||||
|
"answer": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"question": "How should data flow between these detailed requirements?",
|
||||||
|
"answer": ""
|
||||||
|
}
|
||||||
|
]'::jsonb)
|
||||||
|
ON CONFLICT DO NOTHING;
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
-- Migration: 002_fix_foreign_key_constraint.sql
|
||||||
|
-- Description: Remove foreign key constraint on project_id since projects table doesn't exist
|
||||||
|
-- Date: 2024-09-22
|
||||||
|
|
||||||
|
-- Drop the foreign key constraint on project_id
|
||||||
|
ALTER TABLE business_context_responses
|
||||||
|
DROP CONSTRAINT IF EXISTS business_context_responses_project_id_fkey;
|
||||||
|
|
||||||
|
-- Make project_id nullable since it's just a reference field now
|
||||||
|
ALTER TABLE business_context_responses
|
||||||
|
ALTER COLUMN project_id DROP NOT NULL;
|
||||||
|
|
||||||
|
-- Add a comment to clarify the field usage
|
||||||
|
COMMENT ON COLUMN business_context_responses.project_id IS 'Template/Project identifier - not a foreign key constraint';
|
||||||
62
services/requirement-processor/migrations/README.md
Normal file
62
services/requirement-processor/migrations/README.md
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
# Requirement Processor Migrations
|
||||||
|
|
||||||
|
This directory contains database migrations for the requirement processor service.
|
||||||
|
|
||||||
|
## Running Migrations
|
||||||
|
|
||||||
|
### Option 1: Using Python Script
|
||||||
|
```bash
|
||||||
|
cd /home/tech4biz/Desktop/Projectsnew/CODENUK1/codenuk-backend-live/services/requirement-processor/migrations
|
||||||
|
python migrate.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 2: Manual SQL Execution
|
||||||
|
```bash
|
||||||
|
# Connect to your database and run:
|
||||||
|
psql -d dev_pipeline -f 001_business_context_tables.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Option 3: Using Docker
|
||||||
|
```bash
|
||||||
|
# If using Docker Compose
|
||||||
|
docker-compose exec postgres psql -U postgres -d dev_pipeline -f /migrations/001_business_context_tables.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Migration Files
|
||||||
|
|
||||||
|
- `001_business_context_tables.sql` - Creates business context tables with JSONB structure
|
||||||
|
- `business_context_responses` - Stores user responses with questions array
|
||||||
|
- `question_templates` - Reusable question templates
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
### business_context_responses
|
||||||
|
```sql
|
||||||
|
- id: UUID (Primary Key)
|
||||||
|
- user_id: UUID (Required)
|
||||||
|
- template_id: UUID (Optional)
|
||||||
|
- project_id: UUID (Foreign Key to projects)
|
||||||
|
- questions: JSONB Array of {question, answer} objects
|
||||||
|
- status: VARCHAR ('in_progress', 'completed', 'draft')
|
||||||
|
- created_at, updated_at: TIMESTAMP
|
||||||
|
```
|
||||||
|
|
||||||
|
### question_templates
|
||||||
|
```sql
|
||||||
|
- id: UUID (Primary Key)
|
||||||
|
- template_name: VARCHAR
|
||||||
|
- questions: JSONB Array of question templates
|
||||||
|
- is_active: BOOLEAN
|
||||||
|
- created_at: TIMESTAMP
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
Make sure these are set:
|
||||||
|
```bash
|
||||||
|
DATABASE_URL=postgresql://postgres:password@localhost:5432/dev_pipeline
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with Requirement Processor
|
||||||
|
|
||||||
|
The business context data will be available to your requirement processor service for enhanced analysis and better requirement understanding.
|
||||||
93
services/requirement-processor/migrations/migrate.py
Normal file
93
services/requirement-processor/migrations/migrate.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Migration runner for requirement processor service
|
||||||
|
Run migrations in order to set up database schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import asyncio
|
||||||
|
import asyncpg
|
||||||
|
from pathlib import Path
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
# Database connection settings
|
||||||
|
DATABASE_URL = os.getenv('DATABASE_URL', 'postgresql://postgres:password@localhost:5432/dev_pipeline')
|
||||||
|
|
||||||
|
SCHEMA_MIGRATIONS_TABLE_SQL = """
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||||
|
version TEXT PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def ensure_migrations_table(pool) -> None:
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
await conn.execute(SCHEMA_MIGRATIONS_TABLE_SQL)
|
||||||
|
|
||||||
|
async def is_applied(pool, version: str) -> bool:
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
row = await conn.fetchrow("SELECT 1 FROM schema_migrations WHERE version = $1", version)
|
||||||
|
return row is not None
|
||||||
|
|
||||||
|
async def mark_applied(pool, version: str) -> None:
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
await conn.execute("INSERT INTO schema_migrations(version) VALUES($1) ON CONFLICT (version) DO NOTHING", version)
|
||||||
|
|
||||||
|
async def run_migration(pool, migration_file):
|
||||||
|
"""Run a single migration file if not applied"""
|
||||||
|
version = migration_file.name
|
||||||
|
try:
|
||||||
|
if await is_applied(pool, version):
|
||||||
|
logger.info(f"⏭️ Skipping already applied migration: {version}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
with open(migration_file, 'r') as f:
|
||||||
|
sql_content = f.read()
|
||||||
|
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
await conn.execute(sql_content)
|
||||||
|
|
||||||
|
await mark_applied(pool, version)
|
||||||
|
logger.info(f"✅ Migration completed: {version}")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Migration failed: {version} - {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def run_migrations():
|
||||||
|
"""Run all migrations in order"""
|
||||||
|
try:
|
||||||
|
# Connect to database
|
||||||
|
pool = await asyncpg.create_pool(DATABASE_URL)
|
||||||
|
logger.info("Connected to database")
|
||||||
|
|
||||||
|
# Ensure tracking table exists
|
||||||
|
await ensure_migrations_table(pool)
|
||||||
|
|
||||||
|
# Get migration files
|
||||||
|
migrations_dir = Path(__file__).parent
|
||||||
|
migration_files = sorted(migrations_dir.glob("*.sql"))
|
||||||
|
|
||||||
|
if not migration_files:
|
||||||
|
logger.info("No migration files found")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"Found {len(migration_files)} migration files")
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
for migration_file in migration_files:
|
||||||
|
success = await run_migration(pool, migration_file)
|
||||||
|
if not success:
|
||||||
|
logger.error("Migration failed, stopping")
|
||||||
|
break
|
||||||
|
|
||||||
|
await pool.close()
|
||||||
|
logger.info("All migrations completed successfully")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Migration runner failed: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(run_migrations())
|
||||||
@ -673,12 +673,14 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, Any, Optional, Union
|
from typing import Dict, Any, Optional, Union, List
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
from fastapi import FastAPI, HTTPException, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
import anthropic
|
import anthropic
|
||||||
|
import asyncpg
|
||||||
|
import uuid
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logger.remove()
|
logger.remove()
|
||||||
@ -694,6 +696,26 @@ except Exception as e:
|
|||||||
logger.warning(f"⚠️ Claude client not initialized: {e}")
|
logger.warning(f"⚠️ Claude client not initialized: {e}")
|
||||||
claude_client = None
|
claude_client = None
|
||||||
|
|
||||||
|
# Database connection configuration
|
||||||
|
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/codenuk_db")
|
||||||
|
db_pool = None
|
||||||
|
|
||||||
|
async def init_db_pool():
|
||||||
|
"""Initialize database connection pool"""
|
||||||
|
global db_pool
|
||||||
|
try:
|
||||||
|
db_pool = await asyncpg.create_pool(DATABASE_URL)
|
||||||
|
logger.info("✅ Database connection pool initialized successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Failed to initialize database pool: {e}")
|
||||||
|
db_pool = None
|
||||||
|
|
||||||
|
async def get_db_connection():
|
||||||
|
"""Get database connection from pool"""
|
||||||
|
if db_pool is None:
|
||||||
|
await init_db_pool()
|
||||||
|
return db_pool
|
||||||
|
|
||||||
# ================================================================================================
|
# ================================================================================================
|
||||||
# FLEXIBLE MODELS
|
# FLEXIBLE MODELS
|
||||||
# ================================================================================================
|
# ================================================================================================
|
||||||
@ -704,6 +726,19 @@ class FlexibleRequirementRequest(BaseModel):
|
|||||||
class Config:
|
class Config:
|
||||||
extra = "allow" # Allow any additional fields
|
extra = "allow" # Allow any additional fields
|
||||||
|
|
||||||
|
class QuestionAnswer(BaseModel):
|
||||||
|
"""Model for individual question-answer pair"""
|
||||||
|
question: str
|
||||||
|
answer: str
|
||||||
|
|
||||||
|
class BusinessContextRequest(BaseModel):
|
||||||
|
"""Model for storing business context responses"""
|
||||||
|
user_id: str
|
||||||
|
project_id: Optional[str] = None
|
||||||
|
template_id: Optional[str] = None
|
||||||
|
questions: List[QuestionAnswer]
|
||||||
|
status: Optional[str] = "completed"
|
||||||
|
|
||||||
# ================================================================================================
|
# ================================================================================================
|
||||||
# FLEXIBLE FASTAPI APPLICATION
|
# FLEXIBLE FASTAPI APPLICATION
|
||||||
# ================================================================================================
|
# ================================================================================================
|
||||||
@ -722,6 +757,11 @@ app.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Initialize database connection on startup"""
|
||||||
|
await init_db_pool()
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
async def health_check():
|
async def health_check():
|
||||||
return {
|
return {
|
||||||
@ -910,6 +950,143 @@ async def generate_comprehensive_business_questions(request: Request):
|
|||||||
"message": "Failed to generate comprehensive business questions"
|
"message": "Failed to generate comprehensive business questions"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@app.post("/api/v1/store-business-context")
|
||||||
|
async def store_business_context(request: BusinessContextRequest):
|
||||||
|
"""
|
||||||
|
Store business context questions and answers when user clicks 'Generate Technology Recommendations'
|
||||||
|
Input: {user_id, project_id?, template_id?, questions: [{question, answer}], status?}
|
||||||
|
Output: {success, data: {id, stored_questions_count}}
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info(f"🗄️ Storing business context for user: {request.user_id}")
|
||||||
|
logger.info(f"📝 Questions to store: {len(request.questions)}")
|
||||||
|
|
||||||
|
# Get database connection
|
||||||
|
pool = await get_db_connection()
|
||||||
|
if not pool:
|
||||||
|
raise HTTPException(status_code=500, detail="Database connection not available")
|
||||||
|
|
||||||
|
# Convert questions to JSONB format
|
||||||
|
questions_json = [
|
||||||
|
{
|
||||||
|
"question": qa.question,
|
||||||
|
"answer": qa.answer
|
||||||
|
}
|
||||||
|
for qa in request.questions
|
||||||
|
]
|
||||||
|
|
||||||
|
# Generate UUID for the record
|
||||||
|
record_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Store in database
|
||||||
|
async with pool.acquire() as connection:
|
||||||
|
await connection.execute("""
|
||||||
|
INSERT INTO business_context_responses
|
||||||
|
(id, user_id, project_id, template_id, questions, status, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
|
""",
|
||||||
|
record_id,
|
||||||
|
request.user_id,
|
||||||
|
request.project_id,
|
||||||
|
request.template_id,
|
||||||
|
json.dumps(questions_json),
|
||||||
|
request.status,
|
||||||
|
datetime.utcnow(),
|
||||||
|
datetime.utcnow()
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"✅ Successfully stored {len(request.questions)} business context responses")
|
||||||
|
logger.info(f"📊 Record ID: {record_id}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"data": {
|
||||||
|
"id": record_id,
|
||||||
|
"stored_questions_count": len(request.questions),
|
||||||
|
"user_id": request.user_id,
|
||||||
|
"project_id": request.project_id,
|
||||||
|
"status": request.status,
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Failed to store business context: {e}")
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
"message": "Failed to store business context responses"
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.get("/api/v1/business-context/{user_id}")
|
||||||
|
async def get_business_context(user_id: str, project_id: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Retrieve stored business context responses for a user
|
||||||
|
Optional project_id filter
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info(f"🔍 Retrieving business context for user: {user_id}")
|
||||||
|
|
||||||
|
# Get database connection
|
||||||
|
pool = await get_db_connection()
|
||||||
|
if not pool:
|
||||||
|
raise HTTPException(status_code=500, detail="Database connection not available")
|
||||||
|
|
||||||
|
# Build query based on filters
|
||||||
|
if project_id:
|
||||||
|
query = """
|
||||||
|
SELECT id, user_id, project_id, template_id, questions, status, created_at, updated_at
|
||||||
|
FROM business_context_responses
|
||||||
|
WHERE user_id = $1 AND project_id = $2
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
"""
|
||||||
|
params = [user_id, project_id]
|
||||||
|
else:
|
||||||
|
query = """
|
||||||
|
SELECT id, user_id, project_id, template_id, questions, status, created_at, updated_at
|
||||||
|
FROM business_context_responses
|
||||||
|
WHERE user_id = $1
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
"""
|
||||||
|
params = [user_id]
|
||||||
|
|
||||||
|
async with pool.acquire() as connection:
|
||||||
|
rows = await connection.fetch(query, *params)
|
||||||
|
|
||||||
|
# Convert rows to response format
|
||||||
|
responses = []
|
||||||
|
for row in rows:
|
||||||
|
responses.append({
|
||||||
|
"id": str(row['id']),
|
||||||
|
"user_id": row['user_id'],
|
||||||
|
"project_id": row['project_id'],
|
||||||
|
"template_id": row['template_id'],
|
||||||
|
"questions": json.loads(row['questions']) if row['questions'] else [],
|
||||||
|
"status": row['status'],
|
||||||
|
"created_at": row['created_at'].isoformat(),
|
||||||
|
"updated_at": row['updated_at'].isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info(f"✅ Retrieved {len(responses)} business context records")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"data": {
|
||||||
|
"responses": responses,
|
||||||
|
"total_count": len(responses),
|
||||||
|
"user_id": user_id,
|
||||||
|
"project_id": project_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Failed to retrieve business context: {e}")
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
"message": "Failed to retrieve business context responses"
|
||||||
|
}
|
||||||
|
|
||||||
async def generate_ai_business_questions(feature_name: str, description: str, requirements: list, complexity: str, logic_rules: list):
|
async def generate_ai_business_questions(feature_name: str, description: str, requirements: list, complexity: str, logic_rules: list):
|
||||||
"""Generate business questions using Claude AI"""
|
"""Generate business questions using Claude AI"""
|
||||||
try:
|
try:
|
||||||
@ -1396,75 +1573,6 @@ def extract_all_data(data: Dict[str, Any]) -> tuple[list, dict, dict]:
|
|||||||
|
|
||||||
return all_features, scale_info, complete_requirements
|
return all_features, scale_info, complete_requirements
|
||||||
|
|
||||||
|
|
||||||
async def intelligent_fallback_analysis(feature_name: str, description: str, requirements: list, project_type: str):
|
|
||||||
"""Intelligent fallback analysis when Claude is not available"""
|
|
||||||
|
|
||||||
# Analyze complexity based on keywords
|
|
||||||
complexity_indicators = {
|
|
||||||
"high": ["encryption", "hipaa", "compliance", "security", "integration", "real-time", "ai", "machine learning", "blockchain"],
|
|
||||||
"medium": ["crud", "database", "api", "authentication", "validation", "search", "filter"],
|
|
||||||
"low": ["display", "show", "view", "list", "basic"]
|
|
||||||
}
|
|
||||||
|
|
||||||
text_to_analyze = f"{feature_name} {description} {' '.join(requirements)}".lower()
|
|
||||||
|
|
||||||
complexity = "medium" # default
|
|
||||||
for level, keywords in complexity_indicators.items():
|
|
||||||
if any(keyword in text_to_analyze for keyword in keywords):
|
|
||||||
complexity = level
|
|
||||||
break
|
|
||||||
|
|
||||||
# Generate logical business rules based on project type and requirements
|
|
||||||
logic_rules = []
|
|
||||||
|
|
||||||
if project_type.lower() == "healthcare":
|
|
||||||
logic_rules.extend([
|
|
||||||
"Only authorized caregivers can access patient data",
|
|
||||||
"All patient data access must be logged for HIPAA compliance",
|
|
||||||
"Patient data must be encrypted at rest and in transit"
|
|
||||||
])
|
|
||||||
|
|
||||||
if "crud" in text_to_analyze or "manage" in text_to_analyze:
|
|
||||||
logic_rules.append("Users can only modify data they have created or been granted access to")
|
|
||||||
|
|
||||||
if "patient" in text_to_analyze:
|
|
||||||
logic_rules.extend([
|
|
||||||
"Patient information can only be accessed by assigned caregivers",
|
|
||||||
"All patient data modifications require audit trail"
|
|
||||||
])
|
|
||||||
|
|
||||||
# Remove duplicates
|
|
||||||
logic_rules = list(set(logic_rules))
|
|
||||||
|
|
||||||
analysis = {
|
|
||||||
"feature_name": feature_name or "Enhanced Feature",
|
|
||||||
"complexity": complexity,
|
|
||||||
"logicRules": logic_rules,
|
|
||||||
"implementation_details": [
|
|
||||||
f"Implement {feature_name} with proper validation",
|
|
||||||
"Add error handling and logging",
|
|
||||||
"Include unit and integration tests"
|
|
||||||
],
|
|
||||||
"technical_requirements": [
|
|
||||||
"Database schema design",
|
|
||||||
"API endpoint implementation",
|
|
||||||
"Frontend component development"
|
|
||||||
],
|
|
||||||
"estimated_effort": "2-3 weeks" if complexity == "high" else "1-2 weeks",
|
|
||||||
"dependencies": ["User authentication", "Database setup"],
|
|
||||||
"api_endpoints": [f"POST /api/{feature_name.lower().replace(' ', '-')}", f"GET /api/{feature_name.lower().replace(' ', '-')}"],
|
|
||||||
"database_tables": [f"{feature_name.lower().replace(' ', '_')}_table"],
|
|
||||||
"confidence_score": 0.75
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(f"✅ Fallback analysis completed: {complexity} complexity with {len(logic_rules)} logic rules")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"analysis": analysis
|
|
||||||
}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,9 @@ WORKDIR /app
|
|||||||
# Install curl for health checks
|
# Install curl for health checks
|
||||||
RUN apk add --no-cache curl
|
RUN apk add --no-cache curl
|
||||||
|
|
||||||
|
# Ensure shared pipeline schema can be applied automatically when missing
|
||||||
|
ENV APPLY_SCHEMAS_SQL=true
|
||||||
|
|
||||||
# Copy package files
|
# Copy package files
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
|
|||||||
@ -115,36 +115,13 @@ app.post('/api/analyze-feature', async (req, res) => {
|
|||||||
console.log('📋 [Template Manager] Analyzing feature:', actualFeatureName);
|
console.log('📋 [Template Manager] Analyzing feature:', actualFeatureName);
|
||||||
console.log('📋 [Template Manager] Project type:', actualProjectType);
|
console.log('📋 [Template Manager] Project type:', actualProjectType);
|
||||||
console.log('📋 [Template Manager] Requirements:', safeRequirements);
|
console.log('📋 [Template Manager] Requirements:', safeRequirements);
|
||||||
|
// Always use Claude. No rule-based fallback.
|
||||||
// Use Claude AI for intelligent analysis
|
console.log('🤖 [Template Manager] Using Claude AI for analysis (no fallback)...');
|
||||||
let analysis;
|
const analysis = await analyzeWithClaude(actualFeatureName, description, safeRequirements, actualProjectType);
|
||||||
const CLAUDE_API_KEY = process.env.CLAUDE_API_KEY || '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';
|
console.log('✅ [Template Manager] Analysis completed:', analysis?.complexity, 'complexity');
|
||||||
const CLAUDE_AVAILABLE = !!CLAUDE_API_KEY;
|
console.log('🧩 [Template Manager] logicRules:', Array.isArray(analysis?.logicRules) ? analysis.logicRules : 'none');
|
||||||
|
|
||||||
console.log('🔍 [Template Manager] Claude available:', CLAUDE_AVAILABLE);
|
res.json({ success: true, analysis });
|
||||||
console.log('🔍 [Template Manager] Claude API key present:', !!process.env.CLAUDE_API_KEY);
|
|
||||||
|
|
||||||
if (CLAUDE_AVAILABLE) {
|
|
||||||
try {
|
|
||||||
console.log('🤖 [Template Manager] Using Claude AI for analysis...');
|
|
||||||
analysis = await analyzeWithClaude(actualFeatureName, description, safeRequirements, actualProjectType);
|
|
||||||
console.log('✅ [Template Manager] Claude AI analysis completed successfully');
|
|
||||||
} catch (claudeError) {
|
|
||||||
console.warn('⚠️ [Template Manager] Claude AI failed, falling back to rule-based analysis:', claudeError.message);
|
|
||||||
console.error('❌ [Template Manager] Claude error details:', claudeError);
|
|
||||||
analysis = await analyzeWithRules(actualFeatureName, description, safeRequirements, actualProjectType);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
console.log('📋 [Template Manager] Using rule-based analysis (Claude not available)');
|
|
||||||
analysis = await analyzeWithRules(actualFeatureName, description, safeRequirements, actualProjectType);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('✅ [Template Manager] Analysis completed:', analysis.complexity, 'complexity');
|
|
||||||
|
|
||||||
res.json({
|
|
||||||
success: true,
|
|
||||||
analysis: analysis
|
|
||||||
});
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('❌ [Template Manager] AI Analysis error:', error);
|
console.error('❌ [Template Manager] AI Analysis error:', error);
|
||||||
@ -207,7 +184,14 @@ Return ONLY the JSON object, no other text.`;
|
|||||||
model: 'claude-3-5-sonnet-20241022',
|
model: 'claude-3-5-sonnet-20241022',
|
||||||
max_tokens: 2000,
|
max_tokens: 2000,
|
||||||
temperature: 0.1,
|
temperature: 0.1,
|
||||||
messages: [{ role: 'user', content: prompt }]
|
messages: [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
content: [
|
||||||
|
{ type: 'text', text: prompt }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
}, {
|
}, {
|
||||||
headers: {
|
headers: {
|
||||||
'x-api-key': CLAUDE_API_KEY,
|
'x-api-key': CLAUDE_API_KEY,
|
||||||
@ -220,7 +204,7 @@ Return ONLY the JSON object, no other text.`;
|
|||||||
console.log('✅ [Template Manager] Claude API response received');
|
console.log('✅ [Template Manager] Claude API response received');
|
||||||
console.log('🔍 [Template Manager] Response status:', response.status);
|
console.log('🔍 [Template Manager] Response status:', response.status);
|
||||||
|
|
||||||
const responseText = response.data.content[0].text.trim();
|
const responseText = (response?.data?.content?.[0]?.text || '').trim();
|
||||||
console.log('🔍 [Template Manager] Raw Claude response:', responseText.substring(0, 200) + '...');
|
console.log('🔍 [Template Manager] Raw Claude response:', responseText.substring(0, 200) + '...');
|
||||||
|
|
||||||
// Extract JSON from response
|
// Extract JSON from response
|
||||||
@ -231,11 +215,13 @@ Return ONLY the JSON object, no other text.`;
|
|||||||
console.log('🔍 [Template Manager] Parsed analysis:', JSON.stringify(analysis, null, 2));
|
console.log('🔍 [Template Manager] Parsed analysis:', JSON.stringify(analysis, null, 2));
|
||||||
return analysis;
|
return analysis;
|
||||||
} else {
|
} else {
|
||||||
|
// Hard fail if Claude returns non-JSON; do not fallback
|
||||||
console.error('❌ [Template Manager] No valid JSON found in Claude response');
|
console.error('❌ [Template Manager] No valid JSON found in Claude response');
|
||||||
console.error('🔍 [Template Manager] Full response:', responseText);
|
console.error('🔍 [Template Manager] Full response:', responseText);
|
||||||
throw new Error('No valid JSON found in Claude response');
|
throw new Error('No valid JSON found in Claude response');
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
// Propagate error up; endpoint will return 500. No fallback.
|
||||||
console.error('❌ [Template Manager] Claude API error:', error.message);
|
console.error('❌ [Template Manager] Claude API error:', error.message);
|
||||||
console.error('🔍 [Template Manager] Error details:', {
|
console.error('🔍 [Template Manager] Error details:', {
|
||||||
status: error.response?.status,
|
status: error.response?.status,
|
||||||
@ -247,179 +233,7 @@ Return ONLY the JSON object, no other text.`;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rule-based analysis function (fallback)
|
// Removed rule-based fallback and helpers. Claude is mandatory.
|
||||||
async function analyzeWithRules(featureName, description, requirements, projectType) {
|
|
||||||
const complexity = analyzeComplexity(description, requirements);
|
|
||||||
const logicRules = generateLogicRules(featureName, description, requirements, projectType);
|
|
||||||
|
|
||||||
return {
|
|
||||||
feature_name: featureName || 'Custom Feature',
|
|
||||||
complexity: complexity,
|
|
||||||
logicRules: logicRules,
|
|
||||||
implementation_details: [
|
|
||||||
`Implement ${featureName || 'Custom Feature'} with proper validation`,
|
|
||||||
'Add error handling and logging',
|
|
||||||
'Include unit and integration tests'
|
|
||||||
],
|
|
||||||
technical_requirements: [
|
|
||||||
'Database schema design',
|
|
||||||
'API endpoint implementation',
|
|
||||||
'Frontend component development'
|
|
||||||
],
|
|
||||||
estimated_effort: complexity === 'high' ? '3-4 weeks' : complexity === 'low' ? '1-2 weeks' : '2-3 weeks',
|
|
||||||
dependencies: ['User authentication', 'Database setup'],
|
|
||||||
api_endpoints: [
|
|
||||||
`POST /api/${(featureName || 'custom-feature').toLowerCase().replace(/\s+/g, '-')}`,
|
|
||||||
`GET /api/${(featureName || 'custom-feature').toLowerCase().replace(/\s+/g, '-')}`
|
|
||||||
],
|
|
||||||
database_tables: [`${(featureName || 'custom_feature').toLowerCase().replace(/\s+/g, '_')}_table`],
|
|
||||||
confidence_score: 0.75
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to analyze complexity
|
|
||||||
function analyzeComplexity(description, requirements) {
|
|
||||||
const safeRequirements = Array.isArray(requirements) ? requirements : [];
|
|
||||||
const text = `${description || ''} ${safeRequirements.join(' ')}`.toLowerCase();
|
|
||||||
|
|
||||||
const highComplexityKeywords = ['encryption', 'hipaa', 'compliance', 'security', 'integration', 'real-time', 'ai', 'machine learning', 'blockchain', 'payment', 'transaction'];
|
|
||||||
const mediumComplexityKeywords = ['crud', 'database', 'api', 'authentication', 'validation', 'search', 'filter', 'workflow', 'approval'];
|
|
||||||
const lowComplexityKeywords = ['display', 'show', 'view', 'list', 'basic', 'simple'];
|
|
||||||
|
|
||||||
if (highComplexityKeywords.some(keyword => text.includes(keyword))) {
|
|
||||||
return 'high';
|
|
||||||
} else if (mediumComplexityKeywords.some(keyword => text.includes(keyword))) {
|
|
||||||
return 'medium';
|
|
||||||
} else if (lowComplexityKeywords.some(keyword => text.includes(keyword))) {
|
|
||||||
return 'low';
|
|
||||||
}
|
|
||||||
|
|
||||||
return 'medium'; // default
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to generate logic rules
|
|
||||||
function generateLogicRules(featureName, description, requirements, projectType) {
|
|
||||||
const rules = [];
|
|
||||||
const safeRequirements = Array.isArray(requirements) ? requirements : [];
|
|
||||||
const text = `${description || ''} ${safeRequirements.join(' ')}`.toLowerCase();
|
|
||||||
const featureText = `${featureName || ''}`.toLowerCase();
|
|
||||||
|
|
||||||
console.log('🔍 [Template Manager] Generating rules for:', featureName);
|
|
||||||
console.log('🔍 [Template Manager] Description:', description);
|
|
||||||
console.log('🔍 [Template Manager] Requirements:', safeRequirements);
|
|
||||||
console.log('🔍 [Template Manager] Project type:', projectType);
|
|
||||||
|
|
||||||
// Project type specific rules
|
|
||||||
if (projectType?.toLowerCase() === 'healthcare') {
|
|
||||||
rules.push('Only authorized caregivers can access patient data');
|
|
||||||
rules.push('All patient data access must be logged for HIPAA compliance');
|
|
||||||
rules.push('Patient data must be encrypted at rest and in transit');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (projectType?.toLowerCase() === 'ecommerce') {
|
|
||||||
rules.push('Payment information must be PCI DSS compliant');
|
|
||||||
rules.push('Order status updates must be real-time');
|
|
||||||
rules.push('Inventory levels must be validated before purchase');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (projectType?.toLowerCase() === 'finance' || projectType?.toLowerCase() === 'fintech') {
|
|
||||||
rules.push('All financial data must be encrypted and access-controlled');
|
|
||||||
rules.push('Transaction processing must include fraud detection');
|
|
||||||
rules.push('Audit trails must be maintained for all financial operations');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dynamic feature-specific rules based on content analysis
|
|
||||||
if (text.includes('user') || text.includes('account') || text.includes('profile')) {
|
|
||||||
rules.push(`User authentication is required to access ${featureName || 'this feature'}`);
|
|
||||||
rules.push('User data must be validated before storage');
|
|
||||||
rules.push('Users can only access their own data unless explicitly authorized');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('crud') || text.includes('manage') || text.includes('create') || text.includes('edit') || text.includes('delete')) {
|
|
||||||
rules.push(`Only authorized users can perform ${featureName || 'data'} operations`);
|
|
||||||
rules.push('All data modifications must be validated and sanitized');
|
|
||||||
rules.push('Delete operations should be soft deletes with audit trails');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('search') || text.includes('filter') || text.includes('query')) {
|
|
||||||
rules.push('Search queries must be sanitized to prevent injection attacks');
|
|
||||||
rules.push('Search results must respect user permissions and data visibility');
|
|
||||||
rules.push('Search performance should be optimized with proper indexing');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('upload') || text.includes('file') || text.includes('document')) {
|
|
||||||
rules.push('File uploads must be validated for type, size, and content');
|
|
||||||
rules.push('Uploaded files must be scanned for malware');
|
|
||||||
rules.push('File access must be controlled based on user permissions');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('notification') || text.includes('email') || text.includes('alert')) {
|
|
||||||
rules.push('Notification preferences must be configurable by users');
|
|
||||||
rules.push('Email notifications must comply with anti-spam regulations');
|
|
||||||
rules.push('Notification delivery must be reliable and trackable');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('report') || text.includes('analytics') || text.includes('dashboard')) {
|
|
||||||
rules.push('Report data must be filtered based on user access permissions');
|
|
||||||
rules.push('Analytics data must be anonymized where required');
|
|
||||||
rules.push('Report generation must be optimized for performance');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('api') || text.includes('integration') || text.includes('webhook')) {
|
|
||||||
rules.push('API endpoints must be secured with proper authentication');
|
|
||||||
rules.push('Rate limiting must be implemented to prevent abuse');
|
|
||||||
rules.push('API responses must not expose sensitive internal data');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('payment') || text.includes('transaction') || text.includes('billing')) {
|
|
||||||
rules.push('All financial transactions must be logged and auditable');
|
|
||||||
rules.push('Payment processing must include fraud detection');
|
|
||||||
rules.push('Transaction data must be encrypted and PCI compliant');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('approval') || text.includes('workflow') || text.includes('review')) {
|
|
||||||
rules.push('Approval workflows must have configurable escalation rules');
|
|
||||||
rules.push('All approval decisions must be logged with timestamps');
|
|
||||||
rules.push('Workflow states must be clearly defined and trackable');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (text.includes('patient') || text.includes('medical') || text.includes('health')) {
|
|
||||||
rules.push('Patient information can only be accessed by assigned caregivers');
|
|
||||||
rules.push('All patient data modifications require audit trail');
|
|
||||||
rules.push('Medical data must comply with HIPAA regulations');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dynamic rules based on requirements content
|
|
||||||
safeRequirements.forEach((req, index) => {
|
|
||||||
const reqText = req.toLowerCase();
|
|
||||||
if (reqText.includes('security') || reqText.includes('secure')) {
|
|
||||||
rules.push(`Security requirement ${index + 1}: ${req} must be implemented with proper access controls`);
|
|
||||||
}
|
|
||||||
if (reqText.includes('performance') || reqText.includes('fast') || reqText.includes('speed')) {
|
|
||||||
rules.push(`Performance requirement ${index + 1}: ${req} must be optimized for scalability`);
|
|
||||||
}
|
|
||||||
if (reqText.includes('compliance') || reqText.includes('regulation')) {
|
|
||||||
rules.push(`Compliance requirement ${index + 1}: ${req} must be implemented according to regulatory standards`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Generic rules if no specific ones were added
|
|
||||||
if (rules.length === 0) {
|
|
||||||
rules.push(`${featureName || 'This feature'} must implement proper data validation on all inputs`);
|
|
||||||
rules.push(`User permissions must be verified before accessing ${featureName || 'feature'} functionality`);
|
|
||||||
rules.push(`All ${featureName || 'feature'} operations must be logged for audit and debugging purposes`);
|
|
||||||
rules.push(`${featureName || 'This feature'} must handle errors gracefully and provide meaningful feedback`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure we always have at least 3-5 rules for better analysis
|
|
||||||
if (rules.length < 3) {
|
|
||||||
rules.push(`${featureName || 'This feature'} must be tested thoroughly before deployment`);
|
|
||||||
rules.push(`${featureName || 'This feature'} must follow established coding standards and best practices`);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('✅ [Template Manager] Generated rules:', rules);
|
|
||||||
return rules;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Root endpoint
|
// Root endpoint
|
||||||
app.get('/', (req, res) => {
|
app.get('/', (req, res) => {
|
||||||
|
|||||||
@ -111,9 +111,4 @@ CREATE TRIGGER update_template_features_updated_at BEFORE UPDATE ON template_fea
|
|||||||
CREATE TRIGGER update_custom_features_updated_at BEFORE UPDATE ON custom_features
|
CREATE TRIGGER update_custom_features_updated_at BEFORE UPDATE ON custom_features
|
||||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
-- Insert success message
|
-- Removed automatic seed row to avoid creating templates during migrations
|
||||||
INSERT INTO templates (type, title, description, category)
|
|
||||||
VALUES ('_migration_test', 'Migration Test', 'Schema created successfully', 'System')
|
|
||||||
ON CONFLICT (type) DO NOTHING;
|
|
||||||
|
|
||||||
SELECT 'Template Manager database schema created successfully!' as message;
|
|
||||||
@ -117,9 +117,4 @@ BEGIN
|
|||||||
END IF;
|
END IF;
|
||||||
END $$;
|
END $$;
|
||||||
|
|
||||||
-- 7. Insert success message
|
-- Removed automatic seed row; avoid inserting into templates during migrations
|
||||||
INSERT INTO templates (type, title, description, category)
|
|
||||||
VALUES ('_admin_workflow_migration', 'Admin Workflow Migration', 'Admin approval workflow schema created successfully', 'System')
|
|
||||||
ON CONFLICT (type) DO NOTHING;
|
|
||||||
|
|
||||||
SELECT 'Admin approval workflow database schema created successfully!' as message;
|
|
||||||
|
|||||||
@ -32,6 +32,36 @@ async function runMigrations() {
|
|||||||
console.log('🚀 Starting template-manager database migrations...');
|
console.log('🚀 Starting template-manager database migrations...');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Optionally bootstrap shared pipeline schema if requested and missing
|
||||||
|
const applySchemas = String(process.env.APPLY_SCHEMAS_SQL || '').toLowerCase() === 'true';
|
||||||
|
if (applySchemas) {
|
||||||
|
try {
|
||||||
|
const probe = await database.query("SELECT to_regclass('public.projects') AS tbl");
|
||||||
|
const hasProjects = !!(probe.rows && probe.rows[0] && probe.rows[0].tbl);
|
||||||
|
if (!hasProjects) {
|
||||||
|
const schemasPath = path.join(__dirname, '../../../../databases/scripts/schemas.sql');
|
||||||
|
if (fs.existsSync(schemasPath)) {
|
||||||
|
console.log('📦 Applying shared pipeline schemas.sql (projects, tech_stack_decisions, etc.)...');
|
||||||
|
let schemasSQL = fs.readFileSync(schemasPath, 'utf8');
|
||||||
|
// Remove psql meta-commands like \c dev_pipeline that the driver cannot execute
|
||||||
|
schemasSQL = schemasSQL
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => !/^\s*\\/.test(line))
|
||||||
|
.join('\n');
|
||||||
|
await database.query(schemasSQL);
|
||||||
|
console.log('✅ schemas.sql applied');
|
||||||
|
} else {
|
||||||
|
console.log('⚠️ schemas.sql not found at expected path, skipping');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log('⏭️ Shared pipeline schema already present (projects exists), skipping schemas.sql');
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error('❌ Failed applying schemas.sql:', e.message);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create migrations tracking table first
|
// Create migrations tracking table first
|
||||||
await createMigrationsTable();
|
await createMigrationsTable();
|
||||||
console.log('✅ Migration tracking table ready');
|
console.log('✅ Migration tracking table ready');
|
||||||
@ -70,13 +100,25 @@ async function runMigrations() {
|
|||||||
|
|
||||||
const migrationSQL = fs.readFileSync(migrationPath, 'utf8');
|
const migrationSQL = fs.readFileSync(migrationPath, 'utf8');
|
||||||
|
|
||||||
// Skip destructive migrations unless explicitly allowed
|
// Skip destructive migrations unless explicitly allowed.
|
||||||
|
// Exception: if this is the initial schema and base tables don't exist, it's safe to run.
|
||||||
const containsDrop = /\bdrop\s+table\b/i.test(migrationSQL);
|
const containsDrop = /\bdrop\s+table\b/i.test(migrationSQL);
|
||||||
const allowDestructive = String(process.env.ALLOW_DESTRUCTIVE_MIGRATIONS || '').toLowerCase() === 'true';
|
const allowDestructiveEnv = String(process.env.ALLOW_DESTRUCTIVE_MIGRATIONS || '').toLowerCase() === 'true';
|
||||||
if (containsDrop && !allowDestructive) {
|
|
||||||
console.log(`⏭️ Skipping potentially destructive migration (set ALLOW_DESTRUCTIVE_MIGRATIONS=true to run): ${migrationFile}`);
|
if (containsDrop && !allowDestructiveEnv) {
|
||||||
skippedCount++;
|
let canSafelyRun = false;
|
||||||
continue;
|
if (migrationFile === '001_initial_schema.sql') {
|
||||||
|
// Probe for core tables; if missing, allow running the initial schema
|
||||||
|
const probe = await database.query("SELECT to_regclass('public.templates') AS tbl");
|
||||||
|
const hasTemplates = !!(probe.rows && probe.rows[0] && probe.rows[0].tbl);
|
||||||
|
canSafelyRun = !hasTemplates;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!canSafelyRun) {
|
||||||
|
console.log(`⏭️ Skipping potentially destructive migration (set ALLOW_DESTRUCTIVE_MIGRATIONS=true to run): ${migrationFile}`);
|
||||||
|
skippedCount++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`📄 Running migration: ${migrationFile}`);
|
console.log(`📄 Running migration: ${migrationFile}`);
|
||||||
|
|||||||
@ -1,409 +0,0 @@
|
|||||||
require('dotenv').config();
|
|
||||||
const database = require('../config/database');
|
|
||||||
const { v4: uuidv4 } = require('uuid');
|
|
||||||
|
|
||||||
// Template data from your current project.types.js
|
|
||||||
const TEMPLATE_DATA = {
|
|
||||||
'Business & Enterprise': [
|
|
||||||
{
|
|
||||||
type: 'healthcare',
|
|
||||||
title: 'Healthcare Platform',
|
|
||||||
description: 'Patient management, appointments, medical records, telehealth',
|
|
||||||
icon: '🏥',
|
|
||||||
gradient: 'from-blue-50 to-blue-100',
|
|
||||||
border: 'border-blue-200',
|
|
||||||
text: 'text-blue-900',
|
|
||||||
subtext: 'text-blue-700',
|
|
||||||
features: [
|
|
||||||
{
|
|
||||||
feature_id: 'user_auth',
|
|
||||||
name: 'User Authentication',
|
|
||||||
description: 'Secure login and registration for caregivers and admins',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'patient_management',
|
|
||||||
name: 'Patient Management',
|
|
||||||
description: 'Create, edit, and manage patient profiles with medical history',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'appointment_scheduling',
|
|
||||||
name: 'Appointment Scheduling',
|
|
||||||
description: 'Schedule and manage patient appointments',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 3
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'call_scheduling',
|
|
||||||
name: 'Automated Call Scheduling',
|
|
||||||
description: 'Schedule automated calls to patients via Retell AI',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 4
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'admin_dashboard',
|
|
||||||
name: 'Admin Dashboard',
|
|
||||||
description: 'Analytics and usage tracking for administrators',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 5
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'ecommerce',
|
|
||||||
title: 'E-commerce Platform',
|
|
||||||
description: 'Online store, payments, inventory, customer management',
|
|
||||||
icon: '🛒',
|
|
||||||
gradient: 'from-green-50 to-green-100',
|
|
||||||
border: 'border-green-200',
|
|
||||||
text: 'text-green-900',
|
|
||||||
subtext: 'text-green-700',
|
|
||||||
features: [
|
|
||||||
{
|
|
||||||
feature_id: 'user_auth',
|
|
||||||
name: 'User Authentication',
|
|
||||||
description: 'Customer registration and login system',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'product_catalog',
|
|
||||||
name: 'Product Catalog',
|
|
||||||
description: 'Browse and search products with categories',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'shopping_cart',
|
|
||||||
name: 'Shopping Cart',
|
|
||||||
description: 'Add products to cart and manage quantities',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 3
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'payment_processing',
|
|
||||||
name: 'Payment Processing',
|
|
||||||
description: 'Secure payment integration with Stripe/PayPal',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 4
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'inventory_management',
|
|
||||||
name: 'Inventory Management',
|
|
||||||
description: 'Track stock levels and manage inventory',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 5
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'business_crm',
|
|
||||||
title: 'CRM System',
|
|
||||||
description: 'Customer relationship management, sales pipeline, lead tracking',
|
|
||||||
icon: '👥',
|
|
||||||
gradient: 'from-purple-50 to-purple-100',
|
|
||||||
border: 'border-purple-200',
|
|
||||||
text: 'text-purple-900',
|
|
||||||
subtext: 'text-purple-700',
|
|
||||||
features: [
|
|
||||||
{
|
|
||||||
feature_id: 'lead_management',
|
|
||||||
name: 'Lead Management',
|
|
||||||
description: 'Capture, track and manage sales leads',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'sales_pipeline',
|
|
||||||
name: 'Sales Pipeline',
|
|
||||||
description: 'Visual sales pipeline with drag-and-drop functionality',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'customer_analytics',
|
|
||||||
name: 'Customer Analytics',
|
|
||||||
description: 'Detailed analytics and reporting on customer behavior',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 3
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'communication_tools',
|
|
||||||
name: 'Communication Tools',
|
|
||||||
description: 'Email integration and communication tracking',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'Technology & Analytics': [
|
|
||||||
{
|
|
||||||
type: 'analytics_dashboard',
|
|
||||||
title: 'Analytics Dashboard',
|
|
||||||
description: 'Data visualization, business intelligence, custom reports',
|
|
||||||
icon: '📊',
|
|
||||||
gradient: 'from-blue-50 to-blue-100',
|
|
||||||
border: 'border-blue-200',
|
|
||||||
text: 'text-blue-900',
|
|
||||||
subtext: 'text-blue-700',
|
|
||||||
features: [
|
|
||||||
{
|
|
||||||
feature_id: 'data_visualization',
|
|
||||||
name: 'Data Visualization',
|
|
||||||
description: 'Interactive charts and graphs',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'custom_reports',
|
|
||||||
name: 'Custom Reports',
|
|
||||||
description: 'Generate and schedule custom reports',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'ai_chatbot_platform',
|
|
||||||
title: 'AI Chatbot Platform',
|
|
||||||
description: 'Deploy multi-channel AI chatbots with analytics and workflows',
|
|
||||||
icon: '🤖',
|
|
||||||
gradient: 'from-teal-50 to-teal-100',
|
|
||||||
border: 'border-teal-200',
|
|
||||||
text: 'text-teal-900',
|
|
||||||
subtext: 'text-teal-700',
|
|
||||||
features: [
|
|
||||||
{
|
|
||||||
feature_id: 'nlp_engine',
|
|
||||||
name: 'NLP Engine',
|
|
||||||
description: 'Natural language processing for intent and entity extraction',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'channel_integrations',
|
|
||||||
name: 'Channel Integrations',
|
|
||||||
description: 'Integrate with web, WhatsApp, and Slack',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'conversation_analytics',
|
|
||||||
name: 'Conversation Analytics',
|
|
||||||
description: 'Track KPIs: containment rate, CSAT, and response time',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 3
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'Custom & Others': [
|
|
||||||
{
|
|
||||||
type: 'custom_project',
|
|
||||||
title: 'Custom Project',
|
|
||||||
description: 'Describe your unique project requirements and let AI help design it',
|
|
||||||
icon: '🎨',
|
|
||||||
gradient: 'from-gray-50 to-gray-100',
|
|
||||||
border: 'border-gray-200',
|
|
||||||
text: 'text-gray-900',
|
|
||||||
subtext: 'text-gray-700',
|
|
||||||
features: []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'project_management_tool',
|
|
||||||
title: 'Project Management Tool',
|
|
||||||
description: 'Tasks, Kanban boards, sprints, and team collaboration',
|
|
||||||
icon: '🗂️',
|
|
||||||
gradient: 'from-amber-50 to-amber-100',
|
|
||||||
border: 'border-amber-200',
|
|
||||||
text: 'text-amber-900',
|
|
||||||
subtext: 'text-amber-700',
|
|
||||||
features: [
|
|
||||||
{
|
|
||||||
feature_id: 'kanban_boards',
|
|
||||||
name: 'Kanban Boards',
|
|
||||||
description: 'Organize tasks into customizable columns',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'sprint_planning',
|
|
||||||
name: 'Sprint Planning',
|
|
||||||
description: 'Plan sprints, track velocity, and manage backlogs',
|
|
||||||
feature_type: 'essential',
|
|
||||||
complexity: 'high',
|
|
||||||
display_order: 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
feature_id: 'collaboration_tools',
|
|
||||||
name: 'Collaboration Tools',
|
|
||||||
description: 'Comments, mentions, and notifications',
|
|
||||||
feature_type: 'suggested',
|
|
||||||
complexity: 'medium',
|
|
||||||
display_order: 3
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
async function seedDatabase() {
|
|
||||||
console.log('🌱 Starting Template Manager database seeding...');
|
|
||||||
|
|
||||||
const client = await database.getClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client.query('BEGIN');
|
|
||||||
|
|
||||||
// Clear existing data
|
|
||||||
console.log('🧹 Clearing existing template data...');
|
|
||||||
await client.query('DELETE FROM feature_usage');
|
|
||||||
await client.query('DELETE FROM custom_features');
|
|
||||||
await client.query('DELETE FROM template_features');
|
|
||||||
await client.query('DELETE FROM templates WHERE type != \'_migration_test\'');
|
|
||||||
|
|
||||||
let totalTemplates = 0;
|
|
||||||
let totalFeatures = 0;
|
|
||||||
|
|
||||||
// Seed templates by category
|
|
||||||
for (const [category, templates] of Object.entries(TEMPLATE_DATA)) {
|
|
||||||
console.log(`📂 Seeding category: ${category}`);
|
|
||||||
|
|
||||||
for (const templateData of templates) {
|
|
||||||
// Insert template
|
|
||||||
const templateId = uuidv4();
|
|
||||||
const templateQuery = `
|
|
||||||
INSERT INTO templates (
|
|
||||||
id, type, title, description, icon, category,
|
|
||||||
gradient, border, text, subtext
|
|
||||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
|
||||||
`;
|
|
||||||
|
|
||||||
await client.query(templateQuery, [
|
|
||||||
templateId,
|
|
||||||
templateData.type,
|
|
||||||
templateData.title,
|
|
||||||
templateData.description,
|
|
||||||
templateData.icon,
|
|
||||||
category,
|
|
||||||
templateData.gradient,
|
|
||||||
templateData.border,
|
|
||||||
templateData.text,
|
|
||||||
templateData.subtext
|
|
||||||
]);
|
|
||||||
|
|
||||||
totalTemplates++;
|
|
||||||
console.log(` ✅ Template: ${templateData.title}`);
|
|
||||||
|
|
||||||
// Insert features for this template
|
|
||||||
for (const featureData of templateData.features) {
|
|
||||||
const featureId = uuidv4();
|
|
||||||
const featureQuery = `
|
|
||||||
INSERT INTO template_features (
|
|
||||||
id, template_id, feature_id, name, description,
|
|
||||||
feature_type, complexity, display_order, is_default
|
|
||||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
|
||||||
`;
|
|
||||||
|
|
||||||
await client.query(featureQuery, [
|
|
||||||
featureId,
|
|
||||||
templateId,
|
|
||||||
featureData.feature_id,
|
|
||||||
featureData.name,
|
|
||||||
featureData.description,
|
|
||||||
featureData.feature_type,
|
|
||||||
featureData.complexity,
|
|
||||||
featureData.display_order,
|
|
||||||
true // is_default
|
|
||||||
]);
|
|
||||||
|
|
||||||
totalFeatures++;
|
|
||||||
console.log(` 🎯 Feature: ${featureData.name} (${featureData.feature_type})`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add some sample usage data for demonstration
|
|
||||||
console.log('📊 Adding sample usage data...');
|
|
||||||
const sampleUsageQuery = `
|
|
||||||
INSERT INTO feature_usage (template_id, feature_id, user_session, project_id)
|
|
||||||
SELECT
|
|
||||||
t.id,
|
|
||||||
tf.id,
|
|
||||||
'demo_session_' || (RANDOM() * 100)::int,
|
|
||||||
'demo_project_' || (RANDOM() * 50)::int
|
|
||||||
FROM templates t
|
|
||||||
JOIN template_features tf ON t.id = tf.template_id
|
|
||||||
WHERE RANDOM() < 0.3 -- 30% of features get sample usage
|
|
||||||
`;
|
|
||||||
|
|
||||||
const usageResult = await client.query(sampleUsageQuery);
|
|
||||||
|
|
||||||
// Update usage counts based on sample data
|
|
||||||
const updateUsageQuery = `
|
|
||||||
UPDATE template_features
|
|
||||||
SET usage_count = (
|
|
||||||
SELECT COUNT(*)
|
|
||||||
FROM feature_usage
|
|
||||||
WHERE feature_id = template_features.id
|
|
||||||
)
|
|
||||||
`;
|
|
||||||
|
|
||||||
await client.query(updateUsageQuery);
|
|
||||||
|
|
||||||
await client.query('COMMIT');
|
|
||||||
|
|
||||||
console.log('✅ Database seeding completed successfully!');
|
|
||||||
console.log(`📊 Summary:`);
|
|
||||||
console.log(` - Templates created: ${totalTemplates}`);
|
|
||||||
console.log(` - Features created: ${totalFeatures}`);
|
|
||||||
console.log(` - Sample usage records: ${usageResult.rowCount}`);
|
|
||||||
console.log(` - Categories: ${Object.keys(TEMPLATE_DATA).length}`);
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
await client.query('ROLLBACK');
|
|
||||||
console.error('❌ Database seeding failed:', error.message);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
client.release();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run seeder if called directly
|
|
||||||
if (require.main === module) {
|
|
||||||
seedDatabase()
|
|
||||||
.then(() => {
|
|
||||||
console.log('🎉 Seeding process completed!');
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
console.error('💥 Seeding process failed:', error.message);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { seedDatabase };
|
|
||||||
@ -16,8 +16,8 @@ GMAIL_APP_PASSWORD=your-app-password
|
|||||||
# Service Configuration
|
# Service Configuration
|
||||||
PORT=8011
|
PORT=8011
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
FRONTEND_URL=https://dashboard.codenuk.com
|
FRONTEND_URL=http://localhost:3000
|
||||||
AUTH_PUBLIC_URL=https://backend.codenuk.com
|
AUTH_PUBLIC_URL=http://localhost:8011
|
||||||
|
|
||||||
# Database Configuration
|
# Database Configuration
|
||||||
POSTGRES_HOST=postgres
|
POSTGRES_HOST=postgres
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user