184 lines
5.6 KiB
Python
184 lines
5.6 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Test data storage in all databases for AI Analysis Service
|
|
"""
|
|
|
|
import os
|
|
import psycopg2
|
|
import redis
|
|
import pymongo
|
|
import json
|
|
from datetime import datetime
|
|
from dotenv import load_dotenv
|
|
|
|
# Load environment variables
|
|
load_dotenv()
|
|
|
|
def test_postgres_data_storage():
|
|
"""Test PostgreSQL data storage"""
|
|
try:
|
|
conn = psycopg2.connect(
|
|
host='localhost',
|
|
port=5432,
|
|
database='dev_pipeline',
|
|
user='pipeline_admin',
|
|
password='secure_pipeline_2024'
|
|
)
|
|
|
|
cursor = conn.cursor()
|
|
|
|
# Check repositories
|
|
cursor.execute("SELECT COUNT(*) FROM all_repositories;")
|
|
repo_count = cursor.fetchone()[0]
|
|
|
|
# Check analysis sessions
|
|
cursor.execute("SELECT COUNT(*) FROM analysis_sessions;")
|
|
session_count = cursor.fetchone()[0]
|
|
|
|
# Check file analysis history
|
|
cursor.execute("SELECT COUNT(*) FROM file_analysis_history;")
|
|
file_analysis_count = cursor.fetchone()[0]
|
|
|
|
# Check code embeddings
|
|
cursor.execute("SELECT COUNT(*) FROM code_embeddings;")
|
|
embedding_count = cursor.fetchone()[0]
|
|
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
print(f"📊 PostgreSQL Data Storage:")
|
|
print(f" 📁 Repositories: {repo_count}")
|
|
print(f" 🔍 Analysis Sessions: {session_count}")
|
|
print(f" 📄 File Analyses: {file_analysis_count}")
|
|
print(f" 🧠 Code Embeddings: {embedding_count}")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ PostgreSQL data check failed: {e}")
|
|
return False
|
|
|
|
def test_redis_data_storage():
|
|
"""Test Redis data storage"""
|
|
try:
|
|
r = redis.Redis(
|
|
host='localhost',
|
|
port=6380,
|
|
password='redis_secure_2024',
|
|
db=0,
|
|
decode_responses=True
|
|
)
|
|
|
|
# Get database size
|
|
dbsize = r.dbsize()
|
|
|
|
# Get all keys
|
|
keys = r.keys('*')
|
|
|
|
print(f"📊 Redis Data Storage:")
|
|
print(f" 🔑 Total Keys: {dbsize}")
|
|
if keys:
|
|
print(f" 📋 Sample Keys: {keys[:5]}")
|
|
else:
|
|
print(f" 📋 No keys found")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ Redis data check failed: {e}")
|
|
return False
|
|
|
|
def test_mongodb_data_storage():
|
|
"""Test MongoDB data storage"""
|
|
try:
|
|
client = pymongo.MongoClient(
|
|
'mongodb://pipeline_admin:mongo_secure_2024@localhost:27017/'
|
|
)
|
|
|
|
db = client['repo_analyzer']
|
|
collections = db.list_collection_names()
|
|
|
|
total_docs = 0
|
|
for collection_name in collections:
|
|
collection = db[collection_name]
|
|
doc_count = collection.count_documents({})
|
|
total_docs += doc_count
|
|
print(f" 📄 {collection_name}: {doc_count} documents")
|
|
|
|
print(f"📊 MongoDB Data Storage:")
|
|
print(f" 📁 Collections: {len(collections)}")
|
|
print(f" 📄 Total Documents: {total_docs}")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ MongoDB data check failed: {e}")
|
|
return False
|
|
|
|
def test_analysis_reports():
|
|
"""Test analysis reports storage"""
|
|
try:
|
|
reports_dir = "/home/tech4biz/Desktop/prakash/codenuk/backend_new/codenuk_backend_mine/services/ai-analysis-service/reports"
|
|
|
|
if not os.path.exists(reports_dir):
|
|
print(f"❌ Reports directory not found: {reports_dir}")
|
|
return False
|
|
|
|
report_files = [f for f in os.listdir(reports_dir) if f.endswith('.json')]
|
|
|
|
print(f"📊 Analysis Reports:")
|
|
print(f" 📁 Reports Directory: {reports_dir}")
|
|
print(f" 📄 Report Files: {len(report_files)}")
|
|
|
|
if report_files:
|
|
# Check the latest report
|
|
latest_report = max(report_files, key=lambda x: os.path.getctime(os.path.join(reports_dir, x)))
|
|
report_path = os.path.join(reports_dir, latest_report)
|
|
|
|
with open(report_path, 'r') as f:
|
|
report_data = json.load(f)
|
|
|
|
print(f" 📋 Latest Report: {latest_report}")
|
|
print(f" 📊 Repository ID: {report_data.get('repository_id', 'N/A')}")
|
|
print(f" 📁 Total Files: {report_data.get('total_files', 'N/A')}")
|
|
print(f" 📄 Total Lines: {report_data.get('total_lines', 'N/A')}")
|
|
print(f" 🎯 Quality Score: {report_data.get('code_quality_score', 'N/A')}")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ Analysis reports check failed: {e}")
|
|
return False
|
|
|
|
def main():
|
|
"""Test all data storage systems"""
|
|
print("🔍 Testing Data Storage Systems...")
|
|
print("=" * 60)
|
|
|
|
postgres_ok = test_postgres_data_storage()
|
|
print()
|
|
|
|
redis_ok = test_redis_data_storage()
|
|
print()
|
|
|
|
mongodb_ok = test_mongodb_data_storage()
|
|
print()
|
|
|
|
reports_ok = test_analysis_reports()
|
|
print()
|
|
|
|
print("=" * 60)
|
|
print(f"📊 Storage Summary:")
|
|
print(f" PostgreSQL: {'✅' if postgres_ok else '❌'}")
|
|
print(f" Redis: {'✅' if redis_ok else '❌'}")
|
|
print(f" MongoDB: {'✅' if mongodb_ok else '❌'}")
|
|
print(f" Reports: {'✅' if reports_ok else '❌'}")
|
|
|
|
if all([postgres_ok, redis_ok, mongodb_ok, reports_ok]):
|
|
print("🎉 All data storage systems working!")
|
|
else:
|
|
print("⚠️ Some data storage systems have issues")
|
|
|
|
if __name__ == "__main__":
|
|
main()
|