aibilly_backend_code/src/infrastructure/kafka/handlers/transcript-completed.py
2026-03-10 16:44:04 +05:30

155 lines
5.1 KiB
Python

from src.infrastructure.kafka.consumer.service import kafka_consumer
from src.infrastructure.observability.logger import logger
import os
"""
Event Handler Template
Auto-generated handler for topic: transcript.completed
This handler is scaffolded - implement your business logic in the handle method
"""
class TranscriptCompletedHandlerHandler:
def __init__(self):
self.topic = 'transcript.completed'
self.group_id = 'test_project-transcript.completed-handler' or 'test_project-transcript.completed-handler'
self.is_running = False
async def start(self):
"""Initialize and start consuming events"""
try:
if self.is_running:
logger.warning('Event Handler: Already running', extra={'topic': self.topic})
return
await kafka_consumer.subscribe(
self.group_id,
[self.topic],
self.handle,
{
'fromBeginning': False, # Start from latest offset
'sessionTimeout': 30000,
'heartbeatInterval': 3000
}
)
self.is_running = True
logger.info('Event Handler: Started', extra={'topic': self.topic, 'groupId': self.group_id})
except Exception as e:
logger.error('Event Handler: Start failed', extra={
'topic': self.topic,
'error': str(e)
})
raise
async def stop(self):
"""Stop consuming events"""
try:
if not self.is_running:
return
await kafka_consumer.disconnect(self.group_id)
self.is_running = False
logger.info('Event Handler: Stopped', extra={'topic': self.topic})
except Exception as e:
logger.error('Event Handler: Stop failed', extra={
'topic': self.topic,
'error': str(e)
})
async def handle(self, message: dict):
"""
Handle incoming event
TODO: Implement your business logic here
Args:
message: Kafka message dict with keys:
- topic: Topic name
- partition: Partition number
- offset: Message offset
- key: Message key
- value: Parsed message value
- headers: Message headers
- timestamp: Message timestamp
"""
topic = message['topic']
key = message.get('key')
value = message.get('value', {})
headers = message.get('headers', {})
timestamp = message.get('timestamp')
logger.info('Event Handler: Processing event', extra={
'topic': topic,
'key': key,
'eventId': value.get('id') or value.get('eventId'),
'timestamp': timestamp
})
try:
# TODO: Implement your business logic here
# Examples:
# - Update related entities
# - Send notifications
# - Trigger workflows
# - Update cache
# - Write to database
# Example implementation:
# event_id = value.get('id')
# event_name = value.get('name')
# await self.process_event(event_id, event_name)
logger.info('Event Handler: Event processed successfully', extra={
'topic': topic,
'key': key,
'eventId': value.get('id') or value.get('eventId')
})
except Exception as e:
logger.error('Event Handler: Processing failed', extra={
'topic': topic,
'key': key,
'error': str(e)
})
# Re-throw to trigger Kafka retry mechanism
# For DLQ handling, implement custom retry logic here
raise
async def process_event(self, event_id: str, event_data: dict):
"""
Process event
TODO: Implement your business logic
Args:
event_id: Event ID
event_data: Event data
"""
# Example: Send notification
# await self.notification_service.notify_admins({
# 'type': 'EVENT_RECEIVED',
# 'eventId': event_id,
# 'data': event_data
# })
# Example: Update cache
# from src.infrastructure.redis.cache.service import cache_service
# await cache_service.set(f'event:{event_id}', event_data, 3600)
# Example: Write to audit log
# from src.services.audit_log_service import audit_log_service
# await audit_log_service.log({
# 'action': 'EVENT_PROCESSED',
# 'entityId': event_id,
# 'metadata': event_data
# })
pass
# Singleton instance
transcriptCompletedHandler_handler = TranscriptCompletedHandlerHandler()
# Auto-start if enabled
if os.getenv('KAFKA_AUTO_START_HANDLERS', 'true').lower() != 'false':
import asyncio
asyncio.create_task(transcriptCompletedHandler_handler.start())