155 lines
5.1 KiB
Python
155 lines
5.1 KiB
Python
from src.infrastructure.kafka.consumer.service import kafka_consumer
|
|
from src.infrastructure.observability.logger import logger
|
|
import os
|
|
|
|
"""
|
|
Event Handler Template
|
|
Auto-generated handler for topic: claim.submitted
|
|
|
|
This handler is scaffolded - implement your business logic in the handle method
|
|
"""
|
|
|
|
class ClaimSubmittedHandlerHandler:
|
|
def __init__(self):
|
|
self.topic = 'claim.submitted'
|
|
self.group_id = 'test_project-claim.submitted-handler' or 'test_project-claim.submitted-handler'
|
|
self.is_running = False
|
|
|
|
async def start(self):
|
|
"""Initialize and start consuming events"""
|
|
try:
|
|
if self.is_running:
|
|
logger.warning('Event Handler: Already running', extra={'topic': self.topic})
|
|
return
|
|
|
|
await kafka_consumer.subscribe(
|
|
self.group_id,
|
|
[self.topic],
|
|
self.handle,
|
|
{
|
|
'fromBeginning': False, # Start from latest offset
|
|
'sessionTimeout': 30000,
|
|
'heartbeatInterval': 3000
|
|
}
|
|
)
|
|
|
|
self.is_running = True
|
|
logger.info('Event Handler: Started', extra={'topic': self.topic, 'groupId': self.group_id})
|
|
except Exception as e:
|
|
logger.error('Event Handler: Start failed', extra={
|
|
'topic': self.topic,
|
|
'error': str(e)
|
|
})
|
|
raise
|
|
|
|
async def stop(self):
|
|
"""Stop consuming events"""
|
|
try:
|
|
if not self.is_running:
|
|
return
|
|
|
|
await kafka_consumer.disconnect(self.group_id)
|
|
self.is_running = False
|
|
logger.info('Event Handler: Stopped', extra={'topic': self.topic})
|
|
except Exception as e:
|
|
logger.error('Event Handler: Stop failed', extra={
|
|
'topic': self.topic,
|
|
'error': str(e)
|
|
})
|
|
|
|
async def handle(self, message: dict):
|
|
"""
|
|
Handle incoming event
|
|
TODO: Implement your business logic here
|
|
|
|
Args:
|
|
message: Kafka message dict with keys:
|
|
- topic: Topic name
|
|
- partition: Partition number
|
|
- offset: Message offset
|
|
- key: Message key
|
|
- value: Parsed message value
|
|
- headers: Message headers
|
|
- timestamp: Message timestamp
|
|
"""
|
|
topic = message['topic']
|
|
key = message.get('key')
|
|
value = message.get('value', {})
|
|
headers = message.get('headers', {})
|
|
timestamp = message.get('timestamp')
|
|
|
|
logger.info('Event Handler: Processing event', extra={
|
|
'topic': topic,
|
|
'key': key,
|
|
'eventId': value.get('id') or value.get('eventId'),
|
|
'timestamp': timestamp
|
|
})
|
|
|
|
try:
|
|
# TODO: Implement your business logic here
|
|
# Examples:
|
|
# - Update related entities
|
|
# - Send notifications
|
|
# - Trigger workflows
|
|
# - Update cache
|
|
# - Write to database
|
|
|
|
# Example implementation:
|
|
# event_id = value.get('id')
|
|
# event_name = value.get('name')
|
|
# await self.process_event(event_id, event_name)
|
|
|
|
logger.info('Event Handler: Event processed successfully', extra={
|
|
'topic': topic,
|
|
'key': key,
|
|
'eventId': value.get('id') or value.get('eventId')
|
|
})
|
|
except Exception as e:
|
|
logger.error('Event Handler: Processing failed', extra={
|
|
'topic': topic,
|
|
'key': key,
|
|
'error': str(e)
|
|
})
|
|
|
|
# Re-throw to trigger Kafka retry mechanism
|
|
# For DLQ handling, implement custom retry logic here
|
|
raise
|
|
|
|
async def process_event(self, event_id: str, event_data: dict):
|
|
"""
|
|
Process event
|
|
TODO: Implement your business logic
|
|
|
|
Args:
|
|
event_id: Event ID
|
|
event_data: Event data
|
|
"""
|
|
# Example: Send notification
|
|
# await self.notification_service.notify_admins({
|
|
# 'type': 'EVENT_RECEIVED',
|
|
# 'eventId': event_id,
|
|
# 'data': event_data
|
|
# })
|
|
|
|
# Example: Update cache
|
|
# from src.infrastructure.redis.cache.service import cache_service
|
|
# await cache_service.set(f'event:{event_id}', event_data, 3600)
|
|
|
|
# Example: Write to audit log
|
|
# from src.services.audit_log_service import audit_log_service
|
|
# await audit_log_service.log({
|
|
# 'action': 'EVENT_PROCESSED',
|
|
# 'entityId': event_id,
|
|
# 'metadata': event_data
|
|
# })
|
|
pass
|
|
|
|
# Singleton instance
|
|
claimSubmittedHandler_handler = ClaimSubmittedHandlerHandler()
|
|
|
|
# Auto-start if enabled
|
|
if os.getenv('KAFKA_AUTO_START_HANDLERS', 'true').lower() != 'false':
|
|
import asyncio
|
|
asyncio.create_task(claimSubmittedHandler_handler.start())
|
|
|