Initial commit - production deployment
This commit is contained in:
407
services/alert_processor/app/repositories/event_repository.py
Normal file
407
services/alert_processor/app/repositories/event_repository.py
Normal file
@@ -0,0 +1,407 @@
|
||||
"""
|
||||
Event repository for database operations.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timezone
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, desc
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
import structlog
|
||||
|
||||
from app.models.events import Event
|
||||
from app.schemas.events import EnrichedEvent, EventSummary, EventResponse, I18nContent, SmartAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class EventRepository:
|
||||
"""Repository for event database operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def create_event(self, enriched_event: EnrichedEvent) -> Event:
|
||||
"""
|
||||
Store enriched event in database.
|
||||
|
||||
Args:
|
||||
enriched_event: Enriched event with all context
|
||||
|
||||
Returns:
|
||||
Stored Event model
|
||||
"""
|
||||
|
||||
# Convert enriched event to database model
|
||||
event = Event(
|
||||
id=enriched_event.id,
|
||||
tenant_id=UUID(enriched_event.tenant_id),
|
||||
event_class=enriched_event.event_class,
|
||||
event_domain=enriched_event.event_domain,
|
||||
event_type=enriched_event.event_type,
|
||||
service=enriched_event.service,
|
||||
|
||||
# i18n content
|
||||
i18n_title_key=enriched_event.i18n.title_key,
|
||||
i18n_title_params=enriched_event.i18n.title_params,
|
||||
i18n_message_key=enriched_event.i18n.message_key,
|
||||
i18n_message_params=enriched_event.i18n.message_params,
|
||||
|
||||
# Priority
|
||||
priority_score=enriched_event.priority_score,
|
||||
priority_level=enriched_event.priority_level,
|
||||
type_class=enriched_event.type_class,
|
||||
|
||||
# Enrichment contexts
|
||||
orchestrator_context=enriched_event.orchestrator_context.dict() if enriched_event.orchestrator_context else None,
|
||||
business_impact=enriched_event.business_impact.dict() if enriched_event.business_impact else None,
|
||||
urgency=enriched_event.urgency.dict() if enriched_event.urgency else None,
|
||||
user_agency=enriched_event.user_agency.dict() if enriched_event.user_agency else None,
|
||||
trend_context=enriched_event.trend_context,
|
||||
|
||||
# Smart actions
|
||||
smart_actions=[action.dict() for action in enriched_event.smart_actions],
|
||||
|
||||
# AI reasoning
|
||||
ai_reasoning_summary_key=enriched_event.ai_reasoning_summary_key,
|
||||
ai_reasoning_summary_params=enriched_event.ai_reasoning_summary_params,
|
||||
ai_reasoning_details=enriched_event.ai_reasoning_details,
|
||||
confidence_score=enriched_event.confidence_score,
|
||||
|
||||
# Entity links
|
||||
entity_links=enriched_event.entity_links,
|
||||
|
||||
# Status
|
||||
status=enriched_event.status,
|
||||
|
||||
# Metadata
|
||||
event_metadata=enriched_event.event_metadata
|
||||
)
|
||||
|
||||
self.session.add(event)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(event)
|
||||
|
||||
logger.info("event_stored", event_id=event.id, event_type=event.event_type)
|
||||
|
||||
return event
|
||||
|
||||
async def get_events(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
event_class: Optional[str] = None,
|
||||
priority_level: Optional[List[str]] = None,
|
||||
status: Optional[List[str]] = None,
|
||||
event_domain: Optional[str] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> List[Event]:
|
||||
"""
|
||||
Get filtered list of events.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
event_class: Filter by event class (alert, notification, recommendation)
|
||||
priority_level: Filter by priority levels
|
||||
status: Filter by status values
|
||||
event_domain: Filter by domain
|
||||
limit: Max results
|
||||
offset: Pagination offset
|
||||
|
||||
Returns:
|
||||
List of Event models
|
||||
"""
|
||||
|
||||
query = select(Event).where(Event.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
if event_class:
|
||||
query = query.where(Event.event_class == event_class)
|
||||
|
||||
if priority_level:
|
||||
query = query.where(Event.priority_level.in_(priority_level))
|
||||
|
||||
if status:
|
||||
query = query.where(Event.status.in_(status))
|
||||
|
||||
if event_domain:
|
||||
query = query.where(Event.event_domain == event_domain)
|
||||
|
||||
# Order by priority and creation time
|
||||
query = query.order_by(
|
||||
desc(Event.priority_score),
|
||||
desc(Event.created_at)
|
||||
)
|
||||
|
||||
# Pagination
|
||||
query = query.limit(limit).offset(offset)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
events = result.scalars().all()
|
||||
|
||||
return list(events)
|
||||
|
||||
async def get_event_by_id(self, event_id: UUID) -> Optional[Event]:
|
||||
"""Get single event by ID"""
|
||||
query = select(Event).where(Event.id == event_id)
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def check_duplicate_alert(self, tenant_id: UUID, event_type: str, entity_links: Dict, event_metadata: Dict, time_window_hours: int = 24) -> Optional[Event]:
|
||||
"""
|
||||
Check if a similar alert already exists within the time window.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
event_type: Type of event (e.g., 'production_delay', 'critical_stock_shortage')
|
||||
entity_links: Entity references (e.g., batch_id, po_id, ingredient_id)
|
||||
event_metadata: Event metadata for comparison
|
||||
time_window_hours: Time window in hours to check for duplicates
|
||||
|
||||
Returns:
|
||||
Existing event if duplicate found, None otherwise
|
||||
"""
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
# Calculate time threshold
|
||||
time_threshold = datetime.now(timezone.utc) - timedelta(hours=time_window_hours)
|
||||
|
||||
# Build query to find potential duplicates
|
||||
query = select(Event).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.event_type == event_type,
|
||||
Event.status == "active", # Only check active alerts
|
||||
Event.created_at >= time_threshold
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
potential_duplicates = result.scalars().all()
|
||||
|
||||
# Compare each potential duplicate for semantic similarity
|
||||
for event in potential_duplicates:
|
||||
# Check if entity links match (same batch, PO, ingredient, etc.)
|
||||
if self._entities_match(event.entity_links, entity_links):
|
||||
# For production delays, check if it's the same batch with similar delay
|
||||
if event_type == "production_delay":
|
||||
if self._production_delay_match(event.event_metadata, event_metadata):
|
||||
return event
|
||||
|
||||
# For critical stock shortages, check if it's the same ingredient
|
||||
elif event_type == "critical_stock_shortage":
|
||||
if self._stock_shortage_match(event.event_metadata, event_metadata):
|
||||
return event
|
||||
|
||||
# For delivery overdue alerts, check if it's the same PO
|
||||
elif event_type == "delivery_overdue":
|
||||
if self._delivery_overdue_match(event.event_metadata, event_metadata):
|
||||
return event
|
||||
|
||||
# For general matching based on metadata
|
||||
else:
|
||||
if self._metadata_match(event.event_metadata, event_metadata):
|
||||
return event
|
||||
|
||||
return None
|
||||
|
||||
def _entities_match(self, existing_links: Dict, new_links: Dict) -> bool:
|
||||
"""Check if entity links match between two events."""
|
||||
if not existing_links or not new_links:
|
||||
return False
|
||||
|
||||
# Check for common entity types
|
||||
common_entities = ['production_batch', 'purchase_order', 'ingredient', 'supplier', 'equipment']
|
||||
|
||||
for entity in common_entities:
|
||||
if entity in existing_links and entity in new_links:
|
||||
if existing_links[entity] == new_links[entity]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _production_delay_match(self, existing_meta: Dict, new_meta: Dict) -> bool:
|
||||
"""Check if production delay alerts match."""
|
||||
# Same batch_id indicates same production issue
|
||||
return (existing_meta.get('batch_id') == new_meta.get('batch_id') and
|
||||
existing_meta.get('product_name') == new_meta.get('product_name'))
|
||||
|
||||
def _stock_shortage_match(self, existing_meta: Dict, new_meta: Dict) -> bool:
|
||||
"""Check if stock shortage alerts match."""
|
||||
# Same ingredient_id indicates same shortage issue
|
||||
return existing_meta.get('ingredient_id') == new_meta.get('ingredient_id')
|
||||
|
||||
def _delivery_overdue_match(self, existing_meta: Dict, new_meta: Dict) -> bool:
|
||||
"""Check if delivery overdue alerts match."""
|
||||
# Same PO indicates same delivery issue
|
||||
return existing_meta.get('po_id') == new_meta.get('po_id')
|
||||
|
||||
def _metadata_match(self, existing_meta: Dict, new_meta: Dict) -> bool:
|
||||
"""Generic metadata matching for other alert types."""
|
||||
# Check for common identifying fields
|
||||
common_fields = ['batch_id', 'po_id', 'ingredient_id', 'supplier_id', 'equipment_id']
|
||||
|
||||
for field in common_fields:
|
||||
if field in existing_meta and field in new_meta:
|
||||
if existing_meta[field] == new_meta[field]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def get_summary(self, tenant_id: UUID) -> EventSummary:
|
||||
"""
|
||||
Get summary statistics for dashboard.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
EventSummary with counts and statistics
|
||||
"""
|
||||
|
||||
# Count by status
|
||||
status_query = select(
|
||||
Event.status,
|
||||
func.count(Event.id).label('count')
|
||||
).where(
|
||||
Event.tenant_id == tenant_id
|
||||
).group_by(Event.status)
|
||||
|
||||
status_result = await self.session.execute(status_query)
|
||||
status_counts = {row.status: row.count for row in status_result}
|
||||
|
||||
# Count by priority
|
||||
priority_query = select(
|
||||
Event.priority_level,
|
||||
func.count(Event.id).label('count')
|
||||
).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.status == "active"
|
||||
)
|
||||
).group_by(Event.priority_level)
|
||||
|
||||
priority_result = await self.session.execute(priority_query)
|
||||
priority_counts = {row.priority_level: row.count for row in priority_result}
|
||||
|
||||
# Count by domain
|
||||
domain_query = select(
|
||||
Event.event_domain,
|
||||
func.count(Event.id).label('count')
|
||||
).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.status == "active"
|
||||
)
|
||||
).group_by(Event.event_domain)
|
||||
|
||||
domain_result = await self.session.execute(domain_query)
|
||||
domain_counts = {row.event_domain: row.count for row in domain_result}
|
||||
|
||||
# Count by type class
|
||||
type_class_query = select(
|
||||
Event.type_class,
|
||||
func.count(Event.id).label('count')
|
||||
).where(
|
||||
and_(
|
||||
Event.tenant_id == tenant_id,
|
||||
Event.status == "active"
|
||||
)
|
||||
).group_by(Event.type_class)
|
||||
|
||||
type_class_result = await self.session.execute(type_class_query)
|
||||
type_class_counts = {row.type_class: row.count for row in type_class_result}
|
||||
|
||||
return EventSummary(
|
||||
total_active=status_counts.get("active", 0),
|
||||
total_acknowledged=status_counts.get("acknowledged", 0),
|
||||
total_resolved=status_counts.get("resolved", 0),
|
||||
by_priority=priority_counts,
|
||||
by_domain=domain_counts,
|
||||
by_type_class=type_class_counts,
|
||||
critical_alerts=priority_counts.get("critical", 0),
|
||||
important_alerts=priority_counts.get("important", 0)
|
||||
)
|
||||
|
||||
async def acknowledge_event(self, event_id: UUID) -> Event:
|
||||
"""Mark event as acknowledged"""
|
||||
event = await self.get_event_by_id(event_id)
|
||||
|
||||
if not event:
|
||||
raise ValueError(f"Event {event_id} not found")
|
||||
|
||||
event.status = "acknowledged"
|
||||
event.acknowledged_at = datetime.now(timezone.utc)
|
||||
|
||||
await self.session.commit()
|
||||
await self.session.refresh(event)
|
||||
|
||||
logger.info("event_acknowledged", event_id=event_id)
|
||||
|
||||
return event
|
||||
|
||||
async def resolve_event(self, event_id: UUID) -> Event:
|
||||
"""Mark event as resolved"""
|
||||
event = await self.get_event_by_id(event_id)
|
||||
|
||||
if not event:
|
||||
raise ValueError(f"Event {event_id} not found")
|
||||
|
||||
event.status = "resolved"
|
||||
event.resolved_at = datetime.now(timezone.utc)
|
||||
|
||||
await self.session.commit()
|
||||
await self.session.refresh(event)
|
||||
|
||||
logger.info("event_resolved", event_id=event_id)
|
||||
|
||||
return event
|
||||
|
||||
async def dismiss_event(self, event_id: UUID) -> Event:
|
||||
"""Mark event as dismissed"""
|
||||
event = await self.get_event_by_id(event_id)
|
||||
|
||||
if not event:
|
||||
raise ValueError(f"Event {event_id} not found")
|
||||
|
||||
event.status = "dismissed"
|
||||
|
||||
await self.session.commit()
|
||||
await self.session.refresh(event)
|
||||
|
||||
logger.info("event_dismissed", event_id=event_id)
|
||||
|
||||
return event
|
||||
|
||||
def _event_to_response(self, event: Event) -> EventResponse:
|
||||
"""Convert Event model to EventResponse"""
|
||||
return EventResponse(
|
||||
id=event.id,
|
||||
tenant_id=event.tenant_id,
|
||||
created_at=event.created_at,
|
||||
event_class=event.event_class,
|
||||
event_domain=event.event_domain,
|
||||
event_type=event.event_type,
|
||||
i18n=I18nContent(
|
||||
title_key=event.i18n_title_key,
|
||||
title_params=event.i18n_title_params,
|
||||
message_key=event.i18n_message_key,
|
||||
message_params=event.i18n_message_params
|
||||
),
|
||||
priority_score=event.priority_score,
|
||||
priority_level=event.priority_level,
|
||||
type_class=event.type_class,
|
||||
smart_actions=[SmartAction(**action) for action in event.smart_actions],
|
||||
status=event.status,
|
||||
orchestrator_context=event.orchestrator_context,
|
||||
business_impact=event.business_impact,
|
||||
urgency=event.urgency,
|
||||
user_agency=event.user_agency,
|
||||
ai_reasoning_summary_key=event.ai_reasoning_summary_key,
|
||||
ai_reasoning_summary_params=event.ai_reasoning_summary_params,
|
||||
ai_reasoning_details=event.ai_reasoning_details,
|
||||
confidence_score=event.confidence_score,
|
||||
entity_links=event.entity_links,
|
||||
event_metadata=event.event_metadata
|
||||
)
|
||||
Reference in New Issue
Block a user