2025-11-07 22:16:16 +00:00
|
|
|
"""
|
2025-12-05 20:07:01 +01:00
|
|
|
Alert API endpoints.
|
2025-11-07 22:16:16 +00:00
|
|
|
"""
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
from fastapi import APIRouter, Depends, Query, HTTPException
|
2025-11-07 22:16:16 +00:00
|
|
|
from typing import List, Optional
|
|
|
|
|
from uuid import UUID
|
2025-12-05 20:07:01 +01:00
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
2025-11-07 22:16:16 +00:00
|
|
|
import structlog
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
from app.core.database import get_db
|
|
|
|
|
from app.repositories.event_repository import EventRepository
|
|
|
|
|
from app.schemas.events import EventResponse, EventSummary
|
2025-11-07 22:16:16 +00:00
|
|
|
|
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.get("/alerts", response_model=List[EventResponse])
|
|
|
|
|
async def get_alerts(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
event_class: Optional[str] = Query(None, description="Filter by event class"),
|
|
|
|
|
priority_level: Optional[List[str]] = Query(None, description="Filter by priority levels"),
|
|
|
|
|
status: Optional[List[str]] = Query(None, description="Filter by status values"),
|
|
|
|
|
event_domain: Optional[str] = Query(None, description="Filter by domain"),
|
|
|
|
|
limit: int = Query(50, le=100, description="Max results"),
|
|
|
|
|
offset: int = Query(0, description="Pagination offset"),
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
2025-11-07 22:16:16 +00:00
|
|
|
"""
|
2025-12-05 20:07:01 +01:00
|
|
|
Get filtered list of events.
|
2025-11-07 22:16:16 +00:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Query Parameters:
|
|
|
|
|
- event_class: alert, notification, recommendation
|
|
|
|
|
- priority_level: critical, important, standard, info
|
|
|
|
|
- status: active, acknowledged, resolved, dismissed
|
|
|
|
|
- event_domain: inventory, production, supply_chain, etc.
|
|
|
|
|
- limit: Max 100 results
|
|
|
|
|
- offset: For pagination
|
2025-11-07 22:16:16 +00:00
|
|
|
"""
|
|
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
repo = EventRepository(db)
|
|
|
|
|
events = await repo.get_events(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
event_class=event_class,
|
|
|
|
|
priority_level=priority_level,
|
|
|
|
|
status=status,
|
|
|
|
|
event_domain=event_domain,
|
|
|
|
|
limit=limit,
|
|
|
|
|
offset=offset
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Convert to response models
|
|
|
|
|
return [repo._event_to_response(event) for event in events]
|
2025-11-07 22:16:16 +00:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("get_alerts_failed", error=str(e), tenant_id=str(tenant_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to retrieve alerts")
|
2025-11-07 22:16:16 +00:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.get("/alerts/summary", response_model=EventSummary)
|
|
|
|
|
async def get_alerts_summary(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
2025-11-07 22:16:16 +00:00
|
|
|
"""
|
2025-12-05 20:07:01 +01:00
|
|
|
Get summary statistics for dashboard.
|
2025-11-07 22:16:16 +00:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Returns counts by:
|
|
|
|
|
- Status (active, acknowledged, resolved)
|
|
|
|
|
- Priority level (critical, important, standard, info)
|
|
|
|
|
- Domain (inventory, production, etc.)
|
|
|
|
|
- Type class (action_needed, prevented_issue, etc.)
|
2025-11-07 22:16:16 +00:00
|
|
|
"""
|
|
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
repo = EventRepository(db)
|
|
|
|
|
summary = await repo.get_summary(tenant_id)
|
|
|
|
|
return summary
|
2025-11-07 22:16:16 +00:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("get_summary_failed", error=str(e), tenant_id=str(tenant_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to retrieve summary")
|
2025-11-07 22:16:16 +00:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.get("/alerts/{alert_id}", response_model=EventResponse)
|
2025-11-07 22:16:16 +00:00
|
|
|
async def get_alert(
|
2025-12-05 20:07:01 +01:00
|
|
|
tenant_id: UUID,
|
|
|
|
|
alert_id: UUID,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""Get single alert by ID"""
|
2025-11-07 22:16:16 +00:00
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
repo = EventRepository(db)
|
|
|
|
|
event = await repo.get_event_by_id(alert_id)
|
|
|
|
|
|
|
|
|
|
if not event:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Alert not found")
|
|
|
|
|
|
|
|
|
|
# Verify tenant ownership
|
|
|
|
|
if event.tenant_id != tenant_id:
|
|
|
|
|
raise HTTPException(status_code=403, detail="Access denied")
|
|
|
|
|
|
|
|
|
|
return repo._event_to_response(event)
|
2025-11-07 22:16:16 +00:00
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("get_alert_failed", error=str(e), alert_id=str(alert_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to retrieve alert")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.post("/alerts/{alert_id}/acknowledge", response_model=EventResponse)
|
|
|
|
|
async def acknowledge_alert(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
alert_id: UUID,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
2025-11-27 15:52:40 +01:00
|
|
|
"""
|
2025-12-05 20:07:01 +01:00
|
|
|
Mark alert as acknowledged.
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Sets status to 'acknowledged' and records timestamp.
|
|
|
|
|
"""
|
2025-11-27 15:52:40 +01:00
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
repo = EventRepository(db)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Verify ownership first
|
|
|
|
|
event = await repo.get_event_by_id(alert_id)
|
|
|
|
|
if not event:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Alert not found")
|
|
|
|
|
if event.tenant_id != tenant_id:
|
|
|
|
|
raise HTTPException(status_code=403, detail="Access denied")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Acknowledge
|
|
|
|
|
updated_event = await repo.acknowledge_event(alert_id)
|
|
|
|
|
return repo._event_to_response(updated_event)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("acknowledge_alert_failed", error=str(e), alert_id=str(alert_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to acknowledge alert")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.post("/alerts/{alert_id}/resolve", response_model=EventResponse)
|
|
|
|
|
async def resolve_alert(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
alert_id: UUID,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Mark alert as resolved.
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Sets status to 'resolved' and records timestamp.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
repo = EventRepository(db)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Verify ownership first
|
|
|
|
|
event = await repo.get_event_by_id(alert_id)
|
|
|
|
|
if not event:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Alert not found")
|
|
|
|
|
if event.tenant_id != tenant_id:
|
|
|
|
|
raise HTTPException(status_code=403, detail="Access denied")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Resolve
|
|
|
|
|
updated_event = await repo.resolve_event(alert_id)
|
|
|
|
|
return repo._event_to_response(updated_event)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("resolve_alert_failed", error=str(e), alert_id=str(alert_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to resolve alert")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.post("/alerts/{alert_id}/dismiss", response_model=EventResponse)
|
|
|
|
|
async def dismiss_alert(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
alert_id: UUID,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Mark alert as dismissed.
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Sets status to 'dismissed'.
|
|
|
|
|
"""
|
2025-11-27 15:52:40 +01:00
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
repo = EventRepository(db)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Verify ownership first
|
|
|
|
|
event = await repo.get_event_by_id(alert_id)
|
|
|
|
|
if not event:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Alert not found")
|
|
|
|
|
if event.tenant_id != tenant_id:
|
|
|
|
|
raise HTTPException(status_code=403, detail="Access denied")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
# Dismiss
|
|
|
|
|
updated_event = await repo.dismiss_event(alert_id)
|
|
|
|
|
return repo._event_to_response(updated_event)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("dismiss_alert_failed", error=str(e), alert_id=str(alert_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to dismiss alert")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.post("/alerts/{alert_id}/cancel-auto-action")
|
|
|
|
|
async def cancel_auto_action(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
alert_id: UUID,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Cancel an alert's auto-action (escalation countdown).
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Changes type_class from 'escalation' to 'action_needed' if auto-action was pending.
|
|
|
|
|
"""
|
2025-11-27 15:52:40 +01:00
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
repo = EventRepository(db)
|
|
|
|
|
|
|
|
|
|
# Verify ownership first
|
|
|
|
|
event = await repo.get_event_by_id(alert_id)
|
|
|
|
|
if not event:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Alert not found")
|
|
|
|
|
if event.tenant_id != tenant_id:
|
|
|
|
|
raise HTTPException(status_code=403, detail="Access denied")
|
|
|
|
|
|
|
|
|
|
# Cancel auto-action (you'll need to implement this in repository)
|
|
|
|
|
# For now, return success response
|
|
|
|
|
return {
|
|
|
|
|
"success": True,
|
|
|
|
|
"event_id": str(alert_id),
|
|
|
|
|
"message": "Auto-action cancelled successfully",
|
|
|
|
|
"updated_type_class": "action_needed"
|
|
|
|
|
}
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("cancel_auto_action_failed", error=str(e), alert_id=str(alert_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to cancel auto-action")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.post("/alerts/bulk-acknowledge")
|
|
|
|
|
async def bulk_acknowledge_alerts(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
request_body: dict,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Acknowledge multiple alerts by metadata filter.
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Request body:
|
|
|
|
|
{
|
|
|
|
|
"alert_type": "critical_stock_shortage",
|
|
|
|
|
"metadata_filter": {"ingredient_id": "123"}
|
|
|
|
|
}
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
alert_type = request_body.get("alert_type")
|
|
|
|
|
metadata_filter = request_body.get("metadata_filter", {})
|
|
|
|
|
|
|
|
|
|
if not alert_type:
|
|
|
|
|
raise HTTPException(status_code=400, detail="alert_type is required")
|
|
|
|
|
|
|
|
|
|
repo = EventRepository(db)
|
|
|
|
|
|
|
|
|
|
# Get matching alerts
|
|
|
|
|
events = await repo.get_events(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
event_class="alert",
|
|
|
|
|
status=["active"],
|
|
|
|
|
limit=100
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Filter by type and metadata
|
|
|
|
|
matching_ids = []
|
|
|
|
|
for event in events:
|
|
|
|
|
if event.event_type == alert_type:
|
|
|
|
|
# Check if metadata matches
|
|
|
|
|
matches = all(
|
|
|
|
|
event.event_metadata.get(key) == value
|
|
|
|
|
for key, value in metadata_filter.items()
|
|
|
|
|
)
|
|
|
|
|
if matches:
|
|
|
|
|
matching_ids.append(event.id)
|
|
|
|
|
|
|
|
|
|
# Acknowledge all matching
|
|
|
|
|
acknowledged_count = 0
|
|
|
|
|
for event_id in matching_ids:
|
|
|
|
|
try:
|
|
|
|
|
await repo.acknowledge_event(event_id)
|
|
|
|
|
acknowledged_count += 1
|
|
|
|
|
except Exception:
|
|
|
|
|
pass # Continue with others
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"success": True,
|
|
|
|
|
"acknowledged_count": acknowledged_count,
|
|
|
|
|
"alert_ids": [str(id) for id in matching_ids]
|
|
|
|
|
}
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("bulk_acknowledge_failed", error=str(e), tenant_id=str(tenant_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to bulk acknowledge alerts")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/alerts/bulk-resolve")
|
|
|
|
|
async def bulk_resolve_alerts(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
request_body: dict,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
2025-11-27 15:52:40 +01:00
|
|
|
):
|
|
|
|
|
"""
|
2025-12-05 20:07:01 +01:00
|
|
|
Resolve multiple alerts by metadata filter.
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
Request body:
|
|
|
|
|
{
|
|
|
|
|
"alert_type": "critical_stock_shortage",
|
|
|
|
|
"metadata_filter": {"ingredient_id": "123"}
|
|
|
|
|
}
|
2025-11-27 15:52:40 +01:00
|
|
|
"""
|
|
|
|
|
try:
|
2025-12-05 20:07:01 +01:00
|
|
|
alert_type = request_body.get("alert_type")
|
|
|
|
|
metadata_filter = request_body.get("metadata_filter", {})
|
|
|
|
|
|
|
|
|
|
if not alert_type:
|
|
|
|
|
raise HTTPException(status_code=400, detail="alert_type is required")
|
|
|
|
|
|
|
|
|
|
repo = EventRepository(db)
|
|
|
|
|
|
|
|
|
|
# Get matching alerts
|
|
|
|
|
events = await repo.get_events(
|
|
|
|
|
tenant_id=tenant_id,
|
|
|
|
|
event_class="alert",
|
|
|
|
|
status=["active", "acknowledged"],
|
|
|
|
|
limit=100
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Filter by type and metadata
|
|
|
|
|
matching_ids = []
|
|
|
|
|
for event in events:
|
|
|
|
|
if event.event_type == alert_type:
|
|
|
|
|
# Check if metadata matches
|
|
|
|
|
matches = all(
|
|
|
|
|
event.event_metadata.get(key) == value
|
|
|
|
|
for key, value in metadata_filter.items()
|
|
|
|
|
)
|
|
|
|
|
if matches:
|
|
|
|
|
matching_ids.append(event.id)
|
|
|
|
|
|
|
|
|
|
# Resolve all matching
|
|
|
|
|
resolved_count = 0
|
|
|
|
|
for event_id in matching_ids:
|
|
|
|
|
try:
|
|
|
|
|
await repo.resolve_event(event_id)
|
|
|
|
|
resolved_count += 1
|
|
|
|
|
except Exception:
|
|
|
|
|
pass # Continue with others
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"success": True,
|
|
|
|
|
"resolved_count": resolved_count,
|
|
|
|
|
"alert_ids": [str(id) for id in matching_ids]
|
|
|
|
|
}
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("bulk_resolve_failed", error=str(e), tenant_id=str(tenant_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to bulk resolve alerts")
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
@router.post("/events/{event_id}/interactions")
|
|
|
|
|
async def record_interaction(
|
|
|
|
|
tenant_id: UUID,
|
|
|
|
|
event_id: UUID,
|
|
|
|
|
request_body: dict,
|
|
|
|
|
db: AsyncSession = Depends(get_db)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Record user interaction with an event (for analytics).
|
|
|
|
|
|
|
|
|
|
Request body:
|
|
|
|
|
{
|
|
|
|
|
"interaction_type": "viewed" | "clicked" | "dismissed" | "acted_upon",
|
|
|
|
|
"interaction_metadata": {...}
|
|
|
|
|
}
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
interaction_type = request_body.get("interaction_type")
|
|
|
|
|
interaction_metadata = request_body.get("interaction_metadata", {})
|
|
|
|
|
|
|
|
|
|
if not interaction_type:
|
|
|
|
|
raise HTTPException(status_code=400, detail="interaction_type is required")
|
|
|
|
|
|
|
|
|
|
repo = EventRepository(db)
|
|
|
|
|
|
|
|
|
|
# Verify event exists and belongs to tenant
|
|
|
|
|
event = await repo.get_event_by_id(event_id)
|
|
|
|
|
if not event:
|
|
|
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
|
if event.tenant_id != tenant_id:
|
|
|
|
|
raise HTTPException(status_code=403, detail="Access denied")
|
|
|
|
|
|
|
|
|
|
# For now, just return success
|
|
|
|
|
# In the future, you could store interactions in a separate table
|
|
|
|
|
logger.info(
|
|
|
|
|
"interaction_recorded",
|
|
|
|
|
event_id=str(event_id),
|
|
|
|
|
interaction_type=interaction_type,
|
|
|
|
|
metadata=interaction_metadata
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"success": True,
|
|
|
|
|
"interaction_id": str(event_id), # Would be a real ID in production
|
|
|
|
|
"event_id": str(event_id),
|
|
|
|
|
"interaction_type": interaction_type
|
|
|
|
|
}
|
2025-11-27 15:52:40 +01:00
|
|
|
|
2025-12-05 20:07:01 +01:00
|
|
|
except HTTPException:
|
|
|
|
|
raise
|
2025-11-27 15:52:40 +01:00
|
|
|
except Exception as e:
|
2025-12-05 20:07:01 +01:00
|
|
|
logger.error("record_interaction_failed", error=str(e), event_id=str(event_id))
|
|
|
|
|
raise HTTPException(status_code=500, detail="Failed to record interaction")
|