Files
bakery-ia/services/alert_processor/app/api/analytics.py

361 lines
13 KiB
Python
Raw Normal View History

2025-10-21 19:50:07 +02:00
"""
Alert Analytics API Endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, Path, Body, Query
from typing import List, Dict, Any, Optional
from uuid import UUID
from pydantic import BaseModel, Field
import structlog
from shared.auth.decorators import get_current_user_dep
2025-10-31 11:54:19 +01:00
from shared.auth.access_control import service_only_access
2025-10-21 19:50:07 +02:00
logger = structlog.get_logger()
router = APIRouter()
# Schemas
class InteractionCreate(BaseModel):
"""Schema for creating an alert interaction"""
alert_id: str = Field(..., description="Alert ID")
interaction_type: str = Field(..., description="Type of interaction: acknowledged, resolved, snoozed, dismissed")
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
class InteractionBatchCreate(BaseModel):
"""Schema for creating multiple interactions"""
interactions: List[Dict[str, Any]] = Field(..., description="List of interactions to create")
class AnalyticsResponse(BaseModel):
"""Schema for analytics response"""
trends: List[Dict[str, Any]]
averageResponseTime: int
topCategories: List[Dict[str, Any]]
totalAlerts: int
resolvedAlerts: int
activeAlerts: int
resolutionRate: int
predictedDailyAverage: int
busiestDay: str
def get_analytics_repository(current_user: dict = Depends(get_current_user_dep)):
"""Dependency to get analytics repository"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async def _get_repo():
async with db_manager.get_session() as session:
yield AlertAnalyticsRepository(session)
return _get_repo
@router.post(
"/api/v1/tenants/{tenant_id}/alerts/{alert_id}/interactions",
response_model=Dict[str, Any],
summary="Track alert interaction"
)
async def create_interaction(
tenant_id: UUID = Path(..., description="Tenant ID"),
alert_id: UUID = Path(..., description="Alert ID"),
interaction: InteractionCreate = Body(...),
current_user: dict = Depends(get_current_user_dep)
):
"""
Track a user interaction with an alert
- **acknowledged**: User has seen and acknowledged the alert
- **resolved**: User has resolved the alert
- **snoozed**: User has snoozed the alert
- **dismissed**: User has dismissed the alert
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
alert_interaction = await repo.create_interaction(
tenant_id=tenant_id,
alert_id=alert_id,
user_id=UUID(current_user['user_id']),
interaction_type=interaction.interaction_type,
metadata=interaction.metadata
)
return {
'id': str(alert_interaction.id),
'alert_id': str(alert_interaction.alert_id),
'interaction_type': alert_interaction.interaction_type,
'interacted_at': alert_interaction.interacted_at.isoformat(),
'response_time_seconds': alert_interaction.response_time_seconds
}
except ValueError as e:
logger.error("Invalid alert interaction", error=str(e), alert_id=str(alert_id))
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error("Failed to create alert interaction", error=str(e), alert_id=str(alert_id))
raise HTTPException(status_code=500, detail=f"Failed to create interaction: {str(e)}")
@router.post(
"/api/v1/tenants/{tenant_id}/alerts/interactions/batch",
response_model=Dict[str, Any],
summary="Track multiple alert interactions"
)
async def create_interactions_batch(
tenant_id: UUID = Path(..., description="Tenant ID"),
batch: InteractionBatchCreate = Body(...),
current_user: dict = Depends(get_current_user_dep)
):
"""
Track multiple alert interactions in a single request
Useful for offline sync or bulk operations
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
# Add user_id to each interaction
for interaction in batch.interactions:
interaction['user_id'] = current_user['user_id']
created_interactions = await repo.create_interactions_batch(
tenant_id=tenant_id,
interactions=batch.interactions
)
return {
'created_count': len(created_interactions),
'interactions': [
{
'id': str(i.id),
'alert_id': str(i.alert_id),
'interaction_type': i.interaction_type,
'interacted_at': i.interacted_at.isoformat()
}
for i in created_interactions
]
}
except Exception as e:
logger.error("Failed to create batch interactions", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to create batch interactions: {str(e)}")
@router.get(
"/api/v1/tenants/{tenant_id}/alerts/analytics",
response_model=AnalyticsResponse,
summary="Get alert analytics"
)
async def get_analytics(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(7, ge=1, le=90, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep)
):
"""
Get comprehensive analytics for alerts
Returns:
- 7-day trend chart with severity breakdown
- Average response time (time to acknowledgment)
- Top 3 alert categories
- Total alerts, resolved, active counts
- Resolution rate percentage
- Predicted daily average
- Busiest day of week
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
analytics = await repo.get_full_analytics(
tenant_id=tenant_id,
days=days
)
return analytics
except Exception as e:
logger.error("Failed to get alert analytics", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to get analytics: {str(e)}")
@router.get(
"/api/v1/tenants/{tenant_id}/alerts/analytics/trends",
response_model=List[Dict[str, Any]],
summary="Get alert trends"
)
async def get_trends(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(7, ge=1, le=90, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep)
):
"""Get alert trends over time with severity breakdown"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
trends = await repo.get_analytics_trends(
tenant_id=tenant_id,
days=days
)
return trends
except Exception as e:
logger.error("Failed to get alert trends", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to get trends: {str(e)}")
2025-10-31 11:54:19 +01:00
# ============================================================================
# Tenant Data Deletion Operations (Internal Service Only)
# ============================================================================
@router.delete(
"/api/v1/alerts/tenant/{tenant_id}",
response_model=dict
)
@service_only_access
async def delete_tenant_data(
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
current_user: dict = Depends(get_current_user_dep)
):
"""
Delete all alert data for a tenant (Internal service only)
This endpoint is called by the orchestrator during tenant deletion.
It permanently deletes all alert-related data including:
- Alerts (all types and severities)
- Alert interactions
- Audit logs
**WARNING**: This operation is irreversible!
Returns:
Deletion summary with counts of deleted records
"""
from app.services.tenant_deletion_service import AlertProcessorTenantDeletionService
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
logger.info("alert_processor.tenant_deletion.api_called", tenant_id=tenant_id)
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
deletion_service = AlertProcessorTenantDeletionService(session)
result = await deletion_service.safe_delete_tenant_data(tenant_id)
if not result.success:
raise HTTPException(
status_code=500,
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
)
return {
"message": "Tenant data deletion completed successfully",
"summary": result.to_dict()
}
except HTTPException:
raise
except Exception as e:
logger.error("alert_processor.tenant_deletion.api_error",
tenant_id=tenant_id,
error=str(e),
exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to delete tenant data: {str(e)}"
)
@router.get(
"/api/v1/alerts/tenant/{tenant_id}/deletion-preview",
response_model=dict
)
@service_only_access
async def preview_tenant_data_deletion(
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
current_user: dict = Depends(get_current_user_dep)
):
"""
Preview what data would be deleted for a tenant (dry-run)
This endpoint shows counts of all data that would be deleted
without actually deleting anything. Useful for:
- Confirming deletion scope before execution
- Auditing and compliance
- Troubleshooting
Returns:
Dictionary with entity names and their counts
"""
from app.services.tenant_deletion_service import AlertProcessorTenantDeletionService
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
logger.info("alert_processor.tenant_deletion.preview_called", tenant_id=tenant_id)
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
deletion_service = AlertProcessorTenantDeletionService(session)
preview = await deletion_service.get_tenant_data_preview(tenant_id)
total_records = sum(preview.values())
return {
"tenant_id": tenant_id,
"service": "alert_processor",
"preview": preview,
"total_records": total_records,
"warning": "These records will be permanently deleted and cannot be recovered"
}
except Exception as e:
logger.error("alert_processor.tenant_deletion.preview_error",
tenant_id=tenant_id,
error=str(e),
exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to preview tenant data deletion: {str(e)}"
)