Improve the frontend

This commit is contained in:
Urtzi Alfaro
2025-10-21 19:50:07 +02:00
parent 05da20357d
commit 8d30172483
105 changed files with 14699 additions and 4630 deletions

View File

@@ -0,0 +1,7 @@
"""
Alert Processor API Endpoints
"""
from .analytics import router as analytics_router
__all__ = ['analytics_router']

View File

@@ -0,0 +1,238 @@
"""
Alert Analytics API Endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, Path, Body, Query
from typing import List, Dict, Any, Optional
from uuid import UUID
from pydantic import BaseModel, Field
import structlog
from shared.auth.decorators import get_current_user_dep
logger = structlog.get_logger()
router = APIRouter()
# Schemas
class InteractionCreate(BaseModel):
"""Schema for creating an alert interaction"""
alert_id: str = Field(..., description="Alert ID")
interaction_type: str = Field(..., description="Type of interaction: acknowledged, resolved, snoozed, dismissed")
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
class InteractionBatchCreate(BaseModel):
"""Schema for creating multiple interactions"""
interactions: List[Dict[str, Any]] = Field(..., description="List of interactions to create")
class AnalyticsResponse(BaseModel):
"""Schema for analytics response"""
trends: List[Dict[str, Any]]
averageResponseTime: int
topCategories: List[Dict[str, Any]]
totalAlerts: int
resolvedAlerts: int
activeAlerts: int
resolutionRate: int
predictedDailyAverage: int
busiestDay: str
def get_analytics_repository(current_user: dict = Depends(get_current_user_dep)):
"""Dependency to get analytics repository"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async def _get_repo():
async with db_manager.get_session() as session:
yield AlertAnalyticsRepository(session)
return _get_repo
@router.post(
"/api/v1/tenants/{tenant_id}/alerts/{alert_id}/interactions",
response_model=Dict[str, Any],
summary="Track alert interaction"
)
async def create_interaction(
tenant_id: UUID = Path(..., description="Tenant ID"),
alert_id: UUID = Path(..., description="Alert ID"),
interaction: InteractionCreate = Body(...),
current_user: dict = Depends(get_current_user_dep)
):
"""
Track a user interaction with an alert
- **acknowledged**: User has seen and acknowledged the alert
- **resolved**: User has resolved the alert
- **snoozed**: User has snoozed the alert
- **dismissed**: User has dismissed the alert
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
alert_interaction = await repo.create_interaction(
tenant_id=tenant_id,
alert_id=alert_id,
user_id=UUID(current_user['user_id']),
interaction_type=interaction.interaction_type,
metadata=interaction.metadata
)
return {
'id': str(alert_interaction.id),
'alert_id': str(alert_interaction.alert_id),
'interaction_type': alert_interaction.interaction_type,
'interacted_at': alert_interaction.interacted_at.isoformat(),
'response_time_seconds': alert_interaction.response_time_seconds
}
except ValueError as e:
logger.error("Invalid alert interaction", error=str(e), alert_id=str(alert_id))
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error("Failed to create alert interaction", error=str(e), alert_id=str(alert_id))
raise HTTPException(status_code=500, detail=f"Failed to create interaction: {str(e)}")
@router.post(
"/api/v1/tenants/{tenant_id}/alerts/interactions/batch",
response_model=Dict[str, Any],
summary="Track multiple alert interactions"
)
async def create_interactions_batch(
tenant_id: UUID = Path(..., description="Tenant ID"),
batch: InteractionBatchCreate = Body(...),
current_user: dict = Depends(get_current_user_dep)
):
"""
Track multiple alert interactions in a single request
Useful for offline sync or bulk operations
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
# Add user_id to each interaction
for interaction in batch.interactions:
interaction['user_id'] = current_user['user_id']
created_interactions = await repo.create_interactions_batch(
tenant_id=tenant_id,
interactions=batch.interactions
)
return {
'created_count': len(created_interactions),
'interactions': [
{
'id': str(i.id),
'alert_id': str(i.alert_id),
'interaction_type': i.interaction_type,
'interacted_at': i.interacted_at.isoformat()
}
for i in created_interactions
]
}
except Exception as e:
logger.error("Failed to create batch interactions", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to create batch interactions: {str(e)}")
@router.get(
"/api/v1/tenants/{tenant_id}/alerts/analytics",
response_model=AnalyticsResponse,
summary="Get alert analytics"
)
async def get_analytics(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(7, ge=1, le=90, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep)
):
"""
Get comprehensive analytics for alerts
Returns:
- 7-day trend chart with severity breakdown
- Average response time (time to acknowledgment)
- Top 3 alert categories
- Total alerts, resolved, active counts
- Resolution rate percentage
- Predicted daily average
- Busiest day of week
"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
analytics = await repo.get_full_analytics(
tenant_id=tenant_id,
days=days
)
return analytics
except Exception as e:
logger.error("Failed to get alert analytics", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to get analytics: {str(e)}")
@router.get(
"/api/v1/tenants/{tenant_id}/alerts/analytics/trends",
response_model=List[Dict[str, Any]],
summary="Get alert trends"
)
async def get_trends(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(7, ge=1, le=90, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep)
):
"""Get alert trends over time with severity breakdown"""
from app.repositories.analytics_repository import AlertAnalyticsRepository
from app.config import AlertProcessorConfig
from shared.database.base import create_database_manager
try:
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor")
async with db_manager.get_session() as session:
repo = AlertAnalyticsRepository(session)
trends = await repo.get_analytics_trends(
tenant_id=tenant_id,
days=days
)
return trends
except Exception as e:
logger.error("Failed to get alert trends", error=str(e), tenant_id=str(tenant_id))
raise HTTPException(status_code=500, detail=f"Failed to get trends: {str(e)}")

View File

@@ -0,0 +1,84 @@
"""
Alert Processor API Server
Provides REST API endpoints for alert analytics
"""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import structlog
from app.config import AlertProcessorConfig
from app.api import analytics_router
from shared.database.base import create_database_manager
logger = structlog.get_logger()
# Create FastAPI app
app = FastAPI(
title="Alert Processor API",
description="API for alert analytics and interaction tracking",
version="1.0.0"
)
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(analytics_router, tags=["analytics"])
# Initialize database
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor-api")
@app.on_event("startup")
async def startup():
"""Initialize on startup"""
logger.info("Alert Processor API starting up")
# Create tables
try:
from app.models.alerts import Base
await db_manager.create_tables(Base.metadata)
logger.info("Database tables ensured")
except Exception as e:
logger.error("Failed to create tables", error=str(e))
@app.on_event("shutdown")
async def shutdown():
"""Cleanup on shutdown"""
logger.info("Alert Processor API shutting down")
await db_manager.close_connections()
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {"status": "healthy", "service": "alert-processor-api"}
@app.get("/")
async def root():
"""Root endpoint"""
return {
"service": "Alert Processor API",
"version": "1.0.0",
"endpoints": {
"health": "/health",
"docs": "/docs",
"analytics": "/api/v1/tenants/{tenant_id}/alerts/analytics",
"interactions": "/api/v1/tenants/{tenant_id}/alerts/{alert_id}/interactions"
}
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8010)

View File

@@ -55,14 +55,14 @@ class NotificationServiceClient(BaseServiceClient):
"""Send notification via notification service"""
try:
response = await self.post(
"/api/v1/notifications/send",
json={
"notifications/send",
data={
"tenant_id": tenant_id,
"notification": notification,
"channels": channels
}
)
return response
return response if response else {"status": "failed", "error": "No response from notification service"}
except Exception as e:
logger.error("Failed to send notification", error=str(e), tenant_id=tenant_id)
return {"status": "failed", "error": str(e)}
@@ -187,8 +187,8 @@ class AlertProcessorService:
},
channels=channels
)
if notification_result.get('status') == 'success':
if notification_result and notification_result.get('status') == 'success':
self.notifications_sent += 1
# Stream to SSE for real-time dashboard (always)
@@ -255,16 +255,27 @@ class AlertProcessorService:
return alert_dict
async def _cache_active_alerts(self, tenant_id: str):
"""Cache all active alerts for a tenant in Redis for quick SSE access"""
"""
Cache today's active alerts for a tenant in Redis for quick SSE access
Only caches alerts from today (00:00 UTC onwards) to avoid flooding
the dashboard with historical alerts on initial connection.
Analytics endpoints should query the database directly for historical data.
"""
try:
from app.models.alerts import Alert, AlertStatus
from sqlalchemy import select
async with self.db_manager.get_session() as session:
# Query all active alerts for this tenant
# Calculate start of today (UTC) to filter only today's alerts
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
# Query only today's active alerts for this tenant
# This prevents showing yesterday's alerts on dashboard initial load
query = select(Alert).where(
Alert.tenant_id == tenant_id,
Alert.status == AlertStatus.ACTIVE
Alert.status == AlertStatus.ACTIVE,
Alert.created_at >= today_start # Only today's alerts
).order_by(Alert.created_at.desc()).limit(50)
result = await session.execute(query)
@@ -294,9 +305,10 @@ class AlertProcessorService:
json.dumps(active_items)
)
logger.debug("Cached active alerts in Redis",
logger.debug("Cached today's active alerts in Redis",
tenant_id=tenant_id,
count=len(active_items))
count=len(active_items),
filter_date=today_start.isoformat())
except Exception as e:
logger.error("Failed to cache active alerts",

View File

@@ -3,15 +3,20 @@
Alert models for the alert processor service
"""
from sqlalchemy import Column, String, Text, DateTime, JSON, Enum
from sqlalchemy.dialects.postgresql import UUID
from datetime import datetime
from sqlalchemy import Column, String, Text, DateTime, JSON, Enum, Integer, ForeignKey
from sqlalchemy.dialects.postgresql import UUID, JSONB
from datetime import datetime, timezone
import uuid
import enum
from shared.database.base import Base
def utc_now():
"""Return current UTC time as timezone-aware datetime"""
return datetime.now(timezone.utc)
class AlertStatus(enum.Enum):
"""Alert status values"""
ACTIVE = "active"
@@ -28,6 +33,14 @@ class AlertSeverity(enum.Enum):
URGENT = "urgent"
class InteractionType(enum.Enum):
"""Alert interaction types"""
ACKNOWLEDGED = "acknowledged"
RESOLVED = "resolved"
SNOOZED = "snoozed"
DISMISSED = "dismissed"
class Alert(Base):
"""Alert records for the alert processor service"""
__tablename__ = "alerts"
@@ -51,6 +64,27 @@ class Alert(Base):
alert_metadata = Column(JSON, nullable=True) # Additional alert-specific data
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, index=True)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
resolved_at = Column(DateTime, nullable=True)
created_at = Column(DateTime(timezone=True), default=utc_now, index=True)
updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now)
resolved_at = Column(DateTime(timezone=True), nullable=True)
class AlertInteraction(Base):
"""Alert interaction tracking for analytics"""
__tablename__ = "alert_interactions"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
alert_id = Column(UUID(as_uuid=True), ForeignKey('alerts.id', ondelete='CASCADE'), nullable=False)
user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
# Interaction details
interaction_type = Column(String(50), nullable=False, index=True)
interacted_at = Column(DateTime(timezone=True), nullable=False, default=utc_now, index=True)
response_time_seconds = Column(Integer, nullable=True)
# Context
interaction_metadata = Column(JSONB, nullable=True)
# Timestamps
created_at = Column(DateTime(timezone=True), nullable=False, default=utc_now)

View File

@@ -0,0 +1,7 @@
"""
Alert Processor Repositories
"""
from .analytics_repository import AlertAnalyticsRepository
__all__ = ['AlertAnalyticsRepository']

View File

@@ -0,0 +1,382 @@
"""
Alert Analytics Repository
Handles all database operations for alert analytics
"""
from typing import List, Dict, Any, Optional
from datetime import datetime, timedelta
from uuid import UUID
from sqlalchemy import select, func, and_, extract, case
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from app.models.alerts import Alert, AlertInteraction, AlertSeverity, AlertStatus
logger = structlog.get_logger()
class AlertAnalyticsRepository:
"""Repository for alert analytics operations"""
def __init__(self, session: AsyncSession):
self.session = session
async def create_interaction(
self,
tenant_id: UUID,
alert_id: UUID,
user_id: UUID,
interaction_type: str,
metadata: Optional[Dict[str, Any]] = None
) -> AlertInteraction:
"""Create a new alert interaction"""
# Get alert to calculate response time
alert_query = select(Alert).where(Alert.id == alert_id)
result = await self.session.execute(alert_query)
alert = result.scalar_one_or_none()
if not alert:
raise ValueError(f"Alert {alert_id} not found")
# Calculate response time
now = datetime.utcnow()
response_time_seconds = int((now - alert.created_at).total_seconds())
# Create interaction
interaction = AlertInteraction(
tenant_id=tenant_id,
alert_id=alert_id,
user_id=user_id,
interaction_type=interaction_type,
interacted_at=now,
response_time_seconds=response_time_seconds,
interaction_metadata=metadata or {}
)
self.session.add(interaction)
# Update alert status if applicable
if interaction_type == 'acknowledged' and alert.status == AlertStatus.ACTIVE:
alert.status = AlertStatus.ACKNOWLEDGED
elif interaction_type == 'resolved':
alert.status = AlertStatus.RESOLVED
alert.resolved_at = now
elif interaction_type == 'dismissed':
alert.status = AlertStatus.IGNORED
await self.session.commit()
await self.session.refresh(interaction)
logger.info(
"Alert interaction created",
alert_id=str(alert_id),
interaction_type=interaction_type,
response_time=response_time_seconds
)
return interaction
async def create_interactions_batch(
self,
tenant_id: UUID,
interactions: List[Dict[str, Any]]
) -> List[AlertInteraction]:
"""Create multiple interactions in batch"""
created_interactions = []
for interaction_data in interactions:
try:
interaction = await self.create_interaction(
tenant_id=tenant_id,
alert_id=UUID(interaction_data['alert_id']),
user_id=UUID(interaction_data['user_id']),
interaction_type=interaction_data['interaction_type'],
metadata=interaction_data.get('metadata')
)
created_interactions.append(interaction)
except Exception as e:
logger.error(
"Failed to create interaction in batch",
error=str(e),
alert_id=interaction_data.get('alert_id')
)
continue
return created_interactions
async def get_analytics_trends(
self,
tenant_id: UUID,
days: int = 7
) -> List[Dict[str, Any]]:
"""Get alert trends for the last N days"""
start_date = datetime.utcnow() - timedelta(days=days)
# Query alerts grouped by date and severity
query = (
select(
func.date(Alert.created_at).label('date'),
func.count(Alert.id).label('total_count'),
func.sum(
case((Alert.severity == AlertSeverity.URGENT, 1), else_=0)
).label('urgent_count'),
func.sum(
case((Alert.severity == AlertSeverity.HIGH, 1), else_=0)
).label('high_count'),
func.sum(
case((Alert.severity == AlertSeverity.MEDIUM, 1), else_=0)
).label('medium_count'),
func.sum(
case((Alert.severity == AlertSeverity.LOW, 1), else_=0)
).label('low_count')
)
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
.group_by(func.date(Alert.created_at))
.order_by(func.date(Alert.created_at))
)
result = await self.session.execute(query)
rows = result.all()
# Fill in missing dates with zeros
trends = []
current_date = start_date.date()
end_date = datetime.utcnow().date()
# Create a dict for quick lookup
data_by_date = {row.date: row for row in rows}
while current_date <= end_date:
date_str = current_date.isoformat()
row = data_by_date.get(current_date)
trends.append({
'date': date_str,
'count': int(row.total_count) if row else 0,
'urgentCount': int(row.urgent_count) if row else 0,
'highCount': int(row.high_count) if row else 0,
'mediumCount': int(row.medium_count) if row else 0,
'lowCount': int(row.low_count) if row else 0,
})
current_date += timedelta(days=1)
return trends
async def get_average_response_time(
self,
tenant_id: UUID,
days: int = 7
) -> int:
"""Get average response time in minutes for acknowledged alerts"""
start_date = datetime.utcnow() - timedelta(days=days)
query = (
select(func.avg(AlertInteraction.response_time_seconds))
.where(
and_(
AlertInteraction.tenant_id == tenant_id,
AlertInteraction.interaction_type == 'acknowledged',
AlertInteraction.interacted_at >= start_date,
AlertInteraction.response_time_seconds < 86400 # Less than 24 hours
)
)
)
result = await self.session.execute(query)
avg_seconds = result.scalar_one_or_none()
if avg_seconds is None:
return 0
# Convert to minutes
return round(avg_seconds / 60)
async def get_top_categories(
self,
tenant_id: UUID,
days: int = 7,
limit: int = 3
) -> List[Dict[str, Any]]:
"""Get top alert categories"""
start_date = datetime.utcnow() - timedelta(days=days)
query = (
select(
Alert.alert_type,
func.count(Alert.id).label('count')
)
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
.group_by(Alert.alert_type)
.order_by(func.count(Alert.id).desc())
.limit(limit)
)
result = await self.session.execute(query)
rows = result.all()
# Calculate total for percentages
total_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
)
total_result = await self.session.execute(total_query)
total = total_result.scalar_one() or 1
categories = []
for row in rows:
percentage = round((row.count / total) * 100) if total > 0 else 0
categories.append({
'category': row.alert_type,
'count': row.count,
'percentage': percentage
})
return categories
async def get_resolution_stats(
self,
tenant_id: UUID,
days: int = 7
) -> Dict[str, Any]:
"""Get resolution statistics"""
start_date = datetime.utcnow() - timedelta(days=days)
# Total alerts
total_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
)
total_result = await self.session.execute(total_query)
total_alerts = total_result.scalar_one() or 0
# Resolved alerts
resolved_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date,
Alert.status == AlertStatus.RESOLVED
)
)
)
resolved_result = await self.session.execute(resolved_query)
resolved_alerts = resolved_result.scalar_one() or 0
# Active alerts
active_query = (
select(func.count(Alert.id))
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date,
Alert.status == AlertStatus.ACTIVE
)
)
)
active_result = await self.session.execute(active_query)
active_alerts = active_result.scalar_one() or 0
resolution_rate = round((resolved_alerts / total_alerts) * 100) if total_alerts > 0 else 0
return {
'totalAlerts': total_alerts,
'resolvedAlerts': resolved_alerts,
'activeAlerts': active_alerts,
'resolutionRate': resolution_rate
}
async def get_busiest_day(
self,
tenant_id: UUID,
days: int = 7
) -> str:
"""Get busiest day of week"""
start_date = datetime.utcnow() - timedelta(days=days)
query = (
select(
extract('dow', Alert.created_at).label('day_of_week'),
func.count(Alert.id).label('count')
)
.where(
and_(
Alert.tenant_id == tenant_id,
Alert.created_at >= start_date
)
)
.group_by(extract('dow', Alert.created_at))
.order_by(func.count(Alert.id).desc())
.limit(1)
)
result = await self.session.execute(query)
row = result.first()
if not row:
return 'N/A'
day_names = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
return day_names[int(row.day_of_week)]
async def get_predicted_daily_average(
self,
tenant_id: UUID,
days: int = 7
) -> int:
"""Calculate predicted daily average based on trends"""
trends = await self.get_analytics_trends(tenant_id, days)
if not trends:
return 0
total_count = sum(trend['count'] for trend in trends)
return round(total_count / len(trends))
async def get_full_analytics(
self,
tenant_id: UUID,
days: int = 7
) -> Dict[str, Any]:
"""Get complete analytics data"""
trends = await self.get_analytics_trends(tenant_id, days)
avg_response_time = await self.get_average_response_time(tenant_id, days)
top_categories = await self.get_top_categories(tenant_id, days)
resolution_stats = await self.get_resolution_stats(tenant_id, days)
busiest_day = await self.get_busiest_day(tenant_id, days)
predicted_avg = await self.get_predicted_daily_average(tenant_id, days)
return {
'trends': trends,
'averageResponseTime': avg_response_time,
'topCategories': top_categories,
'totalAlerts': resolution_stats['totalAlerts'],
'resolvedAlerts': resolution_stats['resolvedAlerts'],
'activeAlerts': resolution_stats['activeAlerts'],
'resolutionRate': resolution_stats['resolutionRate'],
'predictedDailyAverage': predicted_avg,
'busiestDay': busiest_day
}