Delete legacy alerts
This commit is contained in:
@@ -6,7 +6,6 @@ Repository implementations for forecasting service
|
||||
from .base import ForecastingBaseRepository
|
||||
from .forecast_repository import ForecastRepository
|
||||
from .prediction_batch_repository import PredictionBatchRepository
|
||||
from .forecast_alert_repository import ForecastAlertRepository
|
||||
from .performance_metric_repository import PerformanceMetricRepository
|
||||
from .prediction_cache_repository import PredictionCacheRepository
|
||||
|
||||
@@ -14,7 +13,6 @@ __all__ = [
|
||||
"ForecastingBaseRepository",
|
||||
"ForecastRepository",
|
||||
"PredictionBatchRepository",
|
||||
"ForecastAlertRepository",
|
||||
"PerformanceMetricRepository",
|
||||
"PredictionCacheRepository"
|
||||
]
|
||||
@@ -1,375 +0,0 @@
|
||||
"""
|
||||
Forecast Alert Repository
|
||||
Repository for forecast alert operations
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
|
||||
from .base import ForecastingBaseRepository
|
||||
from app.models.forecasts import ForecastAlert
|
||||
from shared.database.exceptions import DatabaseError, ValidationError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ForecastAlertRepository(ForecastingBaseRepository):
|
||||
"""Repository for forecast alert operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession, cache_ttl: Optional[int] = 300):
|
||||
# Alerts change frequently, shorter cache time (5 minutes)
|
||||
super().__init__(ForecastAlert, session, cache_ttl)
|
||||
|
||||
async def create_alert(self, alert_data: Dict[str, Any]) -> ForecastAlert:
|
||||
"""Create a new forecast alert"""
|
||||
try:
|
||||
# Validate alert data
|
||||
validation_result = self._validate_forecast_data(
|
||||
alert_data,
|
||||
["tenant_id", "forecast_id", "alert_type", "message"]
|
||||
)
|
||||
|
||||
if not validation_result["is_valid"]:
|
||||
raise ValidationError(f"Invalid alert data: {validation_result['errors']}")
|
||||
|
||||
# Set default values
|
||||
if "severity" not in alert_data:
|
||||
alert_data["severity"] = "medium"
|
||||
if "is_active" not in alert_data:
|
||||
alert_data["is_active"] = True
|
||||
if "notification_sent" not in alert_data:
|
||||
alert_data["notification_sent"] = False
|
||||
|
||||
alert = await self.create(alert_data)
|
||||
|
||||
logger.info("Forecast alert created",
|
||||
alert_id=alert.id,
|
||||
tenant_id=alert.tenant_id,
|
||||
alert_type=alert.alert_type,
|
||||
severity=alert.severity)
|
||||
|
||||
return alert
|
||||
|
||||
except ValidationError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to create forecast alert",
|
||||
tenant_id=alert_data.get("tenant_id"),
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to create alert: {str(e)}")
|
||||
|
||||
async def get_active_alerts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_type: str = None,
|
||||
severity: str = None
|
||||
) -> List[ForecastAlert]:
|
||||
"""Get active alerts for a tenant"""
|
||||
try:
|
||||
filters = {
|
||||
"tenant_id": tenant_id,
|
||||
"is_active": True
|
||||
}
|
||||
|
||||
if alert_type:
|
||||
filters["alert_type"] = alert_type
|
||||
if severity:
|
||||
filters["severity"] = severity
|
||||
|
||||
return await self.get_multi(
|
||||
filters=filters,
|
||||
order_by="created_at",
|
||||
order_desc=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active alerts",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def acknowledge_alert(
|
||||
self,
|
||||
alert_id: str,
|
||||
acknowledged_by: str = None
|
||||
) -> Optional[ForecastAlert]:
|
||||
"""Acknowledge an alert"""
|
||||
try:
|
||||
update_data = {
|
||||
"acknowledged_at": datetime.utcnow()
|
||||
}
|
||||
|
||||
if acknowledged_by:
|
||||
# Store in message or create a new field if needed
|
||||
current_alert = await self.get_by_id(alert_id)
|
||||
if current_alert:
|
||||
update_data["message"] = f"{current_alert.message} (Acknowledged by: {acknowledged_by})"
|
||||
|
||||
updated_alert = await self.update(alert_id, update_data)
|
||||
|
||||
logger.info("Alert acknowledged",
|
||||
alert_id=alert_id,
|
||||
acknowledged_by=acknowledged_by)
|
||||
|
||||
return updated_alert
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to acknowledge alert",
|
||||
alert_id=alert_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to acknowledge alert: {str(e)}")
|
||||
|
||||
async def resolve_alert(
|
||||
self,
|
||||
alert_id: str,
|
||||
resolved_by: str = None
|
||||
) -> Optional[ForecastAlert]:
|
||||
"""Resolve an alert"""
|
||||
try:
|
||||
update_data = {
|
||||
"resolved_at": datetime.utcnow(),
|
||||
"is_active": False
|
||||
}
|
||||
|
||||
if resolved_by:
|
||||
current_alert = await self.get_by_id(alert_id)
|
||||
if current_alert:
|
||||
update_data["message"] = f"{current_alert.message} (Resolved by: {resolved_by})"
|
||||
|
||||
updated_alert = await self.update(alert_id, update_data)
|
||||
|
||||
logger.info("Alert resolved",
|
||||
alert_id=alert_id,
|
||||
resolved_by=resolved_by)
|
||||
|
||||
return updated_alert
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to resolve alert",
|
||||
alert_id=alert_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Failed to resolve alert: {str(e)}")
|
||||
|
||||
async def mark_notification_sent(
|
||||
self,
|
||||
alert_id: str,
|
||||
notification_method: str
|
||||
) -> Optional[ForecastAlert]:
|
||||
"""Mark alert notification as sent"""
|
||||
try:
|
||||
update_data = {
|
||||
"notification_sent": True,
|
||||
"notification_method": notification_method
|
||||
}
|
||||
|
||||
updated_alert = await self.update(alert_id, update_data)
|
||||
|
||||
logger.debug("Alert notification marked as sent",
|
||||
alert_id=alert_id,
|
||||
method=notification_method)
|
||||
|
||||
return updated_alert
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark notification as sent",
|
||||
alert_id=alert_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
async def get_unnotified_alerts(self, tenant_id: str = None) -> List[ForecastAlert]:
|
||||
"""Get alerts that haven't been notified yet"""
|
||||
try:
|
||||
filters = {
|
||||
"is_active": True,
|
||||
"notification_sent": False
|
||||
}
|
||||
|
||||
if tenant_id:
|
||||
filters["tenant_id"] = tenant_id
|
||||
|
||||
return await self.get_multi(
|
||||
filters=filters,
|
||||
order_by="created_at",
|
||||
order_desc=False # Oldest first for notification
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get unnotified alerts",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return []
|
||||
|
||||
async def get_alert_statistics(self, tenant_id: str) -> Dict[str, Any]:
|
||||
"""Get alert statistics for a tenant"""
|
||||
try:
|
||||
# Get counts by type
|
||||
type_query = text("""
|
||||
SELECT alert_type, COUNT(*) as count
|
||||
FROM forecast_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
GROUP BY alert_type
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(type_query, {"tenant_id": tenant_id})
|
||||
alerts_by_type = {row.alert_type: row.count for row in result.fetchall()}
|
||||
|
||||
# Get counts by severity
|
||||
severity_query = text("""
|
||||
SELECT severity, COUNT(*) as count
|
||||
FROM forecast_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
GROUP BY severity
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
severity_result = await self.session.execute(severity_query, {"tenant_id": tenant_id})
|
||||
alerts_by_severity = {row.severity: row.count for row in severity_result.fetchall()}
|
||||
|
||||
# Get status counts
|
||||
total_alerts = await self.count(filters={"tenant_id": tenant_id})
|
||||
active_alerts = await self.count(filters={
|
||||
"tenant_id": tenant_id,
|
||||
"is_active": True
|
||||
})
|
||||
acknowledged_alerts = await self.count(filters={
|
||||
"tenant_id": tenant_id,
|
||||
"acknowledged_at": "IS NOT NULL" # This won't work with our current filters
|
||||
})
|
||||
|
||||
# Get recent activity (alerts in last 7 days)
|
||||
seven_days_ago = datetime.utcnow() - timedelta(days=7)
|
||||
recent_alerts = len(await self.get_by_date_range(
|
||||
tenant_id, seven_days_ago, datetime.utcnow(), limit=1000
|
||||
))
|
||||
|
||||
# Calculate response metrics
|
||||
response_query = text("""
|
||||
SELECT
|
||||
AVG(EXTRACT(EPOCH FROM (acknowledged_at - created_at))/60) as avg_acknowledgment_time_minutes,
|
||||
AVG(EXTRACT(EPOCH FROM (resolved_at - created_at))/60) as avg_resolution_time_minutes,
|
||||
COUNT(CASE WHEN acknowledged_at IS NOT NULL THEN 1 END) as acknowledged_count,
|
||||
COUNT(CASE WHEN resolved_at IS NOT NULL THEN 1 END) as resolved_count
|
||||
FROM forecast_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
""")
|
||||
|
||||
response_result = await self.session.execute(response_query, {"tenant_id": tenant_id})
|
||||
response_row = response_result.fetchone()
|
||||
|
||||
return {
|
||||
"total_alerts": total_alerts,
|
||||
"active_alerts": active_alerts,
|
||||
"resolved_alerts": total_alerts - active_alerts,
|
||||
"alerts_by_type": alerts_by_type,
|
||||
"alerts_by_severity": alerts_by_severity,
|
||||
"recent_alerts_7d": recent_alerts,
|
||||
"response_metrics": {
|
||||
"avg_acknowledgment_time_minutes": float(response_row.avg_acknowledgment_time_minutes or 0),
|
||||
"avg_resolution_time_minutes": float(response_row.avg_resolution_time_minutes or 0),
|
||||
"acknowledgment_rate": round((response_row.acknowledged_count / max(total_alerts, 1)) * 100, 2),
|
||||
"resolution_rate": round((response_row.resolved_count / max(total_alerts, 1)) * 100, 2)
|
||||
} if response_row else {
|
||||
"avg_acknowledgment_time_minutes": 0.0,
|
||||
"avg_resolution_time_minutes": 0.0,
|
||||
"acknowledgment_rate": 0.0,
|
||||
"resolution_rate": 0.0
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert statistics",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {
|
||||
"total_alerts": 0,
|
||||
"active_alerts": 0,
|
||||
"resolved_alerts": 0,
|
||||
"alerts_by_type": {},
|
||||
"alerts_by_severity": {},
|
||||
"recent_alerts_7d": 0,
|
||||
"response_metrics": {
|
||||
"avg_acknowledgment_time_minutes": 0.0,
|
||||
"avg_resolution_time_minutes": 0.0,
|
||||
"acknowledgment_rate": 0.0,
|
||||
"resolution_rate": 0.0
|
||||
}
|
||||
}
|
||||
|
||||
async def cleanup_old_alerts(self, days_old: int = 90) -> int:
|
||||
"""Clean up old resolved alerts"""
|
||||
try:
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days_old)
|
||||
|
||||
query_text = """
|
||||
DELETE FROM forecast_alerts
|
||||
WHERE is_active = false
|
||||
AND resolved_at IS NOT NULL
|
||||
AND resolved_at < :cutoff_date
|
||||
"""
|
||||
|
||||
result = await self.session.execute(text(query_text), {"cutoff_date": cutoff_date})
|
||||
deleted_count = result.rowcount
|
||||
|
||||
logger.info("Cleaned up old forecast alerts",
|
||||
deleted_count=deleted_count,
|
||||
days_old=days_old)
|
||||
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to cleanup old alerts",
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Alert cleanup failed: {str(e)}")
|
||||
|
||||
async def bulk_resolve_alerts(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_type: str = None,
|
||||
older_than_hours: int = 24
|
||||
) -> int:
|
||||
"""Bulk resolve old alerts"""
|
||||
try:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=older_than_hours)
|
||||
|
||||
conditions = [
|
||||
"tenant_id = :tenant_id",
|
||||
"is_active = true",
|
||||
"created_at < :cutoff_time"
|
||||
]
|
||||
params = {
|
||||
"tenant_id": tenant_id,
|
||||
"cutoff_time": cutoff_time
|
||||
}
|
||||
|
||||
if alert_type:
|
||||
conditions.append("alert_type = :alert_type")
|
||||
params["alert_type"] = alert_type
|
||||
|
||||
query_text = f"""
|
||||
UPDATE forecast_alerts
|
||||
SET is_active = false, resolved_at = :resolved_at
|
||||
WHERE {' AND '.join(conditions)}
|
||||
"""
|
||||
|
||||
params["resolved_at"] = datetime.utcnow()
|
||||
|
||||
result = await self.session.execute(text(query_text), params)
|
||||
resolved_count = result.rowcount
|
||||
|
||||
logger.info("Bulk resolved old alerts",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
resolved_count=resolved_count,
|
||||
older_than_hours=older_than_hours)
|
||||
|
||||
return resolved_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to bulk resolve alerts",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Bulk resolve failed: {str(e)}")
|
||||
Reference in New Issue
Block a user