306 lines
11 KiB
Python
306 lines
11 KiB
Python
"""
|
|
Internal Demo Cloning API for Alert Processor Service
|
|
Service-to-service endpoint for cloning alert data
|
|
"""
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from sqlalchemy import select, delete, func
|
|
import structlog
|
|
import uuid
|
|
from datetime import datetime, timezone, timedelta
|
|
from typing import Optional, Dict, Any
|
|
import os
|
|
|
|
from app.repositories.alerts_repository import AlertsRepository
|
|
from app.models.events import Alert, AlertStatus, AlertTypeClass
|
|
from app.config import AlertProcessorConfig
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
# Add shared utilities to path
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
|
from shared.database.base import create_database_manager
|
|
|
|
logger = structlog.get_logger()
|
|
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
|
|
|
# Internal API key for service-to-service auth
|
|
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
|
|
|
# Database manager for this module
|
|
config = AlertProcessorConfig()
|
|
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor-internal-demo")
|
|
|
|
# Dependency to get database session
|
|
async def get_db():
|
|
"""Get database session for internal demo operations"""
|
|
async with db_manager.get_session() as session:
|
|
yield session
|
|
|
|
# Base demo tenant IDs
|
|
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
|
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
|
|
|
|
|
|
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
|
"""Verify internal API key for service-to-service communication"""
|
|
if x_internal_api_key != INTERNAL_API_KEY:
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
return True
|
|
|
|
|
|
@router.post("/clone")
|
|
async def clone_demo_data(
|
|
base_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str,
|
|
session_id: Optional[str] = None,
|
|
session_created_at: Optional[str] = None,
|
|
db: AsyncSession = Depends(get_db),
|
|
_: bool = Depends(verify_internal_api_key)
|
|
):
|
|
"""
|
|
Clone alert service data for a virtual demo tenant
|
|
|
|
Clones:
|
|
- Action-needed alerts (PO approvals, delivery tracking, low stock warnings, production delays)
|
|
- Prevented-issue alerts (AI interventions with financial impact)
|
|
- Historical trend data over past 7 days
|
|
|
|
Args:
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
demo_account_type: Type of demo account
|
|
session_id: Originating session ID for tracing
|
|
session_created_at: Session creation timestamp for date adjustment
|
|
|
|
Returns:
|
|
Cloning status and record counts
|
|
"""
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
# Parse session creation time for date adjustment
|
|
if session_created_at:
|
|
try:
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
except (ValueError, AttributeError):
|
|
session_time = start_time
|
|
else:
|
|
session_time = start_time
|
|
|
|
logger.info(
|
|
"Starting alert data cloning",
|
|
base_tenant_id=base_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
demo_account_type=demo_account_type,
|
|
session_id=session_id,
|
|
session_created_at=session_created_at
|
|
)
|
|
|
|
try:
|
|
# Validate UUIDs
|
|
base_uuid = uuid.UUID(base_tenant_id)
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Track cloning statistics
|
|
stats = {
|
|
"alerts": 0,
|
|
"action_needed": 0,
|
|
"prevented_issues": 0,
|
|
"historical_alerts": 0
|
|
}
|
|
|
|
# Clone Alerts
|
|
result = await db.execute(
|
|
select(Alert).where(Alert.tenant_id == base_uuid)
|
|
)
|
|
base_alerts = result.scalars().all()
|
|
|
|
logger.info(
|
|
"Found alerts to clone",
|
|
count=len(base_alerts),
|
|
base_tenant=str(base_uuid)
|
|
)
|
|
|
|
for alert in base_alerts:
|
|
# Adjust dates relative to session creation time
|
|
adjusted_created_at = adjust_date_for_demo(
|
|
alert.created_at, session_time, BASE_REFERENCE_DATE
|
|
) if alert.created_at else session_time
|
|
|
|
adjusted_updated_at = adjust_date_for_demo(
|
|
alert.updated_at, session_time, BASE_REFERENCE_DATE
|
|
) if alert.updated_at else session_time
|
|
|
|
adjusted_resolved_at = adjust_date_for_demo(
|
|
alert.resolved_at, session_time, BASE_REFERENCE_DATE
|
|
) if alert.resolved_at else None
|
|
|
|
adjusted_action_created_at = adjust_date_for_demo(
|
|
alert.action_created_at, session_time, BASE_REFERENCE_DATE
|
|
) if alert.action_created_at else None
|
|
|
|
adjusted_scheduled_send_time = adjust_date_for_demo(
|
|
alert.scheduled_send_time, session_time, BASE_REFERENCE_DATE
|
|
) if alert.scheduled_send_time else None
|
|
|
|
# Update urgency context with adjusted dates if present
|
|
urgency_context = alert.urgency_context.copy() if alert.urgency_context else {}
|
|
if urgency_context.get("expected_delivery"):
|
|
try:
|
|
original_delivery = datetime.fromisoformat(urgency_context["expected_delivery"].replace('Z', '+00:00'))
|
|
adjusted_delivery = adjust_date_for_demo(original_delivery, session_time, BASE_REFERENCE_DATE)
|
|
urgency_context["expected_delivery"] = adjusted_delivery.isoformat() if adjusted_delivery else None
|
|
except:
|
|
pass # Keep original if parsing fails
|
|
|
|
new_alert = Alert(
|
|
id=uuid.uuid4(),
|
|
tenant_id=virtual_uuid,
|
|
item_type=alert.item_type,
|
|
alert_type=alert.alert_type,
|
|
service=alert.service,
|
|
title=alert.title,
|
|
message=alert.message,
|
|
status=alert.status,
|
|
priority_score=alert.priority_score,
|
|
priority_level=alert.priority_level,
|
|
type_class=alert.type_class,
|
|
orchestrator_context=alert.orchestrator_context,
|
|
business_impact=alert.business_impact,
|
|
urgency_context=urgency_context,
|
|
user_agency=alert.user_agency,
|
|
trend_context=alert.trend_context,
|
|
smart_actions=alert.smart_actions,
|
|
ai_reasoning_summary=alert.ai_reasoning_summary,
|
|
confidence_score=alert.confidence_score,
|
|
timing_decision=alert.timing_decision,
|
|
scheduled_send_time=adjusted_scheduled_send_time,
|
|
placement=alert.placement,
|
|
action_created_at=adjusted_action_created_at,
|
|
superseded_by_action_id=None, # Don't clone superseded relationships
|
|
hidden_from_ui=alert.hidden_from_ui,
|
|
alert_metadata=alert.alert_metadata,
|
|
created_at=adjusted_created_at,
|
|
updated_at=adjusted_updated_at,
|
|
resolved_at=adjusted_resolved_at
|
|
)
|
|
db.add(new_alert)
|
|
stats["alerts"] += 1
|
|
|
|
# Track by type_class
|
|
if alert.type_class == "action_needed":
|
|
stats["action_needed"] += 1
|
|
elif alert.type_class == "prevented_issue":
|
|
stats["prevented_issues"] += 1
|
|
|
|
# Track historical (older than 1 day)
|
|
if adjusted_created_at < session_time - timedelta(days=1):
|
|
stats["historical_alerts"] += 1
|
|
|
|
# Commit cloned data
|
|
await db.commit()
|
|
|
|
total_records = stats["alerts"]
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
logger.info(
|
|
"Alert data cloning completed",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
total_records=total_records,
|
|
stats=stats,
|
|
duration_ms=duration_ms
|
|
)
|
|
|
|
return {
|
|
"service": "alert_processor",
|
|
"status": "completed",
|
|
"records_cloned": total_records,
|
|
"duration_ms": duration_ms,
|
|
"details": stats
|
|
}
|
|
|
|
except ValueError as e:
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to clone alert data",
|
|
error=str(e),
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
exc_info=True
|
|
)
|
|
|
|
# Rollback on error
|
|
await db.rollback()
|
|
|
|
return {
|
|
"service": "alert_processor",
|
|
"status": "failed",
|
|
"records_cloned": 0,
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
"error": str(e)
|
|
}
|
|
|
|
|
|
@router.get("/clone/health")
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
"""
|
|
Health check for internal cloning endpoint
|
|
Used by orchestrator to verify service availability
|
|
"""
|
|
return {
|
|
"service": "alert_processor",
|
|
"clone_endpoint": "available",
|
|
"version": "2.0.0"
|
|
}
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
|
async def delete_demo_data(
|
|
virtual_tenant_id: str,
|
|
db: AsyncSession = Depends(get_db),
|
|
_: bool = Depends(verify_internal_api_key)
|
|
):
|
|
"""Delete all alert data for a virtual demo tenant"""
|
|
logger.info("Deleting alert data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
try:
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Count records
|
|
alert_count = await db.scalar(
|
|
select(func.count(Alert.id)).where(Alert.tenant_id == virtual_uuid)
|
|
)
|
|
|
|
# Delete alerts
|
|
await db.execute(delete(Alert).where(Alert.tenant_id == virtual_uuid))
|
|
await db.commit()
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
logger.info(
|
|
"Alert data deleted successfully",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
duration_ms=duration_ms
|
|
)
|
|
|
|
return {
|
|
"service": "alert_processor",
|
|
"status": "deleted",
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
"records_deleted": {
|
|
"alerts": alert_count,
|
|
"total": alert_count
|
|
},
|
|
"duration_ms": duration_ms
|
|
}
|
|
except Exception as e:
|
|
logger.error("Failed to delete alert data", error=str(e), exc_info=True)
|
|
await db.rollback()
|
|
raise HTTPException(status_code=500, detail=str(e))
|