Add user role
This commit is contained in:
@@ -9,12 +9,14 @@ import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
from datetime import date
|
||||
from datetime import date, datetime
|
||||
from sqlalchemy import select, delete, func
|
||||
import uuid
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import (
|
||||
get_current_user_dep,
|
||||
get_current_tenant_id_dep
|
||||
require_admin_role
|
||||
)
|
||||
from app.services.forecasting_service import ForecastingService
|
||||
from app.schemas.forecasts import (
|
||||
@@ -22,6 +24,7 @@ from app.schemas.forecasts import (
|
||||
BatchForecastResponse, AlertResponse
|
||||
)
|
||||
from app.models.forecasts import Forecast, PredictionBatch, ForecastAlert
|
||||
from app.services.messaging import publish_forecasts_deleted_event
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
@@ -318,4 +321,197 @@ async def acknowledge_alert(
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Internal server error"
|
||||
)
|
||||
)
|
||||
|
||||
@router.delete("/forecasts/tenant/{tenant_id}")
|
||||
async def delete_tenant_forecasts_complete(
|
||||
tenant_id: str,
|
||||
current_user = Depends(get_current_user_dep),
|
||||
_admin_check = Depends(require_admin_role),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all forecasts and predictions for a tenant.
|
||||
|
||||
**WARNING: This operation is irreversible!**
|
||||
|
||||
This endpoint:
|
||||
1. Cancels any active prediction batches
|
||||
2. Clears prediction cache
|
||||
3. Deletes all forecast records
|
||||
4. Deletes prediction batch records
|
||||
5. Deletes model performance metrics
|
||||
6. Publishes deletion event
|
||||
|
||||
Used by admin user deletion process to clean up all forecasting data.
|
||||
"""
|
||||
|
||||
try:
|
||||
tenant_uuid = uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid tenant ID format"
|
||||
)
|
||||
|
||||
try:
|
||||
from app.models.forecasts import Forecast, PredictionBatch
|
||||
from app.models.predictions import ModelPerformanceMetric, PredictionCache
|
||||
|
||||
deletion_stats = {
|
||||
"tenant_id": tenant_id,
|
||||
"deleted_at": datetime.utcnow().isoformat(),
|
||||
"batches_cancelled": 0,
|
||||
"forecasts_deleted": 0,
|
||||
"prediction_batches_deleted": 0,
|
||||
"performance_metrics_deleted": 0,
|
||||
"cache_entries_deleted": 0,
|
||||
"errors": []
|
||||
}
|
||||
|
||||
# Step 1: Cancel active prediction batches
|
||||
try:
|
||||
active_batches_query = select(PredictionBatch).where(
|
||||
PredictionBatch.tenant_id == tenant_uuid,
|
||||
PredictionBatch.status.in_(["pending", "processing"])
|
||||
)
|
||||
active_batches_result = await db.execute(active_batches_query)
|
||||
active_batches = active_batches_result.scalars().all()
|
||||
|
||||
for batch in active_batches:
|
||||
batch.status = "cancelled"
|
||||
batch.completed_at = datetime.utcnow()
|
||||
deletion_stats["batches_cancelled"] += 1
|
||||
|
||||
if active_batches:
|
||||
await db.commit()
|
||||
logger.info("Cancelled active prediction batches",
|
||||
tenant_id=tenant_id,
|
||||
count=len(active_batches))
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error cancelling prediction batches: {str(e)}"
|
||||
deletion_stats["errors"].append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
# Step 2: Delete prediction cache
|
||||
try:
|
||||
cache_count_query = select(func.count(PredictionCache.id)).where(
|
||||
PredictionCache.tenant_id == tenant_uuid
|
||||
)
|
||||
cache_count_result = await db.execute(cache_count_query)
|
||||
cache_count = cache_count_result.scalar()
|
||||
|
||||
cache_delete_query = delete(PredictionCache).where(
|
||||
PredictionCache.tenant_id == tenant_uuid
|
||||
)
|
||||
await db.execute(cache_delete_query)
|
||||
deletion_stats["cache_entries_deleted"] = cache_count
|
||||
|
||||
logger.info("Deleted prediction cache entries",
|
||||
tenant_id=tenant_id,
|
||||
count=cache_count)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error deleting prediction cache: {str(e)}"
|
||||
deletion_stats["errors"].append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
# Step 3: Delete model performance metrics
|
||||
try:
|
||||
metrics_count_query = select(func.count(ModelPerformanceMetric.id)).where(
|
||||
ModelPerformanceMetric.tenant_id == tenant_uuid
|
||||
)
|
||||
metrics_count_result = await db.execute(metrics_count_query)
|
||||
metrics_count = metrics_count_result.scalar()
|
||||
|
||||
metrics_delete_query = delete(ModelPerformanceMetric).where(
|
||||
ModelPerformanceMetric.tenant_id == tenant_uuid
|
||||
)
|
||||
await db.execute(metrics_delete_query)
|
||||
deletion_stats["performance_metrics_deleted"] = metrics_count
|
||||
|
||||
logger.info("Deleted performance metrics",
|
||||
tenant_id=tenant_id,
|
||||
count=metrics_count)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error deleting performance metrics: {str(e)}"
|
||||
deletion_stats["errors"].append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
# Step 4: Delete prediction batches
|
||||
try:
|
||||
batches_count_query = select(func.count(PredictionBatch.id)).where(
|
||||
PredictionBatch.tenant_id == tenant_uuid
|
||||
)
|
||||
batches_count_result = await db.execute(batches_count_query)
|
||||
batches_count = batches_count_result.scalar()
|
||||
|
||||
batches_delete_query = delete(PredictionBatch).where(
|
||||
PredictionBatch.tenant_id == tenant_uuid
|
||||
)
|
||||
await db.execute(batches_delete_query)
|
||||
deletion_stats["prediction_batches_deleted"] = batches_count
|
||||
|
||||
logger.info("Deleted prediction batches",
|
||||
tenant_id=tenant_id,
|
||||
count=batches_count)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error deleting prediction batches: {str(e)}"
|
||||
deletion_stats["errors"].append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
# Step 5: Delete forecasts (main data)
|
||||
try:
|
||||
forecasts_count_query = select(func.count(Forecast.id)).where(
|
||||
Forecast.tenant_id == tenant_uuid
|
||||
)
|
||||
forecasts_count_result = await db.execute(forecasts_count_query)
|
||||
forecasts_count = forecasts_count_result.scalar()
|
||||
|
||||
forecasts_delete_query = delete(Forecast).where(
|
||||
Forecast.tenant_id == tenant_uuid
|
||||
)
|
||||
await db.execute(forecasts_delete_query)
|
||||
deletion_stats["forecasts_deleted"] = forecasts_count
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info("Deleted forecasts",
|
||||
tenant_id=tenant_id,
|
||||
count=forecasts_count)
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
error_msg = f"Error deleting forecasts: {str(e)}"
|
||||
deletion_stats["errors"].append(error_msg)
|
||||
logger.error(error_msg)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=error_msg
|
||||
)
|
||||
|
||||
# Step 6: Publish deletion event
|
||||
try:
|
||||
await publish_forecasts_deleted_event(tenant_id, deletion_stats)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to publish forecasts deletion event", error=str(e))
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"All forecasting data for tenant {tenant_id} deleted successfully",
|
||||
"deletion_details": deletion_stats
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Unexpected error deleting tenant forecasts",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to delete tenant forecasts: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -9,6 +9,7 @@ import structlog
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
import asyncio
|
||||
import datetime
|
||||
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
from shared.messaging.events import (
|
||||
@@ -132,4 +133,20 @@ async def handle_weather_updated(data: Dict[str, Any]):
|
||||
# Could trigger re-forecasting if needed
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error handling weather updated event", error=str(e))
|
||||
logger.error("Error handling weather updated event", error=str(e))
|
||||
|
||||
async def publish_forecasts_deleted_event(tenant_id: str, deletion_stats: Dict[str, Any]):
|
||||
"""Publish forecasts deletion event to message queue"""
|
||||
try:
|
||||
await rabbitmq_client.publish_event(
|
||||
exchange="forecasting_events",
|
||||
routing_key="forecasting.tenant.deleted",
|
||||
message={
|
||||
"event_type": "tenant_forecasts_deleted",
|
||||
"tenant_id": tenant_id,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"deletion_stats": deletion_stats
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to publish forecasts deletion event", error=str(e))
|
||||
Reference in New Issue
Block a user