Files
bakery-ia/services/forecasting/app/api/forecasting_operations.py

484 lines
18 KiB
Python
Raw Normal View History

2025-10-06 15:27:01 +02:00
# services/forecasting/app/api/forecasting_operations.py
"""
Forecasting Operations API - Business operations for forecast generation and predictions
"""
import structlog
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path, Request
from typing import List, Dict, Any, Optional
from datetime import date, datetime, timezone
2025-10-06 15:27:01 +02:00
import uuid
from app.services.forecasting_service import EnhancedForecastingService
from app.services.prediction_service import PredictionService
2025-10-09 18:01:24 +02:00
from app.services.forecast_cache import get_forecast_cache_service
2025-10-06 15:27:01 +02:00
from app.schemas.forecasts import (
ForecastRequest, ForecastResponse, BatchForecastRequest,
BatchForecastResponse, MultiDayForecastResponse
)
from shared.auth.decorators import get_current_user_dep
from shared.database.base import create_database_manager
from shared.monitoring.decorators import track_execution_time
from shared.monitoring.metrics import get_metrics_collector
from app.core.config import settings
from shared.routing import RouteBuilder
from shared.auth.access_control import require_user_role
from shared.security import create_audit_logger, create_rate_limiter, AuditSeverity, AuditAction
from shared.subscription.plans import get_forecast_quota, get_forecast_horizon_limit
from shared.redis_utils import get_redis_client
2025-10-06 15:27:01 +02:00
route_builder = RouteBuilder('forecasting')
logger = structlog.get_logger()
router = APIRouter(tags=["forecasting-operations"])
# Initialize audit logger
audit_logger = create_audit_logger("forecasting-service")
async def get_rate_limiter():
"""Dependency for rate limiter"""
redis_client = await get_redis_client()
return create_rate_limiter(redis_client)
2025-10-06 15:27:01 +02:00
def get_enhanced_forecasting_service():
"""Dependency injection for EnhancedForecastingService"""
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
return EnhancedForecastingService(database_manager)
def get_enhanced_prediction_service():
"""Dependency injection for enhanced PredictionService"""
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
return PredictionService(database_manager)
@router.post(
route_builder.build_operations_route("single"),
response_model=ForecastResponse
)
@require_user_role(['viewer', 'member', 'admin', 'owner'])
@track_execution_time("enhanced_single_forecast_duration_seconds", "forecasting-service")
async def generate_single_forecast(
request: ForecastRequest,
tenant_id: str = Path(..., description="Tenant ID"),
request_obj: Request = None,
current_user: dict = Depends(get_current_user_dep),
2025-10-06 15:27:01 +02:00
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
):
2025-10-09 18:01:24 +02:00
"""Generate a single product forecast with caching support"""
2025-10-06 15:27:01 +02:00
metrics = get_metrics_collector(request_obj)
try:
logger.info("Generating single forecast",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id,
forecast_date=request.forecast_date.isoformat())
if metrics:
metrics.increment_counter("single_forecasts_total")
2025-10-09 18:01:24 +02:00
# Initialize cache service
cache_service = get_forecast_cache_service(settings.REDIS_URL)
# Check cache first
cached_forecast = await cache_service.get_cached_forecast(
tenant_id=uuid.UUID(tenant_id),
product_id=uuid.UUID(request.inventory_product_id),
forecast_date=request.forecast_date
)
if cached_forecast:
if metrics:
metrics.increment_counter("forecast_cache_hits_total")
logger.info("Returning cached forecast",
tenant_id=tenant_id,
forecast_id=cached_forecast.get('id'))
return ForecastResponse(**cached_forecast)
# Cache miss - generate forecast
if metrics:
metrics.increment_counter("forecast_cache_misses_total")
2025-10-06 15:27:01 +02:00
forecast = await enhanced_forecasting_service.generate_forecast(
tenant_id=tenant_id,
request=request
)
2025-10-09 18:01:24 +02:00
# Cache the result
await cache_service.cache_forecast(
tenant_id=uuid.UUID(tenant_id),
product_id=uuid.UUID(request.inventory_product_id),
forecast_date=request.forecast_date,
forecast_data=forecast.dict()
)
2025-10-06 15:27:01 +02:00
if metrics:
metrics.increment_counter("single_forecasts_success_total")
logger.info("Single forecast generated successfully",
tenant_id=tenant_id,
forecast_id=forecast.id)
return forecast
except ValueError as e:
if metrics:
metrics.increment_counter("forecast_validation_errors_total")
logger.error("Forecast validation error", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
if metrics:
metrics.increment_counter("single_forecasts_errors_total")
logger.error("Single forecast generation failed", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Forecast generation failed"
)
@router.post(
route_builder.build_operations_route("multi-day"),
response_model=MultiDayForecastResponse
)
@require_user_role(['viewer', 'member', 'admin', 'owner'])
@track_execution_time("enhanced_multi_day_forecast_duration_seconds", "forecasting-service")
async def generate_multi_day_forecast(
request: ForecastRequest,
tenant_id: str = Path(..., description="Tenant ID"),
request_obj: Request = None,
current_user: dict = Depends(get_current_user_dep),
2025-10-06 15:27:01 +02:00
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
):
"""Generate multiple daily forecasts for the specified period"""
metrics = get_metrics_collector(request_obj)
try:
logger.info("Generating multi-day forecast",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id,
forecast_days=request.forecast_days,
forecast_date=request.forecast_date.isoformat())
if metrics:
metrics.increment_counter("multi_day_forecasts_total")
if request.forecast_days <= 0 or request.forecast_days > 30:
raise ValueError("forecast_days must be between 1 and 30")
forecast_result = await enhanced_forecasting_service.generate_multi_day_forecast(
tenant_id=tenant_id,
request=request
)
if metrics:
metrics.increment_counter("multi_day_forecasts_success_total")
logger.info("Multi-day forecast generated successfully",
tenant_id=tenant_id,
inventory_product_id=request.inventory_product_id,
forecast_days=len(forecast_result.get("forecasts", [])))
return forecast_result
except ValueError as e:
if metrics:
metrics.increment_counter("forecast_validation_errors_total")
logger.error("Multi-day forecast validation error", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
if metrics:
metrics.increment_counter("multi_day_forecasts_errors_total")
logger.error("Multi-day forecast generation failed", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Multi-day forecast generation failed"
)
@router.post(
route_builder.build_operations_route("batch"),
response_model=BatchForecastResponse
)
@require_user_role(['admin', 'owner'])
2025-10-06 15:27:01 +02:00
@track_execution_time("enhanced_batch_forecast_duration_seconds", "forecasting-service")
async def generate_batch_forecast(
request: BatchForecastRequest,
tenant_id: str = Path(..., description="Tenant ID"),
request_obj: Request = None,
current_user: dict = Depends(get_current_user_dep),
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service),
rate_limiter = Depends(get_rate_limiter)
2025-10-06 15:27:01 +02:00
):
"""Generate forecasts for multiple products in batch (Admin+ only, quota enforced)"""
2025-10-06 15:27:01 +02:00
metrics = get_metrics_collector(request_obj)
try:
logger.info("Generating batch forecast",
tenant_id=tenant_id,
product_count=len(request.inventory_product_ids))
if metrics:
metrics.increment_counter("batch_forecasts_total")
if not request.inventory_product_ids:
raise ValueError("inventory_product_ids cannot be empty")
# Get subscription tier and enforce quotas
tier = current_user.get('subscription_tier', 'starter')
# Check daily quota for forecast generation
quota_limit = get_forecast_quota(tier)
quota_result = await rate_limiter.check_and_increment_quota(
tenant_id,
"forecast_generation",
quota_limit,
period=86400 # 24 hours
)
# Validate forecast horizon if specified
if request.horizon_days:
await rate_limiter.validate_forecast_horizon(
tenant_id, request.horizon_days, tier
)
2025-10-06 15:27:01 +02:00
batch_result = await enhanced_forecasting_service.generate_batch_forecast(
tenant_id=tenant_id,
request=request
)
if metrics:
metrics.increment_counter("batch_forecasts_success_total")
logger.info("Batch forecast generated successfully",
tenant_id=tenant_id,
total_forecasts=batch_result.total_forecasts)
return batch_result
except ValueError as e:
if metrics:
metrics.increment_counter("forecast_validation_errors_total")
logger.error("Batch forecast validation error", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
if metrics:
metrics.increment_counter("batch_forecasts_errors_total")
logger.error("Batch forecast generation failed", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Batch forecast generation failed"
)
@router.post(
route_builder.build_operations_route("realtime")
)
@require_user_role(['viewer', 'member', 'admin', 'owner'])
@track_execution_time("enhanced_realtime_prediction_duration_seconds", "forecasting-service")
async def generate_realtime_prediction(
prediction_request: Dict[str, Any],
tenant_id: str = Path(..., description="Tenant ID"),
request_obj: Request = None,
current_user: dict = Depends(get_current_user_dep),
2025-10-06 15:27:01 +02:00
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
):
"""Generate real-time prediction"""
metrics = get_metrics_collector(request_obj)
try:
logger.info("Generating real-time prediction",
tenant_id=tenant_id,
inventory_product_id=prediction_request.get("inventory_product_id"))
if metrics:
metrics.increment_counter("realtime_predictions_total")
required_fields = ["inventory_product_id", "model_id", "features"]
missing_fields = [field for field in required_fields if field not in prediction_request]
if missing_fields:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Missing required fields: {missing_fields}"
)
prediction_result = await prediction_service.predict_with_weather_forecast(
2025-10-06 15:27:01 +02:00
model_id=prediction_request["model_id"],
model_path=prediction_request.get("model_path", ""),
features=prediction_request["features"],
tenant_id=tenant_id,
days=prediction_request.get("days", 7),
2025-10-06 15:27:01 +02:00
confidence_level=prediction_request.get("confidence_level", 0.8)
)
if metrics:
metrics.increment_counter("realtime_predictions_success_total")
logger.info("Real-time prediction generated successfully",
tenant_id=tenant_id,
days=len(prediction_result))
2025-10-06 15:27:01 +02:00
return {
"tenant_id": tenant_id,
"inventory_product_id": prediction_request["inventory_product_id"],
"model_id": prediction_request["model_id"],
"predictions": prediction_result,
"days": len(prediction_result),
"timestamp": datetime.now(timezone.utc).isoformat()
2025-10-06 15:27:01 +02:00
}
except HTTPException:
raise
except ValueError as e:
if metrics:
metrics.increment_counter("prediction_validation_errors_total")
logger.error("Prediction validation error", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
if metrics:
metrics.increment_counter("realtime_predictions_errors_total")
logger.error("Real-time prediction failed", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Real-time prediction failed"
)
@router.post(
route_builder.build_operations_route("batch-predictions")
)
@require_user_role(['viewer', 'member', 'admin', 'owner'])
async def generate_batch_predictions(
predictions_request: List[Dict[str, Any]],
tenant_id: str = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
2025-10-06 15:27:01 +02:00
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
):
"""Generate batch predictions"""
try:
logger.info("Generating batch predictions", tenant_id=tenant_id, count=len(predictions_request))
results = []
for pred_request in predictions_request:
try:
prediction_result = await prediction_service.predict_with_weather_forecast(
2025-10-06 15:27:01 +02:00
model_id=pred_request["model_id"],
model_path=pred_request.get("model_path", ""),
features=pred_request["features"],
tenant_id=tenant_id,
days=pred_request.get("days", 7),
2025-10-06 15:27:01 +02:00
confidence_level=pred_request.get("confidence_level", 0.8)
)
results.append({
"inventory_product_id": pred_request.get("inventory_product_id"),
"predictions": prediction_result,
2025-10-06 15:27:01 +02:00
"success": True
})
except Exception as e:
results.append({
"inventory_product_id": pred_request.get("inventory_product_id"),
"error": str(e),
"success": False
})
return {"predictions": results, "total": len(results)}
except Exception as e:
logger.error("Batch predictions failed", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Batch predictions failed"
)
@router.post(
route_builder.build_operations_route("validate-predictions")
)
async def validate_predictions(
tenant_id: str = Path(..., description="Tenant ID"),
start_date: date = Query(...),
end_date: date = Query(...),
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
):
"""Validate predictions against actual sales data"""
try:
logger.info("Validating predictions", tenant_id=tenant_id)
validation_results = await enhanced_forecasting_service.validate_predictions(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date
)
return validation_results
except Exception as e:
logger.error("Prediction validation failed", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Prediction validation failed"
)
@router.get(
route_builder.build_operations_route("statistics")
)
async def get_forecast_statistics(
tenant_id: str = Path(..., description="Tenant ID"),
start_date: Optional[date] = Query(None),
end_date: Optional[date] = Query(None),
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
):
"""Get forecast statistics"""
try:
logger.info("Getting forecast statistics", tenant_id=tenant_id)
stats = await enhanced_forecasting_service.get_forecast_statistics(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date
)
return stats
except Exception as e:
logger.error("Failed to get forecast statistics", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve forecast statistics"
)
@router.delete(
route_builder.build_operations_route("cache")
)
async def clear_prediction_cache(
tenant_id: str = Path(..., description="Tenant ID"),
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
):
"""Clear prediction cache"""
try:
logger.info("Clearing prediction cache", tenant_id=tenant_id)
await prediction_service.clear_cache(tenant_id=tenant_id)
return {"message": "Prediction cache cleared successfully"}
except Exception as e:
logger.error("Failed to clear prediction cache", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to clear prediction cache"
)