REFACTOR ALL APIs
This commit is contained in:
@@ -4,13 +4,12 @@ HTTP endpoints for demand forecasting and prediction operations
|
||||
"""
|
||||
|
||||
from .forecasts import router as forecasts_router
|
||||
|
||||
from .predictions import router as predictions_router
|
||||
from .forecasting_operations import router as forecasting_operations_router
|
||||
from .analytics import router as analytics_router
|
||||
|
||||
|
||||
__all__ = [
|
||||
"forecasts_router",
|
||||
|
||||
"predictions_router",
|
||||
|
||||
"forecasting_operations_router",
|
||||
"analytics_router",
|
||||
]
|
||||
53
services/forecasting/app/api/analytics.py
Normal file
53
services/forecasting/app/api/analytics.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# services/forecasting/app/api/analytics.py
|
||||
"""
|
||||
Forecasting Analytics API - Reporting, statistics, and insights
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from app.services.prediction_service import PredictionService
|
||||
from shared.database.base import create_database_manager
|
||||
from app.core.config import settings
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
route_builder = RouteBuilder('forecasting')
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["forecasting-analytics"])
|
||||
|
||||
|
||||
def get_enhanced_prediction_service():
|
||||
"""Dependency injection for enhanced PredictionService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
return PredictionService(database_manager)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("predictions-performance")
|
||||
)
|
||||
async def get_predictions_performance(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[date] = Query(None),
|
||||
end_date: Optional[date] = Query(None),
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Get predictions performance analytics"""
|
||||
try:
|
||||
logger.info("Getting predictions performance", tenant_id=tenant_id)
|
||||
|
||||
performance = await prediction_service.get_performance_metrics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
return performance
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get predictions performance", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve predictions performance"
|
||||
)
|
||||
414
services/forecasting/app/api/forecasting_operations.py
Normal file
414
services/forecasting/app/api/forecasting_operations.py
Normal file
@@ -0,0 +1,414 @@
|
||||
# services/forecasting/app/api/forecasting_operations.py
|
||||
"""
|
||||
Forecasting Operations API - Business operations for forecast generation and predictions
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path, Request
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import date, datetime
|
||||
import uuid
|
||||
|
||||
from app.services.forecasting_service import EnhancedForecastingService
|
||||
from app.services.prediction_service import PredictionService
|
||||
from app.schemas.forecasts import (
|
||||
ForecastRequest, ForecastResponse, BatchForecastRequest,
|
||||
BatchForecastResponse, MultiDayForecastResponse
|
||||
)
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.decorators import track_execution_time
|
||||
from shared.monitoring.metrics import get_metrics_collector
|
||||
from app.core.config import settings
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.auth.access_control import require_user_role
|
||||
|
||||
route_builder = RouteBuilder('forecasting')
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["forecasting-operations"])
|
||||
|
||||
|
||||
def get_enhanced_forecasting_service():
|
||||
"""Dependency injection for EnhancedForecastingService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
return EnhancedForecastingService(database_manager)
|
||||
|
||||
|
||||
def get_enhanced_prediction_service():
|
||||
"""Dependency injection for enhanced PredictionService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
return PredictionService(database_manager)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("single"),
|
||||
response_model=ForecastResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
@track_execution_time("enhanced_single_forecast_duration_seconds", "forecasting-service")
|
||||
async def generate_single_forecast(
|
||||
request: ForecastRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate a single product forecast"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating single forecast",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_date=request.forecast_date.isoformat())
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("single_forecasts_total")
|
||||
|
||||
forecast = await enhanced_forecasting_service.generate_forecast(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("single_forecasts_success_total")
|
||||
|
||||
logger.info("Single forecast generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
forecast_id=forecast.id)
|
||||
|
||||
return forecast
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("forecast_validation_errors_total")
|
||||
logger.error("Forecast validation error", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("single_forecasts_errors_total")
|
||||
logger.error("Single forecast generation failed", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Forecast generation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("multi-day"),
|
||||
response_model=MultiDayForecastResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
@track_execution_time("enhanced_multi_day_forecast_duration_seconds", "forecasting-service")
|
||||
async def generate_multi_day_forecast(
|
||||
request: ForecastRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate multiple daily forecasts for the specified period"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating multi-day forecast",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_days=request.forecast_days,
|
||||
forecast_date=request.forecast_date.isoformat())
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("multi_day_forecasts_total")
|
||||
|
||||
if request.forecast_days <= 0 or request.forecast_days > 30:
|
||||
raise ValueError("forecast_days must be between 1 and 30")
|
||||
|
||||
forecast_result = await enhanced_forecasting_service.generate_multi_day_forecast(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("multi_day_forecasts_success_total")
|
||||
|
||||
logger.info("Multi-day forecast generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_days=len(forecast_result.get("forecasts", [])))
|
||||
|
||||
return forecast_result
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("forecast_validation_errors_total")
|
||||
logger.error("Multi-day forecast validation error", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("multi_day_forecasts_errors_total")
|
||||
logger.error("Multi-day forecast generation failed", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Multi-day forecast generation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("batch"),
|
||||
response_model=BatchForecastResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
@track_execution_time("enhanced_batch_forecast_duration_seconds", "forecasting-service")
|
||||
async def generate_batch_forecast(
|
||||
request: BatchForecastRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate forecasts for multiple products in batch"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating batch forecast",
|
||||
tenant_id=tenant_id,
|
||||
product_count=len(request.inventory_product_ids))
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("batch_forecasts_total")
|
||||
|
||||
if not request.inventory_product_ids:
|
||||
raise ValueError("inventory_product_ids cannot be empty")
|
||||
|
||||
batch_result = await enhanced_forecasting_service.generate_batch_forecast(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("batch_forecasts_success_total")
|
||||
|
||||
logger.info("Batch forecast generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
total_forecasts=batch_result.total_forecasts)
|
||||
|
||||
return batch_result
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("forecast_validation_errors_total")
|
||||
logger.error("Batch forecast validation error", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("batch_forecasts_errors_total")
|
||||
logger.error("Batch forecast generation failed", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Batch forecast generation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("realtime")
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
@track_execution_time("enhanced_realtime_prediction_duration_seconds", "forecasting-service")
|
||||
async def generate_realtime_prediction(
|
||||
prediction_request: Dict[str, Any],
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Generate real-time prediction"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating real-time prediction",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=prediction_request.get("inventory_product_id"))
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("realtime_predictions_total")
|
||||
|
||||
required_fields = ["inventory_product_id", "model_id", "features"]
|
||||
missing_fields = [field for field in required_fields if field not in prediction_request]
|
||||
if missing_fields:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Missing required fields: {missing_fields}"
|
||||
)
|
||||
|
||||
prediction_result = await prediction_service.predict(
|
||||
model_id=prediction_request["model_id"],
|
||||
model_path=prediction_request.get("model_path", ""),
|
||||
features=prediction_request["features"],
|
||||
confidence_level=prediction_request.get("confidence_level", 0.8)
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("realtime_predictions_success_total")
|
||||
|
||||
logger.info("Real-time prediction generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
prediction_value=prediction_result.get("prediction"))
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"inventory_product_id": prediction_request["inventory_product_id"],
|
||||
"model_id": prediction_request["model_id"],
|
||||
"prediction": prediction_result.get("prediction"),
|
||||
"confidence": prediction_result.get("confidence"),
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("prediction_validation_errors_total")
|
||||
logger.error("Prediction validation error", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("realtime_predictions_errors_total")
|
||||
logger.error("Real-time prediction failed", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Real-time prediction failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("batch-predictions")
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def generate_batch_predictions(
|
||||
predictions_request: List[Dict[str, Any]],
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Generate batch predictions"""
|
||||
try:
|
||||
logger.info("Generating batch predictions", tenant_id=tenant_id, count=len(predictions_request))
|
||||
|
||||
results = []
|
||||
for pred_request in predictions_request:
|
||||
try:
|
||||
prediction_result = await prediction_service.predict(
|
||||
model_id=pred_request["model_id"],
|
||||
model_path=pred_request.get("model_path", ""),
|
||||
features=pred_request["features"],
|
||||
confidence_level=pred_request.get("confidence_level", 0.8)
|
||||
)
|
||||
results.append({
|
||||
"inventory_product_id": pred_request.get("inventory_product_id"),
|
||||
"prediction": prediction_result.get("prediction"),
|
||||
"confidence": prediction_result.get("confidence"),
|
||||
"success": True
|
||||
})
|
||||
except Exception as e:
|
||||
results.append({
|
||||
"inventory_product_id": pred_request.get("inventory_product_id"),
|
||||
"error": str(e),
|
||||
"success": False
|
||||
})
|
||||
|
||||
return {"predictions": results, "total": len(results)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Batch predictions failed", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Batch predictions failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("validate-predictions")
|
||||
)
|
||||
async def validate_predictions(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
start_date: date = Query(...),
|
||||
end_date: date = Query(...),
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Validate predictions against actual sales data"""
|
||||
try:
|
||||
logger.info("Validating predictions", tenant_id=tenant_id)
|
||||
|
||||
validation_results = await enhanced_forecasting_service.validate_predictions(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
return validation_results
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Prediction validation failed", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Prediction validation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("statistics")
|
||||
)
|
||||
async def get_forecast_statistics(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[date] = Query(None),
|
||||
end_date: Optional[date] = Query(None),
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Get forecast statistics"""
|
||||
try:
|
||||
logger.info("Getting forecast statistics", tenant_id=tenant_id)
|
||||
|
||||
stats = await enhanced_forecasting_service.get_forecast_statistics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get forecast statistics", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve forecast statistics"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_operations_route("cache")
|
||||
)
|
||||
async def clear_prediction_cache(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Clear prediction cache"""
|
||||
try:
|
||||
logger.info("Clearing prediction cache", tenant_id=tenant_id)
|
||||
|
||||
await prediction_service.clear_cache(tenant_id=tenant_id)
|
||||
|
||||
return {"message": "Prediction cache cleared successfully"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to clear prediction cache", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to clear prediction cache"
|
||||
)
|
||||
@@ -1,444 +1,145 @@
|
||||
# services/forecasting/app/api/forecasts.py
|
||||
"""
|
||||
Enhanced Forecast API Endpoints with Repository Pattern
|
||||
Updated to use repository pattern with dependency injection and improved error handling
|
||||
Forecasts API - Atomic CRUD operations on Forecast model
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path, Request
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path
|
||||
from typing import List, Optional
|
||||
from datetime import date, datetime
|
||||
import uuid
|
||||
|
||||
from app.services.forecasting_service import EnhancedForecastingService
|
||||
from app.schemas.forecasts import (
|
||||
ForecastRequest, ForecastResponse, BatchForecastRequest,
|
||||
BatchForecastResponse, MultiDayForecastResponse
|
||||
)
|
||||
from shared.auth.decorators import (
|
||||
get_current_user_dep,
|
||||
require_admin_role
|
||||
)
|
||||
from app.schemas.forecasts import ForecastResponse
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.decorators import track_execution_time
|
||||
from shared.monitoring.metrics import get_metrics_collector
|
||||
from app.core.config import settings
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
route_builder = RouteBuilder('forecasting')
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["enhanced-forecasts"])
|
||||
router = APIRouter(tags=["forecasts"])
|
||||
|
||||
|
||||
def get_enhanced_forecasting_service():
|
||||
"""Dependency injection for EnhancedForecastingService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
return EnhancedForecastingService(database_manager)
|
||||
|
||||
@router.post("/tenants/{tenant_id}/forecasts/single", response_model=ForecastResponse)
|
||||
@track_execution_time("enhanced_single_forecast_duration_seconds", "forecasting-service")
|
||||
async def create_enhanced_single_forecast(
|
||||
request: ForecastRequest,
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("forecasts"),
|
||||
response_model=List[ForecastResponse]
|
||||
)
|
||||
async def list_forecasts(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate a single product forecast using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating enhanced single forecast",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_date=request.forecast_date.isoformat())
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_single_forecasts_total")
|
||||
|
||||
# Generate forecast using enhanced service
|
||||
forecast = await enhanced_forecasting_service.generate_forecast(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_single_forecasts_success_total")
|
||||
|
||||
logger.info("Enhanced single forecast generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
forecast_id=forecast.id)
|
||||
|
||||
return forecast
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_forecast_validation_errors_total")
|
||||
logger.error("Enhanced forecast validation error",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_single_forecasts_errors_total")
|
||||
logger.error("Enhanced single forecast generation failed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Enhanced forecast generation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/forecasts/multi-day", response_model=MultiDayForecastResponse)
|
||||
@track_execution_time("enhanced_multi_day_forecast_duration_seconds", "forecasting-service")
|
||||
async def create_enhanced_multi_day_forecast(
|
||||
request: ForecastRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate multiple daily forecasts for the specified period using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating enhanced multi-day forecast",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_days=request.forecast_days,
|
||||
forecast_date=request.forecast_date.isoformat())
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_multi_day_forecasts_total")
|
||||
|
||||
# Validate forecast_days parameter
|
||||
if request.forecast_days <= 0 or request.forecast_days > 30:
|
||||
raise ValueError("forecast_days must be between 1 and 30")
|
||||
|
||||
# Generate multi-day forecast using enhanced service
|
||||
forecast_result = await enhanced_forecasting_service.generate_multi_day_forecast(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_multi_day_forecasts_success_total")
|
||||
|
||||
logger.info("Enhanced multi-day forecast generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=request.inventory_product_id,
|
||||
forecast_days=len(forecast_result.get("forecasts", [])))
|
||||
|
||||
return MultiDayForecastResponse(**forecast_result)
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_multi_day_forecast_validation_errors_total")
|
||||
logger.error("Enhanced multi-day forecast validation error",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_multi_day_forecasts_errors_total")
|
||||
logger.error("Enhanced multi-day forecast generation failed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Enhanced multi-day forecast generation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/forecasts/batch", response_model=BatchForecastResponse)
|
||||
@track_execution_time("enhanced_batch_forecast_duration_seconds", "forecasting-service")
|
||||
async def create_enhanced_batch_forecast(
|
||||
request: BatchForecastRequest,
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate batch forecasts using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
logger.info("Generating enhanced batch forecasts",
|
||||
tenant_id=tenant_id,
|
||||
products_count=len(request.inventory_product_ids),
|
||||
forecast_dates_count=request.forecast_days)
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_forecasts_total")
|
||||
metrics.histogram("enhanced_batch_forecast_products_count", len(request.inventory_product_ids))
|
||||
|
||||
# Generate batch forecasts using enhanced service
|
||||
batch_result = await enhanced_forecasting_service.generate_batch_forecasts(
|
||||
tenant_id=tenant_id,
|
||||
request=request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_forecasts_success_total")
|
||||
|
||||
logger.info("Enhanced batch forecasts generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
batch_id=batch_result.get("batch_id"),
|
||||
forecasts_generated=len(batch_result.get("forecasts", [])))
|
||||
|
||||
return BatchForecastResponse(**batch_result)
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_forecast_validation_errors_total")
|
||||
logger.error("Enhanced batch forecast validation error",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_forecasts_errors_total")
|
||||
logger.error("Enhanced batch forecast generation failed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Enhanced batch forecast generation failed"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/forecasts")
|
||||
@track_execution_time("enhanced_get_forecasts_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_tenant_forecasts(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Filter by product ID"),
|
||||
start_date: Optional[date] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[date] = Query(None, description="End date filter"),
|
||||
skip: int = Query(0, description="Number of records to skip"),
|
||||
limit: int = Query(100, description="Number of records to return"),
|
||||
request_obj: Request = None,
|
||||
limit: int = Query(50, ge=1, le=1000),
|
||||
offset: int = Query(0, ge=0),
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Get tenant forecasts with enhanced filtering using repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
"""List forecasts with optional filters"""
|
||||
try:
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_forecasts_total")
|
||||
|
||||
# Get forecasts using enhanced service
|
||||
forecasts = await enhanced_forecasting_service.get_tenant_forecasts(
|
||||
logger.info("Listing forecasts", tenant_id=tenant_id)
|
||||
|
||||
forecasts = await enhanced_forecasting_service.list_forecasts(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_forecasts_success_total")
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"forecasts": forecasts,
|
||||
"total_returned": len(forecasts),
|
||||
"filters": {
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"start_date": start_date.isoformat() if start_date else None,
|
||||
"end_date": end_date.isoformat() if end_date else None
|
||||
},
|
||||
"pagination": {
|
||||
"skip": skip,
|
||||
"limit": limit
|
||||
},
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
|
||||
return forecasts
|
||||
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_forecasts_errors_total")
|
||||
logger.error("Failed to get enhanced tenant forecasts",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
logger.error("Failed to list forecasts", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get tenant forecasts"
|
||||
detail="Failed to retrieve forecasts"
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/forecasts/{forecast_id}")
|
||||
@track_execution_time("enhanced_get_forecast_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_forecast_by_id(
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("forecasts", "forecast_id"),
|
||||
response_model=ForecastResponse
|
||||
)
|
||||
async def get_forecast(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
forecast_id: str = Path(..., description="Forecast ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Get specific forecast by ID using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
"""Get a specific forecast by ID"""
|
||||
try:
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_forecast_by_id_total")
|
||||
|
||||
# Get forecast using enhanced service
|
||||
forecast = await enhanced_forecasting_service.get_forecast_by_id(forecast_id)
|
||||
|
||||
logger.info("Getting forecast", tenant_id=tenant_id, forecast_id=forecast_id)
|
||||
|
||||
forecast = await enhanced_forecasting_service.get_forecast(
|
||||
tenant_id=tenant_id,
|
||||
forecast_id=uuid.UUID(forecast_id)
|
||||
)
|
||||
|
||||
if not forecast:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Forecast not found"
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_forecast_by_id_success_total")
|
||||
|
||||
return {
|
||||
**forecast,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
|
||||
return forecast
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
logger.error("Invalid forecast ID", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid forecast ID format"
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_forecast_by_id_errors_total")
|
||||
logger.error("Failed to get enhanced forecast by ID",
|
||||
forecast_id=forecast_id,
|
||||
error=str(e))
|
||||
logger.error("Failed to get forecast", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get forecast"
|
||||
detail="Failed to retrieve forecast"
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/tenants/{tenant_id}/forecasts/{forecast_id}")
|
||||
@track_execution_time("enhanced_delete_forecast_duration_seconds", "forecasting-service")
|
||||
async def delete_enhanced_forecast(
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("forecasts", "forecast_id")
|
||||
)
|
||||
async def delete_forecast(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
forecast_id: str = Path(..., description="Forecast ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Delete forecast using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
"""Delete a specific forecast"""
|
||||
try:
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_delete_forecast_total")
|
||||
|
||||
# Delete forecast using enhanced service
|
||||
deleted = await enhanced_forecasting_service.delete_forecast(forecast_id)
|
||||
|
||||
if not deleted:
|
||||
logger.info("Deleting forecast", tenant_id=tenant_id, forecast_id=forecast_id)
|
||||
|
||||
success = await enhanced_forecasting_service.delete_forecast(
|
||||
tenant_id=tenant_id,
|
||||
forecast_id=uuid.UUID(forecast_id)
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Forecast not found"
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_delete_forecast_success_total")
|
||||
|
||||
logger.info("Enhanced forecast deleted successfully",
|
||||
forecast_id=forecast_id,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return {
|
||||
"message": "Forecast deleted successfully",
|
||||
"forecast_id": forecast_id,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
|
||||
return {"message": "Forecast deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
logger.error("Invalid forecast ID", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid forecast ID format"
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_delete_forecast_errors_total")
|
||||
logger.error("Failed to delete enhanced forecast",
|
||||
forecast_id=forecast_id,
|
||||
error=str(e))
|
||||
logger.error("Failed to delete forecast", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete forecast"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/forecasts/statistics")
|
||||
@track_execution_time("enhanced_forecast_statistics_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_forecast_statistics(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Get comprehensive forecast statistics using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_forecast_statistics_total")
|
||||
|
||||
# Get statistics using enhanced service
|
||||
statistics = await enhanced_forecasting_service.get_tenant_forecast_statistics(tenant_id)
|
||||
|
||||
if statistics.get("error"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=statistics["error"]
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_forecast_statistics_success_total")
|
||||
|
||||
return {
|
||||
**statistics,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_forecast_statistics_errors_total")
|
||||
logger.error("Failed to get enhanced forecast statistics",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get forecast statistics"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def enhanced_health_check():
|
||||
"""Enhanced health check endpoint for the forecasting service"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "enhanced-forecasting-service",
|
||||
"version": "2.0.0",
|
||||
"features": [
|
||||
"repository-pattern",
|
||||
"dependency-injection",
|
||||
"enhanced-error-handling",
|
||||
"metrics-tracking",
|
||||
"transactional-operations",
|
||||
"batch-processing"
|
||||
],
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
"""
|
||||
Enhanced Predictions API Endpoints with Repository Pattern
|
||||
Real-time prediction capabilities using repository pattern with dependency injection
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Path, Request
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import date, datetime, timedelta
|
||||
import uuid
|
||||
|
||||
from app.services.prediction_service import PredictionService
|
||||
from app.services.forecasting_service import EnhancedForecastingService
|
||||
from app.schemas.forecasts import ForecastRequest
|
||||
from shared.auth.decorators import (
|
||||
get_current_user_dep,
|
||||
require_admin_role
|
||||
)
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.decorators import track_execution_time
|
||||
from shared.monitoring.metrics import get_metrics_collector
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(tags=["enhanced-predictions"])
|
||||
|
||||
def get_enhanced_prediction_service():
|
||||
"""Dependency injection for enhanced PredictionService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
return PredictionService(database_manager)
|
||||
|
||||
def get_enhanced_forecasting_service():
|
||||
"""Dependency injection for EnhancedForecastingService"""
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "forecasting-service")
|
||||
return EnhancedForecastingService(database_manager)
|
||||
|
||||
@router.post("/tenants/{tenant_id}/predictions/realtime")
|
||||
@track_execution_time("enhanced_realtime_prediction_duration_seconds", "forecasting-service")
|
||||
async def generate_enhanced_realtime_prediction(
|
||||
prediction_request: Dict[str, Any],
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Generate real-time prediction using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
|
||||
logger.info("Generating enhanced real-time prediction",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=prediction_request.get("inventory_product_id"))
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_realtime_predictions_total")
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ["inventory_product_id", "model_id", "features"]
|
||||
missing_fields = [field for field in required_fields if field not in prediction_request]
|
||||
if missing_fields:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Missing required fields: {missing_fields}"
|
||||
)
|
||||
|
||||
# Generate prediction using enhanced service
|
||||
prediction_result = await prediction_service.predict(
|
||||
model_id=prediction_request["model_id"],
|
||||
model_path=prediction_request.get("model_path", ""),
|
||||
features=prediction_request["features"],
|
||||
confidence_level=prediction_request.get("confidence_level", 0.8)
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_realtime_predictions_success_total")
|
||||
|
||||
logger.info("Enhanced real-time prediction generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
prediction_value=prediction_result.get("prediction"))
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"inventory_product_id": prediction_request["inventory_product_id"],
|
||||
"model_id": prediction_request["model_id"],
|
||||
"prediction": prediction_result,
|
||||
"generated_at": datetime.now().isoformat(),
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_prediction_validation_errors_total")
|
||||
logger.error("Enhanced prediction validation error",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_realtime_predictions_errors_total")
|
||||
logger.error("Enhanced real-time prediction failed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Enhanced real-time prediction failed"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/predictions/batch")
|
||||
@track_execution_time("enhanced_batch_prediction_duration_seconds", "forecasting-service")
|
||||
async def generate_enhanced_batch_predictions(
|
||||
batch_request: Dict[str, Any],
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Generate batch predictions using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
|
||||
logger.info("Generating enhanced batch predictions",
|
||||
tenant_id=tenant_id,
|
||||
predictions_count=len(batch_request.get("predictions", [])))
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_predictions_total")
|
||||
metrics.histogram("enhanced_batch_predictions_count", len(batch_request.get("predictions", [])))
|
||||
|
||||
# Validate batch request
|
||||
if "predictions" not in batch_request or not batch_request["predictions"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Batch request must contain 'predictions' array"
|
||||
)
|
||||
|
||||
# Generate batch predictions using enhanced service
|
||||
batch_result = await enhanced_forecasting_service.generate_batch_predictions(
|
||||
tenant_id=tenant_id,
|
||||
batch_request=batch_request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_predictions_success_total")
|
||||
|
||||
logger.info("Enhanced batch predictions generated successfully",
|
||||
tenant_id=tenant_id,
|
||||
batch_id=batch_result.get("batch_id"),
|
||||
predictions_generated=len(batch_result.get("predictions", [])))
|
||||
|
||||
return {
|
||||
**batch_result,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_prediction_validation_errors_total")
|
||||
logger.error("Enhanced batch prediction validation error",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_batch_predictions_errors_total")
|
||||
logger.error("Enhanced batch predictions failed",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Enhanced batch predictions failed"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/predictions/cache")
|
||||
@track_execution_time("enhanced_get_prediction_cache_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_prediction_cache(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
|
||||
skip: int = Query(0, description="Number of records to skip"),
|
||||
limit: int = Query(100, description="Number of records to return"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Get cached predictions using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_prediction_cache_total")
|
||||
|
||||
# Get cached predictions using enhanced service
|
||||
cached_predictions = await enhanced_forecasting_service.get_cached_predictions(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_prediction_cache_success_total")
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"cached_predictions": cached_predictions,
|
||||
"total_returned": len(cached_predictions),
|
||||
"filters": {
|
||||
"inventory_product_id": inventory_product_id
|
||||
},
|
||||
"pagination": {
|
||||
"skip": skip,
|
||||
"limit": limit
|
||||
},
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_prediction_cache_errors_total")
|
||||
logger.error("Failed to get enhanced prediction cache",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get prediction cache"
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/tenants/{tenant_id}/predictions/cache")
|
||||
@track_execution_time("enhanced_clear_prediction_cache_duration_seconds", "forecasting-service")
|
||||
async def clear_enhanced_prediction_cache(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
inventory_product_id: Optional[str] = Query(None, description="Clear cache for specific inventory product ID"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Clear prediction cache using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_clear_prediction_cache_total")
|
||||
|
||||
# Clear cache using enhanced service
|
||||
cleared_count = await enhanced_forecasting_service.clear_prediction_cache(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_clear_prediction_cache_success_total")
|
||||
metrics.histogram("enhanced_cache_cleared_count", cleared_count)
|
||||
|
||||
logger.info("Enhanced prediction cache cleared",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
cleared_count=cleared_count)
|
||||
|
||||
return {
|
||||
"message": "Prediction cache cleared successfully",
|
||||
"tenant_id": tenant_id,
|
||||
"inventory_product_id": inventory_product_id,
|
||||
"cleared_count": cleared_count,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_clear_prediction_cache_errors_total")
|
||||
logger.error("Failed to clear enhanced prediction cache",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to clear prediction cache"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tenants/{tenant_id}/predictions/performance")
|
||||
@track_execution_time("enhanced_get_prediction_performance_duration_seconds", "forecasting-service")
|
||||
async def get_enhanced_prediction_performance(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
model_id: Optional[str] = Query(None, description="Filter by model ID"),
|
||||
start_date: Optional[date] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[date] = Query(None, description="End date filter"),
|
||||
request_obj: Request = None,
|
||||
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
|
||||
):
|
||||
"""Get prediction performance metrics using enhanced repository pattern"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_prediction_performance_total")
|
||||
|
||||
# Get performance metrics using enhanced service
|
||||
performance = await enhanced_forecasting_service.get_prediction_performance(
|
||||
tenant_id=tenant_id,
|
||||
model_id=model_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_prediction_performance_success_total")
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"performance_metrics": performance,
|
||||
"filters": {
|
||||
"model_id": model_id,
|
||||
"start_date": start_date.isoformat() if start_date else None,
|
||||
"end_date": end_date.isoformat() if end_date else None
|
||||
},
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_get_prediction_performance_errors_total")
|
||||
logger.error("Failed to get enhanced prediction performance",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get prediction performance"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tenants/{tenant_id}/predictions/validate")
|
||||
@track_execution_time("enhanced_validate_prediction_duration_seconds", "forecasting-service")
|
||||
async def validate_enhanced_prediction_request(
|
||||
validation_request: Dict[str, Any],
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
request_obj: Request = None,
|
||||
prediction_service: PredictionService = Depends(get_enhanced_prediction_service)
|
||||
):
|
||||
"""Validate prediction request without generating prediction"""
|
||||
metrics = get_metrics_collector(request_obj)
|
||||
|
||||
try:
|
||||
|
||||
# Record metrics
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_validate_prediction_total")
|
||||
|
||||
# Validate prediction request
|
||||
validation_result = await prediction_service.validate_prediction_request(
|
||||
validation_request
|
||||
)
|
||||
|
||||
if metrics:
|
||||
if validation_result.get("is_valid"):
|
||||
metrics.increment_counter("enhanced_validate_prediction_success_total")
|
||||
else:
|
||||
metrics.increment_counter("enhanced_validate_prediction_failed_total")
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"validation_result": validation_result,
|
||||
"enhanced_features": True,
|
||||
"repository_integration": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
if metrics:
|
||||
metrics.increment_counter("enhanced_validate_prediction_errors_total")
|
||||
logger.error("Failed to validate enhanced prediction request",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to validate prediction request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def enhanced_predictions_health_check():
|
||||
"""Enhanced health check endpoint for predictions"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "enhanced-predictions-service",
|
||||
"version": "2.0.0",
|
||||
"features": [
|
||||
"repository-pattern",
|
||||
"dependency-injection",
|
||||
"realtime-predictions",
|
||||
"batch-predictions",
|
||||
"prediction-caching",
|
||||
"performance-metrics",
|
||||
"request-validation"
|
||||
],
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
Reference in New Issue
Block a user