Fix new services implementation 3

This commit is contained in:
Urtzi Alfaro
2025-08-14 16:47:34 +02:00
parent 0951547e92
commit 03737430ee
51 changed files with 657 additions and 982 deletions

View File

@@ -56,7 +56,7 @@ async def create_enhanced_single_forecast(
logger.info("Generating enhanced single forecast",
tenant_id=tenant_id,
product_name=request.product_name,
inventory_product_id=request.inventory_product_id,
forecast_date=request.forecast_date.isoformat())
# Record metrics
@@ -124,13 +124,13 @@ async def create_enhanced_batch_forecast(
logger.info("Generating enhanced batch forecasts",
tenant_id=tenant_id,
products_count=len(request.products),
forecast_dates_count=len(request.forecast_dates))
products_count=len(request.inventory_product_ids),
forecast_dates_count=request.forecast_days)
# Record metrics
if metrics:
metrics.increment_counter("enhanced_batch_forecasts_total")
metrics.histogram("enhanced_batch_forecast_products_count", len(request.products))
metrics.histogram("enhanced_batch_forecast_products_count", len(request.inventory_product_ids))
# Generate batch forecasts using enhanced service
batch_result = await enhanced_forecasting_service.generate_batch_forecasts(
@@ -174,7 +174,7 @@ async def create_enhanced_batch_forecast(
@track_execution_time("enhanced_get_forecasts_duration_seconds", "forecasting-service")
async def get_enhanced_tenant_forecasts(
tenant_id: str = Path(..., description="Tenant ID"),
product_name: Optional[str] = Query(None, description="Filter by product name"),
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
start_date: Optional[date] = Query(None, description="Start date filter"),
end_date: Optional[date] = Query(None, description="End date filter"),
skip: int = Query(0, description="Number of records to skip"),
@@ -203,7 +203,7 @@ async def get_enhanced_tenant_forecasts(
# Get forecasts using enhanced service
forecasts = await enhanced_forecasting_service.get_tenant_forecasts(
tenant_id=tenant_id,
product_name=product_name,
inventory_product_id=inventory_product_id,
start_date=start_date,
end_date=end_date,
skip=skip,
@@ -218,7 +218,7 @@ async def get_enhanced_tenant_forecasts(
"forecasts": forecasts,
"total_returned": len(forecasts),
"filters": {
"product_name": product_name,
"inventory_product_id": inventory_product_id,
"start_date": start_date.isoformat() if start_date else None,
"end_date": end_date.isoformat() if end_date else None
},

View File

@@ -59,14 +59,14 @@ async def generate_enhanced_realtime_prediction(
logger.info("Generating enhanced real-time prediction",
tenant_id=tenant_id,
product_name=prediction_request.get("product_name"))
inventory_product_id=prediction_request.get("inventory_product_id"))
# Record metrics
if metrics:
metrics.increment_counter("enhanced_realtime_predictions_total")
# Validate required fields
required_fields = ["product_name", "model_id", "features"]
required_fields = ["inventory_product_id", "model_id", "features"]
missing_fields = [field for field in required_fields if field not in prediction_request]
if missing_fields:
raise HTTPException(
@@ -91,7 +91,7 @@ async def generate_enhanced_realtime_prediction(
return {
"tenant_id": tenant_id,
"product_name": prediction_request["product_name"],
"inventory_product_id": prediction_request["inventory_product_id"],
"model_id": prediction_request["model_id"],
"prediction": prediction_result,
"generated_at": datetime.now().isoformat(),
@@ -205,7 +205,7 @@ async def generate_enhanced_batch_predictions(
@track_execution_time("enhanced_get_prediction_cache_duration_seconds", "forecasting-service")
async def get_enhanced_prediction_cache(
tenant_id: str = Path(..., description="Tenant ID"),
product_name: Optional[str] = Query(None, description="Filter by product name"),
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
skip: int = Query(0, description="Number of records to skip"),
limit: int = Query(100, description="Number of records to return"),
request_obj: Request = None,
@@ -232,7 +232,7 @@ async def get_enhanced_prediction_cache(
# Get cached predictions using enhanced service
cached_predictions = await enhanced_forecasting_service.get_cached_predictions(
tenant_id=tenant_id,
product_name=product_name,
inventory_product_id=inventory_product_id,
skip=skip,
limit=limit
)
@@ -245,7 +245,7 @@ async def get_enhanced_prediction_cache(
"cached_predictions": cached_predictions,
"total_returned": len(cached_predictions),
"filters": {
"product_name": product_name
"inventory_product_id": inventory_product_id
},
"pagination": {
"skip": skip,
@@ -271,7 +271,7 @@ async def get_enhanced_prediction_cache(
@track_execution_time("enhanced_clear_prediction_cache_duration_seconds", "forecasting-service")
async def clear_enhanced_prediction_cache(
tenant_id: str = Path(..., description="Tenant ID"),
product_name: Optional[str] = Query(None, description="Clear cache for specific product"),
inventory_product_id: Optional[str] = Query(None, description="Clear cache for specific inventory product ID"),
request_obj: Request = None,
current_tenant: str = Depends(get_current_tenant_id_dep),
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
@@ -296,7 +296,7 @@ async def clear_enhanced_prediction_cache(
# Clear cache using enhanced service
cleared_count = await enhanced_forecasting_service.clear_prediction_cache(
tenant_id=tenant_id,
product_name=product_name
inventory_product_id=inventory_product_id
)
if metrics:
@@ -305,13 +305,13 @@ async def clear_enhanced_prediction_cache(
logger.info("Enhanced prediction cache cleared",
tenant_id=tenant_id,
product_name=product_name,
inventory_product_id=inventory_product_id,
cleared_count=cleared_count)
return {
"message": "Prediction cache cleared successfully",
"tenant_id": tenant_id,
"product_name": product_name,
"inventory_product_id": inventory_product_id,
"cleared_count": cleared_count,
"enhanced_features": True,
"repository_integration": True