Fix new services implementation 3

This commit is contained in:
Urtzi Alfaro
2025-08-14 16:47:34 +02:00
parent 0951547e92
commit 03737430ee
51 changed files with 657 additions and 982 deletions

View File

@@ -59,14 +59,14 @@ async def generate_enhanced_realtime_prediction(
logger.info("Generating enhanced real-time prediction",
tenant_id=tenant_id,
product_name=prediction_request.get("product_name"))
inventory_product_id=prediction_request.get("inventory_product_id"))
# Record metrics
if metrics:
metrics.increment_counter("enhanced_realtime_predictions_total")
# Validate required fields
required_fields = ["product_name", "model_id", "features"]
required_fields = ["inventory_product_id", "model_id", "features"]
missing_fields = [field for field in required_fields if field not in prediction_request]
if missing_fields:
raise HTTPException(
@@ -91,7 +91,7 @@ async def generate_enhanced_realtime_prediction(
return {
"tenant_id": tenant_id,
"product_name": prediction_request["product_name"],
"inventory_product_id": prediction_request["inventory_product_id"],
"model_id": prediction_request["model_id"],
"prediction": prediction_result,
"generated_at": datetime.now().isoformat(),
@@ -205,7 +205,7 @@ async def generate_enhanced_batch_predictions(
@track_execution_time("enhanced_get_prediction_cache_duration_seconds", "forecasting-service")
async def get_enhanced_prediction_cache(
tenant_id: str = Path(..., description="Tenant ID"),
product_name: Optional[str] = Query(None, description="Filter by product name"),
inventory_product_id: Optional[str] = Query(None, description="Filter by inventory product ID"),
skip: int = Query(0, description="Number of records to skip"),
limit: int = Query(100, description="Number of records to return"),
request_obj: Request = None,
@@ -232,7 +232,7 @@ async def get_enhanced_prediction_cache(
# Get cached predictions using enhanced service
cached_predictions = await enhanced_forecasting_service.get_cached_predictions(
tenant_id=tenant_id,
product_name=product_name,
inventory_product_id=inventory_product_id,
skip=skip,
limit=limit
)
@@ -245,7 +245,7 @@ async def get_enhanced_prediction_cache(
"cached_predictions": cached_predictions,
"total_returned": len(cached_predictions),
"filters": {
"product_name": product_name
"inventory_product_id": inventory_product_id
},
"pagination": {
"skip": skip,
@@ -271,7 +271,7 @@ async def get_enhanced_prediction_cache(
@track_execution_time("enhanced_clear_prediction_cache_duration_seconds", "forecasting-service")
async def clear_enhanced_prediction_cache(
tenant_id: str = Path(..., description="Tenant ID"),
product_name: Optional[str] = Query(None, description="Clear cache for specific product"),
inventory_product_id: Optional[str] = Query(None, description="Clear cache for specific inventory product ID"),
request_obj: Request = None,
current_tenant: str = Depends(get_current_tenant_id_dep),
enhanced_forecasting_service: EnhancedForecastingService = Depends(get_enhanced_forecasting_service)
@@ -296,7 +296,7 @@ async def clear_enhanced_prediction_cache(
# Clear cache using enhanced service
cleared_count = await enhanced_forecasting_service.clear_prediction_cache(
tenant_id=tenant_id,
product_name=product_name
inventory_product_id=inventory_product_id
)
if metrics:
@@ -305,13 +305,13 @@ async def clear_enhanced_prediction_cache(
logger.info("Enhanced prediction cache cleared",
tenant_id=tenant_id,
product_name=product_name,
inventory_product_id=inventory_product_id,
cleared_count=cleared_count)
return {
"message": "Prediction cache cleared successfully",
"tenant_id": tenant_id,
"product_name": product_name,
"inventory_product_id": inventory_product_id,
"cleared_count": cleared_count,
"enhanced_features": True,
"repository_integration": True