Add forecasting demand insights trigger + fix RabbitMQ cleanup

Issue 1: Forecasting demand insights not triggered in demo workflow
- Created internal ML endpoint: /forecasting/internal/ml/generate-demand-insights
- Added trigger_demand_insights_internal() to ForecastServiceClient
- Integrated forecasting insights into demo session post-clone workflow
- Now triggers 4 AI insight types: price, safety stock, yield, + demand

Issue 2: RabbitMQ client cleanup error in procurement service
- Fixed: rabbitmq_client.close() → rabbitmq_client.disconnect()
- Added proper cleanup in exception handler
- Error: "'RabbitMQClient' object has no attribute 'close'"

Files modified:
- services/forecasting/app/api/ml_insights.py (new internal_router)
- services/forecasting/app/main.py (register internal router)
- shared/clients/forecast_client.py (new trigger method)
- services/demo_session/app/services/clone_orchestrator.py (+ demand insights)
- services/procurement/app/api/internal_demo.py (fix disconnect)

Expected impact:
- Demo sessions will now generate demand forecasting insights
- No more RabbitMQ cleanup errors in logs
- AI insights count should increase from 1 to 2-3 per session

🤖 Generated with Claude Code

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Urtzi Alfaro
2025-12-16 11:28:04 +01:00
parent b461d620ab
commit 4418ff0876
5 changed files with 244 additions and 2 deletions

View File

@@ -767,3 +767,172 @@ async def ml_insights_health():
"POST /ml/insights/analyze-business-rules"
]
}
# ================================================================
# INTERNAL ML INSIGHTS ENDPOINTS (for demo session service)
# ================================================================
internal_router = APIRouter(tags=["Internal ML"])
@internal_router.post("/api/v1/tenants/{tenant_id}/forecasting/internal/ml/generate-demand-insights")
async def trigger_demand_insights_internal(
tenant_id: str,
request: Request,
db: AsyncSession = Depends(get_db)
):
"""
Internal endpoint to trigger demand forecasting insights for a tenant.
This endpoint is called by the demo-session service after cloning to generate
AI insights from the seeded forecast data.
Args:
tenant_id: Tenant UUID
request: FastAPI request object to access app state
db: Database session
Returns:
Dict with insights generation results
"""
logger.info(
"Internal demand insights generation triggered",
tenant_id=tenant_id
)
try:
# Import ML orchestrator and clients
from app.ml.demand_insights_orchestrator import DemandInsightsOrchestrator
from shared.clients.sales_client import SalesServiceClient
from shared.clients.inventory_client import InventoryServiceClient
from app.core.config import settings
# Get event publisher from app state
event_publisher = getattr(request.app.state, 'event_publisher', None)
# Initialize orchestrator and clients
orchestrator = DemandInsightsOrchestrator(event_publisher=event_publisher)
inventory_client = InventoryServiceClient(settings)
# Get all products for tenant (limit to 10 for performance)
all_products = await inventory_client.get_all_ingredients(tenant_id=tenant_id)
products = all_products[:10] if all_products else []
logger.info(
"Retrieved products from inventory service",
tenant_id=tenant_id,
product_count=len(products)
)
if not products:
return {
"success": False,
"message": "No products found for analysis",
"tenant_id": tenant_id,
"products_analyzed": 0,
"insights_posted": 0
}
# Initialize sales client
sales_client = SalesServiceClient(config=settings, calling_service_name="forecasting")
# Calculate date range (90 days lookback)
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=90)
# Process each product
total_insights_generated = 0
total_insights_posted = 0
for product in products:
try:
product_id = str(product['id'])
product_name = product.get('name', 'Unknown Product')
logger.debug(
"Analyzing demand for product",
tenant_id=tenant_id,
product_id=product_id,
product_name=product_name
)
# Fetch historical sales data
sales_data_raw = await sales_client.get_product_sales(
tenant_id=tenant_id,
product_id=product_id,
start_date=start_date,
end_date=end_date
)
if not sales_data_raw or len(sales_data_raw) < 10:
logger.debug(
"Insufficient sales data for product",
product_id=product_id,
sales_records=len(sales_data_raw) if sales_data_raw else 0
)
continue
# Convert to DataFrame
sales_df = pd.DataFrame(sales_data_raw)
# Run demand insights orchestrator
insights = await orchestrator.analyze_and_generate_insights(
tenant_id=tenant_id,
product_id=product_id,
product_name=product_name,
sales_data=sales_df,
lookback_days=90,
db=db
)
if insights:
total_insights_generated += len(insights)
total_insights_posted += len(insights)
logger.info(
"Demand insights generated for product",
tenant_id=tenant_id,
product_id=product_id,
insights_count=len(insights)
)
except Exception as e:
logger.warning(
"Failed to analyze product demand (non-fatal)",
tenant_id=tenant_id,
product_id=product_id,
error=str(e)
)
continue
logger.info(
"Internal demand insights generation complete",
tenant_id=tenant_id,
products_analyzed=len(products),
insights_generated=total_insights_generated,
insights_posted=total_insights_posted
)
return {
"success": True,
"message": f"Generated {total_insights_posted} demand forecasting insights",
"tenant_id": tenant_id,
"products_analyzed": len(products),
"insights_posted": total_insights_posted
}
except Exception as e:
logger.error(
"Internal demand insights generation failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
return {
"success": False,
"message": f"Demand insights generation failed: {str(e)}",
"tenant_id": tenant_id,
"products_analyzed": 0,
"insights_posted": 0
}

View File

@@ -193,6 +193,7 @@ service.add_router(analytics.router)
service.add_router(scenario_operations.router)
service.add_router(internal_demo.router, tags=["internal-demo"])
service.add_router(ml_insights.router) # ML insights endpoint
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint
service.add_router(validation.router) # Validation endpoint
service.add_router(historical_validation.router) # Historical validation endpoint
service.add_router(webhooks.router) # Webhooks endpoint