Add AI insights feature
This commit is contained in:
@@ -358,13 +358,66 @@ async def clone_demo_data(
|
||||
except KeyError:
|
||||
process_stage_value = None
|
||||
|
||||
# Transform foreign key references (product_id, recipe_id, order_id, forecast_id)
|
||||
transformed_product_id = None
|
||||
if batch_data.get('product_id'):
|
||||
try:
|
||||
transformed_product_id = str(transform_id(batch_data['product_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform product_id",
|
||||
product_id=batch_data.get('product_id'),
|
||||
error=str(e))
|
||||
|
||||
transformed_recipe_id = None
|
||||
if batch_data.get('recipe_id'):
|
||||
try:
|
||||
transformed_recipe_id = str(transform_id(batch_data['recipe_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform recipe_id",
|
||||
recipe_id=batch_data.get('recipe_id'),
|
||||
error=str(e))
|
||||
|
||||
transformed_order_id = None
|
||||
if batch_data.get('order_id'):
|
||||
try:
|
||||
transformed_order_id = str(transform_id(batch_data['order_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform order_id",
|
||||
order_id=batch_data.get('order_id'),
|
||||
error=str(e))
|
||||
|
||||
transformed_forecast_id = None
|
||||
if batch_data.get('forecast_id'):
|
||||
try:
|
||||
transformed_forecast_id = str(transform_id(batch_data['forecast_id'], virtual_uuid))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform forecast_id",
|
||||
forecast_id=batch_data.get('forecast_id'),
|
||||
error=str(e))
|
||||
|
||||
# Transform equipment_used array
|
||||
transformed_equipment = []
|
||||
if batch_data.get('equipment_used'):
|
||||
for equip_id in batch_data['equipment_used']:
|
||||
try:
|
||||
transformed_equipment.append(str(transform_id(equip_id, virtual_uuid)))
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to transform equipment_id",
|
||||
equipment_id=equip_id,
|
||||
error=str(e))
|
||||
|
||||
# staff_assigned contains user IDs - these should NOT be transformed
|
||||
# because they reference actual user accounts which are NOT cloned
|
||||
# The demo uses the same user accounts across all virtual tenants
|
||||
staff_assigned = batch_data.get('staff_assigned', [])
|
||||
|
||||
new_batch = ProductionBatch(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
|
||||
product_id=batch_data.get('product_id'),
|
||||
product_id=transformed_product_id,
|
||||
product_name=batch_data.get('product_name'),
|
||||
recipe_id=batch_data.get('recipe_id'),
|
||||
recipe_id=transformed_recipe_id,
|
||||
planned_start_time=adjusted_planned_start,
|
||||
planned_end_time=adjusted_planned_end,
|
||||
planned_quantity=batch_data.get('planned_quantity'),
|
||||
@@ -389,11 +442,11 @@ async def clone_demo_data(
|
||||
waste_quantity=batch_data.get('waste_quantity'),
|
||||
defect_quantity=batch_data.get('defect_quantity'),
|
||||
waste_defect_type=batch_data.get('waste_defect_type'),
|
||||
equipment_used=batch_data.get('equipment_used'),
|
||||
staff_assigned=batch_data.get('staff_assigned'),
|
||||
equipment_used=transformed_equipment,
|
||||
staff_assigned=staff_assigned,
|
||||
station_id=batch_data.get('station_id'),
|
||||
order_id=batch_data.get('order_id'),
|
||||
forecast_id=batch_data.get('forecast_id'),
|
||||
order_id=transformed_order_id,
|
||||
forecast_id=transformed_forecast_id,
|
||||
is_rush_order=batch_data.get('is_rush_order', False),
|
||||
is_special_recipe=batch_data.get('is_special_recipe', False),
|
||||
is_ai_assisted=batch_data.get('is_ai_assisted', False),
|
||||
|
||||
@@ -7,7 +7,7 @@ Provides endpoints to trigger ML insight generation for:
|
||||
- Process efficiency analysis
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
@@ -71,6 +71,7 @@ class YieldPredictionResponse(BaseModel):
|
||||
async def trigger_yield_prediction(
|
||||
tenant_id: str,
|
||||
request_data: YieldPredictionRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
@@ -81,10 +82,12 @@ async def trigger_yield_prediction(
|
||||
2. Runs the YieldInsightsOrchestrator to predict yields
|
||||
3. Generates insights about yield optimization opportunities
|
||||
4. Posts insights to AI Insights Service
|
||||
5. Publishes recommendation events to RabbitMQ
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Prediction parameters
|
||||
request: FastAPI request (for app state access)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
@@ -103,8 +106,13 @@ async def trigger_yield_prediction(
|
||||
from shared.clients.recipes_client import RecipesServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
# Get event publisher from app state (if available)
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None) if hasattr(request, 'app') else None
|
||||
|
||||
# Initialize orchestrator and recipes client
|
||||
orchestrator = YieldInsightsOrchestrator()
|
||||
orchestrator = YieldInsightsOrchestrator(
|
||||
event_publisher=event_publisher
|
||||
)
|
||||
recipes_client = RecipesServiceClient(settings)
|
||||
|
||||
# Get recipes to analyze from recipes service via API
|
||||
@@ -186,12 +194,18 @@ async def trigger_yield_prediction(
|
||||
continue # Skip batches without complete data
|
||||
|
||||
production_data.append({
|
||||
'production_date': batch.actual_start_time,
|
||||
'production_run_id': str(batch.id), # Required: unique identifier for each production run
|
||||
'recipe_id': str(batch.recipe_id), # Required: recipe identifier
|
||||
'started_at': batch.actual_start_time,
|
||||
'completed_at': batch.actual_end_time, # Optional but useful for duration analysis
|
||||
'batch_size': float(batch.planned_quantity), # Use planned_quantity as batch_size
|
||||
'planned_quantity': float(batch.planned_quantity),
|
||||
'actual_quantity': float(batch.actual_quantity),
|
||||
'yield_percentage': yield_pct,
|
||||
'worker_id': batch.notes or 'unknown', # Use notes field or default
|
||||
'batch_number': batch.batch_number
|
||||
'staff_assigned': batch.staff_assigned if batch.staff_assigned else ['unknown'],
|
||||
'batch_number': batch.batch_number,
|
||||
'equipment_id': batch.equipment_used[0] if batch.equipment_used and len(batch.equipment_used) > 0 else None,
|
||||
'notes': batch.quality_notes # Optional quality notes
|
||||
})
|
||||
|
||||
if not production_data:
|
||||
@@ -202,6 +216,14 @@ async def trigger_yield_prediction(
|
||||
|
||||
production_history = pd.DataFrame(production_data)
|
||||
|
||||
# Debug: Log DataFrame columns and sample data
|
||||
logger.debug(
|
||||
"Production history DataFrame created",
|
||||
recipe_id=recipe_id,
|
||||
columns=list(production_history.columns),
|
||||
sample_data=production_history.head(1).to_dict('records') if len(production_history) > 0 else None
|
||||
)
|
||||
|
||||
# Run yield analysis
|
||||
results = await orchestrator.analyze_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
@@ -291,8 +313,6 @@ async def ml_insights_health():
|
||||
# INTERNAL ENDPOINTS (for demo-session service)
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
@@ -347,6 +367,7 @@ async def generate_yield_insights_internal(
|
||||
result = await trigger_yield_prediction(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user