demo seed change 4

This commit is contained in:
Urtzi Alfaro
2025-12-14 19:05:37 +01:00
parent 4ae5356ad1
commit 82f9622411
16 changed files with 532 additions and 55 deletions

View File

@@ -251,26 +251,44 @@ async def clone_demo_data(
# Set default location if not provided in seed data
location = forecast_data.get('location') or "Main Bakery"
# Get or calculate forecast date
forecast_date = forecast_data.get('forecast_date')
if not forecast_date:
forecast_date = session_time
# Calculate day_of_week from forecast_date if not provided
# day_of_week should be 0-6 (Monday=0, Sunday=6)
day_of_week = forecast_data.get('day_of_week')
if day_of_week is None and forecast_date:
day_of_week = forecast_date.weekday()
# Calculate is_weekend from day_of_week if not provided
is_weekend = forecast_data.get('is_weekend')
if is_weekend is None and day_of_week is not None:
is_weekend = day_of_week >= 5 # Saturday=5, Sunday=6
else:
is_weekend = False
new_forecast = Forecast(
id=transformed_id,
tenant_id=virtual_uuid,
inventory_product_id=inventory_product_id,
product_name=forecast_data.get('product_name'),
location=location,
forecast_date=forecast_data.get('forecast_date'),
forecast_date=forecast_date,
created_at=forecast_data.get('created_at', session_time),
predicted_demand=predicted_demand,
confidence_lower=forecast_data.get('confidence_lower', max(0.0, float(predicted_demand or 0.0) * 0.8)),
confidence_upper=forecast_data.get('confidence_upper', max(0.0, float(predicted_demand or 0.0) * 1.2)),
confidence_level=forecast_data.get('confidence_level', 0.8),
model_id=forecast_data.get('model_id'),
model_version=forecast_data.get('model_version'),
model_id=forecast_data.get('model_id') or 'default-fallback-model',
model_version=forecast_data.get('model_version') or '1.0',
algorithm=forecast_data.get('algorithm', 'prophet'),
business_type=forecast_data.get('business_type', 'individual'),
day_of_week=forecast_data.get('day_of_week'),
day_of_week=day_of_week,
is_holiday=forecast_data.get('is_holiday', False),
is_weekend=forecast_data.get('is_weekend', False),
is_weekend=is_weekend,
weather_temperature=forecast_data.get('weather_temperature'),
weather_precipitation=forecast_data.get('weather_precipitation'),
weather_description=forecast_data.get('weather_description'),

View File

@@ -385,7 +385,7 @@ class EnhancedForecastingService:
"confidence_lower": adjusted_prediction.get('lower_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 0.8)),
"confidence_upper": adjusted_prediction.get('upper_bound', max(0.0, float(adjusted_prediction.get('prediction') or 0.0) * 1.2)),
"confidence_level": request.confidence_level,
"model_id": model_data['model_id'],
"model_id": model_data.get('model_id') or 'default-fallback-model',
"model_version": str(model_data.get('version', '1.0')),
"algorithm": model_data.get('algorithm', 'prophet'),
"business_type": features.get('business_type', 'individual'),

View File

@@ -157,21 +157,18 @@ async def trigger_safety_stock_optimization(
try:
# Fetch sales data for this product
sales_response = await sales_client.get_sales_data(
sales_data = await sales_client.get_sales_data(
tenant_id=tenant_id,
product_id=product_id,
start_date=start_date.strftime('%Y-%m-%d'),
end_date=end_date.strftime('%Y-%m-%d')
)
if not sales_response or not sales_response.get('sales'):
if not sales_data:
logger.warning(
f"No sales history for product {product_id}, skipping"
)
continue
# Convert sales data to daily demand
sales_data = sales_response.get('sales', [])
demand_data = []
for sale in sales_data:

View File

@@ -179,8 +179,21 @@ class InventoryScheduler:
for shortage in stock_shortages:
try:
ingredient_id = UUID(shortage["ingredient_id"])
tenant_id = UUID(shortage["tenant_id"])
# Handle asyncpg UUID objects properly
ingredient_id_val = shortage["ingredient_id"]
tenant_id_val = shortage["tenant_id"]
# Convert asyncpg UUID to string first, then to UUID
if hasattr(ingredient_id_val, 'hex'):
ingredient_id = UUID(hex=ingredient_id_val.hex)
else:
ingredient_id = UUID(str(ingredient_id_val))
if hasattr(tenant_id_val, 'hex'):
tenant_id = UUID(hex=tenant_id_val.hex)
else:
tenant_id = UUID(str(tenant_id_val))
current_quantity = float(shortage["current_quantity"])
required_quantity = float(shortage["required_quantity"])
shortage_amount = float(shortage["shortage_amount"])
@@ -515,7 +528,12 @@ class InventoryScheduler:
for shortage in critical_shortages:
try:
ingredient_id = UUID(str(shortage["id"])) # Use 'id' instead of 'ingredient_id'
# Handle asyncpg UUID objects properly
ingredient_id_val = shortage["id"]
if hasattr(ingredient_id_val, 'hex'):
ingredient_id = UUID(hex=ingredient_id_val.hex)
else:
ingredient_id = UUID(str(ingredient_id_val))
# Extract values with defaults
current_quantity = float(shortage.get("current_stock", 0))
@@ -732,8 +750,19 @@ class InventoryScheduler:
for shortage in critical_shortages:
try:
ingredient_id = UUID(str(shortage["id"]))
tenant_id = UUID(shortage["tenant_id"])
# Handle asyncpg UUID objects properly
ingredient_id_val = shortage["id"]
tenant_id_val = shortage["tenant_id"]
if hasattr(ingredient_id_val, 'hex'):
ingredient_id = UUID(hex=ingredient_id_val.hex)
else:
ingredient_id = UUID(str(ingredient_id_val))
if hasattr(tenant_id_val, 'hex'):
tenant_id = UUID(hex=tenant_id_val.hex)
else:
tenant_id = UUID(str(tenant_id_val))
# Extract values with defaults
current_quantity = float(shortage.get("current_stock", 0))

View File

@@ -9,7 +9,7 @@ from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
from typing import Optional, Dict, Any
import os
import json
from pathlib import Path
@@ -26,6 +26,7 @@ from shared.schemas.reasoning_types import (
create_po_reasoning_supplier_contract
)
from app.core.config import settings
from shared.clients.suppliers_client import SuppliersServiceClient
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
@@ -42,6 +43,155 @@ def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
return True
async def _emit_po_approval_alerts_for_demo(
virtual_tenant_id: uuid.UUID,
pending_pos: list[PurchaseOrder]
) -> int:
"""
Emit alerts for pending approval POs during demo cloning.
Creates clients internally to avoid dependency injection issues.
Returns the number of alerts successfully emitted.
"""
if not pending_pos:
return 0
alerts_emitted = 0
try:
# Initialize clients locally for this operation
from shared.clients.suppliers_client import SuppliersServiceClient
from shared.messaging import RabbitMQClient
# Use the existing settings instead of creating a new config
# This avoids issues with property-based configuration
suppliers_client = SuppliersServiceClient(settings, "procurement-service")
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement-service")
# Connect to RabbitMQ
await rabbitmq_client.connect()
logger.info(
"Emitting PO approval alerts for demo",
pending_po_count=len(pending_pos),
virtual_tenant_id=str(virtual_tenant_id)
)
# Emit alerts for each pending PO
for po in pending_pos:
try:
# Get supplier details
supplier_details = await suppliers_client.get_supplier_by_id(
tenant_id=str(virtual_tenant_id),
supplier_id=str(po.supplier_id)
)
# Skip if supplier not found
if not supplier_details:
logger.warning(
"Supplier not found for PO, skipping alert",
po_id=str(po.id),
supplier_id=str(po.supplier_id)
)
continue
# Calculate urgency fields
now = datetime.utcnow()
hours_until_consequence = None
deadline = None
if po.required_delivery_date:
supplier_lead_time_days = supplier_details.get('standard_lead_time', 7)
approval_deadline = po.required_delivery_date - timedelta(days=supplier_lead_time_days)
deadline = approval_deadline
hours_until_consequence = (approval_deadline - now).total_seconds() / 3600
# Prepare alert payload
alert_data = {
'id': str(uuid.uuid4()),
'tenant_id': str(virtual_tenant_id),
'service': 'procurement',
'type': 'po_approval_needed',
'alert_type': 'po_approval_needed',
'type_class': 'action_needed',
'severity': 'high' if po.priority == 'critical' else 'medium',
'title': '',
'message': '',
'timestamp': datetime.utcnow().isoformat(),
'metadata': {
'po_id': str(po.id),
'po_number': po.po_number,
'supplier_id': str(po.supplier_id),
'supplier_name': supplier_details.get('name', ''),
'total_amount': float(po.total_amount),
'currency': po.currency,
'priority': po.priority,
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
'created_at': po.created_at.isoformat(),
'financial_impact': float(po.total_amount),
'urgency_score': 85,
'deadline': deadline.isoformat() if deadline else None,
'hours_until_consequence': round(hours_until_consequence, 1) if hours_until_consequence else None,
'reasoning_data': po.reasoning_data or {}
},
'message_params': {
'po_number': po.po_number,
'supplier_name': supplier_details.get('name', ''),
'total_amount': float(po.total_amount),
'currency': po.currency,
'priority': po.priority,
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
'items_count': 0,
'created_at': po.created_at.isoformat()
},
'actions': ['approve_po', 'reject_po', 'modify_po'],
'item_type': 'alert'
}
# Publish to RabbitMQ
await rabbitmq_client.publish_event(
exchange_name='alerts.exchange',
routing_key=f'alert.{alert_data["severity"]}.procurement',
event_data=alert_data
)
alerts_emitted += 1
logger.debug(
"PO approval alert emitted",
po_id=str(po.id),
po_number=po.po_number
)
except Exception as po_error:
logger.warning(
"Failed to emit alert for PO",
po_id=str(po.id),
po_number=po.po_number,
error=str(po_error)
)
# Continue with other POs
# Close RabbitMQ connection
await rabbitmq_client.close()
logger.info(
"PO approval alerts emission completed",
alerts_emitted=alerts_emitted,
total_pending=len(pending_pos)
)
return alerts_emitted
except Exception as e:
logger.error(
"Failed to emit PO approval alerts",
error=str(e),
virtual_tenant_id=str(virtual_tenant_id),
exc_info=True
)
# Don't fail the cloning process
return alerts_emitted
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
@@ -420,6 +570,39 @@ async def clone_demo_data(
# Commit all loaded data
await db.commit()
# Emit alerts for pending approval POs (CRITICAL for demo dashboard)
alerts_emitted = 0
try:
# Get all pending approval POs that were just created
pending_approval_pos = await db.execute(
select(PurchaseOrder).where(
PurchaseOrder.tenant_id == virtual_uuid,
PurchaseOrder.status == 'pending_approval'
)
)
pending_pos = pending_approval_pos.scalars().all()
logger.info(
"Found pending approval POs for alert emission",
count=len(pending_pos),
virtual_tenant_id=virtual_tenant_id
)
# Emit alerts using refactored function
if pending_pos:
alerts_emitted = await _emit_po_approval_alerts_for_demo(
virtual_tenant_id=virtual_uuid,
pending_pos=pending_pos
)
except Exception as e:
logger.error(
"Failed to emit PO approval alerts during demo cloning",
error=str(e),
virtual_tenant_id=virtual_tenant_id
)
# Don't fail the entire cloning process if alert emission fails
# Calculate total records
total_records = (stats["procurement_plans"] + stats["procurement_requirements"] +
stats["purchase_orders"] + stats["purchase_order_items"] +
@@ -439,7 +622,8 @@ async def clone_demo_data(
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
"details": stats,
"alerts_emitted": alerts_emitted
}
except ValueError as e:

View File

@@ -188,6 +188,9 @@ async def clone_demo_data(
"recipe_ingredients": 0
}
# First, build recipe ID map by processing all recipes
recipe_id_map = {}
# Create Recipes
for recipe_data in seed_data.get('recipes', []):
# Transform recipe ID using XOR
@@ -263,8 +266,8 @@ async def clone_demo_data(
db.add(new_recipe)
stats["recipes"] += 1
# Map recipe ID for ingredients
recipe_id_map = {recipe_data['id']: str(transformed_id)}
# Add recipe ID to map for ingredients
recipe_id_map[recipe_data['id']] = str(transformed_id)
# Create Recipe Ingredients
for recipe_ingredient_data in seed_data.get('recipe_ingredients', []):