Improve te panel de control logic
This commit is contained in:
@@ -56,6 +56,7 @@ class Settings(BaseSettings):
|
||||
ORDERS_SERVICE_URL: str = os.getenv("ORDERS_SERVICE_URL", "http://orders-service:8000")
|
||||
PRODUCTION_SERVICE_URL: str = os.getenv("PRODUCTION_SERVICE_URL", "http://production-service:8000")
|
||||
SUPPLIERS_SERVICE_URL: str = os.getenv("SUPPLIERS_SERVICE_URL", "http://suppliers-service:8000")
|
||||
ORCHESTRATOR_SERVICE_URL: str = os.getenv("ORCHESTRATOR_SERVICE_URL", "http://orchestrator-service:8000")
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||
|
||||
@@ -95,6 +95,12 @@ class CloneOrchestrator:
|
||||
required=False, # Optional - provides procurement and purchase orders
|
||||
timeout=25.0 # Longer - clones many procurement entities
|
||||
),
|
||||
ServiceDefinition(
|
||||
name="orchestrator",
|
||||
url=os.getenv("ORCHESTRATOR_SERVICE_URL", "http://orchestrator-service:8000"),
|
||||
required=False, # Optional - provides orchestration run history
|
||||
timeout=15.0 # Standard timeout for orchestration data
|
||||
),
|
||||
]
|
||||
|
||||
async def clone_all_services(
|
||||
|
||||
@@ -54,6 +54,74 @@ DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7")
|
||||
# Base reference date for demo data (all relative dates calculated from this)
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
# Daily consumption rates (kg/day) - aligned with procurement seed script
|
||||
# Used to create realistic stock levels that trigger appropriate PO scenarios
|
||||
DAILY_CONSUMPTION_RATES = {
|
||||
"HAR-T55-001": 50.0, # Harina de Trigo T55
|
||||
"HAR-INT-001": 15.0, # Harina Integral Ecológica
|
||||
"MAN-SAL-001": 8.0, # Mantequilla sin Sal 82% MG
|
||||
"HUE-FRE-001": 100.0, # Huevos Frescos (units, modeled as kg)
|
||||
"LEV-SEC-001": 2.5, # Levadura Seca
|
||||
"SAL-FIN-001": 3.0, # Sal Fina
|
||||
"ACE-OLI-001": 5.0, # Aceite de Oliva Virgen
|
||||
"AZU-MOR-001": 6.0, # Azúcar Moreno
|
||||
"SEM-GIR-001": 2.0, # Semillas de Girasol
|
||||
"MIE-AZA-001": 1.5, # Miel de Azahar
|
||||
"CHO-NEG-001": 4.0, # Chocolate Negro 70%
|
||||
"NUE-PEL-001": 3.5, # Nueces Peladas
|
||||
"PAS-SUL-001": 2.5 # Pasas Sultanas
|
||||
}
|
||||
|
||||
# Reorder points (kg) - when to trigger PO
|
||||
REORDER_POINTS_BY_SKU = {
|
||||
"HAR-T55-001": 150.0, # Critical ingredient
|
||||
"HAR-INT-001": 50.0,
|
||||
"MAN-SAL-001": 25.0,
|
||||
"HUE-FRE-001": 300.0,
|
||||
"LEV-SEC-001": 10.0,
|
||||
"SAL-FIN-001": 20.0,
|
||||
"ACE-OLI-001": 15.0,
|
||||
"AZU-MOR-001": 20.0,
|
||||
"SEM-GIR-001": 10.0,
|
||||
"MIE-AZA-001": 5.0,
|
||||
"CHO-NEG-001": 15.0,
|
||||
"NUE-PEL-001": 12.0,
|
||||
"PAS-SUL-001": 10.0
|
||||
}
|
||||
|
||||
def calculate_realistic_stock_level(
|
||||
ingredient_sku: str,
|
||||
make_critical: bool = False,
|
||||
variability_factor: float = 0.2
|
||||
) -> float:
|
||||
"""
|
||||
Calculate realistic stock level based on consumption rates
|
||||
|
||||
Args:
|
||||
ingredient_sku: SKU of the ingredient
|
||||
make_critical: If True, create critically low stock (< 1 day supply)
|
||||
variability_factor: Random variation (default 20%)
|
||||
|
||||
Returns:
|
||||
Realistic stock level in kg
|
||||
"""
|
||||
daily_consumption = DAILY_CONSUMPTION_RATES.get(ingredient_sku, 5.0)
|
||||
|
||||
if make_critical:
|
||||
# Critical: 0.5-6 hours worth of stock
|
||||
days_of_supply = random.uniform(0.02, 0.25)
|
||||
else:
|
||||
# Normal: 5-15 days worth of stock (healthy inventory levels)
|
||||
# This prevents all ingredients from triggering alerts
|
||||
days_of_supply = random.uniform(5.0, 15.0)
|
||||
|
||||
stock_level = daily_consumption * days_of_supply
|
||||
|
||||
# Add realistic variability
|
||||
stock_level *= random.uniform(1 - variability_factor, 1 + variability_factor)
|
||||
|
||||
return max(0.1, stock_level) # Minimum 0.1 kg
|
||||
|
||||
# Load configuration from JSON
|
||||
def load_stock_config():
|
||||
"""Load stock configuration from JSON file"""
|
||||
@@ -126,13 +194,25 @@ async def create_stock_batches_for_ingredient(
|
||||
stocks = []
|
||||
num_batches = random.randint(1, 2) # Reduced from 3-5 for faster demo loading
|
||||
|
||||
# Calculate target total stock for this ingredient
|
||||
# Use 40-80% of max_stock_level to allow for realistic variation
|
||||
# If max_stock_level is not set, use reorder_point * 3 as a reasonable target
|
||||
if ingredient.max_stock_level:
|
||||
target_total_stock = float(ingredient.max_stock_level) * random.uniform(0.4, 0.8)
|
||||
# CRITICAL DEMO SCENARIO: Create consumption-aware stock levels
|
||||
# This creates realistic scenarios that trigger intelligent PO reasoning
|
||||
critical_low_stock_skus = ["HAR-T55-001", "LEV-SEC-001", "MAN-SAL-001"]
|
||||
is_critical_low = ingredient.sku in critical_low_stock_skus
|
||||
|
||||
# Calculate target total stock using consumption-aware logic
|
||||
if is_critical_low:
|
||||
# Critical low: < 1 day supply (triggers urgent/critical PO reasoning)
|
||||
target_total_stock = calculate_realistic_stock_level(
|
||||
ingredient.sku,
|
||||
make_critical=True
|
||||
)
|
||||
num_batches = 1 # Single nearly-empty batch for critical items
|
||||
else:
|
||||
target_total_stock = float(ingredient.reorder_point or 50.0) * 3.0
|
||||
# Normal low stock: 1-4 days supply (creates urgency but not critical)
|
||||
target_total_stock = calculate_realistic_stock_level(
|
||||
ingredient.sku,
|
||||
make_critical=False
|
||||
)
|
||||
|
||||
# Distribute total stock across batches
|
||||
batch_quantities = []
|
||||
|
||||
@@ -325,6 +325,9 @@ async def get_orchestration_summary(
|
||||
try:
|
||||
po_data = await procurement_client.get_pending_purchase_orders(tenant_id, limit=10)
|
||||
if po_data and isinstance(po_data, list):
|
||||
# Override stale orchestration count with actual real-time PO count
|
||||
summary["purchaseOrdersCreated"] = len(po_data)
|
||||
summary["userActionsRequired"] = len(po_data) # Update actions required to match actual pending POs
|
||||
summary["purchaseOrdersSummary"] = [
|
||||
PurchaseOrderSummary(
|
||||
supplierName=po.get("supplier_name", "Unknown"),
|
||||
@@ -341,6 +344,8 @@ async def get_orchestration_summary(
|
||||
batch_data = await production_client.get_todays_batches(tenant_id)
|
||||
if batch_data:
|
||||
batches = batch_data.get("batches", [])
|
||||
# Override stale orchestration count with actual real-time batch count
|
||||
summary["productionBatchesCreated"] = len(batches)
|
||||
summary["productionBatchesSummary"] = [
|
||||
ProductionBatchSummary(
|
||||
productName=batch.get("product_name", "Unknown"),
|
||||
|
||||
264
services/orchestrator/app/api/internal_demo.py
Normal file
264
services/orchestrator/app/api/internal_demo.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""
|
||||
Internal Demo API Endpoints for Orchestrator Service
|
||||
Used by demo_session service to clone data for virtual demo tenants
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from typing import Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
import os
|
||||
|
||||
from app.core.database import get_db
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
from app.models.orchestration_run import OrchestrationRun
|
||||
import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Internal API key for service-to-service communication
|
||||
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: str = Header(...)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
if x_internal_api_key != INTERNAL_API_KEY:
|
||||
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Clone orchestration run demo data from base tenant to virtual tenant
|
||||
|
||||
This endpoint is called by the demo_session service during session initialization.
|
||||
It clones orchestration runs with date adjustments to make them appear recent.
|
||||
"""
|
||||
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_created_at or use current time
|
||||
if session_created_at:
|
||||
try:
|
||||
reference_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except:
|
||||
reference_time = datetime.now(timezone.utc)
|
||||
else:
|
||||
reference_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting orchestration runs cloning with date adjustment",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
reference_time=reference_time.isoformat()
|
||||
)
|
||||
|
||||
try:
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Fetch base tenant orchestration runs
|
||||
# Get all completed and partial_success runs from the base tenant
|
||||
result = await db.execute(
|
||||
select(OrchestrationRun)
|
||||
.where(OrchestrationRun.tenant_id == base_uuid)
|
||||
.order_by(OrchestrationRun.started_at.desc())
|
||||
.limit(10) # Clone last 10 runs for demo
|
||||
)
|
||||
base_runs = list(result.scalars().all())
|
||||
|
||||
runs_cloned = 0
|
||||
|
||||
# Clone each orchestration run with date adjustment
|
||||
for base_run in base_runs:
|
||||
# Calculate time offset: how old was this run relative to when it was created
|
||||
# We'll adjust all timestamps to be relative to the session creation time
|
||||
if base_run.started_at:
|
||||
# Calculate how many days ago this run was from a reference point
|
||||
# Use a fixed reference date for consistency
|
||||
reference_date = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
time_offset = base_run.started_at - reference_date
|
||||
|
||||
# Apply this offset to the current reference time
|
||||
new_started_at = reference_time + time_offset
|
||||
else:
|
||||
new_started_at = reference_time - timedelta(hours=2)
|
||||
|
||||
# Adjust completed_at if it exists
|
||||
if base_run.completed_at and base_run.started_at:
|
||||
duration = base_run.completed_at - base_run.started_at
|
||||
new_completed_at = new_started_at + duration
|
||||
else:
|
||||
new_completed_at = None
|
||||
|
||||
# Adjust all step timestamps proportionally
|
||||
def adjust_timestamp(original_timestamp):
|
||||
if not original_timestamp or not base_run.started_at:
|
||||
return None
|
||||
step_offset = original_timestamp - base_run.started_at
|
||||
return new_started_at + step_offset
|
||||
|
||||
# Create new orchestration run for virtual tenant
|
||||
new_run = OrchestrationRun(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
run_number=f"{base_run.run_number}-DEMO",
|
||||
status=base_run.status,
|
||||
run_type=base_run.run_type,
|
||||
priority=base_run.priority,
|
||||
started_at=new_started_at,
|
||||
completed_at=new_completed_at,
|
||||
duration_seconds=base_run.duration_seconds,
|
||||
|
||||
# Forecasting step
|
||||
forecasting_started_at=adjust_timestamp(base_run.forecasting_started_at),
|
||||
forecasting_completed_at=adjust_timestamp(base_run.forecasting_completed_at),
|
||||
forecasting_status=base_run.forecasting_status,
|
||||
forecasting_error=base_run.forecasting_error,
|
||||
|
||||
# Production step
|
||||
production_started_at=adjust_timestamp(base_run.production_started_at),
|
||||
production_completed_at=adjust_timestamp(base_run.production_completed_at),
|
||||
production_status=base_run.production_status,
|
||||
production_error=base_run.production_error,
|
||||
|
||||
# Procurement step
|
||||
procurement_started_at=adjust_timestamp(base_run.procurement_started_at),
|
||||
procurement_completed_at=adjust_timestamp(base_run.procurement_completed_at),
|
||||
procurement_status=base_run.procurement_status,
|
||||
procurement_error=base_run.procurement_error,
|
||||
|
||||
# Notification step
|
||||
notification_started_at=adjust_timestamp(base_run.notification_started_at),
|
||||
notification_completed_at=adjust_timestamp(base_run.notification_completed_at),
|
||||
notification_status=base_run.notification_status,
|
||||
notification_error=base_run.notification_error,
|
||||
|
||||
# AI Insights (if exists)
|
||||
ai_insights_started_at=adjust_timestamp(base_run.ai_insights_started_at) if hasattr(base_run, 'ai_insights_started_at') else None,
|
||||
ai_insights_completed_at=adjust_timestamp(base_run.ai_insights_completed_at) if hasattr(base_run, 'ai_insights_completed_at') else None,
|
||||
ai_insights_status=base_run.ai_insights_status if hasattr(base_run, 'ai_insights_status') else None,
|
||||
ai_insights_generated=base_run.ai_insights_generated if hasattr(base_run, 'ai_insights_generated') else None,
|
||||
ai_insights_posted=base_run.ai_insights_posted if hasattr(base_run, 'ai_insights_posted') else None,
|
||||
|
||||
# Results summary
|
||||
forecasts_generated=base_run.forecasts_generated,
|
||||
production_batches_created=base_run.production_batches_created,
|
||||
procurement_plans_created=base_run.procurement_plans_created,
|
||||
purchase_orders_created=base_run.purchase_orders_created,
|
||||
notifications_sent=base_run.notifications_sent,
|
||||
|
||||
# Performance metrics
|
||||
fulfillment_rate=base_run.fulfillment_rate,
|
||||
on_time_delivery_rate=base_run.on_time_delivery_rate,
|
||||
cost_accuracy=base_run.cost_accuracy,
|
||||
quality_score=base_run.quality_score,
|
||||
|
||||
# Data
|
||||
forecast_data=base_run.forecast_data,
|
||||
run_metadata=base_run.run_metadata,
|
||||
|
||||
# Metadata
|
||||
triggered_by=base_run.triggered_by,
|
||||
created_at=reference_time,
|
||||
updated_at=reference_time
|
||||
)
|
||||
|
||||
db.add(new_run)
|
||||
await db.flush()
|
||||
runs_cloned += 1
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Orchestration runs cloned successfully",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
runs_cloned=runs_cloned,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "orchestrator",
|
||||
"status": "completed",
|
||||
"success": True,
|
||||
"records_cloned": runs_cloned,
|
||||
"runs_cloned": runs_cloned,
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to clone orchestration runs", error=str(e), exc_info=True)
|
||||
await db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to clone orchestration runs: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/internal/demo/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""Delete all orchestration runs for a virtual demo tenant"""
|
||||
logger.info("Deleting orchestration runs for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Count records
|
||||
run_count = await db.scalar(
|
||||
select(func.count(OrchestrationRun.id))
|
||||
.where(OrchestrationRun.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# Delete orchestration runs
|
||||
await db.execute(
|
||||
delete(OrchestrationRun)
|
||||
.where(OrchestrationRun.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
logger.info(
|
||||
"Orchestration runs deleted successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "orchestrator",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"orchestration_runs": run_count,
|
||||
"total": run_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete orchestration runs", error=str(e), exc_info=True)
|
||||
await db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/internal/demo/clone/health")
|
||||
async def health_check(_: bool = Depends(verify_internal_api_key)):
|
||||
"""Health check for demo cloning endpoint"""
|
||||
return {"status": "healthy", "service": "orchestrator"}
|
||||
@@ -99,8 +99,8 @@ service.add_router(orchestration_router)
|
||||
service.add_router(dashboard_router)
|
||||
|
||||
# INTERNAL: Service-to-service endpoints
|
||||
# from app.api import internal_demo
|
||||
# service.add_router(internal_demo.router)
|
||||
from app.api import internal_demo
|
||||
service.add_router(internal_demo.router)
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
|
||||
@@ -417,14 +417,38 @@ class DashboardService:
|
||||
|
||||
# Get reasoning type and convert to i18n key
|
||||
reasoning_type = reasoning_data.get('type', 'inventory_replenishment')
|
||||
reasoning_type_i18n_key = self._get_reasoning_type_i18n_key(reasoning_type, context="purchaseOrder")
|
||||
|
||||
# Check if enhanced mode (has product_details with supply chain intelligence)
|
||||
is_enhanced_mode = reasoning_data.get('metadata', {}).get('enhanced_mode', False)
|
||||
|
||||
# Use enhanced i18n key if available
|
||||
if is_enhanced_mode and reasoning_type == 'low_stock_detection':
|
||||
reasoning_type_i18n_key = "reasoning.purchaseOrder.low_stock_detection_detailed"
|
||||
else:
|
||||
reasoning_type_i18n_key = self._get_reasoning_type_i18n_key(reasoning_type, context="purchaseOrder")
|
||||
|
||||
# Preprocess parameters for i18n - MUST create a copy to avoid modifying immutable database objects
|
||||
params = dict(reasoning_data.get('parameters', {}))
|
||||
|
||||
# Convert product_names array to product_names_joined string
|
||||
if 'product_names' in params and isinstance(params['product_names'], list):
|
||||
params['product_names_joined'] = ', '.join(params['product_names'])
|
||||
|
||||
# Convert critical_products array to indexed params and joined string for i18n
|
||||
if 'critical_products' in params and isinstance(params['critical_products'], list):
|
||||
critical_prods = params['critical_products']
|
||||
# Add indexed params for select/plural statements
|
||||
for i, prod in enumerate(critical_prods[:3]): # Limit to first 3
|
||||
params[f'critical_products_{i}'] = prod
|
||||
params['critical_products_joined'] = ', '.join(critical_prods)
|
||||
|
||||
# Convert affected_batches array to indexed params for i18n
|
||||
if 'affected_batches' in params and isinstance(params['affected_batches'], list):
|
||||
batches = params['affected_batches']
|
||||
for i, batch in enumerate(batches[:3]): # Limit to first 3
|
||||
params[f'affected_batches_{i}'] = batch
|
||||
params['affected_batches_joined'] = ', '.join(batches)
|
||||
|
||||
actions.append({
|
||||
"id": po["id"],
|
||||
"type": ActionType.APPROVE_PO,
|
||||
@@ -594,7 +618,8 @@ class DashboardService:
|
||||
if actual_start and planned_end:
|
||||
total_duration = (planned_end - actual_start).total_seconds()
|
||||
elapsed = (now - actual_start).total_seconds()
|
||||
progress = min(int((elapsed / total_duration) * 100), 99)
|
||||
# Ensure progress is never negative (defensive programming)
|
||||
progress = max(0, min(int((elapsed / total_duration) * 100), 99))
|
||||
else:
|
||||
progress = 50
|
||||
status_icon = "🔄"
|
||||
@@ -604,10 +629,12 @@ class DashboardService:
|
||||
"params": {}
|
||||
}
|
||||
else:
|
||||
# PENDING, SCHEDULED, or any other status
|
||||
progress = 0
|
||||
status_icon = "⏰"
|
||||
status_text = "PENDING"
|
||||
status_text = status # Use actual status
|
||||
status_i18n = {
|
||||
"key": "production.status.pending",
|
||||
"key": f"production.status.{status.lower()}",
|
||||
"params": {}
|
||||
}
|
||||
|
||||
|
||||
@@ -301,6 +301,7 @@ async def clone_demo_data(
|
||||
notes=order.notes if hasattr(order, 'notes') else None,
|
||||
internal_notes=order.internal_notes if hasattr(order, 'internal_notes') else None,
|
||||
terms_and_conditions=order.terms_and_conditions if hasattr(order, 'terms_and_conditions') else None,
|
||||
reasoning_data=order.reasoning_data if hasattr(order, 'reasoning_data') else None, # Clone reasoning for JTBD dashboard
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=system_user_id,
|
||||
|
||||
@@ -21,6 +21,7 @@ import random
|
||||
from datetime import datetime, timezone, timedelta, date
|
||||
from pathlib import Path
|
||||
from decimal import Decimal
|
||||
from typing import List, Dict, Any
|
||||
|
||||
# Add app to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
@@ -60,6 +61,48 @@ BASE_SUPPLIER_IDS = [
|
||||
uuid.UUID("40000000-0000-0000-0000-000000000005"), # Lesaffre Ibérica (low trust)
|
||||
]
|
||||
|
||||
# Supplier lead times (days) for realistic supply chain modeling
|
||||
SUPPLIER_LEAD_TIMES = {
|
||||
"Molinos San José S.L.": 2, # 2-day delivery (trusted, local)
|
||||
"Lácteos del Valle S.A.": 3, # 3-day delivery (regional)
|
||||
"Lesaffre Ibérica": 4 # 4-day delivery (national)
|
||||
}
|
||||
|
||||
# Daily consumption rates (kg/day) for realistic stock depletion modeling
|
||||
# These match real bakery production needs
|
||||
DAILY_CONSUMPTION_RATES = {
|
||||
"Harina de Trigo T55": 50.0,
|
||||
"Harina Integral Ecológica": 15.0,
|
||||
"Mantequilla sin Sal 82% MG": 8.0,
|
||||
"Huevos Frescos Categoría A": 100.0, # units, not kg, but modeled as kg for consistency
|
||||
"Levadura Seca": 2.5,
|
||||
"Sal Fina": 3.0,
|
||||
"Aceite de Oliva Virgen": 5.0,
|
||||
"Azúcar Moreno": 6.0,
|
||||
"Semillas de Girasol": 2.0,
|
||||
"Miel de Azahar": 1.5,
|
||||
"Chocolate Negro 70%": 4.0,
|
||||
"Nueces Peladas": 3.5,
|
||||
"Pasas Sultanas": 2.5
|
||||
}
|
||||
|
||||
# Reorder points (kg) - when to trigger PO
|
||||
REORDER_POINTS = {
|
||||
"Harina de Trigo T55": 150.0, # Critical ingredient
|
||||
"Harina Integral Ecológica": 50.0,
|
||||
"Mantequilla sin Sal 82% MG": 25.0,
|
||||
"Huevos Frescos Categoría A": 300.0,
|
||||
"Levadura Seca": 10.0,
|
||||
"Sal Fina": 20.0,
|
||||
"Aceite de Oliva Virgen": 15.0,
|
||||
"Azúcar Moreno": 20.0,
|
||||
"Semillas de Girasol": 10.0,
|
||||
"Miel de Azahar": 5.0,
|
||||
"Chocolate Negro 70%": 15.0,
|
||||
"Nueces Peladas": 12.0,
|
||||
"Pasas Sultanas": 10.0
|
||||
}
|
||||
|
||||
def get_demo_supplier_ids(tenant_id: uuid.UUID):
|
||||
"""
|
||||
Generate tenant-specific supplier IDs using XOR strategy with hardcoded base IDs.
|
||||
@@ -96,6 +139,106 @@ def get_demo_supplier_ids(tenant_id: uuid.UUID):
|
||||
return suppliers
|
||||
|
||||
|
||||
def get_simulated_stock_level(product_name: str, make_critical: bool = False) -> float:
|
||||
"""
|
||||
Simulate current stock level for demo purposes
|
||||
|
||||
Args:
|
||||
product_name: Name of the product
|
||||
make_critical: If True, create critically low stock (< 1 day)
|
||||
|
||||
Returns:
|
||||
Simulated current stock in kg
|
||||
"""
|
||||
daily_consumption = DAILY_CONSUMPTION_RATES.get(product_name, 5.0)
|
||||
|
||||
if make_critical:
|
||||
# Critical: 0.5-6 hours worth of stock
|
||||
return round(daily_consumption * random.uniform(0.02, 0.25), 2)
|
||||
else:
|
||||
# Normal low stock: 1-3 days worth
|
||||
return round(daily_consumption * random.uniform(1.0, 3.0), 2)
|
||||
|
||||
|
||||
def calculate_product_urgency(
|
||||
product_name: str,
|
||||
current_stock: float,
|
||||
supplier_lead_time_days: int,
|
||||
reorder_point: float = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate urgency metrics for a product based on supply chain dynamics
|
||||
|
||||
Args:
|
||||
product_name: Name of the product
|
||||
current_stock: Current stock level in kg
|
||||
supplier_lead_time_days: Supplier delivery lead time in days
|
||||
reorder_point: Reorder point threshold (optional)
|
||||
|
||||
Returns:
|
||||
Dictionary with urgency metrics
|
||||
"""
|
||||
daily_consumption = DAILY_CONSUMPTION_RATES.get(product_name, 5.0)
|
||||
reorder_pt = reorder_point or REORDER_POINTS.get(product_name, 50.0)
|
||||
|
||||
# Calculate days until depletion
|
||||
if daily_consumption > 0:
|
||||
days_until_depletion = current_stock / daily_consumption
|
||||
else:
|
||||
days_until_depletion = 999.0
|
||||
|
||||
# Calculate safety margin (days until depletion - supplier lead time)
|
||||
safety_margin_days = days_until_depletion - supplier_lead_time_days
|
||||
|
||||
# Determine criticality based on safety margin
|
||||
if safety_margin_days <= 0:
|
||||
criticality = "critical" # Already late or will run out before delivery!
|
||||
order_urgency_reason = f"Stock depletes in {round(days_until_depletion, 1)} days, but delivery takes {supplier_lead_time_days} days"
|
||||
elif safety_margin_days <= 0.5:
|
||||
criticality = "urgent" # Must order TODAY
|
||||
order_urgency_reason = f"Only {round(safety_margin_days * 24, 1)} hours margin before stockout"
|
||||
elif safety_margin_days <= 1:
|
||||
criticality = "important" # Should order today
|
||||
order_urgency_reason = f"Only {round(safety_margin_days, 1)} day margin"
|
||||
else:
|
||||
criticality = "normal"
|
||||
order_urgency_reason = "Standard replenishment"
|
||||
|
||||
return {
|
||||
"product_name": product_name,
|
||||
"current_stock_kg": round(current_stock, 2),
|
||||
"daily_consumption_kg": round(daily_consumption, 2),
|
||||
"days_until_depletion": round(days_until_depletion, 2),
|
||||
"reorder_point_kg": round(reorder_pt, 2),
|
||||
"safety_stock_days": 3, # Standard 3-day safety stock
|
||||
"safety_margin_days": round(safety_margin_days, 2),
|
||||
"criticality": criticality,
|
||||
"urgency_reason": order_urgency_reason
|
||||
}
|
||||
|
||||
|
||||
def determine_overall_po_urgency(product_details: List[Dict[str, Any]]) -> str:
|
||||
"""
|
||||
Determine overall PO urgency based on most critical product
|
||||
|
||||
Args:
|
||||
product_details: List of product urgency dictionaries
|
||||
|
||||
Returns:
|
||||
Overall urgency: "critical", "urgent", "important", or "normal"
|
||||
"""
|
||||
criticalities = [p.get("criticality", "normal") for p in product_details]
|
||||
|
||||
if "critical" in criticalities:
|
||||
return "critical"
|
||||
elif "urgent" in criticalities:
|
||||
return "urgent"
|
||||
elif "important" in criticalities:
|
||||
return "important"
|
||||
else:
|
||||
return "normal"
|
||||
|
||||
|
||||
async def create_purchase_order(
|
||||
db: AsyncSession,
|
||||
tenant_id: uuid.UUID,
|
||||
@@ -131,7 +274,7 @@ async def create_purchase_order(
|
||||
# Generate reasoning for JTBD dashboard (if columns exist after migration)
|
||||
days_until_delivery = (required_delivery - created_at).days
|
||||
|
||||
# Generate structured reasoning_data for i18n support
|
||||
# Generate structured reasoning_data with supply chain intelligence
|
||||
reasoning_data = None
|
||||
|
||||
try:
|
||||
@@ -142,18 +285,57 @@ async def create_purchase_order(
|
||||
if not product_names:
|
||||
product_names = ["Demo Product"]
|
||||
|
||||
# Get supplier lead time
|
||||
supplier_lead_time = SUPPLIER_LEAD_TIMES.get(supplier.name, 3)
|
||||
|
||||
if status == PurchaseOrderStatus.pending_approval:
|
||||
# Low stock detection reasoning
|
||||
days_until_stockout = days_until_delivery + 2
|
||||
# Enhanced low stock detection with per-product urgency analysis
|
||||
product_details = []
|
||||
estimated_loss = 0.0
|
||||
|
||||
for i, item in enumerate(items_list):
|
||||
product_name = item.get('name', item.get('product_name', f"Product {i+1}"))
|
||||
|
||||
# Simulate current stock - make first item critical for demo impact
|
||||
make_critical = (i == 0) and (priority == "urgent")
|
||||
current_stock = get_simulated_stock_level(product_name, make_critical=make_critical)
|
||||
|
||||
# Calculate product-specific urgency
|
||||
urgency_info = calculate_product_urgency(
|
||||
product_name=product_name,
|
||||
current_stock=current_stock,
|
||||
supplier_lead_time_days=supplier_lead_time,
|
||||
reorder_point=item.get('reorder_point')
|
||||
)
|
||||
|
||||
product_details.append(urgency_info)
|
||||
|
||||
# Estimate production loss for critical items
|
||||
if urgency_info["criticality"] in ["critical", "urgent"]:
|
||||
# Rough estimate: lost production value
|
||||
estimated_loss += item.get("unit_price", 1.0) * item.get("quantity", 10) * 1.5
|
||||
|
||||
# Determine overall urgency
|
||||
overall_urgency = determine_overall_po_urgency(product_details)
|
||||
|
||||
# Find affected production batches (demo: simulate batch names)
|
||||
affected_batches = []
|
||||
critical_products = [p for p in product_details if p["criticality"] in ["critical", "urgent"]]
|
||||
if critical_products:
|
||||
# Simulate batch numbers that would be affected
|
||||
affected_batches = ["BATCH-TODAY-001", "BATCH-TODAY-002"] if overall_urgency == "critical" else \
|
||||
["BATCH-TOMORROW-001"] if overall_urgency == "urgent" else []
|
||||
|
||||
# Create enhanced reasoning with detailed supply chain intelligence
|
||||
reasoning_data = create_po_reasoning_low_stock(
|
||||
supplier_name=supplier.name,
|
||||
product_names=product_names,
|
||||
current_stock=random.uniform(20, 50), # Demo: low stock
|
||||
required_stock=random.uniform(100, 200), # Demo: needed stock
|
||||
days_until_stockout=days_until_stockout,
|
||||
threshold_percentage=20,
|
||||
affected_products=product_names[:2] if len(product_names) > 1 else product_names,
|
||||
estimated_lost_orders=random.randint(5, 15) if days_until_stockout <= 3 else None
|
||||
product_names=product_names, # Legacy compatibility
|
||||
# Enhanced parameters
|
||||
product_details=product_details,
|
||||
supplier_lead_time_days=supplier_lead_time,
|
||||
order_urgency=overall_urgency,
|
||||
affected_production_batches=affected_batches,
|
||||
estimated_production_loss_eur=estimated_loss if estimated_loss > 0 else None
|
||||
)
|
||||
elif auto_approved:
|
||||
# Supplier contract/auto-approval reasoning
|
||||
@@ -165,6 +347,7 @@ async def create_purchase_order(
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to generate reasoning_data: {e}")
|
||||
logger.exception(e)
|
||||
pass
|
||||
|
||||
# Create PO
|
||||
@@ -298,17 +481,17 @@ async def seed_purchase_orders_for_tenant(db: AsyncSession, tenant_id: uuid.UUID
|
||||
)
|
||||
pos_created.append(po2)
|
||||
|
||||
# 3. PENDING_APPROVAL - Large amount (created yesterday)
|
||||
# 3. PENDING_APPROVAL - URGENT: Critical stock for tomorrow's Croissant production
|
||||
po3 = await create_purchase_order(
|
||||
db, tenant_id, supplier_medium_trust,
|
||||
db, tenant_id, supplier_high_trust,
|
||||
PurchaseOrderStatus.pending_approval,
|
||||
Decimal("250.00"),
|
||||
created_offset_days=-1,
|
||||
priority="normal",
|
||||
Decimal("450.00"),
|
||||
created_offset_days=0,
|
||||
priority="urgent",
|
||||
items_data=[
|
||||
{"name": "Harina de Fuerza T65", "quantity": 500, "unit_price": 0.95, "uom": "kg"},
|
||||
{"name": "Mantequilla Premium", "quantity": 80, "unit_price": 5.20, "uom": "kg"},
|
||||
{"name": "Huevos Categoría A", "quantity": 600, "unit_price": 0.22, "uom": "unidad"}
|
||||
{"name": "Harina de Trigo T55", "quantity": 100, "unit_price": 0.85, "uom": "kg"},
|
||||
{"name": "Mantequilla sin Sal 82% MG", "quantity": 30, "unit_price": 6.50, "uom": "kg"},
|
||||
{"name": "Huevos Frescos Categoría A", "quantity": 200, "unit_price": 0.25, "uom": "unidad"}
|
||||
]
|
||||
)
|
||||
pos_created.append(po3)
|
||||
|
||||
@@ -593,6 +593,105 @@
|
||||
"production_notes": "Planificado para mañana",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000001"]
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-999999999001",
|
||||
"batch_number": "BATCH-TODAY-001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"product_name": "Croissant de Mantequilla Artesanal",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"planned_start_offset_days": 0,
|
||||
"planned_start_hour": 6,
|
||||
"planned_start_minute": 0,
|
||||
"planned_duration_minutes": 240,
|
||||
"planned_quantity": 120.0,
|
||||
"actual_quantity": null,
|
||||
"status": "PENDING",
|
||||
"priority": "HIGH",
|
||||
"current_process_stage": null,
|
||||
"yield_percentage": null,
|
||||
"quality_score": null,
|
||||
"waste_quantity": null,
|
||||
"defect_quantity": null,
|
||||
"waste_defect_type": null,
|
||||
"estimated_cost": 280.00,
|
||||
"actual_cost": null,
|
||||
"labor_cost": null,
|
||||
"material_cost": null,
|
||||
"overhead_cost": null,
|
||||
"station_id": "STATION-02",
|
||||
"is_rush_order": false,
|
||||
"is_special_recipe": false,
|
||||
"is_ai_assisted": true,
|
||||
"production_notes": "Lote programado para hoy - Demanda prevista alta",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000002", "50000000-0000-0000-0000-000000000001"]
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-999999999002",
|
||||
"batch_number": "BATCH-TODAY-002",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"product_name": "Baguette Francesa Tradicional",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"planned_start_offset_days": 0,
|
||||
"planned_start_hour": 8,
|
||||
"planned_start_minute": 30,
|
||||
"planned_duration_minutes": 165,
|
||||
"planned_quantity": 100.0,
|
||||
"actual_quantity": null,
|
||||
"status": "PENDING",
|
||||
"priority": "MEDIUM",
|
||||
"current_process_stage": null,
|
||||
"yield_percentage": null,
|
||||
"quality_score": null,
|
||||
"waste_quantity": null,
|
||||
"defect_quantity": null,
|
||||
"waste_defect_type": null,
|
||||
"estimated_cost": 150.00,
|
||||
"actual_cost": null,
|
||||
"labor_cost": null,
|
||||
"material_cost": null,
|
||||
"overhead_cost": null,
|
||||
"station_id": "STATION-01",
|
||||
"is_rush_order": false,
|
||||
"is_special_recipe": false,
|
||||
"is_ai_assisted": true,
|
||||
"production_notes": "Producción diaria programada",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000001"]
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-999999999003",
|
||||
"batch_number": "BATCH-TODAY-003",
|
||||
"product_id": "20000000-0000-0000-0000-000000000003",
|
||||
"product_name": "Pan de Pueblo con Masa Madre",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000003",
|
||||
"planned_start_offset_days": 0,
|
||||
"planned_start_hour": 10,
|
||||
"planned_start_minute": 0,
|
||||
"planned_duration_minutes": 300,
|
||||
"planned_quantity": 60.0,
|
||||
"actual_quantity": null,
|
||||
"status": "PENDING",
|
||||
"priority": "MEDIUM",
|
||||
"current_process_stage": null,
|
||||
"yield_percentage": null,
|
||||
"quality_score": null,
|
||||
"waste_quantity": null,
|
||||
"defect_quantity": null,
|
||||
"waste_defect_type": null,
|
||||
"estimated_cost": 180.00,
|
||||
"actual_cost": null,
|
||||
"labor_cost": null,
|
||||
"material_cost": null,
|
||||
"overhead_cost": null,
|
||||
"station_id": "STATION-01",
|
||||
"is_rush_order": false,
|
||||
"is_special_recipe": true,
|
||||
"is_ai_assisted": true,
|
||||
"production_notes": "Masa madre preparada ayer - Listo para horneado",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000001"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -37,6 +37,8 @@ DEMO_TENANT_SAN_PABLO = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Ind
|
||||
DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # Central bakery
|
||||
|
||||
# Base reference date for date calculations
|
||||
# MUST match shared/utils/demo_dates.py for proper demo session cloning
|
||||
# This fixed date allows demo sessions to adjust all dates relative to session creation time
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
|
||||
@@ -140,7 +142,19 @@ async def seed_batches_for_tenant(
|
||||
actual_end = actual_start + timedelta(minutes=actual_duration)
|
||||
completed_at = actual_end
|
||||
elif batch_data["status"] == "IN_PROGRESS":
|
||||
actual_start = planned_start
|
||||
# For IN_PROGRESS batches, set actual_start to a recent time to ensure valid progress calculation
|
||||
# If planned_start is in the past, use it; otherwise, set to 30 minutes ago
|
||||
now = datetime.now(timezone.utc)
|
||||
if planned_start < now:
|
||||
# If planned start was in the past, use a time that ensures batch is ~30% complete
|
||||
elapsed_time_minutes = min(
|
||||
int(batch_data["planned_duration_minutes"] * 0.3),
|
||||
int((now - planned_start).total_seconds() / 60)
|
||||
)
|
||||
actual_start = now - timedelta(minutes=elapsed_time_minutes)
|
||||
else:
|
||||
# If planned_start is in the future, start batch 30 minutes ago
|
||||
actual_start = now - timedelta(minutes=30)
|
||||
actual_duration = None
|
||||
actual_end = None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user