demo seed change

This commit is contained in:
Urtzi Alfaro
2025-12-13 23:57:54 +01:00
parent f3688dfb04
commit ff830a3415
299 changed files with 20328 additions and 19485 deletions

View File

@@ -3,11 +3,9 @@
from .procurement_plans import router as procurement_plans_router
from .purchase_orders import router as purchase_orders_router
from .replenishment import router as replenishment_router
from .internal_demo import router as internal_demo_router
__all__ = [
"procurement_plans_router",
"purchase_orders_router",
"replenishment_router",
"internal_demo_router"
"procurement_plans_router",
"purchase_orders_router",
"replenishment_router"
]

View File

@@ -91,8 +91,11 @@ async def get_expected_deliveries(
# Add date filters
if include_overdue:
# Include any delivery from past until end_date
# Include deliveries from last 48 hours (recent overdue) until end_date
# This ensures we only show truly recent overdue deliveries, not ancient history
start_date = now - timedelta(hours=48)
query = query.where(
PurchaseOrder.expected_delivery_date >= start_date,
PurchaseOrder.expected_delivery_date <= end_date
)
else:
@@ -149,13 +152,22 @@ async def get_expected_deliveries(
# Default delivery window is 4 hours
delivery_window_hours = 4
# Ensure expected delivery date is timezone-aware and in UTC format
expected_delivery_utc = po.expected_delivery_date
if expected_delivery_utc and expected_delivery_utc.tzinfo is None:
# If naive datetime, assume it's UTC (this shouldn't happen with proper DB setup)
expected_delivery_utc = expected_delivery_utc.replace(tzinfo=timezone.utc)
elif expected_delivery_utc and expected_delivery_utc.tzinfo is not None:
# Convert to UTC if it's in another timezone
expected_delivery_utc = expected_delivery_utc.astimezone(timezone.utc)
delivery_dict = {
"po_id": str(po.id),
"po_number": po.po_number,
"supplier_id": str(po.supplier_id),
"supplier_name": supplier_name,
"supplier_phone": supplier_phone,
"expected_delivery_date": po.expected_delivery_date.isoformat(),
"expected_delivery_date": expected_delivery_utc.isoformat() if expected_delivery_utc else None,
"delivery_window_hours": delivery_window_hours,
"status": po.status.value,
"line_items": line_items,
@@ -187,4 +199,4 @@ async def get_expected_deliveries(
tenant_id=tenant_id,
exc_info=True
)
raise HTTPException(status_code=500, detail="Internal server error")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -162,7 +162,7 @@ async def get_expected_deliveries(
"supplier_id": str(po.supplier_id),
"supplier_name": supplier_name,
"supplier_phone": supplier_phone,
"expected_delivery_date": po.expected_delivery_date.isoformat(),
"expected_delivery_date": po.expected_delivery_date.isoformat() if po.expected_delivery_date else None,
"delivery_window_hours": delivery_window_hours,
"status": po.status.value,
"line_items": line_items,

View File

@@ -3,6 +3,9 @@ Internal API for triggering delivery tracking alerts.
Used by demo session cloning to generate realistic late delivery alerts.
Moved from orchestrator service to procurement service (domain ownership).
URL Pattern: /api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger
This follows the tenant-scoped pattern so gateway can proxy correctly.
"""
from fastapi import APIRouter, HTTPException, Request, Path
@@ -14,7 +17,8 @@ logger = structlog.get_logger()
router = APIRouter()
@router.post("/api/internal/delivery-tracking/trigger/{tenant_id}")
# New URL pattern: tenant-scoped so gateway proxies to procurement service correctly
@router.post("/api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger")
async def trigger_delivery_tracking(
tenant_id: UUID = Path(..., description="Tenant ID to check deliveries for"),
request: Request = None

View File

@@ -11,12 +11,14 @@ import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
import json
from pathlib import Path
from app.core.database import get_db
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE, resolve_time_marker
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
from sqlalchemy.orm import selectinload
from shared.schemas.reasoning_types import (
@@ -53,10 +55,10 @@ async def clone_demo_data(
"""
Clone procurement service data for a virtual demo tenant
Clones:
- Procurement plans with requirements
Loads seed data from JSON files and creates:
- Purchase orders with line items
- Replenishment plans with items
- Procurement plans with requirements (if in seed data)
- Replenishment plans with items (if in seed data)
- Adjusts dates to recent timeframe
Args:
@@ -80,7 +82,7 @@ async def clone_demo_data(
session_time = start_time
logger.info(
"Starting procurement data cloning",
"Starting procurement data cloning from seed files",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
@@ -103,536 +105,332 @@ async def clone_demo_data(
"replenishment_items": 0
}
# Clone Procurement Plans with Requirements
result = await db.execute(
select(ProcurementPlan).where(ProcurementPlan.tenant_id == base_uuid)
)
base_plans = result.scalars().all()
logger.info(
"Found procurement plans to clone",
count=len(base_plans),
base_tenant=str(base_uuid)
)
# Calculate date offset for procurement
if base_plans:
max_plan_date = max(plan.plan_date for plan in base_plans if plan.plan_date)
today_date = date.today()
days_diff = (today_date - max_plan_date).days
plan_date_offset = timedelta(days=days_diff)
else:
plan_date_offset = timedelta(days=0)
plan_id_map = {}
for plan in base_plans:
new_plan_id = uuid.uuid4()
plan_id_map[plan.id] = new_plan_id
new_plan = ProcurementPlan(
id=new_plan_id,
tenant_id=virtual_uuid,
plan_number=f"PROC-{uuid.uuid4().hex[:8].upper()}",
plan_date=plan.plan_date + plan_date_offset if plan.plan_date else None,
plan_period_start=plan.plan_period_start + plan_date_offset if plan.plan_period_start else None,
plan_period_end=plan.plan_period_end + plan_date_offset if plan.plan_period_end else None,
planning_horizon_days=plan.planning_horizon_days,
status=plan.status,
plan_type=plan.plan_type,
priority=plan.priority,
business_model=plan.business_model,
procurement_strategy=plan.procurement_strategy,
total_requirements=plan.total_requirements,
total_estimated_cost=plan.total_estimated_cost,
total_approved_cost=plan.total_approved_cost,
cost_variance=plan.cost_variance,
created_at=session_time,
updated_at=session_time
)
db.add(new_plan)
stats["procurement_plans"] += 1
# Clone Procurement Requirements
for old_plan_id, new_plan_id in plan_id_map.items():
result = await db.execute(
select(ProcurementRequirement).where(ProcurementRequirement.plan_id == old_plan_id)
)
requirements = result.scalars().all()
for req in requirements:
new_req = ProcurementRequirement(
id=uuid.uuid4(),
plan_id=new_plan_id,
requirement_number=req.requirement_number,
product_id=req.product_id,
product_name=req.product_name,
product_sku=req.product_sku,
product_category=req.product_category,
product_type=req.product_type,
required_quantity=req.required_quantity,
unit_of_measure=req.unit_of_measure,
safety_stock_quantity=req.safety_stock_quantity,
total_quantity_needed=req.total_quantity_needed,
current_stock_level=req.current_stock_level,
reserved_stock=req.reserved_stock,
available_stock=req.available_stock,
net_requirement=req.net_requirement,
order_demand=req.order_demand,
production_demand=req.production_demand,
forecast_demand=req.forecast_demand,
buffer_demand=req.buffer_demand,
preferred_supplier_id=req.preferred_supplier_id,
backup_supplier_id=req.backup_supplier_id,
supplier_name=req.supplier_name,
supplier_lead_time_days=req.supplier_lead_time_days,
minimum_order_quantity=req.minimum_order_quantity,
estimated_unit_cost=req.estimated_unit_cost,
estimated_total_cost=req.estimated_total_cost,
last_purchase_cost=req.last_purchase_cost,
cost_variance=req.cost_variance,
required_by_date=req.required_by_date + plan_date_offset if req.required_by_date else None,
lead_time_buffer_days=req.lead_time_buffer_days,
suggested_order_date=req.suggested_order_date + plan_date_offset if req.suggested_order_date else None,
latest_order_date=req.latest_order_date + plan_date_offset if req.latest_order_date else None,
quality_specifications=req.quality_specifications,
special_requirements=req.special_requirements,
storage_requirements=req.storage_requirements,
shelf_life_days=req.shelf_life_days,
status=req.status,
priority=req.priority,
risk_level=req.risk_level,
purchase_order_id=req.purchase_order_id,
purchase_order_number=req.purchase_order_number,
ordered_quantity=req.ordered_quantity,
ordered_at=req.ordered_at,
expected_delivery_date=req.expected_delivery_date + plan_date_offset if req.expected_delivery_date else None,
actual_delivery_date=req.actual_delivery_date + plan_date_offset if req.actual_delivery_date else None,
received_quantity=req.received_quantity,
delivery_status=req.delivery_status,
fulfillment_rate=req.fulfillment_rate,
on_time_delivery=req.on_time_delivery,
quality_rating=req.quality_rating,
source_orders=req.source_orders,
source_production_batches=req.source_production_batches,
demand_analysis=req.demand_analysis,
approved_quantity=req.approved_quantity,
approved_cost=req.approved_cost,
approved_at=req.approved_at,
approved_by=req.approved_by,
procurement_notes=req.procurement_notes,
supplier_communication=req.supplier_communication,
requirement_metadata=req.requirement_metadata,
created_at=session_time,
updated_at=session_time
def parse_date_field(date_value, field_name="date"):
"""Parse date field, handling both ISO strings and BASE_TS markers"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
return adjust_date_for_demo(
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
db.add(new_req)
stats["procurement_requirements"] += 1
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
# Clone Purchase Orders with Line Items
result = await db.execute(
select(PurchaseOrder).where(PurchaseOrder.tenant_id == base_uuid)
)
base_orders = result.scalars().all()
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "07-procurement.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "07-procurement.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "07-procurement.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "07-procurement.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
"Found purchase orders to clone",
count=len(base_orders),
base_tenant=str(base_uuid)
"Loaded procurement seed data",
purchase_orders=len(seed_data.get('purchase_orders', [])),
purchase_order_items=len(seed_data.get('purchase_order_items', [])),
procurement_plans=len(seed_data.get('procurement_plans', []))
)
# Load Purchase Orders from seed data
order_id_map = {}
for po_data in seed_data.get('purchase_orders', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
logger.debug("Processing purchase order", po_id=po_data.get('id'), po_number=po_data.get('po_number'))
po_uuid = uuid.UUID(po_data['id'])
transformed_id = transform_id(po_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse purchase order UUID",
po_id=po_data.get('id'),
po_number=po_data.get('po_number'),
error=str(e))
continue
for order in base_orders:
new_order_id = uuid.uuid4()
order_id_map[order.id] = new_order_id
order_id_map[uuid.UUID(po_data['id'])] = transformed_id
# Adjust dates using demo_dates utility
adjusted_order_date = adjust_date_for_demo(
order.order_date, session_time, BASE_REFERENCE_DATE
)
adjusted_required_delivery = adjust_date_for_demo(
order.required_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_estimated_delivery = adjust_date_for_demo(
order.estimated_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_supplier_confirmation = adjust_date_for_demo(
order.supplier_confirmation_date, session_time, BASE_REFERENCE_DATE
)
adjusted_approved_at = adjust_date_for_demo(
order.approved_at, session_time, BASE_REFERENCE_DATE
)
adjusted_sent_to_supplier_at = adjust_date_for_demo(
order.sent_to_supplier_at, session_time, BASE_REFERENCE_DATE
)
# Adjust dates relative to session creation time
# FIX: Use current UTC time for future dates (expected delivery)
current_time = datetime.now(timezone.utc)
logger.debug("Parsing dates for PO",
po_number=po_data.get('po_number'),
order_date_raw=po_data.get('order_date') or po_data.get('order_date_offset_days'),
required_delivery_raw=po_data.get('required_delivery_date') or po_data.get('required_delivery_date_offset_days'))
# Handle both direct dates and offset-based dates
if 'order_date_offset_days' in po_data:
adjusted_order_date = session_time + timedelta(days=po_data['order_date_offset_days'])
else:
adjusted_order_date = parse_date_field(po_data.get('order_date'), "order_date") or session_time
if 'required_delivery_date_offset_days' in po_data:
adjusted_required_delivery = session_time + timedelta(days=po_data['required_delivery_date_offset_days'])
else:
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), "required_delivery_date")
if 'estimated_delivery_date_offset_days' in po_data:
adjusted_estimated_delivery = session_time + timedelta(days=po_data['estimated_delivery_date_offset_days'])
else:
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), "estimated_delivery_date")
# Calculate expected delivery date (use estimated delivery if not specified separately)
# FIX: Use current UTC time for future delivery dates
if 'expected_delivery_date_offset_days' in po_data:
adjusted_expected_delivery = current_time + timedelta(days=po_data['expected_delivery_date_offset_days'])
else:
adjusted_expected_delivery = adjusted_estimated_delivery # Fallback to estimated delivery
logger.debug("Dates parsed successfully",
po_number=po_data.get('po_number'),
order_date=adjusted_order_date,
required_delivery=adjusted_required_delivery)
# Generate a system user UUID for audit fields (demo purposes)
system_user_id = uuid.uuid4()
# For demo sessions: Adjust expected_delivery_date if it exists
# This ensures the ExecutionProgressTracker shows realistic delivery data
expected_delivery = None
if hasattr(order, 'expected_delivery_date') and order.expected_delivery_date:
# Adjust the existing expected_delivery_date to demo session time
expected_delivery = adjust_date_for_demo(
order.expected_delivery_date, session_time, BASE_REFERENCE_DATE
# Use status directly from JSON - JSON files should contain valid enum values
# Valid values: draft, pending_approval, approved, sent_to_supplier, confirmed,
# partially_received, completed, cancelled, disputed
raw_status = po_data.get('status', 'draft')
# Validate that the status is a valid enum value
valid_statuses = {'draft', 'pending_approval', 'approved', 'sent_to_supplier',
'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed'}
if raw_status not in valid_statuses:
logger.warning(
"Invalid status value in seed data, using default 'draft'",
invalid_status=raw_status,
po_number=po_data.get('po_number'),
valid_options=sorted(valid_statuses)
)
elif order.status in ['approved', 'sent_to_supplier', 'confirmed']:
# If no expected_delivery_date but order is in delivery status, use estimated_delivery_date
expected_delivery = adjusted_estimated_delivery
# Create new PurchaseOrder - add expected_delivery_date only if column exists (after migration)
raw_status = 'draft'
# Create new PurchaseOrder
new_order = PurchaseOrder(
id=new_order_id,
id=str(transformed_id),
tenant_id=virtual_uuid,
po_number=f"PO-{uuid.uuid4().hex[:8].upper()}", # New PO number
reference_number=order.reference_number,
supplier_id=order.supplier_id,
procurement_plan_id=plan_id_map.get(order.procurement_plan_id) if hasattr(order, 'procurement_plan_id') and order.procurement_plan_id else None,
po_number=f"{session_id[:8]}-{po_data.get('po_number', f'PO-{uuid.uuid4().hex[:8].upper()}')}",
supplier_id=po_data.get('supplier_id'),
order_date=adjusted_order_date,
required_delivery_date=adjusted_required_delivery,
estimated_delivery_date=adjusted_estimated_delivery,
status=order.status,
priority=order.priority,
subtotal=order.subtotal,
tax_amount=order.tax_amount,
discount_amount=order.discount_amount,
shipping_cost=order.shipping_cost,
total_amount=order.total_amount,
currency=order.currency,
delivery_address=order.delivery_address if hasattr(order, 'delivery_address') else None,
delivery_instructions=order.delivery_instructions if hasattr(order, 'delivery_instructions') else None,
delivery_contact=order.delivery_contact if hasattr(order, 'delivery_contact') else None,
delivery_phone=order.delivery_phone if hasattr(order, 'delivery_phone') else None,
requires_approval=order.requires_approval if hasattr(order, 'requires_approval') else False,
approved_by=order.approved_by if hasattr(order, 'approved_by') else None,
approved_at=adjusted_approved_at,
rejection_reason=order.rejection_reason if hasattr(order, 'rejection_reason') else None,
auto_approved=order.auto_approved if hasattr(order, 'auto_approved') else False,
auto_approval_rule_id=order.auto_approval_rule_id if hasattr(order, 'auto_approval_rule_id') else None,
sent_to_supplier_at=adjusted_sent_to_supplier_at,
supplier_confirmation_date=adjusted_supplier_confirmation,
supplier_reference=order.supplier_reference if hasattr(order, 'supplier_reference') else None,
notes=order.notes if hasattr(order, 'notes') else None,
internal_notes=order.internal_notes if hasattr(order, 'internal_notes') else None,
terms_and_conditions=order.terms_and_conditions if hasattr(order, 'terms_and_conditions') else None,
reasoning_data=order.reasoning_data if hasattr(order, 'reasoning_data') else None, # Clone reasoning for JTBD dashboard
expected_delivery_date=adjusted_expected_delivery,
status=raw_status,
priority=po_data.get('priority', 'normal').lower() if po_data.get('priority') else 'normal',
subtotal=po_data.get('subtotal', 0.0),
tax_amount=po_data.get('tax_amount', 0.0),
shipping_cost=po_data.get('shipping_cost', 0.0),
discount_amount=po_data.get('discount_amount', 0.0),
total_amount=po_data.get('total_amount', 0.0),
currency=po_data.get('currency', 'EUR'),
delivery_address=po_data.get('delivery_address'),
delivery_instructions=po_data.get('delivery_instructions'),
delivery_contact=po_data.get('delivery_contact'),
delivery_phone=po_data.get('delivery_phone'),
requires_approval=po_data.get('requires_approval', False),
auto_approved=po_data.get('auto_approved', False),
auto_approval_rule_id=po_data.get('auto_approval_rule_id') if po_data.get('auto_approval_rule_id') and len(po_data.get('auto_approval_rule_id', '')) >= 32 else None,
rejection_reason=po_data.get('rejection_reason'),
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), "sent_to_supplier_at"),
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), "supplier_confirmation_date"),
supplier_reference=po_data.get('supplier_reference'),
notes=po_data.get('notes'),
internal_notes=po_data.get('internal_notes'),
terms_and_conditions=po_data.get('terms_and_conditions'),
reasoning_data=po_data.get('reasoning_data'),
created_at=session_time,
updated_at=session_time,
created_by=system_user_id,
updated_by=system_user_id
)
# Add expected_delivery_date if the model supports it (after migration)
# Add expected_delivery_date if the model supports it
if hasattr(PurchaseOrder, 'expected_delivery_date'):
if 'expected_delivery_date_offset_days' in po_data:
# Handle offset-based expected delivery dates
expected_delivery = adjusted_order_date + timedelta(
days=po_data['expected_delivery_date_offset_days']
)
else:
expected_delivery = adjusted_estimated_delivery
new_order.expected_delivery_date = expected_delivery
db.add(new_order)
stats["purchase_orders"] += 1
# Clone Purchase Order Items
for old_order_id, new_order_id in order_id_map.items():
result = await db.execute(
select(PurchaseOrderItem).where(PurchaseOrderItem.purchase_order_id == old_order_id)
# Load Purchase Order Items from seed data
for po_item_data in seed_data.get('purchase_order_items', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
item_uuid = uuid.UUID(po_item_data['id'])
transformed_id = transform_id(po_item_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse purchase order item UUID",
item_id=po_item_data['id'],
error=str(e))
continue
# Map purchase_order_id if it exists in our map
po_id_value = po_item_data.get('purchase_order_id')
if po_id_value:
po_id_value = order_id_map.get(uuid.UUID(po_id_value), uuid.UUID(po_id_value))
new_item = PurchaseOrderItem(
id=str(transformed_id),
tenant_id=virtual_uuid,
purchase_order_id=str(po_id_value) if po_id_value else None,
inventory_product_id=po_item_data.get('inventory_product_id'),
product_name=po_item_data.get('product_name'),
product_code=po_item_data.get('product_code'), # Use product_code directly from JSON
ordered_quantity=po_item_data.get('ordered_quantity', 0.0),
unit_of_measure=po_item_data.get('unit_of_measure'),
unit_price=po_item_data.get('unit_price', 0.0),
line_total=po_item_data.get('line_total', 0.0),
received_quantity=po_item_data.get('received_quantity', 0.0),
remaining_quantity=po_item_data.get('remaining_quantity', po_item_data.get('ordered_quantity', 0.0)),
quality_requirements=po_item_data.get('quality_requirements'),
item_notes=po_item_data.get('item_notes'),
created_at=session_time,
updated_at=session_time
)
order_items = result.scalars().all()
db.add(new_item)
stats["purchase_order_items"] += 1
for item in order_items:
new_item = PurchaseOrderItem(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
purchase_order_id=new_order_id,
procurement_requirement_id=item.procurement_requirement_id if hasattr(item, 'procurement_requirement_id') else None,
inventory_product_id=item.inventory_product_id,
product_code=item.product_code if hasattr(item, 'product_code') else None,
product_name=item.product_name,
supplier_price_list_id=item.supplier_price_list_id if hasattr(item, 'supplier_price_list_id') else None,
ordered_quantity=item.ordered_quantity,
unit_of_measure=item.unit_of_measure,
unit_price=item.unit_price,
line_total=item.line_total,
received_quantity=item.received_quantity if hasattr(item, 'received_quantity') else 0,
remaining_quantity=item.remaining_quantity if hasattr(item, 'remaining_quantity') else item.ordered_quantity,
quality_requirements=item.quality_requirements if hasattr(item, 'quality_requirements') else None,
item_notes=item.item_notes if hasattr(item, 'item_notes') else None,
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["purchase_order_items"] += 1
# Load Procurement Plans from seed data (if any)
for plan_data in seed_data.get('procurement_plans', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
plan_uuid = uuid.UUID(plan_data['id'])
transformed_id = transform_id(plan_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse procurement plan UUID",
plan_id=plan_data['id'],
error=str(e))
continue
# Clone Replenishment Plans with Items
result = await db.execute(
select(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == base_uuid)
)
base_replenishment_plans = result.scalars().all()
# Adjust dates
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), "plan_date")
logger.info(
"Found replenishment plans to clone",
count=len(base_replenishment_plans),
base_tenant=str(base_uuid)
)
new_plan = ProcurementPlan(
id=str(transformed_id),
tenant_id=virtual_uuid,
plan_number=plan_data.get('plan_number', f"PROC-{uuid.uuid4().hex[:8].upper()}"),
plan_date=adjusted_plan_date,
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), "plan_period_start"),
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), "plan_period_end"),
planning_horizon_days=plan_data.get('planning_horizon_days'),
status=plan_data.get('status', 'draft'),
plan_type=plan_data.get('plan_type'),
priority=plan_data.get('priority', 'normal'),
business_model=plan_data.get('business_model'),
procurement_strategy=plan_data.get('procurement_strategy'),
total_requirements=plan_data.get('total_requirements', 0),
total_estimated_cost=plan_data.get('total_estimated_cost', 0.0),
total_approved_cost=plan_data.get('total_approved_cost', 0.0),
cost_variance=plan_data.get('cost_variance', 0.0),
created_at=session_time,
updated_at=session_time
)
db.add(new_plan)
stats["procurement_plans"] += 1
replan_id_map = {}
# Load Replenishment Plans from seed data (if any)
for replan_data in seed_data.get('replenishment_plans', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
replan_uuid = uuid.UUID(replan_data['id'])
transformed_id = transform_id(replan_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse replenishment plan UUID",
replan_id=replan_data['id'],
error=str(e))
continue
for replan in base_replenishment_plans:
new_replan_id = uuid.uuid4()
replan_id_map[replan.id] = new_replan_id
# Adjust dates
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), "plan_date")
new_replan = ReplenishmentPlan(
id=new_replan_id,
id=str(transformed_id),
tenant_id=virtual_uuid,
plan_number=f"REPL-{uuid.uuid4().hex[:8].upper()}",
plan_date=replan.plan_date + plan_date_offset if replan.plan_date else None,
plan_period_start=replan.plan_period_start + plan_date_offset if replan.plan_period_start else None,
plan_period_end=replan.plan_period_end + plan_date_offset if replan.plan_period_end else None,
planning_horizon_days=replan.planning_horizon_days,
status=replan.status,
plan_type=replan.plan_type,
priority=replan.priority,
business_model=replan.business_model,
total_items=replan.total_items,
total_estimated_cost=replan.total_estimated_cost,
plan_number=replan_data.get('plan_number', f"REPL-{uuid.uuid4().hex[:8].upper()}"),
plan_date=adjusted_plan_date,
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), "plan_period_start"),
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), "plan_period_end"),
planning_horizon_days=replan_data.get('planning_horizon_days'),
status=replan_data.get('status', 'draft'),
plan_type=replan_data.get('plan_type'),
priority=replan_data.get('priority', 'normal'),
business_model=replan_data.get('business_model'),
total_items=replan_data.get('total_items', 0),
total_estimated_cost=replan_data.get('total_estimated_cost', 0.0),
created_at=session_time,
updated_at=session_time
)
db.add(new_replan)
stats["replenishment_plans"] += 1
# Clone Replenishment Plan Items
for old_replan_id, new_replan_id in replan_id_map.items():
result = await db.execute(
select(ReplenishmentPlanItem).where(ReplenishmentPlanItem.plan_id == old_replan_id)
)
replan_items = result.scalars().all()
for item in replan_items:
new_item = ReplenishmentPlanItem(
id=uuid.uuid4(),
plan_id=new_replan_id,
product_id=item.product_id,
product_name=item.product_name,
product_sku=item.product_sku,
required_quantity=item.required_quantity,
unit_of_measure=item.unit_of_measure,
current_stock_level=item.current_stock_level,
safety_stock_quantity=item.safety_stock_quantity,
suggested_order_quantity=item.suggested_order_quantity,
supplier_id=item.supplier_id,
supplier_name=item.supplier_name,
estimated_delivery_days=item.estimated_delivery_days,
required_by_date=item.required_by_date + plan_date_offset if item.required_by_date else None,
status=item.status,
priority=item.priority,
notes=item.notes,
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["replenishment_items"] += 1
# Commit cloned data
# Commit all loaded data
await db.commit()
total_records = sum(stats.values())
# FIX DELIVERY ALERT TIMING - Adjust specific POs to guarantee delivery alerts
# After cloning, some POs need their expected_delivery_date adjusted relative to session time
# to ensure they trigger delivery tracking alerts (arriving soon, overdue, etc.)
logger.info("Adjusting delivery PO dates for guaranteed alert triggering")
# Query for sent_to_supplier POs that have expected_delivery_date
result = await db.execute(
select(PurchaseOrder)
.where(
PurchaseOrder.tenant_id == virtual_uuid,
PurchaseOrder.status == 'sent_to_supplier',
PurchaseOrder.expected_delivery_date.isnot(None)
)
.limit(5) # Adjust first 5 POs with delivery dates
)
delivery_pos = result.scalars().all()
if len(delivery_pos) >= 2:
# PO 1: Set to OVERDUE (5 hours ago) - will trigger overdue alert
delivery_pos[0].expected_delivery_date = session_time - timedelta(hours=5)
delivery_pos[0].required_delivery_date = session_time - timedelta(hours=5)
delivery_pos[0].notes = "🔴 OVERDUE: Expected delivery was 5 hours ago - Contact supplier immediately"
logger.info(f"Set PO {delivery_pos[0].po_number} to overdue (5 hours ago)")
# PO 2: Set to ARRIVING SOON (1 hour from now) - will trigger arriving soon alert
delivery_pos[1].expected_delivery_date = session_time + timedelta(hours=1)
delivery_pos[1].required_delivery_date = session_time + timedelta(hours=1)
delivery_pos[1].notes = "📦 ARRIVING SOON: Delivery expected in 1 hour - Prepare for stock receipt"
logger.info(f"Set PO {delivery_pos[1].po_number} to arriving soon (1 hour)")
if len(delivery_pos) >= 4:
# PO 3: Set to TODAY AFTERNOON (6 hours from now) - visible in dashboard
delivery_pos[2].expected_delivery_date = session_time + timedelta(hours=6)
delivery_pos[2].required_delivery_date = session_time + timedelta(hours=6)
delivery_pos[2].notes = "📅 TODAY: Delivery scheduled for this afternoon"
logger.info(f"Set PO {delivery_pos[2].po_number} to today afternoon (6 hours)")
# PO 4: Set to TOMORROW MORNING (18 hours from now)
delivery_pos[3].expected_delivery_date = session_time + timedelta(hours=18)
delivery_pos[3].required_delivery_date = session_time + timedelta(hours=18)
delivery_pos[3].notes = "📅 TOMORROW: Morning delivery scheduled"
logger.info(f"Set PO {delivery_pos[3].po_number} to tomorrow morning (18 hours)")
# Commit the adjusted delivery dates
await db.commit()
logger.info(f"Adjusted {len(delivery_pos)} POs for delivery alert triggering")
# EMIT ALERTS FOR PENDING APPROVAL POs
# After cloning, emit PO approval alerts for any pending_approval POs
# This ensures the action queue is populated when the demo session starts
pending_pos_for_alerts = []
for order_id in order_id_map.values():
result = await db.execute(
select(PurchaseOrder)
.options(selectinload(PurchaseOrder.items))
.where(
PurchaseOrder.id == order_id,
PurchaseOrder.status == 'pending_approval'
)
)
po = result.scalar_one_or_none()
if po:
pending_pos_for_alerts.append(po)
logger.info(
"Emitting PO approval alerts for cloned pending POs",
pending_po_count=len(pending_pos_for_alerts),
virtual_tenant_id=virtual_tenant_id
)
# Initialize RabbitMQ client for alert emission using UnifiedEventPublisher
alerts_emitted = 0
if pending_pos_for_alerts:
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement")
try:
await rabbitmq_client.connect()
event_publisher = UnifiedEventPublisher(rabbitmq_client, "procurement")
for po in pending_pos_for_alerts:
try:
# Get deadline for urgency calculation
now_utc = datetime.now(timezone.utc)
if po.required_delivery_date:
deadline = po.required_delivery_date
if deadline.tzinfo is None:
deadline = deadline.replace(tzinfo=timezone.utc)
else:
days_until = 3 if po.priority == 'critical' else 7
deadline = now_utc + timedelta(days=days_until)
hours_until = (deadline - now_utc).total_seconds() / 3600
# Check for reasoning data and generate if missing
reasoning_data = po.reasoning_data
if not reasoning_data:
try:
# Generate synthetic reasoning data for demo purposes
product_names = [item.product_name for item in po.items] if po.items else ["Assorted Bakery Supplies"]
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}" # Fallback name
# Create realistic looking reasoning based on PO data
reasoning_data = create_po_reasoning_low_stock(
supplier_name=supplier_name,
product_names=product_names,
current_stock=15.5, # Simulated
required_stock=100.0, # Simulated
days_until_stockout=2, # Simulated urgent
threshold_percentage=20,
affected_products=product_names[:2],
estimated_lost_orders=12
)
logger.info("Generated synthetic reasoning data for demo alert", po_id=str(po.id))
except Exception as e:
logger.warning("Failed to generate synthetic reasoning data, using ultimate fallback", error=str(e))
# Ultimate fallback: Create minimal valid reasoning data structure
reasoning_data = {
"type": "low_stock_detection",
"parameters": {
"supplier_name": supplier_name,
"product_names": ["Assorted Bakery Supplies"],
"product_count": 1,
"current_stock": 10.0,
"required_stock": 50.0,
"days_until_stockout": 2
},
"consequence": {
"type": "stockout_risk",
"severity": "medium",
"impact_days": 2
},
"metadata": {
"trigger_source": "demo_fallback",
"ai_assisted": False
}
}
logger.info("Used ultimate fallback reasoning_data structure", po_id=str(po.id))
# Prepare metadata for the alert
severity = 'high' if po.priority == 'critical' else 'medium'
metadata = {
'po_id': str(po.id),
'po_number': po.po_number,
'supplier_id': str(po.supplier_id),
'supplier_name': f'Supplier-{po.supplier_id}', # Simplified for demo
'total_amount': float(po.total_amount),
'currency': po.currency,
'priority': po.priority,
'severity': severity,
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
'created_at': po.created_at.isoformat(),
'financial_impact': float(po.total_amount),
'deadline': deadline.isoformat(),
'hours_until_consequence': int(hours_until),
'reasoning_data': reasoning_data, # For enrichment service
}
# Use UnifiedEventPublisher.publish_alert() which handles MinimalEvent format automatically
success = await event_publisher.publish_alert(
event_type='supply_chain.po_approval_needed', # domain.event_type format
tenant_id=virtual_uuid,
severity=severity,
data=metadata
)
if success:
alerts_emitted += 1
logger.info(
"PO approval alert emitted during cloning",
po_id=str(po.id),
po_number=po.po_number,
tenant_id=str(virtual_uuid)
)
except Exception as e:
logger.error(
"Failed to emit PO approval alert during cloning",
po_id=str(po.id),
error=str(e),
exc_info=True
)
# Continue with other POs
continue
finally:
await rabbitmq_client.disconnect()
stats["alerts_emitted"] = alerts_emitted
# Calculate total records
total_records = (stats["procurement_plans"] + stats["procurement_requirements"] +
stats["purchase_orders"] + stats["purchase_order_items"] +
stats["replenishment_plans"] + stats["replenishment_items"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Procurement data cloning completed",
"Procurement data loading from seed files completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
alerts_emitted=alerts_emitted,
stats=stats,
duration_ms=duration_ms
)
@@ -651,7 +449,7 @@ async def clone_demo_data(
except Exception as e:
logger.error(
"Failed to clone procurement data",
"Failed to load procurement seed data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
@@ -696,14 +494,12 @@ async def delete_demo_data(
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
po_count = await db.scalar(select(func.count(PurchaseOrder.id)).where(PurchaseOrder.tenant_id == virtual_uuid))
item_count = await db.scalar(select(func.count(PurchaseOrderItem.id)).where(PurchaseOrderItem.tenant_id == virtual_uuid))
plan_count = await db.scalar(select(func.count(ProcurementPlan.id)).where(ProcurementPlan.tenant_id == virtual_uuid))
req_count = await db.scalar(select(func.count(ProcurementRequirement.id)).where(ProcurementRequirement.tenant_id == virtual_uuid))
replan_count = await db.scalar(select(func.count(ReplenishmentPlan.id)).where(ReplenishmentPlan.tenant_id == virtual_uuid))
replan_item_count = await db.scalar(select(func.count(ReplenishmentPlanItem.id)).where(ReplenishmentPlanItem.tenant_id == virtual_uuid))
po_count = await db.scalar(func.count(PurchaseOrder.id).where(PurchaseOrder.tenant_id == virtual_uuid))
po_item_count = await db.scalar(func.count(PurchaseOrderItem.id).where(PurchaseOrderItem.tenant_id == virtual_uuid))
plan_count = await db.scalar(func.count(ProcurementPlan.id).where(ProcurementPlan.tenant_id == virtual_uuid))
replan_count = await db.scalar(func.count(ReplenishmentPlan.id).where(ReplenishmentPlan.tenant_id == virtual_uuid))
# Delete in order (respecting foreign key constraints)
# Delete in order
await db.execute(delete(PurchaseOrderItem).where(PurchaseOrderItem.tenant_id == virtual_uuid))
await db.execute(delete(PurchaseOrder).where(PurchaseOrder.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.tenant_id == virtual_uuid))
@@ -721,16 +517,14 @@ async def delete_demo_data(
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"purchase_orders": po_count,
"purchase_order_items": item_count,
"purchase_order_items": po_item_count,
"procurement_plans": plan_count,
"procurement_requirements": req_count,
"replenishment_plans": replan_count,
"replenishment_items": replan_item_count,
"total": po_count + item_count + plan_count + req_count + replan_count + replan_item_count
"total": po_count + po_item_count + plan_count + replan_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete procurement data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -97,7 +97,11 @@ class ProcurementService(StandardFastAPIService):
# Start delivery tracking service (APScheduler with leader election)
from app.services.delivery_tracking_service import DeliveryTrackingService
self.delivery_tracking_service = DeliveryTrackingService(self.event_publisher, settings)
self.delivery_tracking_service = DeliveryTrackingService(
event_publisher=self.event_publisher,
config=settings,
database_manager=self.database_manager
)
await self.delivery_tracking_service.start()
self.logger.info("Delivery tracking service started")
@@ -159,9 +163,9 @@ from app.api.purchase_orders import router as purchase_orders_router
from app.api import internal_transfer # Internal Transfer Routes
from app.api import replenishment # Enhanced Replenishment Planning Routes
from app.api import analytics # Procurement Analytics Routes
from app.api import internal_demo
from app.api import internal_delivery # Internal Delivery Tracking Routes
from app.api import ml_insights # ML insights endpoint
from app.api import internal_demo # Internal demo data cloning
from app.api.expected_deliveries import router as expected_deliveries_router # Expected Deliveries Routes
from app.api.internal_delivery_tracking import router as internal_delivery_tracking_router # NEW: Internal trigger endpoint
@@ -170,10 +174,11 @@ service.add_router(purchase_orders_router)
service.add_router(internal_transfer.router, tags=["internal-transfer"]) # Internal transfer routes
service.add_router(replenishment.router, tags=["replenishment"]) # RouteBuilder already includes full path
service.add_router(analytics.router, tags=["analytics"]) # RouteBuilder already includes full path
service.add_router(internal_demo.router)
service.add_router(internal_demo.router, tags=["internal-demo"]) # Internal demo data cloning
service.add_router(internal_delivery.router, tags=["internal-delivery"]) # Internal delivery tracking
service.add_router(internal_delivery_tracking_router, tags=["internal-delivery-tracking"]) # NEW: Delivery alert trigger
service.add_router(ml_insights.router) # ML insights endpoint
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint
service.add_router(expected_deliveries_router, tags=["expected-deliveries"]) # Expected deliveries endpoint

View File

@@ -33,9 +33,10 @@ class DeliveryTrackingService:
Only one pod executes checks (others skip if not leader).
"""
def __init__(self, event_publisher: UnifiedEventPublisher, config):
def __init__(self, event_publisher: UnifiedEventPublisher, config, database_manager=None):
self.publisher = event_publisher
self.config = config
self.database_manager = database_manager
self.scheduler = AsyncIOScheduler()
self.is_leader = False
self.instance_id = str(uuid4())[:8] # Short instance ID for logging
@@ -144,7 +145,7 @@ class DeliveryTrackingService:
Returns list of tenant UUIDs that have purchase orders.
"""
try:
async with self.config.database_manager.get_session() as session:
async with self.database_manager.get_session() as session:
# Get distinct tenant_ids that have purchase orders
query = select(PurchaseOrder.tenant_id).distinct()
result = await session.execute(query)
@@ -260,7 +261,7 @@ class DeliveryTrackingService:
List of delivery dicts with same structure as API endpoint
"""
try:
async with self.config.database_manager.get_session() as session:
async with self.database_manager.get_session() as session:
# Calculate date range
now = datetime.now(timezone.utc)
end_date = now + timedelta(days=days_ahead)
@@ -339,7 +340,7 @@ class DeliveryTrackingService:
"supplier_id": str(po.supplier_id),
"supplier_name": supplier_name,
"supplier_phone": supplier_phone,
"expected_delivery_date": po.expected_delivery_date.isoformat(),
"expected_delivery_date": po.expected_delivery_date.isoformat() if po.expected_delivery_date else None,
"delivery_window_hours": 4, # Default
"status": po.status.value,
"line_items": line_items,

View File

@@ -1034,7 +1034,7 @@ class ProcurementService:
async def _get_supplier_by_id(self, tenant_id, supplier_id):
"""Get supplier details by ID"""
try:
return await self.suppliers_client.get_supplier(str(tenant_id), str(supplier_id))
return await self.suppliers_client.get_supplier_by_id(str(tenant_id), str(supplier_id))
except Exception as e:
logger.warning(f"Failed to get supplier {supplier_id}: {e}")
return None

View File

@@ -1017,7 +1017,7 @@ class PurchaseOrderService:
async def _get_and_validate_supplier(self, tenant_id: uuid.UUID, supplier_id: uuid.UUID) -> Dict[str, Any]:
"""Get and validate supplier from Suppliers Service"""
try:
supplier = await self.suppliers_client.get_supplier(str(tenant_id), str(supplier_id))
supplier = await self.suppliers_client.get_supplier_by_id(str(tenant_id), str(supplier_id))
if not supplier:
raise ValueError("Supplier not found")
@@ -1048,7 +1048,7 @@ class PurchaseOrderService:
cache_key = f"{tenant_id}:{supplier_id}"
if cache_key not in self._supplier_cache:
supplier = await self.suppliers_client.get_supplier(str(tenant_id), str(supplier_id))
supplier = await self.suppliers_client.get_supplier_by_id(str(tenant_id), str(supplier_id))
self._supplier_cache[cache_key] = supplier
logger.debug("Supplier cache MISS", tenant_id=str(tenant_id), supplier_id=str(supplier_id))
else:

View File

@@ -1,680 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Procurement Plans Seeding Script for Procurement Service
Creates realistic procurement plans for demo template tenants using pre-defined UUIDs
This script runs as a Kubernetes init job inside the procurement-service container.
It populates the template tenants with comprehensive procurement plans.
Usage:
python /app/scripts/demo/seed_demo_procurement_plans.py
Environment Variables Required:
PROCUREMENT_DATABASE_URL - PostgreSQL connection string for procurement database
DEMO_MODE - Set to 'production' for production seeding
LOG_LEVEL - Logging level (default: INFO)
Note: No database lookups needed - all IDs are pre-defined in the JSON file
"""
import asyncio
import uuid
import sys
import os
import json
from datetime import datetime, timezone, timedelta, date
from pathlib import Path
import random
from decimal import Decimal
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select, text
import structlog
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
# Add shared path for demo utilities
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import BASE_REFERENCE_DATE
# Configure logging
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.dev.ConsoleRenderer()
]
)
logger = structlog.get_logger()
# Fixed Demo Tenant IDs (must match tenant service)
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
# Hardcoded SKU to Ingredient ID mapping (no database lookups needed!)
INGREDIENT_ID_MAP = {
"HAR-T55-001": "10000000-0000-0000-0000-000000000001",
"HAR-T65-002": "10000000-0000-0000-0000-000000000002",
"HAR-FUE-003": "10000000-0000-0000-0000-000000000003",
"HAR-INT-004": "10000000-0000-0000-0000-000000000004",
"HAR-CEN-005": "10000000-0000-0000-0000-000000000005",
"HAR-ESP-006": "10000000-0000-0000-0000-000000000006",
"LAC-MAN-001": "10000000-0000-0000-0000-000000000011",
"LAC-LEC-002": "10000000-0000-0000-0000-000000000012",
"LAC-NAT-003": "10000000-0000-0000-0000-000000000013",
"LAC-HUE-004": "10000000-0000-0000-0000-000000000014",
"LEV-FRE-001": "10000000-0000-0000-0000-000000000021",
"LEV-SEC-002": "10000000-0000-0000-0000-000000000022",
"BAS-SAL-001": "10000000-0000-0000-0000-000000000031",
"BAS-AZU-002": "10000000-0000-0000-0000-000000000032",
"ESP-CHO-001": "10000000-0000-0000-0000-000000000041",
"ESP-ALM-002": "10000000-0000-0000-0000-000000000042",
"ESP-VAI-004": "10000000-0000-0000-0000-000000000044",
"ESP-CRE-005": "10000000-0000-0000-0000-000000000045",
}
# Ingredient costs (for requirement generation)
INGREDIENT_COSTS = {
"HAR-T55-001": 0.85,
"HAR-T65-002": 0.95,
"HAR-FUE-003": 1.15,
"HAR-INT-004": 1.20,
"HAR-CEN-005": 1.30,
"HAR-ESP-006": 2.45,
"LAC-MAN-001": 6.50,
"LAC-LEC-002": 0.95,
"LAC-NAT-003": 3.20,
"LAC-HUE-004": 0.25,
"LEV-FRE-001": 4.80,
"LEV-SEC-002": 12.50,
"BAS-SAL-001": 0.60,
"BAS-AZU-002": 0.90,
"ESP-CHO-001": 15.50,
"ESP-ALM-002": 8.90,
"ESP-VAI-004": 3.50,
"ESP-CRE-005": 7.20,
}
def calculate_date_from_offset(offset_days: int) -> date:
"""Calculate a date based on offset from BASE_REFERENCE_DATE"""
return (BASE_REFERENCE_DATE + timedelta(days=offset_days)).date()
def calculate_datetime_from_offset(offset_days: int) -> datetime:
"""Calculate a datetime based on offset from BASE_REFERENCE_DATE"""
return BASE_REFERENCE_DATE + timedelta(days=offset_days)
def weighted_choice(choices: list) -> dict:
"""Make a weighted random choice from list of dicts with 'weight' key"""
total_weight = sum(c.get("weight", 1.0) for c in choices)
r = random.uniform(0, total_weight)
cumulative = 0
for choice in choices:
cumulative += choice.get("weight", 1.0)
if r <= cumulative:
return choice
return choices[-1]
def generate_plan_number(tenant_id: uuid.UUID, index: int, plan_type: str) -> str:
"""Generate a unique plan number"""
tenant_prefix = "SP" if tenant_id == DEMO_TENANT_PROFESSIONAL else "LE"
type_code = plan_type[0:3].upper()
return f"PROC-{tenant_prefix}-{type_code}-{BASE_REFERENCE_DATE.year}-{index:03d}"
async def generate_procurement_for_tenant(
db: AsyncSession,
tenant_id: uuid.UUID,
tenant_name: str,
business_model: str,
config: dict
) -> dict:
"""Generate procurement plans and requirements for a specific tenant"""
logger.info("" * 80)
logger.info(f"Generating procurement data for: {tenant_name}")
logger.info(f"Tenant ID: {tenant_id}")
logger.info("" * 80)
# Check if procurement plans already exist
result = await db.execute(
select(ProcurementPlan).where(ProcurementPlan.tenant_id == tenant_id).limit(1)
)
existing = result.scalar_one_or_none()
if existing:
logger.info(f" ⏭️ Procurement plans already exist for {tenant_name}, skipping seed")
return {
"tenant_id": str(tenant_id),
"plans_created": 0,
"requirements_created": 0,
"skipped": True
}
proc_config = config["procurement_config"]
total_plans = proc_config["plans_per_tenant"]
plans_created = 0
requirements_created = 0
for i in range(total_plans):
# Determine temporal distribution
rand_temporal = random.random()
cumulative = 0
temporal_category = None
for category, details in proc_config["temporal_distribution"].items():
cumulative += details["percentage"]
if rand_temporal <= cumulative:
temporal_category = details
break
if not temporal_category:
temporal_category = proc_config["temporal_distribution"]["completed"]
# Calculate plan date
offset_days = random.randint(
temporal_category["offset_days_min"],
temporal_category["offset_days_max"]
)
plan_date = calculate_date_from_offset(offset_days)
# Select status
status = random.choice(temporal_category["statuses"])
# Select plan type
plan_type_choice = weighted_choice(proc_config["plan_types"])
plan_type = plan_type_choice["type"]
# Select priority
priority_rand = random.random()
cumulative_priority = 0
priority = "normal"
for p, weight in proc_config["priorities"].items():
cumulative_priority += weight
if priority_rand <= cumulative_priority:
priority = p
break
# Select procurement strategy
strategy_choice = weighted_choice(proc_config["procurement_strategies"])
procurement_strategy = strategy_choice["strategy"]
# Select supply risk level
risk_rand = random.random()
cumulative_risk = 0
supply_risk_level = "low"
for risk, weight in proc_config["risk_levels"].items():
cumulative_risk += weight
if risk_rand <= cumulative_risk:
supply_risk_level = risk
break
# Calculate planning horizon
planning_horizon = proc_config["planning_horizon_days"][business_model]
# Calculate period dates
period_start = plan_date
period_end = plan_date + timedelta(days=planning_horizon)
# Generate plan number
plan_number = generate_plan_number(tenant_id, i + 1, plan_type)
# Calculate safety stock buffer
safety_stock_buffer = Decimal(str(random.uniform(
proc_config["safety_stock_percentage"]["min"],
proc_config["safety_stock_percentage"]["max"]
)))
# Calculate approval/execution dates based on status
approved_at = None
execution_started_at = None
execution_completed_at = None
approved_by = None
if status in ["approved", "in_execution", "completed"]:
approved_at = calculate_datetime_from_offset(offset_days - 1)
approved_by = uuid.uuid4() # Would be actual user ID
if status in ["in_execution", "completed"]:
execution_started_at = calculate_datetime_from_offset(offset_days)
if status == "completed":
execution_completed_at = calculate_datetime_from_offset(offset_days + planning_horizon)
# Calculate performance metrics for completed plans
fulfillment_rate = None
on_time_delivery_rate = None
cost_accuracy = None
quality_score = None
if status == "completed":
metrics = proc_config["performance_metrics"]
fulfillment_rate = Decimal(str(random.uniform(
metrics["fulfillment_rate"]["min"],
metrics["fulfillment_rate"]["max"]
)))
on_time_delivery_rate = Decimal(str(random.uniform(
metrics["on_time_delivery"]["min"],
metrics["on_time_delivery"]["max"]
)))
cost_accuracy = Decimal(str(random.uniform(
metrics["cost_accuracy"]["min"],
metrics["cost_accuracy"]["max"]
)))
quality_score = Decimal(str(random.uniform(
metrics["quality_score"]["min"],
metrics["quality_score"]["max"]
)))
# Create procurement plan
plan = ProcurementPlan(
id=uuid.uuid4(),
tenant_id=tenant_id,
plan_number=plan_number,
plan_date=plan_date,
plan_period_start=period_start,
plan_period_end=period_end,
planning_horizon_days=planning_horizon,
status=status,
plan_type=plan_type,
priority=priority,
business_model=business_model,
procurement_strategy=procurement_strategy,
total_requirements=0, # Will update after adding requirements
total_estimated_cost=Decimal("0.00"), # Will calculate
total_approved_cost=Decimal("0.00"),
safety_stock_buffer=safety_stock_buffer,
supply_risk_level=supply_risk_level,
demand_forecast_confidence=Decimal(str(random.uniform(7.0, 9.5))),
approved_at=approved_at,
approved_by=approved_by,
execution_started_at=execution_started_at,
execution_completed_at=execution_completed_at,
fulfillment_rate=fulfillment_rate,
on_time_delivery_rate=on_time_delivery_rate,
cost_accuracy=cost_accuracy,
quality_score=quality_score,
created_at=calculate_datetime_from_offset(offset_days - 2),
updated_at=calculate_datetime_from_offset(offset_days)
)
db.add(plan)
await db.flush() # Get plan ID
# Generate requirements for this plan
num_requirements = random.randint(
proc_config["requirements_per_plan"]["min"],
proc_config["requirements_per_plan"]["max"]
)
# Select random ingredients
selected_ingredients = random.sample(
list(INGREDIENT_ID_MAP.keys()),
min(num_requirements, len(INGREDIENT_ID_MAP))
)
total_estimated_cost = Decimal("0.00")
for req_num, ingredient_sku in enumerate(selected_ingredients, 1):
# Get ingredient ID from hardcoded mapping
ingredient_id_str = INGREDIENT_ID_MAP.get(ingredient_sku)
if not ingredient_id_str:
logger.warning(f" ⚠️ Ingredient SKU not in mapping: {ingredient_sku}")
continue
# Generate tenant-specific ingredient ID
base_ingredient_id = uuid.UUID(ingredient_id_str)
tenant_int = int(tenant_id.hex, 16)
ingredient_id = uuid.UUID(int=tenant_int ^ int(base_ingredient_id.hex, 16))
# Get quantity range for category
category = ingredient_sku.split("-")[0] # HAR, LAC, LEV, BAS, ESP
cantidad_range = proc_config["quantity_ranges"].get(
category,
{"min": 50.0, "max": 200.0}
)
# Calculate required quantity
required_quantity = Decimal(str(random.uniform(
cantidad_range["min"],
cantidad_range["max"]
)))
# Calculate safety stock
safety_stock_quantity = required_quantity * (safety_stock_buffer / 100)
# Total quantity needed
total_quantity_needed = required_quantity + safety_stock_quantity
# Current stock simulation
current_stock_level = required_quantity * Decimal(str(random.uniform(0.1, 0.4)))
reserved_stock = current_stock_level * Decimal(str(random.uniform(0.0, 0.3)))
available_stock = current_stock_level - reserved_stock
# Net requirement
net_requirement = total_quantity_needed - available_stock
# Demand breakdown
order_demand = required_quantity * Decimal(str(random.uniform(0.5, 0.7)))
production_demand = required_quantity * Decimal(str(random.uniform(0.2, 0.4)))
forecast_demand = required_quantity * Decimal(str(random.uniform(0.05, 0.15)))
buffer_demand = safety_stock_quantity
# Pricing
estimated_unit_cost = Decimal(str(INGREDIENT_COSTS.get(ingredient_sku, 1.0))) * Decimal(str(random.uniform(0.95, 1.05)))
estimated_total_cost = estimated_unit_cost * net_requirement
# Timing
lead_time_days = random.randint(1, 5)
required_by_date = period_start + timedelta(days=random.randint(3, planning_horizon - 2))
lead_time_buffer_days = random.randint(1, 2)
suggested_order_date = required_by_date - timedelta(days=lead_time_days + lead_time_buffer_days)
latest_order_date = required_by_date - timedelta(days=lead_time_days)
# Requirement status based on plan status
if status == "draft":
req_status = "pending"
elif status == "pending_approval":
req_status = "pending"
elif status == "approved":
req_status = "approved"
elif status == "in_execution":
req_status = random.choice(["ordered", "partially_received"])
elif status == "completed":
req_status = "received"
else:
req_status = "pending"
# Requirement priority
if priority == "critical":
req_priority = "critical"
elif priority == "high":
req_priority = random.choice(["high", "critical"])
else:
req_priority = random.choice(["normal", "high"])
# Risk level
if supply_risk_level == "critical":
req_risk_level = random.choice(["high", "critical"])
elif supply_risk_level == "high":
req_risk_level = random.choice(["medium", "high"])
else:
req_risk_level = "low"
# Create requirement
requirement = ProcurementRequirement(
id=uuid.uuid4(),
plan_id=plan.id,
requirement_number=f"{plan_number}-REQ-{req_num:03d}",
product_id=ingredient_id,
product_name=f"Ingrediente {ingredient_sku}",
product_sku=ingredient_sku,
product_category=category,
product_type="ingredient",
required_quantity=required_quantity,
unit_of_measure="kg",
safety_stock_quantity=safety_stock_quantity,
total_quantity_needed=total_quantity_needed,
current_stock_level=current_stock_level,
reserved_stock=reserved_stock,
available_stock=available_stock,
net_requirement=net_requirement,
order_demand=order_demand,
production_demand=production_demand,
forecast_demand=forecast_demand,
buffer_demand=buffer_demand,
supplier_lead_time_days=lead_time_days,
minimum_order_quantity=Decimal(str(random.choice([1, 5, 10, 25]))),
estimated_unit_cost=estimated_unit_cost,
estimated_total_cost=estimated_total_cost,
required_by_date=required_by_date,
lead_time_buffer_days=lead_time_buffer_days,
suggested_order_date=suggested_order_date,
latest_order_date=latest_order_date,
shelf_life_days=random.choice([30, 60, 90, 180, 365]),
status=req_status,
priority=req_priority,
risk_level=req_risk_level,
created_at=plan.created_at,
updated_at=plan.updated_at
)
db.add(requirement)
total_estimated_cost += estimated_total_cost
requirements_created += 1
# Update plan totals
plan.total_requirements = num_requirements
plan.total_estimated_cost = total_estimated_cost
if status in ["approved", "in_execution", "completed"]:
plan.total_approved_cost = total_estimated_cost * Decimal(str(random.uniform(0.95, 1.05)))
plans_created += 1
await db.commit()
logger.info(f" 📊 Successfully created {plans_created} plans with {requirements_created} requirements for {tenant_name}")
logger.info("")
return {
"tenant_id": str(tenant_id),
"plans_created": plans_created,
"requirements_created": requirements_created,
"skipped": False
}
async def seed_all(db: AsyncSession):
"""Seed all demo tenants with procurement data"""
logger.info("=" * 80)
logger.info("🚚 Starting Demo Procurement Plans Seeding")
logger.info("=" * 80)
# Load configuration
config = {
"procurement_config": {
"plans_per_tenant": 8,
"requirements_per_plan": {"min": 3, "max": 8},
"planning_horizon_days": {
"individual_bakery": 30,
"central_bakery": 45,
"enterprise_chain": 45 # Enterprise parent uses same horizon as central bakery
},
"safety_stock_percentage": {"min": 15.0, "max": 25.0},
"temporal_distribution": {
"completed": {
"percentage": 0.3,
"offset_days_min": -15,
"offset_days_max": -1,
"statuses": ["completed"]
},
"in_execution": {
"percentage": 0.2,
"offset_days_min": -5,
"offset_days_max": 2,
"statuses": ["in_execution", "partially_received"]
},
"approved": {
"percentage": 0.2,
"offset_days_min": -2,
"offset_days_max": 1,
"statuses": ["approved"]
},
"pending_approval": {
"percentage": 0.15,
"offset_days_min": 0,
"offset_days_max": 3,
"statuses": ["pending_approval"]
},
"draft": {
"percentage": 0.15,
"offset_days_min": 0,
"offset_days_max": 5,
"statuses": ["draft"]
}
},
"plan_types": [
{"type": "regular", "weight": 0.7},
{"type": "seasonal", "weight": 0.2},
{"type": "emergency", "weight": 0.1}
],
"priorities": {
"normal": 0.7,
"high": 0.25,
"critical": 0.05
},
"procurement_strategies": [
{"strategy": "just_in_time", "weight": 0.6},
{"strategy": "bulk", "weight": 0.3},
{"strategy": "mixed", "weight": 0.1}
],
"risk_levels": {
"low": 0.6,
"medium": 0.3,
"high": 0.08,
"critical": 0.02
},
"quantity_ranges": {
"HAR": {"min": 50.0, "max": 500.0}, # Harinas
"LAC": {"min": 20.0, "max": 200.0}, # Lácteos
"LEV": {"min": 5.0, "max": 50.0}, # Levaduras
"BAS": {"min": 10.0, "max": 100.0}, # Básicos
"ESP": {"min": 1.0, "max": 20.0} # Especiales
},
"performance_metrics": {
"fulfillment_rate": {"min": 85.0, "max": 98.0},
"on_time_delivery": {"min": 80.0, "max": 95.0},
"cost_accuracy": {"min": 90.0, "max": 99.0},
"quality_score": {"min": 7.0, "max": 9.5}
}
}
}
results = []
# Seed Professional Bakery (single location)
result_professional = await generate_procurement_for_tenant(
db,
DEMO_TENANT_PROFESSIONAL,
"Panadería Artesana Madrid (Professional)",
"individual_bakery",
config
)
results.append(result_professional)
# Seed Enterprise Parent (central production - Obrador) with scaled procurement
result_enterprise_parent = await generate_procurement_for_tenant(
db,
DEMO_TENANT_ENTERPRISE_CHAIN,
"Panadería Central - Obrador Madrid (Enterprise Parent)",
"enterprise_chain",
config
)
results.append(result_enterprise_parent)
total_plans = sum(r["plans_created"] for r in results)
total_requirements = sum(r["requirements_created"] for r in results)
logger.info("=" * 80)
logger.info("✅ Demo Procurement Plans Seeding Completed")
logger.info("=" * 80)
return {
"results": results,
"total_plans_created": total_plans,
"total_requirements_created": total_requirements,
"status": "completed"
}
async def main():
"""Main execution function"""
logger.info("Demo Procurement Plans Seeding Script Starting")
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
logger.info("Log Level: %s", os.getenv("LOG_LEVEL", "INFO"))
# Get database URL from environment
database_url = os.getenv("PROCUREMENT_DATABASE_URL") or os.getenv("DATABASE_URL")
if not database_url:
logger.error("❌ PROCUREMENT_DATABASE_URL or DATABASE_URL environment variable must be set")
return 1
# Ensure asyncpg driver
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
logger.info("Connecting to procurement database")
# Create async engine
engine = create_async_engine(
database_url,
echo=False,
pool_pre_ping=True,
pool_size=5,
max_overflow=10
)
async_session = sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False
)
try:
async with async_session() as session:
result = await seed_all(session)
logger.info("")
logger.info("📊 Seeding Summary:")
logger.info(f" ✅ Total Plans: {result['total_plans_created']}")
logger.info(f" ✅ Total Requirements: {result['total_requirements_created']}")
logger.info(f" ✅ Status: {result['status']}")
logger.info("")
# Print per-tenant details
for tenant_result in result["results"]:
tenant_id = tenant_result["tenant_id"]
plans = tenant_result["plans_created"]
requirements = tenant_result["requirements_created"]
skipped = tenant_result.get("skipped", False)
status = "SKIPPED (already exists)" if skipped else f"CREATED {plans} plans, {requirements} requirements"
logger.info(f" Tenant {tenant_id}: {status}")
logger.info("")
logger.info("🎉 Success! Procurement plans are ready for demo sessions.")
logger.info("")
logger.info("Plans created:")
logger.info(" • 8 Regular procurement plans per tenant")
logger.info(" • 3-8 Requirements per plan")
logger.info(" • Various statuses: draft, pending, approved, in execution, completed")
logger.info(" • Different priorities and risk levels")
logger.info("")
logger.info("Note: All IDs are pre-defined and hardcoded for cross-service consistency")
logger.info("")
return 0
except Exception as e:
logger.error("=" * 80)
logger.error("❌ Demo Procurement Plans Seeding Failed")
logger.error("=" * 80)
logger.error("Error: %s", str(e))
logger.error("", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

File diff suppressed because it is too large Load Diff