demo seed change

This commit is contained in:
Urtzi Alfaro
2025-12-13 23:57:54 +01:00
parent f3688dfb04
commit ff830a3415
299 changed files with 20328 additions and 19485 deletions

View File

@@ -3,11 +3,9 @@
from .procurement_plans import router as procurement_plans_router
from .purchase_orders import router as purchase_orders_router
from .replenishment import router as replenishment_router
from .internal_demo import router as internal_demo_router
__all__ = [
"procurement_plans_router",
"purchase_orders_router",
"replenishment_router",
"internal_demo_router"
"procurement_plans_router",
"purchase_orders_router",
"replenishment_router"
]

View File

@@ -91,8 +91,11 @@ async def get_expected_deliveries(
# Add date filters
if include_overdue:
# Include any delivery from past until end_date
# Include deliveries from last 48 hours (recent overdue) until end_date
# This ensures we only show truly recent overdue deliveries, not ancient history
start_date = now - timedelta(hours=48)
query = query.where(
PurchaseOrder.expected_delivery_date >= start_date,
PurchaseOrder.expected_delivery_date <= end_date
)
else:
@@ -149,13 +152,22 @@ async def get_expected_deliveries(
# Default delivery window is 4 hours
delivery_window_hours = 4
# Ensure expected delivery date is timezone-aware and in UTC format
expected_delivery_utc = po.expected_delivery_date
if expected_delivery_utc and expected_delivery_utc.tzinfo is None:
# If naive datetime, assume it's UTC (this shouldn't happen with proper DB setup)
expected_delivery_utc = expected_delivery_utc.replace(tzinfo=timezone.utc)
elif expected_delivery_utc and expected_delivery_utc.tzinfo is not None:
# Convert to UTC if it's in another timezone
expected_delivery_utc = expected_delivery_utc.astimezone(timezone.utc)
delivery_dict = {
"po_id": str(po.id),
"po_number": po.po_number,
"supplier_id": str(po.supplier_id),
"supplier_name": supplier_name,
"supplier_phone": supplier_phone,
"expected_delivery_date": po.expected_delivery_date.isoformat(),
"expected_delivery_date": expected_delivery_utc.isoformat() if expected_delivery_utc else None,
"delivery_window_hours": delivery_window_hours,
"status": po.status.value,
"line_items": line_items,
@@ -187,4 +199,4 @@ async def get_expected_deliveries(
tenant_id=tenant_id,
exc_info=True
)
raise HTTPException(status_code=500, detail="Internal server error")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -162,7 +162,7 @@ async def get_expected_deliveries(
"supplier_id": str(po.supplier_id),
"supplier_name": supplier_name,
"supplier_phone": supplier_phone,
"expected_delivery_date": po.expected_delivery_date.isoformat(),
"expected_delivery_date": po.expected_delivery_date.isoformat() if po.expected_delivery_date else None,
"delivery_window_hours": delivery_window_hours,
"status": po.status.value,
"line_items": line_items,

View File

@@ -3,6 +3,9 @@ Internal API for triggering delivery tracking alerts.
Used by demo session cloning to generate realistic late delivery alerts.
Moved from orchestrator service to procurement service (domain ownership).
URL Pattern: /api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger
This follows the tenant-scoped pattern so gateway can proxy correctly.
"""
from fastapi import APIRouter, HTTPException, Request, Path
@@ -14,7 +17,8 @@ logger = structlog.get_logger()
router = APIRouter()
@router.post("/api/internal/delivery-tracking/trigger/{tenant_id}")
# New URL pattern: tenant-scoped so gateway proxies to procurement service correctly
@router.post("/api/v1/tenants/{tenant_id}/procurement/internal/delivery-tracking/trigger")
async def trigger_delivery_tracking(
tenant_id: UUID = Path(..., description="Tenant ID to check deliveries for"),
request: Request = None

View File

@@ -11,12 +11,14 @@ import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
import json
from pathlib import Path
from app.core.database import get_db
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE, resolve_time_marker
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
from sqlalchemy.orm import selectinload
from shared.schemas.reasoning_types import (
@@ -53,10 +55,10 @@ async def clone_demo_data(
"""
Clone procurement service data for a virtual demo tenant
Clones:
- Procurement plans with requirements
Loads seed data from JSON files and creates:
- Purchase orders with line items
- Replenishment plans with items
- Procurement plans with requirements (if in seed data)
- Replenishment plans with items (if in seed data)
- Adjusts dates to recent timeframe
Args:
@@ -80,7 +82,7 @@ async def clone_demo_data(
session_time = start_time
logger.info(
"Starting procurement data cloning",
"Starting procurement data cloning from seed files",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
@@ -103,536 +105,332 @@ async def clone_demo_data(
"replenishment_items": 0
}
# Clone Procurement Plans with Requirements
result = await db.execute(
select(ProcurementPlan).where(ProcurementPlan.tenant_id == base_uuid)
)
base_plans = result.scalars().all()
logger.info(
"Found procurement plans to clone",
count=len(base_plans),
base_tenant=str(base_uuid)
)
# Calculate date offset for procurement
if base_plans:
max_plan_date = max(plan.plan_date for plan in base_plans if plan.plan_date)
today_date = date.today()
days_diff = (today_date - max_plan_date).days
plan_date_offset = timedelta(days=days_diff)
else:
plan_date_offset = timedelta(days=0)
plan_id_map = {}
for plan in base_plans:
new_plan_id = uuid.uuid4()
plan_id_map[plan.id] = new_plan_id
new_plan = ProcurementPlan(
id=new_plan_id,
tenant_id=virtual_uuid,
plan_number=f"PROC-{uuid.uuid4().hex[:8].upper()}",
plan_date=plan.plan_date + plan_date_offset if plan.plan_date else None,
plan_period_start=plan.plan_period_start + plan_date_offset if plan.plan_period_start else None,
plan_period_end=plan.plan_period_end + plan_date_offset if plan.plan_period_end else None,
planning_horizon_days=plan.planning_horizon_days,
status=plan.status,
plan_type=plan.plan_type,
priority=plan.priority,
business_model=plan.business_model,
procurement_strategy=plan.procurement_strategy,
total_requirements=plan.total_requirements,
total_estimated_cost=plan.total_estimated_cost,
total_approved_cost=plan.total_approved_cost,
cost_variance=plan.cost_variance,
created_at=session_time,
updated_at=session_time
)
db.add(new_plan)
stats["procurement_plans"] += 1
# Clone Procurement Requirements
for old_plan_id, new_plan_id in plan_id_map.items():
result = await db.execute(
select(ProcurementRequirement).where(ProcurementRequirement.plan_id == old_plan_id)
)
requirements = result.scalars().all()
for req in requirements:
new_req = ProcurementRequirement(
id=uuid.uuid4(),
plan_id=new_plan_id,
requirement_number=req.requirement_number,
product_id=req.product_id,
product_name=req.product_name,
product_sku=req.product_sku,
product_category=req.product_category,
product_type=req.product_type,
required_quantity=req.required_quantity,
unit_of_measure=req.unit_of_measure,
safety_stock_quantity=req.safety_stock_quantity,
total_quantity_needed=req.total_quantity_needed,
current_stock_level=req.current_stock_level,
reserved_stock=req.reserved_stock,
available_stock=req.available_stock,
net_requirement=req.net_requirement,
order_demand=req.order_demand,
production_demand=req.production_demand,
forecast_demand=req.forecast_demand,
buffer_demand=req.buffer_demand,
preferred_supplier_id=req.preferred_supplier_id,
backup_supplier_id=req.backup_supplier_id,
supplier_name=req.supplier_name,
supplier_lead_time_days=req.supplier_lead_time_days,
minimum_order_quantity=req.minimum_order_quantity,
estimated_unit_cost=req.estimated_unit_cost,
estimated_total_cost=req.estimated_total_cost,
last_purchase_cost=req.last_purchase_cost,
cost_variance=req.cost_variance,
required_by_date=req.required_by_date + plan_date_offset if req.required_by_date else None,
lead_time_buffer_days=req.lead_time_buffer_days,
suggested_order_date=req.suggested_order_date + plan_date_offset if req.suggested_order_date else None,
latest_order_date=req.latest_order_date + plan_date_offset if req.latest_order_date else None,
quality_specifications=req.quality_specifications,
special_requirements=req.special_requirements,
storage_requirements=req.storage_requirements,
shelf_life_days=req.shelf_life_days,
status=req.status,
priority=req.priority,
risk_level=req.risk_level,
purchase_order_id=req.purchase_order_id,
purchase_order_number=req.purchase_order_number,
ordered_quantity=req.ordered_quantity,
ordered_at=req.ordered_at,
expected_delivery_date=req.expected_delivery_date + plan_date_offset if req.expected_delivery_date else None,
actual_delivery_date=req.actual_delivery_date + plan_date_offset if req.actual_delivery_date else None,
received_quantity=req.received_quantity,
delivery_status=req.delivery_status,
fulfillment_rate=req.fulfillment_rate,
on_time_delivery=req.on_time_delivery,
quality_rating=req.quality_rating,
source_orders=req.source_orders,
source_production_batches=req.source_production_batches,
demand_analysis=req.demand_analysis,
approved_quantity=req.approved_quantity,
approved_cost=req.approved_cost,
approved_at=req.approved_at,
approved_by=req.approved_by,
procurement_notes=req.procurement_notes,
supplier_communication=req.supplier_communication,
requirement_metadata=req.requirement_metadata,
created_at=session_time,
updated_at=session_time
def parse_date_field(date_value, field_name="date"):
"""Parse date field, handling both ISO strings and BASE_TS markers"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
return adjust_date_for_demo(
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
db.add(new_req)
stats["procurement_requirements"] += 1
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
# Clone Purchase Orders with Line Items
result = await db.execute(
select(PurchaseOrder).where(PurchaseOrder.tenant_id == base_uuid)
)
base_orders = result.scalars().all()
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "07-procurement.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "07-procurement.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "07-procurement.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "07-procurement.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
"Found purchase orders to clone",
count=len(base_orders),
base_tenant=str(base_uuid)
"Loaded procurement seed data",
purchase_orders=len(seed_data.get('purchase_orders', [])),
purchase_order_items=len(seed_data.get('purchase_order_items', [])),
procurement_plans=len(seed_data.get('procurement_plans', []))
)
# Load Purchase Orders from seed data
order_id_map = {}
for po_data in seed_data.get('purchase_orders', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
logger.debug("Processing purchase order", po_id=po_data.get('id'), po_number=po_data.get('po_number'))
po_uuid = uuid.UUID(po_data['id'])
transformed_id = transform_id(po_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse purchase order UUID",
po_id=po_data.get('id'),
po_number=po_data.get('po_number'),
error=str(e))
continue
for order in base_orders:
new_order_id = uuid.uuid4()
order_id_map[order.id] = new_order_id
order_id_map[uuid.UUID(po_data['id'])] = transformed_id
# Adjust dates using demo_dates utility
adjusted_order_date = adjust_date_for_demo(
order.order_date, session_time, BASE_REFERENCE_DATE
)
adjusted_required_delivery = adjust_date_for_demo(
order.required_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_estimated_delivery = adjust_date_for_demo(
order.estimated_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_supplier_confirmation = adjust_date_for_demo(
order.supplier_confirmation_date, session_time, BASE_REFERENCE_DATE
)
adjusted_approved_at = adjust_date_for_demo(
order.approved_at, session_time, BASE_REFERENCE_DATE
)
adjusted_sent_to_supplier_at = adjust_date_for_demo(
order.sent_to_supplier_at, session_time, BASE_REFERENCE_DATE
)
# Adjust dates relative to session creation time
# FIX: Use current UTC time for future dates (expected delivery)
current_time = datetime.now(timezone.utc)
logger.debug("Parsing dates for PO",
po_number=po_data.get('po_number'),
order_date_raw=po_data.get('order_date') or po_data.get('order_date_offset_days'),
required_delivery_raw=po_data.get('required_delivery_date') or po_data.get('required_delivery_date_offset_days'))
# Handle both direct dates and offset-based dates
if 'order_date_offset_days' in po_data:
adjusted_order_date = session_time + timedelta(days=po_data['order_date_offset_days'])
else:
adjusted_order_date = parse_date_field(po_data.get('order_date'), "order_date") or session_time
if 'required_delivery_date_offset_days' in po_data:
adjusted_required_delivery = session_time + timedelta(days=po_data['required_delivery_date_offset_days'])
else:
adjusted_required_delivery = parse_date_field(po_data.get('required_delivery_date'), "required_delivery_date")
if 'estimated_delivery_date_offset_days' in po_data:
adjusted_estimated_delivery = session_time + timedelta(days=po_data['estimated_delivery_date_offset_days'])
else:
adjusted_estimated_delivery = parse_date_field(po_data.get('estimated_delivery_date'), "estimated_delivery_date")
# Calculate expected delivery date (use estimated delivery if not specified separately)
# FIX: Use current UTC time for future delivery dates
if 'expected_delivery_date_offset_days' in po_data:
adjusted_expected_delivery = current_time + timedelta(days=po_data['expected_delivery_date_offset_days'])
else:
adjusted_expected_delivery = adjusted_estimated_delivery # Fallback to estimated delivery
logger.debug("Dates parsed successfully",
po_number=po_data.get('po_number'),
order_date=adjusted_order_date,
required_delivery=adjusted_required_delivery)
# Generate a system user UUID for audit fields (demo purposes)
system_user_id = uuid.uuid4()
# For demo sessions: Adjust expected_delivery_date if it exists
# This ensures the ExecutionProgressTracker shows realistic delivery data
expected_delivery = None
if hasattr(order, 'expected_delivery_date') and order.expected_delivery_date:
# Adjust the existing expected_delivery_date to demo session time
expected_delivery = adjust_date_for_demo(
order.expected_delivery_date, session_time, BASE_REFERENCE_DATE
# Use status directly from JSON - JSON files should contain valid enum values
# Valid values: draft, pending_approval, approved, sent_to_supplier, confirmed,
# partially_received, completed, cancelled, disputed
raw_status = po_data.get('status', 'draft')
# Validate that the status is a valid enum value
valid_statuses = {'draft', 'pending_approval', 'approved', 'sent_to_supplier',
'confirmed', 'partially_received', 'completed', 'cancelled', 'disputed'}
if raw_status not in valid_statuses:
logger.warning(
"Invalid status value in seed data, using default 'draft'",
invalid_status=raw_status,
po_number=po_data.get('po_number'),
valid_options=sorted(valid_statuses)
)
elif order.status in ['approved', 'sent_to_supplier', 'confirmed']:
# If no expected_delivery_date but order is in delivery status, use estimated_delivery_date
expected_delivery = adjusted_estimated_delivery
# Create new PurchaseOrder - add expected_delivery_date only if column exists (after migration)
raw_status = 'draft'
# Create new PurchaseOrder
new_order = PurchaseOrder(
id=new_order_id,
id=str(transformed_id),
tenant_id=virtual_uuid,
po_number=f"PO-{uuid.uuid4().hex[:8].upper()}", # New PO number
reference_number=order.reference_number,
supplier_id=order.supplier_id,
procurement_plan_id=plan_id_map.get(order.procurement_plan_id) if hasattr(order, 'procurement_plan_id') and order.procurement_plan_id else None,
po_number=f"{session_id[:8]}-{po_data.get('po_number', f'PO-{uuid.uuid4().hex[:8].upper()}')}",
supplier_id=po_data.get('supplier_id'),
order_date=adjusted_order_date,
required_delivery_date=adjusted_required_delivery,
estimated_delivery_date=adjusted_estimated_delivery,
status=order.status,
priority=order.priority,
subtotal=order.subtotal,
tax_amount=order.tax_amount,
discount_amount=order.discount_amount,
shipping_cost=order.shipping_cost,
total_amount=order.total_amount,
currency=order.currency,
delivery_address=order.delivery_address if hasattr(order, 'delivery_address') else None,
delivery_instructions=order.delivery_instructions if hasattr(order, 'delivery_instructions') else None,
delivery_contact=order.delivery_contact if hasattr(order, 'delivery_contact') else None,
delivery_phone=order.delivery_phone if hasattr(order, 'delivery_phone') else None,
requires_approval=order.requires_approval if hasattr(order, 'requires_approval') else False,
approved_by=order.approved_by if hasattr(order, 'approved_by') else None,
approved_at=adjusted_approved_at,
rejection_reason=order.rejection_reason if hasattr(order, 'rejection_reason') else None,
auto_approved=order.auto_approved if hasattr(order, 'auto_approved') else False,
auto_approval_rule_id=order.auto_approval_rule_id if hasattr(order, 'auto_approval_rule_id') else None,
sent_to_supplier_at=adjusted_sent_to_supplier_at,
supplier_confirmation_date=adjusted_supplier_confirmation,
supplier_reference=order.supplier_reference if hasattr(order, 'supplier_reference') else None,
notes=order.notes if hasattr(order, 'notes') else None,
internal_notes=order.internal_notes if hasattr(order, 'internal_notes') else None,
terms_and_conditions=order.terms_and_conditions if hasattr(order, 'terms_and_conditions') else None,
reasoning_data=order.reasoning_data if hasattr(order, 'reasoning_data') else None, # Clone reasoning for JTBD dashboard
expected_delivery_date=adjusted_expected_delivery,
status=raw_status,
priority=po_data.get('priority', 'normal').lower() if po_data.get('priority') else 'normal',
subtotal=po_data.get('subtotal', 0.0),
tax_amount=po_data.get('tax_amount', 0.0),
shipping_cost=po_data.get('shipping_cost', 0.0),
discount_amount=po_data.get('discount_amount', 0.0),
total_amount=po_data.get('total_amount', 0.0),
currency=po_data.get('currency', 'EUR'),
delivery_address=po_data.get('delivery_address'),
delivery_instructions=po_data.get('delivery_instructions'),
delivery_contact=po_data.get('delivery_contact'),
delivery_phone=po_data.get('delivery_phone'),
requires_approval=po_data.get('requires_approval', False),
auto_approved=po_data.get('auto_approved', False),
auto_approval_rule_id=po_data.get('auto_approval_rule_id') if po_data.get('auto_approval_rule_id') and len(po_data.get('auto_approval_rule_id', '')) >= 32 else None,
rejection_reason=po_data.get('rejection_reason'),
sent_to_supplier_at=parse_date_field(po_data.get('sent_to_supplier_at'), "sent_to_supplier_at"),
supplier_confirmation_date=parse_date_field(po_data.get('supplier_confirmation_date'), "supplier_confirmation_date"),
supplier_reference=po_data.get('supplier_reference'),
notes=po_data.get('notes'),
internal_notes=po_data.get('internal_notes'),
terms_and_conditions=po_data.get('terms_and_conditions'),
reasoning_data=po_data.get('reasoning_data'),
created_at=session_time,
updated_at=session_time,
created_by=system_user_id,
updated_by=system_user_id
)
# Add expected_delivery_date if the model supports it (after migration)
# Add expected_delivery_date if the model supports it
if hasattr(PurchaseOrder, 'expected_delivery_date'):
if 'expected_delivery_date_offset_days' in po_data:
# Handle offset-based expected delivery dates
expected_delivery = adjusted_order_date + timedelta(
days=po_data['expected_delivery_date_offset_days']
)
else:
expected_delivery = adjusted_estimated_delivery
new_order.expected_delivery_date = expected_delivery
db.add(new_order)
stats["purchase_orders"] += 1
# Clone Purchase Order Items
for old_order_id, new_order_id in order_id_map.items():
result = await db.execute(
select(PurchaseOrderItem).where(PurchaseOrderItem.purchase_order_id == old_order_id)
# Load Purchase Order Items from seed data
for po_item_data in seed_data.get('purchase_order_items', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
item_uuid = uuid.UUID(po_item_data['id'])
transformed_id = transform_id(po_item_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse purchase order item UUID",
item_id=po_item_data['id'],
error=str(e))
continue
# Map purchase_order_id if it exists in our map
po_id_value = po_item_data.get('purchase_order_id')
if po_id_value:
po_id_value = order_id_map.get(uuid.UUID(po_id_value), uuid.UUID(po_id_value))
new_item = PurchaseOrderItem(
id=str(transformed_id),
tenant_id=virtual_uuid,
purchase_order_id=str(po_id_value) if po_id_value else None,
inventory_product_id=po_item_data.get('inventory_product_id'),
product_name=po_item_data.get('product_name'),
product_code=po_item_data.get('product_code'), # Use product_code directly from JSON
ordered_quantity=po_item_data.get('ordered_quantity', 0.0),
unit_of_measure=po_item_data.get('unit_of_measure'),
unit_price=po_item_data.get('unit_price', 0.0),
line_total=po_item_data.get('line_total', 0.0),
received_quantity=po_item_data.get('received_quantity', 0.0),
remaining_quantity=po_item_data.get('remaining_quantity', po_item_data.get('ordered_quantity', 0.0)),
quality_requirements=po_item_data.get('quality_requirements'),
item_notes=po_item_data.get('item_notes'),
created_at=session_time,
updated_at=session_time
)
order_items = result.scalars().all()
db.add(new_item)
stats["purchase_order_items"] += 1
for item in order_items:
new_item = PurchaseOrderItem(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
purchase_order_id=new_order_id,
procurement_requirement_id=item.procurement_requirement_id if hasattr(item, 'procurement_requirement_id') else None,
inventory_product_id=item.inventory_product_id,
product_code=item.product_code if hasattr(item, 'product_code') else None,
product_name=item.product_name,
supplier_price_list_id=item.supplier_price_list_id if hasattr(item, 'supplier_price_list_id') else None,
ordered_quantity=item.ordered_quantity,
unit_of_measure=item.unit_of_measure,
unit_price=item.unit_price,
line_total=item.line_total,
received_quantity=item.received_quantity if hasattr(item, 'received_quantity') else 0,
remaining_quantity=item.remaining_quantity if hasattr(item, 'remaining_quantity') else item.ordered_quantity,
quality_requirements=item.quality_requirements if hasattr(item, 'quality_requirements') else None,
item_notes=item.item_notes if hasattr(item, 'item_notes') else None,
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["purchase_order_items"] += 1
# Load Procurement Plans from seed data (if any)
for plan_data in seed_data.get('procurement_plans', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
plan_uuid = uuid.UUID(plan_data['id'])
transformed_id = transform_id(plan_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse procurement plan UUID",
plan_id=plan_data['id'],
error=str(e))
continue
# Clone Replenishment Plans with Items
result = await db.execute(
select(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == base_uuid)
)
base_replenishment_plans = result.scalars().all()
# Adjust dates
adjusted_plan_date = parse_date_field(plan_data.get('plan_date'), "plan_date")
logger.info(
"Found replenishment plans to clone",
count=len(base_replenishment_plans),
base_tenant=str(base_uuid)
)
new_plan = ProcurementPlan(
id=str(transformed_id),
tenant_id=virtual_uuid,
plan_number=plan_data.get('plan_number', f"PROC-{uuid.uuid4().hex[:8].upper()}"),
plan_date=adjusted_plan_date,
plan_period_start=parse_date_field(plan_data.get('plan_period_start'), "plan_period_start"),
plan_period_end=parse_date_field(plan_data.get('plan_period_end'), "plan_period_end"),
planning_horizon_days=plan_data.get('planning_horizon_days'),
status=plan_data.get('status', 'draft'),
plan_type=plan_data.get('plan_type'),
priority=plan_data.get('priority', 'normal'),
business_model=plan_data.get('business_model'),
procurement_strategy=plan_data.get('procurement_strategy'),
total_requirements=plan_data.get('total_requirements', 0),
total_estimated_cost=plan_data.get('total_estimated_cost', 0.0),
total_approved_cost=plan_data.get('total_approved_cost', 0.0),
cost_variance=plan_data.get('cost_variance', 0.0),
created_at=session_time,
updated_at=session_time
)
db.add(new_plan)
stats["procurement_plans"] += 1
replan_id_map = {}
# Load Replenishment Plans from seed data (if any)
for replan_data in seed_data.get('replenishment_plans', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
replan_uuid = uuid.UUID(replan_data['id'])
transformed_id = transform_id(replan_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse replenishment plan UUID",
replan_id=replan_data['id'],
error=str(e))
continue
for replan in base_replenishment_plans:
new_replan_id = uuid.uuid4()
replan_id_map[replan.id] = new_replan_id
# Adjust dates
adjusted_plan_date = parse_date_field(replan_data.get('plan_date'), "plan_date")
new_replan = ReplenishmentPlan(
id=new_replan_id,
id=str(transformed_id),
tenant_id=virtual_uuid,
plan_number=f"REPL-{uuid.uuid4().hex[:8].upper()}",
plan_date=replan.plan_date + plan_date_offset if replan.plan_date else None,
plan_period_start=replan.plan_period_start + plan_date_offset if replan.plan_period_start else None,
plan_period_end=replan.plan_period_end + plan_date_offset if replan.plan_period_end else None,
planning_horizon_days=replan.planning_horizon_days,
status=replan.status,
plan_type=replan.plan_type,
priority=replan.priority,
business_model=replan.business_model,
total_items=replan.total_items,
total_estimated_cost=replan.total_estimated_cost,
plan_number=replan_data.get('plan_number', f"REPL-{uuid.uuid4().hex[:8].upper()}"),
plan_date=adjusted_plan_date,
plan_period_start=parse_date_field(replan_data.get('plan_period_start'), "plan_period_start"),
plan_period_end=parse_date_field(replan_data.get('plan_period_end'), "plan_period_end"),
planning_horizon_days=replan_data.get('planning_horizon_days'),
status=replan_data.get('status', 'draft'),
plan_type=replan_data.get('plan_type'),
priority=replan_data.get('priority', 'normal'),
business_model=replan_data.get('business_model'),
total_items=replan_data.get('total_items', 0),
total_estimated_cost=replan_data.get('total_estimated_cost', 0.0),
created_at=session_time,
updated_at=session_time
)
db.add(new_replan)
stats["replenishment_plans"] += 1
# Clone Replenishment Plan Items
for old_replan_id, new_replan_id in replan_id_map.items():
result = await db.execute(
select(ReplenishmentPlanItem).where(ReplenishmentPlanItem.plan_id == old_replan_id)
)
replan_items = result.scalars().all()
for item in replan_items:
new_item = ReplenishmentPlanItem(
id=uuid.uuid4(),
plan_id=new_replan_id,
product_id=item.product_id,
product_name=item.product_name,
product_sku=item.product_sku,
required_quantity=item.required_quantity,
unit_of_measure=item.unit_of_measure,
current_stock_level=item.current_stock_level,
safety_stock_quantity=item.safety_stock_quantity,
suggested_order_quantity=item.suggested_order_quantity,
supplier_id=item.supplier_id,
supplier_name=item.supplier_name,
estimated_delivery_days=item.estimated_delivery_days,
required_by_date=item.required_by_date + plan_date_offset if item.required_by_date else None,
status=item.status,
priority=item.priority,
notes=item.notes,
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["replenishment_items"] += 1
# Commit cloned data
# Commit all loaded data
await db.commit()
total_records = sum(stats.values())
# FIX DELIVERY ALERT TIMING - Adjust specific POs to guarantee delivery alerts
# After cloning, some POs need their expected_delivery_date adjusted relative to session time
# to ensure they trigger delivery tracking alerts (arriving soon, overdue, etc.)
logger.info("Adjusting delivery PO dates for guaranteed alert triggering")
# Query for sent_to_supplier POs that have expected_delivery_date
result = await db.execute(
select(PurchaseOrder)
.where(
PurchaseOrder.tenant_id == virtual_uuid,
PurchaseOrder.status == 'sent_to_supplier',
PurchaseOrder.expected_delivery_date.isnot(None)
)
.limit(5) # Adjust first 5 POs with delivery dates
)
delivery_pos = result.scalars().all()
if len(delivery_pos) >= 2:
# PO 1: Set to OVERDUE (5 hours ago) - will trigger overdue alert
delivery_pos[0].expected_delivery_date = session_time - timedelta(hours=5)
delivery_pos[0].required_delivery_date = session_time - timedelta(hours=5)
delivery_pos[0].notes = "🔴 OVERDUE: Expected delivery was 5 hours ago - Contact supplier immediately"
logger.info(f"Set PO {delivery_pos[0].po_number} to overdue (5 hours ago)")
# PO 2: Set to ARRIVING SOON (1 hour from now) - will trigger arriving soon alert
delivery_pos[1].expected_delivery_date = session_time + timedelta(hours=1)
delivery_pos[1].required_delivery_date = session_time + timedelta(hours=1)
delivery_pos[1].notes = "📦 ARRIVING SOON: Delivery expected in 1 hour - Prepare for stock receipt"
logger.info(f"Set PO {delivery_pos[1].po_number} to arriving soon (1 hour)")
if len(delivery_pos) >= 4:
# PO 3: Set to TODAY AFTERNOON (6 hours from now) - visible in dashboard
delivery_pos[2].expected_delivery_date = session_time + timedelta(hours=6)
delivery_pos[2].required_delivery_date = session_time + timedelta(hours=6)
delivery_pos[2].notes = "📅 TODAY: Delivery scheduled for this afternoon"
logger.info(f"Set PO {delivery_pos[2].po_number} to today afternoon (6 hours)")
# PO 4: Set to TOMORROW MORNING (18 hours from now)
delivery_pos[3].expected_delivery_date = session_time + timedelta(hours=18)
delivery_pos[3].required_delivery_date = session_time + timedelta(hours=18)
delivery_pos[3].notes = "📅 TOMORROW: Morning delivery scheduled"
logger.info(f"Set PO {delivery_pos[3].po_number} to tomorrow morning (18 hours)")
# Commit the adjusted delivery dates
await db.commit()
logger.info(f"Adjusted {len(delivery_pos)} POs for delivery alert triggering")
# EMIT ALERTS FOR PENDING APPROVAL POs
# After cloning, emit PO approval alerts for any pending_approval POs
# This ensures the action queue is populated when the demo session starts
pending_pos_for_alerts = []
for order_id in order_id_map.values():
result = await db.execute(
select(PurchaseOrder)
.options(selectinload(PurchaseOrder.items))
.where(
PurchaseOrder.id == order_id,
PurchaseOrder.status == 'pending_approval'
)
)
po = result.scalar_one_or_none()
if po:
pending_pos_for_alerts.append(po)
logger.info(
"Emitting PO approval alerts for cloned pending POs",
pending_po_count=len(pending_pos_for_alerts),
virtual_tenant_id=virtual_tenant_id
)
# Initialize RabbitMQ client for alert emission using UnifiedEventPublisher
alerts_emitted = 0
if pending_pos_for_alerts:
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement")
try:
await rabbitmq_client.connect()
event_publisher = UnifiedEventPublisher(rabbitmq_client, "procurement")
for po in pending_pos_for_alerts:
try:
# Get deadline for urgency calculation
now_utc = datetime.now(timezone.utc)
if po.required_delivery_date:
deadline = po.required_delivery_date
if deadline.tzinfo is None:
deadline = deadline.replace(tzinfo=timezone.utc)
else:
days_until = 3 if po.priority == 'critical' else 7
deadline = now_utc + timedelta(days=days_until)
hours_until = (deadline - now_utc).total_seconds() / 3600
# Check for reasoning data and generate if missing
reasoning_data = po.reasoning_data
if not reasoning_data:
try:
# Generate synthetic reasoning data for demo purposes
product_names = [item.product_name for item in po.items] if po.items else ["Assorted Bakery Supplies"]
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}" # Fallback name
# Create realistic looking reasoning based on PO data
reasoning_data = create_po_reasoning_low_stock(
supplier_name=supplier_name,
product_names=product_names,
current_stock=15.5, # Simulated
required_stock=100.0, # Simulated
days_until_stockout=2, # Simulated urgent
threshold_percentage=20,
affected_products=product_names[:2],
estimated_lost_orders=12
)
logger.info("Generated synthetic reasoning data for demo alert", po_id=str(po.id))
except Exception as e:
logger.warning("Failed to generate synthetic reasoning data, using ultimate fallback", error=str(e))
# Ultimate fallback: Create minimal valid reasoning data structure
reasoning_data = {
"type": "low_stock_detection",
"parameters": {
"supplier_name": supplier_name,
"product_names": ["Assorted Bakery Supplies"],
"product_count": 1,
"current_stock": 10.0,
"required_stock": 50.0,
"days_until_stockout": 2
},
"consequence": {
"type": "stockout_risk",
"severity": "medium",
"impact_days": 2
},
"metadata": {
"trigger_source": "demo_fallback",
"ai_assisted": False
}
}
logger.info("Used ultimate fallback reasoning_data structure", po_id=str(po.id))
# Prepare metadata for the alert
severity = 'high' if po.priority == 'critical' else 'medium'
metadata = {
'po_id': str(po.id),
'po_number': po.po_number,
'supplier_id': str(po.supplier_id),
'supplier_name': f'Supplier-{po.supplier_id}', # Simplified for demo
'total_amount': float(po.total_amount),
'currency': po.currency,
'priority': po.priority,
'severity': severity,
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
'created_at': po.created_at.isoformat(),
'financial_impact': float(po.total_amount),
'deadline': deadline.isoformat(),
'hours_until_consequence': int(hours_until),
'reasoning_data': reasoning_data, # For enrichment service
}
# Use UnifiedEventPublisher.publish_alert() which handles MinimalEvent format automatically
success = await event_publisher.publish_alert(
event_type='supply_chain.po_approval_needed', # domain.event_type format
tenant_id=virtual_uuid,
severity=severity,
data=metadata
)
if success:
alerts_emitted += 1
logger.info(
"PO approval alert emitted during cloning",
po_id=str(po.id),
po_number=po.po_number,
tenant_id=str(virtual_uuid)
)
except Exception as e:
logger.error(
"Failed to emit PO approval alert during cloning",
po_id=str(po.id),
error=str(e),
exc_info=True
)
# Continue with other POs
continue
finally:
await rabbitmq_client.disconnect()
stats["alerts_emitted"] = alerts_emitted
# Calculate total records
total_records = (stats["procurement_plans"] + stats["procurement_requirements"] +
stats["purchase_orders"] + stats["purchase_order_items"] +
stats["replenishment_plans"] + stats["replenishment_items"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Procurement data cloning completed",
"Procurement data loading from seed files completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
alerts_emitted=alerts_emitted,
stats=stats,
duration_ms=duration_ms
)
@@ -651,7 +449,7 @@ async def clone_demo_data(
except Exception as e:
logger.error(
"Failed to clone procurement data",
"Failed to load procurement seed data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
@@ -696,14 +494,12 @@ async def delete_demo_data(
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
po_count = await db.scalar(select(func.count(PurchaseOrder.id)).where(PurchaseOrder.tenant_id == virtual_uuid))
item_count = await db.scalar(select(func.count(PurchaseOrderItem.id)).where(PurchaseOrderItem.tenant_id == virtual_uuid))
plan_count = await db.scalar(select(func.count(ProcurementPlan.id)).where(ProcurementPlan.tenant_id == virtual_uuid))
req_count = await db.scalar(select(func.count(ProcurementRequirement.id)).where(ProcurementRequirement.tenant_id == virtual_uuid))
replan_count = await db.scalar(select(func.count(ReplenishmentPlan.id)).where(ReplenishmentPlan.tenant_id == virtual_uuid))
replan_item_count = await db.scalar(select(func.count(ReplenishmentPlanItem.id)).where(ReplenishmentPlanItem.tenant_id == virtual_uuid))
po_count = await db.scalar(func.count(PurchaseOrder.id).where(PurchaseOrder.tenant_id == virtual_uuid))
po_item_count = await db.scalar(func.count(PurchaseOrderItem.id).where(PurchaseOrderItem.tenant_id == virtual_uuid))
plan_count = await db.scalar(func.count(ProcurementPlan.id).where(ProcurementPlan.tenant_id == virtual_uuid))
replan_count = await db.scalar(func.count(ReplenishmentPlan.id).where(ReplenishmentPlan.tenant_id == virtual_uuid))
# Delete in order (respecting foreign key constraints)
# Delete in order
await db.execute(delete(PurchaseOrderItem).where(PurchaseOrderItem.tenant_id == virtual_uuid))
await db.execute(delete(PurchaseOrder).where(PurchaseOrder.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.tenant_id == virtual_uuid))
@@ -721,16 +517,14 @@ async def delete_demo_data(
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"purchase_orders": po_count,
"purchase_order_items": item_count,
"purchase_order_items": po_item_count,
"procurement_plans": plan_count,
"procurement_requirements": req_count,
"replenishment_plans": replan_count,
"replenishment_items": replan_item_count,
"total": po_count + item_count + plan_count + req_count + replan_count + replan_item_count
"total": po_count + po_item_count + plan_count + replan_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete procurement data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))
raise HTTPException(status_code=500, detail=str(e))