Files
bakery-ia/services/procurement/app/api/internal_demo.py
2025-12-05 20:07:01 +01:00

737 lines
35 KiB
Python

"""
Internal Demo Cloning API for Procurement Service
Service-to-service endpoint for cloning procurement and purchase order data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
from app.core.database import get_db
from app.models.procurement_plan import ProcurementPlan, ProcurementRequirement
from app.models.purchase_order import PurchaseOrder, PurchaseOrderItem
from app.models.replenishment import ReplenishmentPlan, ReplenishmentPlanItem
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from shared.messaging import RabbitMQClient, UnifiedEventPublisher
from sqlalchemy.orm import selectinload
from shared.schemas.reasoning_types import (
create_po_reasoning_low_stock,
create_po_reasoning_supplier_contract
)
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone procurement service data for a virtual demo tenant
Clones:
- Procurement plans with requirements
- Purchase orders with line items
- Replenishment plans with items
- Adjusts dates to recent timeframe
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting procurement data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"procurement_plans": 0,
"procurement_requirements": 0,
"purchase_orders": 0,
"purchase_order_items": 0,
"replenishment_plans": 0,
"replenishment_items": 0
}
# Clone Procurement Plans with Requirements
result = await db.execute(
select(ProcurementPlan).where(ProcurementPlan.tenant_id == base_uuid)
)
base_plans = result.scalars().all()
logger.info(
"Found procurement plans to clone",
count=len(base_plans),
base_tenant=str(base_uuid)
)
# Calculate date offset for procurement
if base_plans:
max_plan_date = max(plan.plan_date for plan in base_plans if plan.plan_date)
today_date = date.today()
days_diff = (today_date - max_plan_date).days
plan_date_offset = timedelta(days=days_diff)
else:
plan_date_offset = timedelta(days=0)
plan_id_map = {}
for plan in base_plans:
new_plan_id = uuid.uuid4()
plan_id_map[plan.id] = new_plan_id
new_plan = ProcurementPlan(
id=new_plan_id,
tenant_id=virtual_uuid,
plan_number=f"PROC-{uuid.uuid4().hex[:8].upper()}",
plan_date=plan.plan_date + plan_date_offset if plan.plan_date else None,
plan_period_start=plan.plan_period_start + plan_date_offset if plan.plan_period_start else None,
plan_period_end=plan.plan_period_end + plan_date_offset if plan.plan_period_end else None,
planning_horizon_days=plan.planning_horizon_days,
status=plan.status,
plan_type=plan.plan_type,
priority=plan.priority,
business_model=plan.business_model,
procurement_strategy=plan.procurement_strategy,
total_requirements=plan.total_requirements,
total_estimated_cost=plan.total_estimated_cost,
total_approved_cost=plan.total_approved_cost,
cost_variance=plan.cost_variance,
created_at=session_time,
updated_at=session_time
)
db.add(new_plan)
stats["procurement_plans"] += 1
# Clone Procurement Requirements
for old_plan_id, new_plan_id in plan_id_map.items():
result = await db.execute(
select(ProcurementRequirement).where(ProcurementRequirement.plan_id == old_plan_id)
)
requirements = result.scalars().all()
for req in requirements:
new_req = ProcurementRequirement(
id=uuid.uuid4(),
plan_id=new_plan_id,
requirement_number=req.requirement_number,
product_id=req.product_id,
product_name=req.product_name,
product_sku=req.product_sku,
product_category=req.product_category,
product_type=req.product_type,
required_quantity=req.required_quantity,
unit_of_measure=req.unit_of_measure,
safety_stock_quantity=req.safety_stock_quantity,
total_quantity_needed=req.total_quantity_needed,
current_stock_level=req.current_stock_level,
reserved_stock=req.reserved_stock,
available_stock=req.available_stock,
net_requirement=req.net_requirement,
order_demand=req.order_demand,
production_demand=req.production_demand,
forecast_demand=req.forecast_demand,
buffer_demand=req.buffer_demand,
preferred_supplier_id=req.preferred_supplier_id,
backup_supplier_id=req.backup_supplier_id,
supplier_name=req.supplier_name,
supplier_lead_time_days=req.supplier_lead_time_days,
minimum_order_quantity=req.minimum_order_quantity,
estimated_unit_cost=req.estimated_unit_cost,
estimated_total_cost=req.estimated_total_cost,
last_purchase_cost=req.last_purchase_cost,
cost_variance=req.cost_variance,
required_by_date=req.required_by_date + plan_date_offset if req.required_by_date else None,
lead_time_buffer_days=req.lead_time_buffer_days,
suggested_order_date=req.suggested_order_date + plan_date_offset if req.suggested_order_date else None,
latest_order_date=req.latest_order_date + plan_date_offset if req.latest_order_date else None,
quality_specifications=req.quality_specifications,
special_requirements=req.special_requirements,
storage_requirements=req.storage_requirements,
shelf_life_days=req.shelf_life_days,
status=req.status,
priority=req.priority,
risk_level=req.risk_level,
purchase_order_id=req.purchase_order_id,
purchase_order_number=req.purchase_order_number,
ordered_quantity=req.ordered_quantity,
ordered_at=req.ordered_at,
expected_delivery_date=req.expected_delivery_date + plan_date_offset if req.expected_delivery_date else None,
actual_delivery_date=req.actual_delivery_date + plan_date_offset if req.actual_delivery_date else None,
received_quantity=req.received_quantity,
delivery_status=req.delivery_status,
fulfillment_rate=req.fulfillment_rate,
on_time_delivery=req.on_time_delivery,
quality_rating=req.quality_rating,
source_orders=req.source_orders,
source_production_batches=req.source_production_batches,
demand_analysis=req.demand_analysis,
approved_quantity=req.approved_quantity,
approved_cost=req.approved_cost,
approved_at=req.approved_at,
approved_by=req.approved_by,
procurement_notes=req.procurement_notes,
supplier_communication=req.supplier_communication,
requirement_metadata=req.requirement_metadata,
created_at=session_time,
updated_at=session_time
)
db.add(new_req)
stats["procurement_requirements"] += 1
# Clone Purchase Orders with Line Items
result = await db.execute(
select(PurchaseOrder).where(PurchaseOrder.tenant_id == base_uuid)
)
base_orders = result.scalars().all()
logger.info(
"Found purchase orders to clone",
count=len(base_orders),
base_tenant=str(base_uuid)
)
order_id_map = {}
for order in base_orders:
new_order_id = uuid.uuid4()
order_id_map[order.id] = new_order_id
# Adjust dates using demo_dates utility
adjusted_order_date = adjust_date_for_demo(
order.order_date, session_time, BASE_REFERENCE_DATE
)
adjusted_required_delivery = adjust_date_for_demo(
order.required_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_estimated_delivery = adjust_date_for_demo(
order.estimated_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_supplier_confirmation = adjust_date_for_demo(
order.supplier_confirmation_date, session_time, BASE_REFERENCE_DATE
)
adjusted_approved_at = adjust_date_for_demo(
order.approved_at, session_time, BASE_REFERENCE_DATE
)
adjusted_sent_to_supplier_at = adjust_date_for_demo(
order.sent_to_supplier_at, session_time, BASE_REFERENCE_DATE
)
# Generate a system user UUID for audit fields (demo purposes)
system_user_id = uuid.uuid4()
# For demo sessions: Adjust expected_delivery_date if it exists
# This ensures the ExecutionProgressTracker shows realistic delivery data
expected_delivery = None
if hasattr(order, 'expected_delivery_date') and order.expected_delivery_date:
# Adjust the existing expected_delivery_date to demo session time
expected_delivery = adjust_date_for_demo(
order.expected_delivery_date, session_time, BASE_REFERENCE_DATE
)
elif order.status in ['approved', 'sent_to_supplier', 'confirmed']:
# If no expected_delivery_date but order is in delivery status, use estimated_delivery_date
expected_delivery = adjusted_estimated_delivery
# Create new PurchaseOrder - add expected_delivery_date only if column exists (after migration)
new_order = PurchaseOrder(
id=new_order_id,
tenant_id=virtual_uuid,
po_number=f"PO-{uuid.uuid4().hex[:8].upper()}", # New PO number
reference_number=order.reference_number,
supplier_id=order.supplier_id,
procurement_plan_id=plan_id_map.get(order.procurement_plan_id) if hasattr(order, 'procurement_plan_id') and order.procurement_plan_id else None,
order_date=adjusted_order_date,
required_delivery_date=adjusted_required_delivery,
estimated_delivery_date=adjusted_estimated_delivery,
status=order.status,
priority=order.priority,
subtotal=order.subtotal,
tax_amount=order.tax_amount,
discount_amount=order.discount_amount,
shipping_cost=order.shipping_cost,
total_amount=order.total_amount,
currency=order.currency,
delivery_address=order.delivery_address if hasattr(order, 'delivery_address') else None,
delivery_instructions=order.delivery_instructions if hasattr(order, 'delivery_instructions') else None,
delivery_contact=order.delivery_contact if hasattr(order, 'delivery_contact') else None,
delivery_phone=order.delivery_phone if hasattr(order, 'delivery_phone') else None,
requires_approval=order.requires_approval if hasattr(order, 'requires_approval') else False,
approved_by=order.approved_by if hasattr(order, 'approved_by') else None,
approved_at=adjusted_approved_at,
rejection_reason=order.rejection_reason if hasattr(order, 'rejection_reason') else None,
auto_approved=order.auto_approved if hasattr(order, 'auto_approved') else False,
auto_approval_rule_id=order.auto_approval_rule_id if hasattr(order, 'auto_approval_rule_id') else None,
sent_to_supplier_at=adjusted_sent_to_supplier_at,
supplier_confirmation_date=adjusted_supplier_confirmation,
supplier_reference=order.supplier_reference if hasattr(order, 'supplier_reference') else None,
notes=order.notes if hasattr(order, 'notes') else None,
internal_notes=order.internal_notes if hasattr(order, 'internal_notes') else None,
terms_and_conditions=order.terms_and_conditions if hasattr(order, 'terms_and_conditions') else None,
reasoning_data=order.reasoning_data if hasattr(order, 'reasoning_data') else None, # Clone reasoning for JTBD dashboard
created_at=session_time,
updated_at=session_time,
created_by=system_user_id,
updated_by=system_user_id
)
# Add expected_delivery_date if the model supports it (after migration)
if hasattr(PurchaseOrder, 'expected_delivery_date'):
new_order.expected_delivery_date = expected_delivery
db.add(new_order)
stats["purchase_orders"] += 1
# Clone Purchase Order Items
for old_order_id, new_order_id in order_id_map.items():
result = await db.execute(
select(PurchaseOrderItem).where(PurchaseOrderItem.purchase_order_id == old_order_id)
)
order_items = result.scalars().all()
for item in order_items:
new_item = PurchaseOrderItem(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
purchase_order_id=new_order_id,
procurement_requirement_id=item.procurement_requirement_id if hasattr(item, 'procurement_requirement_id') else None,
inventory_product_id=item.inventory_product_id,
product_code=item.product_code if hasattr(item, 'product_code') else None,
product_name=item.product_name,
supplier_price_list_id=item.supplier_price_list_id if hasattr(item, 'supplier_price_list_id') else None,
ordered_quantity=item.ordered_quantity,
unit_of_measure=item.unit_of_measure,
unit_price=item.unit_price,
line_total=item.line_total,
received_quantity=item.received_quantity if hasattr(item, 'received_quantity') else 0,
remaining_quantity=item.remaining_quantity if hasattr(item, 'remaining_quantity') else item.ordered_quantity,
quality_requirements=item.quality_requirements if hasattr(item, 'quality_requirements') else None,
item_notes=item.item_notes if hasattr(item, 'item_notes') else None,
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["purchase_order_items"] += 1
# Clone Replenishment Plans with Items
result = await db.execute(
select(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == base_uuid)
)
base_replenishment_plans = result.scalars().all()
logger.info(
"Found replenishment plans to clone",
count=len(base_replenishment_plans),
base_tenant=str(base_uuid)
)
replan_id_map = {}
for replan in base_replenishment_plans:
new_replan_id = uuid.uuid4()
replan_id_map[replan.id] = new_replan_id
new_replan = ReplenishmentPlan(
id=new_replan_id,
tenant_id=virtual_uuid,
plan_number=f"REPL-{uuid.uuid4().hex[:8].upper()}",
plan_date=replan.plan_date + plan_date_offset if replan.plan_date else None,
plan_period_start=replan.plan_period_start + plan_date_offset if replan.plan_period_start else None,
plan_period_end=replan.plan_period_end + plan_date_offset if replan.plan_period_end else None,
planning_horizon_days=replan.planning_horizon_days,
status=replan.status,
plan_type=replan.plan_type,
priority=replan.priority,
business_model=replan.business_model,
total_items=replan.total_items,
total_estimated_cost=replan.total_estimated_cost,
created_at=session_time,
updated_at=session_time
)
db.add(new_replan)
stats["replenishment_plans"] += 1
# Clone Replenishment Plan Items
for old_replan_id, new_replan_id in replan_id_map.items():
result = await db.execute(
select(ReplenishmentPlanItem).where(ReplenishmentPlanItem.plan_id == old_replan_id)
)
replan_items = result.scalars().all()
for item in replan_items:
new_item = ReplenishmentPlanItem(
id=uuid.uuid4(),
plan_id=new_replan_id,
product_id=item.product_id,
product_name=item.product_name,
product_sku=item.product_sku,
required_quantity=item.required_quantity,
unit_of_measure=item.unit_of_measure,
current_stock_level=item.current_stock_level,
safety_stock_quantity=item.safety_stock_quantity,
suggested_order_quantity=item.suggested_order_quantity,
supplier_id=item.supplier_id,
supplier_name=item.supplier_name,
estimated_delivery_days=item.estimated_delivery_days,
required_by_date=item.required_by_date + plan_date_offset if item.required_by_date else None,
status=item.status,
priority=item.priority,
notes=item.notes,
created_at=session_time,
updated_at=session_time
)
db.add(new_item)
stats["replenishment_items"] += 1
# Commit cloned data
await db.commit()
total_records = sum(stats.values())
# FIX DELIVERY ALERT TIMING - Adjust specific POs to guarantee delivery alerts
# After cloning, some POs need their expected_delivery_date adjusted relative to session time
# to ensure they trigger delivery tracking alerts (arriving soon, overdue, etc.)
logger.info("Adjusting delivery PO dates for guaranteed alert triggering")
# Query for sent_to_supplier POs that have expected_delivery_date
result = await db.execute(
select(PurchaseOrder)
.where(
PurchaseOrder.tenant_id == virtual_uuid,
PurchaseOrder.status == 'sent_to_supplier',
PurchaseOrder.expected_delivery_date.isnot(None)
)
.limit(5) # Adjust first 5 POs with delivery dates
)
delivery_pos = result.scalars().all()
if len(delivery_pos) >= 2:
# PO 1: Set to OVERDUE (5 hours ago) - will trigger overdue alert
delivery_pos[0].expected_delivery_date = session_time - timedelta(hours=5)
delivery_pos[0].required_delivery_date = session_time - timedelta(hours=5)
delivery_pos[0].notes = "🔴 OVERDUE: Expected delivery was 5 hours ago - Contact supplier immediately"
logger.info(f"Set PO {delivery_pos[0].po_number} to overdue (5 hours ago)")
# PO 2: Set to ARRIVING SOON (1 hour from now) - will trigger arriving soon alert
delivery_pos[1].expected_delivery_date = session_time + timedelta(hours=1)
delivery_pos[1].required_delivery_date = session_time + timedelta(hours=1)
delivery_pos[1].notes = "📦 ARRIVING SOON: Delivery expected in 1 hour - Prepare for stock receipt"
logger.info(f"Set PO {delivery_pos[1].po_number} to arriving soon (1 hour)")
if len(delivery_pos) >= 4:
# PO 3: Set to TODAY AFTERNOON (6 hours from now) - visible in dashboard
delivery_pos[2].expected_delivery_date = session_time + timedelta(hours=6)
delivery_pos[2].required_delivery_date = session_time + timedelta(hours=6)
delivery_pos[2].notes = "📅 TODAY: Delivery scheduled for this afternoon"
logger.info(f"Set PO {delivery_pos[2].po_number} to today afternoon (6 hours)")
# PO 4: Set to TOMORROW MORNING (18 hours from now)
delivery_pos[3].expected_delivery_date = session_time + timedelta(hours=18)
delivery_pos[3].required_delivery_date = session_time + timedelta(hours=18)
delivery_pos[3].notes = "📅 TOMORROW: Morning delivery scheduled"
logger.info(f"Set PO {delivery_pos[3].po_number} to tomorrow morning (18 hours)")
# Commit the adjusted delivery dates
await db.commit()
logger.info(f"Adjusted {len(delivery_pos)} POs for delivery alert triggering")
# EMIT ALERTS FOR PENDING APPROVAL POs
# After cloning, emit PO approval alerts for any pending_approval POs
# This ensures the action queue is populated when the demo session starts
pending_pos_for_alerts = []
for order_id in order_id_map.values():
result = await db.execute(
select(PurchaseOrder)
.options(selectinload(PurchaseOrder.items))
.where(
PurchaseOrder.id == order_id,
PurchaseOrder.status == 'pending_approval'
)
)
po = result.scalar_one_or_none()
if po:
pending_pos_for_alerts.append(po)
logger.info(
"Emitting PO approval alerts for cloned pending POs",
pending_po_count=len(pending_pos_for_alerts),
virtual_tenant_id=virtual_tenant_id
)
# Initialize RabbitMQ client for alert emission using UnifiedEventPublisher
alerts_emitted = 0
if pending_pos_for_alerts:
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, "procurement")
try:
await rabbitmq_client.connect()
event_publisher = UnifiedEventPublisher(rabbitmq_client, "procurement")
for po in pending_pos_for_alerts:
try:
# Get deadline for urgency calculation
now_utc = datetime.now(timezone.utc)
if po.required_delivery_date:
deadline = po.required_delivery_date
if deadline.tzinfo is None:
deadline = deadline.replace(tzinfo=timezone.utc)
else:
days_until = 3 if po.priority == 'critical' else 7
deadline = now_utc + timedelta(days=days_until)
hours_until = (deadline - now_utc).total_seconds() / 3600
# Check for reasoning data and generate if missing
reasoning_data = po.reasoning_data
if not reasoning_data:
try:
# Generate synthetic reasoning data for demo purposes
product_names = [item.product_name for item in po.items] if po.items else ["Assorted Bakery Supplies"]
supplier_name = f"Supplier-{str(po.supplier_id)[:8]}" # Fallback name
# Create realistic looking reasoning based on PO data
reasoning_data = create_po_reasoning_low_stock(
supplier_name=supplier_name,
product_names=product_names,
current_stock=15.5, # Simulated
required_stock=100.0, # Simulated
days_until_stockout=2, # Simulated urgent
threshold_percentage=20,
affected_products=product_names[:2],
estimated_lost_orders=12
)
logger.info("Generated synthetic reasoning data for demo alert", po_id=str(po.id))
except Exception as e:
logger.warning("Failed to generate synthetic reasoning data, using ultimate fallback", error=str(e))
# Ultimate fallback: Create minimal valid reasoning data structure
reasoning_data = {
"type": "low_stock_detection",
"parameters": {
"supplier_name": supplier_name,
"product_names": ["Assorted Bakery Supplies"],
"product_count": 1,
"current_stock": 10.0,
"required_stock": 50.0,
"days_until_stockout": 2
},
"consequence": {
"type": "stockout_risk",
"severity": "medium",
"impact_days": 2
},
"metadata": {
"trigger_source": "demo_fallback",
"ai_assisted": False
}
}
logger.info("Used ultimate fallback reasoning_data structure", po_id=str(po.id))
# Prepare metadata for the alert
severity = 'high' if po.priority == 'critical' else 'medium'
metadata = {
'po_id': str(po.id),
'po_number': po.po_number,
'supplier_id': str(po.supplier_id),
'supplier_name': f'Supplier-{po.supplier_id}', # Simplified for demo
'total_amount': float(po.total_amount),
'currency': po.currency,
'priority': po.priority,
'severity': severity,
'required_delivery_date': po.required_delivery_date.isoformat() if po.required_delivery_date else None,
'created_at': po.created_at.isoformat(),
'financial_impact': float(po.total_amount),
'deadline': deadline.isoformat(),
'hours_until_consequence': int(hours_until),
'reasoning_data': reasoning_data, # For enrichment service
}
# Use UnifiedEventPublisher.publish_alert() which handles MinimalEvent format automatically
success = await event_publisher.publish_alert(
event_type='supply_chain.po_approval_needed', # domain.event_type format
tenant_id=virtual_uuid,
severity=severity,
data=metadata
)
if success:
alerts_emitted += 1
logger.info(
"PO approval alert emitted during cloning",
po_id=str(po.id),
po_number=po.po_number,
tenant_id=str(virtual_uuid)
)
except Exception as e:
logger.error(
"Failed to emit PO approval alert during cloning",
po_id=str(po.id),
error=str(e),
exc_info=True
)
# Continue with other POs
continue
finally:
await rabbitmq_client.disconnect()
stats["alerts_emitted"] = alerts_emitted
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Procurement data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
alerts_emitted=alerts_emitted,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "procurement",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone procurement data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "procurement",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "procurement",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all procurement data for a virtual demo tenant"""
logger.info("Deleting procurement data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
po_count = await db.scalar(select(func.count(PurchaseOrder.id)).where(PurchaseOrder.tenant_id == virtual_uuid))
item_count = await db.scalar(select(func.count(PurchaseOrderItem.id)).where(PurchaseOrderItem.tenant_id == virtual_uuid))
plan_count = await db.scalar(select(func.count(ProcurementPlan.id)).where(ProcurementPlan.tenant_id == virtual_uuid))
req_count = await db.scalar(select(func.count(ProcurementRequirement.id)).where(ProcurementRequirement.tenant_id == virtual_uuid))
replan_count = await db.scalar(select(func.count(ReplenishmentPlan.id)).where(ReplenishmentPlan.tenant_id == virtual_uuid))
replan_item_count = await db.scalar(select(func.count(ReplenishmentPlanItem.id)).where(ReplenishmentPlanItem.tenant_id == virtual_uuid))
# Delete in order (respecting foreign key constraints)
await db.execute(delete(PurchaseOrderItem).where(PurchaseOrderItem.tenant_id == virtual_uuid))
await db.execute(delete(PurchaseOrder).where(PurchaseOrder.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementPlan).where(ProcurementPlan.tenant_id == virtual_uuid))
await db.execute(delete(ReplenishmentPlanItem).where(ReplenishmentPlanItem.tenant_id == virtual_uuid))
await db.execute(delete(ReplenishmentPlan).where(ReplenishmentPlan.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Procurement data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "procurement",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"purchase_orders": po_count,
"purchase_order_items": item_count,
"procurement_plans": plan_count,
"procurement_requirements": req_count,
"replenishment_plans": replan_count,
"replenishment_items": replan_item_count,
"total": po_count + item_count + plan_count + req_count + replan_count + replan_item_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete procurement data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))