Files
bakery-ia/services/orders/app/api/internal_demo.py

495 lines
20 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Orders Service
Service-to-service endpoint for cloning order and procurement data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
2025-10-24 13:05:04 +02:00
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
from decimal import Decimal
from app.core.database import get_db
from app.models.order import CustomerOrder, OrderItem
from app.models.procurement import ProcurementPlan, ProcurementRequirement
from app.models.customer import Customer
2025-10-17 07:31:14 +02:00
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Internal API key for service-to-service auth
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
2025-10-17 07:31:14 +02:00
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone orders service data for a virtual demo tenant
Clones:
- Customers
- Customer orders with line items
- Procurement plans with requirements
- Adjusts dates to recent timeframe
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
2025-10-17 07:31:14 +02:00
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting orders data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
2025-10-17 07:31:14 +02:00
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"customers": 0,
"customer_orders": 0,
"order_line_items": 0,
"procurement_plans": 0,
2025-10-17 07:31:14 +02:00
"procurement_requirements": 0,
"alerts_generated": 0
}
# Customer ID mapping (old -> new)
customer_id_map = {}
# Clone Customers
result = await db.execute(
select(Customer).where(Customer.tenant_id == base_uuid)
)
base_customers = result.scalars().all()
logger.info(
"Found customers to clone",
count=len(base_customers),
base_tenant=str(base_uuid)
)
for customer in base_customers:
new_customer_id = uuid.uuid4()
customer_id_map[customer.id] = new_customer_id
new_customer = Customer(
id=new_customer_id,
tenant_id=virtual_uuid,
2025-10-17 07:31:14 +02:00
customer_code=customer.customer_code,
name=customer.name,
business_name=customer.business_name,
2025-10-17 07:31:14 +02:00
customer_type=customer.customer_type,
tax_id=customer.tax_id,
email=customer.email,
phone=customer.phone,
2025-10-17 07:31:14 +02:00
address_line1=customer.address_line1,
address_line2=customer.address_line2,
city=customer.city,
state=customer.state,
postal_code=customer.postal_code,
country=customer.country,
business_license=customer.business_license,
is_active=customer.is_active,
preferred_delivery_method=customer.preferred_delivery_method,
payment_terms=customer.payment_terms,
2025-10-17 07:31:14 +02:00
credit_limit=customer.credit_limit,
discount_percentage=customer.discount_percentage,
2025-10-17 07:31:14 +02:00
customer_segment=customer.customer_segment,
priority_level=customer.priority_level,
special_instructions=customer.special_instructions,
delivery_preferences=customer.delivery_preferences,
product_preferences=customer.product_preferences,
total_orders=customer.total_orders,
total_spent=customer.total_spent,
average_order_value=customer.average_order_value,
last_order_date=customer.last_order_date,
created_at=session_time,
updated_at=session_time
)
db.add(new_customer)
stats["customers"] += 1
# Clone Customer Orders with Line Items
result = await db.execute(
select(CustomerOrder).where(CustomerOrder.tenant_id == base_uuid)
)
base_orders = result.scalars().all()
logger.info(
"Found customer orders to clone",
count=len(base_orders),
base_tenant=str(base_uuid)
)
order_id_map = {}
for order in base_orders:
new_order_id = uuid.uuid4()
order_id_map[order.id] = new_order_id
2025-10-17 07:31:14 +02:00
# Adjust dates using demo_dates utility
adjusted_order_date = adjust_date_for_demo(
order.order_date, session_time, BASE_REFERENCE_DATE
)
adjusted_requested_delivery = adjust_date_for_demo(
order.requested_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_confirmed_delivery = adjust_date_for_demo(
order.confirmed_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_actual_delivery = adjust_date_for_demo(
order.actual_delivery_date, session_time, BASE_REFERENCE_DATE
)
adjusted_window_start = adjust_date_for_demo(
order.delivery_window_start, session_time, BASE_REFERENCE_DATE
)
adjusted_window_end = adjust_date_for_demo(
order.delivery_window_end, session_time, BASE_REFERENCE_DATE
)
new_order = CustomerOrder(
id=new_order_id,
tenant_id=virtual_uuid,
order_number=f"ORD-{uuid.uuid4().hex[:8].upper()}", # New order number
customer_id=customer_id_map.get(order.customer_id, order.customer_id),
status=order.status,
order_type=order.order_type,
priority=order.priority,
2025-10-17 07:31:14 +02:00
order_date=adjusted_order_date,
requested_delivery_date=adjusted_requested_delivery,
confirmed_delivery_date=adjusted_confirmed_delivery,
actual_delivery_date=adjusted_actual_delivery,
delivery_method=order.delivery_method,
delivery_address=order.delivery_address,
delivery_instructions=order.delivery_instructions,
2025-10-17 07:31:14 +02:00
delivery_window_start=adjusted_window_start,
delivery_window_end=adjusted_window_end,
subtotal=order.subtotal,
tax_amount=order.tax_amount,
discount_amount=order.discount_amount,
2025-10-17 07:31:14 +02:00
discount_percentage=order.discount_percentage,
delivery_fee=order.delivery_fee,
total_amount=order.total_amount,
payment_status=order.payment_status,
payment_method=order.payment_method,
2025-10-17 07:31:14 +02:00
payment_terms=order.payment_terms,
payment_due_date=order.payment_due_date,
special_instructions=order.special_instructions,
order_source=order.order_source,
sales_channel=order.sales_channel,
created_at=session_time,
updated_at=session_time
)
db.add(new_order)
stats["customer_orders"] += 1
# Clone Order Items
for old_order_id, new_order_id in order_id_map.items():
result = await db.execute(
select(OrderItem).where(OrderItem.order_id == old_order_id)
)
order_items = result.scalars().all()
for item in order_items:
new_item = OrderItem(
id=uuid.uuid4(),
order_id=new_order_id,
2025-10-17 07:31:14 +02:00
product_id=item.product_id,
product_name=item.product_name,
product_sku=item.product_sku,
quantity=item.quantity,
2025-10-17 07:31:14 +02:00
unit_of_measure=item.unit_of_measure,
unit_price=item.unit_price,
2025-10-17 07:31:14 +02:00
line_discount=item.line_discount,
line_total=item.line_total,
status=item.status
)
db.add(new_item)
stats["order_line_items"] += 1
# Clone Procurement Plans with Requirements
result = await db.execute(
select(ProcurementPlan).where(ProcurementPlan.tenant_id == base_uuid)
)
base_plans = result.scalars().all()
logger.info(
"Found procurement plans to clone",
count=len(base_plans),
base_tenant=str(base_uuid)
)
# Calculate date offset for procurement
if base_plans:
max_plan_date = max(plan.plan_date for plan in base_plans)
today_date = date.today()
days_diff = (today_date - max_plan_date).days
plan_date_offset = timedelta(days=days_diff)
else:
plan_date_offset = timedelta(days=0)
plan_id_map = {}
for plan in base_plans:
new_plan_id = uuid.uuid4()
plan_id_map[plan.id] = new_plan_id
new_plan = ProcurementPlan(
id=new_plan_id,
tenant_id=virtual_uuid,
plan_number=f"PROC-{uuid.uuid4().hex[:8].upper()}",
2025-10-17 07:31:14 +02:00
plan_date=plan.plan_date + plan_date_offset if plan.plan_date else None,
plan_period_start=plan.plan_period_start + plan_date_offset if plan.plan_period_start else None,
plan_period_end=plan.plan_period_end + plan_date_offset if plan.plan_period_end else None,
planning_horizon_days=plan.planning_horizon_days,
status=plan.status,
plan_type=plan.plan_type,
priority=plan.priority,
business_model=plan.business_model,
procurement_strategy=plan.procurement_strategy,
total_requirements=plan.total_requirements,
total_estimated_cost=plan.total_estimated_cost,
total_approved_cost=plan.total_approved_cost,
cost_variance=plan.cost_variance,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)
db.add(new_plan)
stats["procurement_plans"] += 1
# Clone Procurement Requirements
for old_plan_id, new_plan_id in plan_id_map.items():
result = await db.execute(
2025-10-17 07:31:14 +02:00
select(ProcurementRequirement).where(ProcurementRequirement.plan_id == old_plan_id)
)
requirements = result.scalars().all()
for req in requirements:
new_req = ProcurementRequirement(
id=uuid.uuid4(),
2025-10-17 07:31:14 +02:00
plan_id=new_plan_id,
requirement_number=req.requirement_number,
product_id=req.product_id,
product_name=req.product_name,
product_sku=req.product_sku,
product_category=req.product_category,
product_type=req.product_type,
required_quantity=req.required_quantity,
unit_of_measure=req.unit_of_measure,
2025-10-17 07:31:14 +02:00
safety_stock_quantity=req.safety_stock_quantity,
total_quantity_needed=req.total_quantity_needed,
current_stock_level=req.current_stock_level,
reserved_stock=req.reserved_stock,
available_stock=req.available_stock,
net_requirement=req.net_requirement,
order_demand=req.order_demand,
production_demand=req.production_demand,
forecast_demand=req.forecast_demand,
buffer_demand=req.buffer_demand,
preferred_supplier_id=req.preferred_supplier_id,
backup_supplier_id=req.backup_supplier_id,
supplier_name=req.supplier_name,
supplier_lead_time_days=req.supplier_lead_time_days,
minimum_order_quantity=req.minimum_order_quantity,
estimated_unit_cost=req.estimated_unit_cost,
estimated_total_cost=req.estimated_total_cost,
2025-10-17 07:31:14 +02:00
last_purchase_cost=req.last_purchase_cost,
cost_variance=req.cost_variance,
required_by_date=req.required_by_date + plan_date_offset if req.required_by_date else None,
lead_time_buffer_days=req.lead_time_buffer_days,
suggested_order_date=req.suggested_order_date + plan_date_offset if req.suggested_order_date else None,
latest_order_date=req.latest_order_date + plan_date_offset if req.latest_order_date else None,
quality_specifications=req.quality_specifications,
special_requirements=req.special_requirements,
storage_requirements=req.storage_requirements,
shelf_life_days=req.shelf_life_days,
status=req.status,
priority=req.priority,
2025-10-17 07:31:14 +02:00
risk_level=req.risk_level,
purchase_order_id=req.purchase_order_id,
purchase_order_number=req.purchase_order_number,
ordered_quantity=req.ordered_quantity,
ordered_at=req.ordered_at,
expected_delivery_date=req.expected_delivery_date + plan_date_offset if req.expected_delivery_date else None,
actual_delivery_date=req.actual_delivery_date + plan_date_offset if req.actual_delivery_date else None,
received_quantity=req.received_quantity,
delivery_status=req.delivery_status,
fulfillment_rate=req.fulfillment_rate,
on_time_delivery=req.on_time_delivery,
quality_rating=req.quality_rating,
source_orders=req.source_orders,
source_production_batches=req.source_production_batches,
demand_analysis=req.demand_analysis,
approved_quantity=req.approved_quantity,
approved_cost=req.approved_cost,
approved_at=req.approved_at,
approved_by=req.approved_by,
procurement_notes=req.procurement_notes,
supplier_communication=req.supplier_communication,
requirement_metadata=req.requirement_metadata,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)
db.add(new_req)
stats["procurement_requirements"] += 1
# Commit cloned data
await db.commit()
# NOTE: Alert generation removed - alerts are now generated automatically by the
# respective alert services which run scheduled checks at appropriate intervals.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
total_records = stats["customers"] + stats["customer_orders"] + stats["order_line_items"] + stats["procurement_plans"] + stats["procurement_requirements"]
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Orders data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "orders",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone orders data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "orders",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "orders",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all order data for a virtual demo tenant"""
logger.info("Deleting order data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
order_count = await db.scalar(select(func.count(CustomerOrder.id)).where(CustomerOrder.tenant_id == virtual_uuid))
item_count = await db.scalar(select(func.count(OrderItem.id)).where(OrderItem.tenant_id == virtual_uuid))
customer_count = await db.scalar(select(func.count(Customer.id)).where(Customer.tenant_id == virtual_uuid))
procurement_count = await db.scalar(select(func.count(ProcurementPlan.id)).where(ProcurementPlan.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(OrderItem).where(OrderItem.tenant_id == virtual_uuid))
await db.execute(delete(CustomerOrder).where(CustomerOrder.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementRequirement).where(ProcurementRequirement.tenant_id == virtual_uuid))
await db.execute(delete(ProcurementPlan).where(ProcurementPlan.tenant_id == virtual_uuid))
await db.execute(delete(Customer).where(Customer.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Order data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "orders",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"orders": order_count,
"items": item_count,
"customers": customer_count,
"procurement": procurement_count,
"total": order_count + item_count + customer_count + procurement_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete order data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))