Files
bakery-ia/services/orders/app/api/internal_demo.py
2026-01-12 14:24:14 +01:00

456 lines
18 KiB
Python

"""
Internal Demo Cloning API for Orders Service
Service-to-service endpoint for cloning order and customer data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
from decimal import Decimal
import json
from pathlib import Path
from app.core.database import get_db
from app.models.order import CustomerOrder, OrderItem
from app.models.customer import Customer
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, get_next_workday
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
"""
Parse date field, handling both ISO strings and BASE_TS markers.
Supports:
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
- ISO 8601 strings: "2025-01-15T06:00:00Z"
- None values (returns None)
Returns timezone-aware datetime or None.
"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
elif hasattr(date_value, 'isoformat'):
original_date = date_value
else:
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
return None
return adjust_date_for_demo(original_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
def ensure_workday(target_date: datetime) -> datetime:
"""Ensure delivery date falls on a workday (Monday-Friday)"""
if target_date and target_date.weekday() >= 5: # Saturday or Sunday
return get_next_workday(target_date)
return target_date
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db)
):
"""
Clone orders service data for a virtual demo tenant
Clones:
- Customers
- Customer orders with line items
- Adjusts dates to recent timeframe
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting orders data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"customers": 0,
"customer_orders": 0,
"order_line_items": 0,
"alerts_generated": 0
}
# Customer ID mapping (old -> new)
customer_id_map = {}
# Load Customers from seed data
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "08-orders.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "08-orders.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "08-orders.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "08-orders.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "08-orders.json"
elif demo_account_type == "enterprise_child":
json_file = seed_data_dir / "enterprise" / "children" / base_tenant_id / "08-orders.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
"Loaded orders seed data",
customers=len(seed_data.get('customers', [])),
orders=len(seed_data.get('customer_orders', []))
)
# Load Customers from seed data
for customer_data in seed_data.get('customers', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
customer_uuid = uuid.UUID(customer_data['id'])
transformed_id = transform_id(customer_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse customer UUID",
customer_id=customer_data['id'],
error=str(e))
continue
customer_id_map[uuid.UUID(customer_data['id'])] = transformed_id
new_customer = Customer(
id=transformed_id,
tenant_id=virtual_uuid,
customer_code=customer_data.get('customer_code'),
name=customer_data.get('name'),
business_name=customer_data.get('business_name'),
customer_type=customer_data.get('customer_type'),
tax_id=customer_data.get('tax_id'),
email=customer_data.get('email'),
phone=customer_data.get('phone'),
address_line1=customer_data.get('address_line1'),
address_line2=customer_data.get('address_line2'),
city=customer_data.get('city'),
state=customer_data.get('state'),
postal_code=customer_data.get('postal_code'),
country=customer_data.get('country'),
business_license=customer_data.get('business_license'),
is_active=customer_data.get('is_active', True),
preferred_delivery_method=customer_data.get('preferred_delivery_method'),
payment_terms=customer_data.get('payment_terms'),
credit_limit=customer_data.get('credit_limit', 0.0),
discount_percentage=customer_data.get('discount_percentage', 0.0),
customer_segment=customer_data.get('customer_segment'),
priority_level=customer_data.get('priority_level'),
special_instructions=customer_data.get('special_instructions'),
delivery_preferences=customer_data.get('delivery_preferences'),
product_preferences=customer_data.get('product_preferences'),
total_orders=customer_data.get('total_orders', 0),
total_spent=customer_data.get('total_spent', 0.0),
average_order_value=customer_data.get('average_order_value', 0.0),
last_order_date=parse_date_field(
customer_data.get('last_order_date'),
session_time,
"last_order_date"
),
created_at=session_time,
updated_at=session_time
)
db.add(new_customer)
stats["customers"] += 1
# Load Customer Orders from seed data
order_id_map = {}
for order_data in seed_data.get('customer_orders', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
order_uuid = uuid.UUID(order_data['id'])
transformed_id = transform_id(order_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse order UUID",
order_id=order_data['id'],
error=str(e))
continue
order_id_map[uuid.UUID(order_data['id'])] = transformed_id
# Map customer_id if it exists in our map
customer_id_value = order_data.get('customer_id')
if customer_id_value:
customer_id_value = customer_id_map.get(uuid.UUID(customer_id_value), uuid.UUID(customer_id_value))
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_order_date = parse_date_field(
order_data.get('order_date'),
session_time,
"order_date"
) or session_time
# Handle delivery date - JSON uses 'delivery_date', database uses 'requested_delivery_date'
# Fallback to order_date + 2 hours if no delivery date is provided
delivery_date_value = order_data.get('delivery_date') or order_data.get('requested_delivery_date')
adjusted_requested_delivery = parse_date_field(
delivery_date_value,
session_time,
"requested_delivery_date"
)
# Ensure requested_delivery_date is never None - fallback to order_date + 2 hours
if adjusted_requested_delivery is None:
adjusted_requested_delivery = adjusted_order_date + timedelta(hours=2)
# Create new order from seed data
# Generate unique order number by appending tenant-specific suffix
base_order_number = order_data.get('order_number', f"ORD-{uuid.uuid4().hex[:8].upper()}")
# Add tenant-specific suffix to ensure global uniqueness
tenant_suffix = virtual_uuid.hex[:4].upper() # Use first 4 chars of tenant ID
unique_order_number = f"{base_order_number}-{tenant_suffix}"
new_order = CustomerOrder(
id=str(transformed_id),
tenant_id=virtual_uuid,
order_number=unique_order_number,
customer_id=str(customer_id_value) if customer_id_value else None,
status=order_data.get('status', 'pending'),
order_type=order_data.get('order_type', 'standard'),
priority=order_data.get('priority', 'normal'),
order_date=adjusted_order_date,
requested_delivery_date=adjusted_requested_delivery,
delivery_method=order_data.get('delivery_method'),
delivery_address=order_data.get('delivery_address'),
delivery_instructions=order_data.get('delivery_instructions'),
subtotal=order_data.get('subtotal', 0.0),
tax_amount=order_data.get('tax_amount', 0.0),
discount_amount=order_data.get('discount_amount', 0.0),
discount_percentage=order_data.get('discount_percentage', 0.0),
delivery_fee=order_data.get('delivery_fee', 0.0),
total_amount=order_data.get('total_amount', 0.0),
payment_status=order_data.get('payment_status', 'pending'),
payment_method=order_data.get('payment_method'),
payment_terms=order_data.get('payment_terms'),
special_instructions=order_data.get('special_instructions'),
order_source=order_data.get('order_source', 'manual'),
sales_channel=order_data.get('sales_channel', 'direct'),
created_at=session_time,
updated_at=session_time
)
db.add(new_order)
stats["customer_orders"] += 1
# Clone Order Items
for old_order_id, new_order_id in order_id_map.items():
result = await db.execute(
select(OrderItem).where(OrderItem.order_id == old_order_id)
)
order_items = result.scalars().all()
for item in order_items:
new_item = OrderItem(
id=uuid.uuid4(),
order_id=new_order_id,
product_id=item.product_id,
product_name=item.product_name,
product_sku=item.product_sku,
quantity=item.quantity,
unit_of_measure=item.unit_of_measure,
unit_price=item.unit_price,
line_discount=item.line_discount,
line_total=item.line_total,
status=item.status
)
db.add(new_item)
stats["order_line_items"] += 1
# Commit cloned data
await db.commit()
# NOTE: Alert generation removed - alerts are now generated automatically by the
# respective alert services which run scheduled checks at appropriate intervals.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
total_records = stats["customers"] + stats["customer_orders"] + stats["order_line_items"]
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Orders data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "orders",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone orders data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "orders",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check():
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "orders",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""Delete all order data for a virtual demo tenant"""
logger.info("Deleting order data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
order_count = await db.scalar(select(func.count(CustomerOrder.id)).where(CustomerOrder.tenant_id == virtual_uuid))
item_count = await db.scalar(select(func.count(OrderItem.id)).join(CustomerOrder).where(CustomerOrder.tenant_id == virtual_uuid))
customer_count = await db.scalar(select(func.count(Customer.id)).where(Customer.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(OrderItem).where(OrderItem.order_id.in_(
select(CustomerOrder.id).where(CustomerOrder.tenant_id == virtual_uuid)
)))
await db.execute(delete(CustomerOrder).where(CustomerOrder.tenant_id == virtual_uuid))
await db.execute(delete(Customer).where(Customer.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Order data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "orders",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"orders": order_count,
"items": item_count,
"customers": customer_count,
"total": order_count + item_count + customer_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete order data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))