Files
bakery-ia/services/orders/app/api/internal_demo.py

447 lines
17 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Orders Service
2025-10-30 21:08:07 +01:00
Service-to-service endpoint for cloning order and customer data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
2025-10-24 13:05:04 +02:00
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta, date
from typing import Optional
import os
from decimal import Decimal
2025-12-14 11:58:14 +01:00
import json
from pathlib import Path
from app.core.database import get_db
from app.models.order import CustomerOrder, OrderItem
from app.models.customer import Customer
2025-12-14 11:58:14 +01:00
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, get_next_workday
2025-11-30 09:12:40 +01:00
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
2025-11-30 09:12:40 +01:00
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
2025-11-30 09:12:40 +01:00
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
2025-12-14 11:58:14 +01:00
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
"""
Parse date field, handling both ISO strings and BASE_TS markers.
Supports:
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
- ISO 8601 strings: "2025-01-15T06:00:00Z"
- None values (returns None)
Returns timezone-aware datetime or None.
"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
elif hasattr(date_value, 'isoformat'):
original_date = date_value
else:
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
return None
return adjust_date_for_demo(original_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
def ensure_workday(target_date: datetime) -> datetime:
"""Ensure delivery date falls on a workday (Monday-Friday)"""
if target_date and target_date.weekday() >= 5: # Saturday or Sunday
return get_next_workday(target_date)
return target_date
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
2025-10-17 07:31:14 +02:00
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone orders service data for a virtual demo tenant
Clones:
- Customers
- Customer orders with line items
- Adjusts dates to recent timeframe
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
2025-10-17 07:31:14 +02:00
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting orders data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
2025-10-17 07:31:14 +02:00
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"customers": 0,
"customer_orders": 0,
"order_line_items": 0,
2025-10-17 07:31:14 +02:00
"alerts_generated": 0
}
# Customer ID mapping (old -> new)
customer_id_map = {}
2025-12-13 23:57:54 +01:00
# Load Customers from seed data
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "08-orders.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "08-orders.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "08-orders.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "08-orders.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
logger.info(
2025-12-13 23:57:54 +01:00
"Loaded orders seed data",
customers=len(seed_data.get('customers', [])),
orders=len(seed_data.get('orders', []))
)
2025-12-13 23:57:54 +01:00
# Load Customers from seed data
for customer_data in seed_data.get('customers', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
customer_uuid = uuid.UUID(customer_data['id'])
transformed_id = transform_id(customer_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse customer UUID",
customer_id=customer_data['id'],
error=str(e))
continue
customer_id_map[uuid.UUID(customer_data['id'])] = transformed_id
new_customer = Customer(
2025-12-13 23:57:54 +01:00
id=transformed_id,
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
customer_code=customer_data.get('customer_code'),
name=customer_data.get('name'),
business_name=customer_data.get('business_name'),
customer_type=customer_data.get('customer_type'),
tax_id=customer_data.get('tax_id'),
email=customer_data.get('email'),
phone=customer_data.get('phone'),
address_line1=customer_data.get('address_line1'),
address_line2=customer_data.get('address_line2'),
city=customer_data.get('city'),
state=customer_data.get('state'),
postal_code=customer_data.get('postal_code'),
country=customer_data.get('country'),
business_license=customer_data.get('business_license'),
is_active=customer_data.get('is_active', True),
preferred_delivery_method=customer_data.get('preferred_delivery_method'),
payment_terms=customer_data.get('payment_terms'),
credit_limit=customer_data.get('credit_limit', 0.0),
discount_percentage=customer_data.get('discount_percentage', 0.0),
customer_segment=customer_data.get('customer_segment'),
priority_level=customer_data.get('priority_level'),
special_instructions=customer_data.get('special_instructions'),
delivery_preferences=customer_data.get('delivery_preferences'),
product_preferences=customer_data.get('product_preferences'),
total_orders=customer_data.get('total_orders', 0),
total_spent=customer_data.get('total_spent', 0.0),
average_order_value=customer_data.get('average_order_value', 0.0),
2025-12-14 11:58:14 +01:00
last_order_date=parse_date_field(
customer_data.get('last_order_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"last_order_date"
),
2025-10-17 07:31:14 +02:00
created_at=session_time,
updated_at=session_time
)
db.add(new_customer)
stats["customers"] += 1
2025-12-13 23:57:54 +01:00
# Load Customer Orders from seed data
order_id_map = {}
2025-12-13 23:57:54 +01:00
for order_data in seed_data.get('orders', []):
# Transform IDs using XOR
from shared.utils.demo_id_transformer import transform_id
try:
order_uuid = uuid.UUID(order_data['id'])
transformed_id = transform_id(order_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse order UUID",
order_id=order_data['id'],
error=str(e))
continue
order_id_map[uuid.UUID(order_data['id'])] = transformed_id
# Map customer_id if it exists in our map
customer_id_value = order_data.get('customer_id')
if customer_id_value:
customer_id_value = customer_id_map.get(uuid.UUID(customer_id_value), uuid.UUID(customer_id_value))
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_order_date = parse_date_field(
order_data.get('order_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"order_date"
) or session_time
2025-12-13 23:57:54 +01:00
2025-12-14 11:58:14 +01:00
adjusted_requested_delivery = parse_date_field(
order_data.get('requested_delivery_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"requested_delivery_date"
)
2025-10-17 07:31:14 +02:00
2025-12-13 23:57:54 +01:00
# Create new order from seed data
new_order = CustomerOrder(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
order_number=order_data.get('order_number', f"ORD-{uuid.uuid4().hex[:8].upper()}"),
customer_id=str(customer_id_value) if customer_id_value else None,
status=order_data.get('status', 'pending'),
order_type=order_data.get('order_type', 'standard'),
priority=order_data.get('priority', 'normal'),
2025-10-17 07:31:14 +02:00
order_date=adjusted_order_date,
requested_delivery_date=adjusted_requested_delivery,
2025-12-13 23:57:54 +01:00
delivery_method=order_data.get('delivery_method'),
delivery_address=order_data.get('delivery_address'),
delivery_instructions=order_data.get('delivery_instructions'),
subtotal=order_data.get('subtotal', 0.0),
tax_amount=order_data.get('tax_amount', 0.0),
discount_amount=order_data.get('discount_amount', 0.0),
discount_percentage=order_data.get('discount_percentage', 0.0),
delivery_fee=order_data.get('delivery_fee', 0.0),
total_amount=order_data.get('total_amount', 0.0),
payment_status=order_data.get('payment_status', 'pending'),
payment_method=order_data.get('payment_method'),
payment_terms=order_data.get('payment_terms'),
special_instructions=order_data.get('special_instructions'),
order_source=order_data.get('order_source', 'demo'),
sales_channel=order_data.get('sales_channel', 'direct'),
2025-10-17 07:31:14 +02:00
created_at=session_time,
updated_at=session_time
)
db.add(new_order)
stats["customer_orders"] += 1
# Clone Order Items
for old_order_id, new_order_id in order_id_map.items():
result = await db.execute(
select(OrderItem).where(OrderItem.order_id == old_order_id)
)
order_items = result.scalars().all()
for item in order_items:
new_item = OrderItem(
id=uuid.uuid4(),
order_id=new_order_id,
2025-10-17 07:31:14 +02:00
product_id=item.product_id,
product_name=item.product_name,
product_sku=item.product_sku,
quantity=item.quantity,
2025-10-17 07:31:14 +02:00
unit_of_measure=item.unit_of_measure,
unit_price=item.unit_price,
2025-10-17 07:31:14 +02:00
line_discount=item.line_discount,
line_total=item.line_total,
status=item.status
)
db.add(new_item)
stats["order_line_items"] += 1
# Commit cloned data
await db.commit()
# NOTE: Alert generation removed - alerts are now generated automatically by the
# respective alert services which run scheduled checks at appropriate intervals.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
2025-10-30 21:08:07 +01:00
total_records = stats["customers"] + stats["customer_orders"] + stats["order_line_items"]
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Orders data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "orders",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone orders data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "orders",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "orders",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all order data for a virtual demo tenant"""
logger.info("Deleting order data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
order_count = await db.scalar(select(func.count(CustomerOrder.id)).where(CustomerOrder.tenant_id == virtual_uuid))
item_count = await db.scalar(select(func.count(OrderItem.id)).where(OrderItem.tenant_id == virtual_uuid))
customer_count = await db.scalar(select(func.count(Customer.id)).where(Customer.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(OrderItem).where(OrderItem.tenant_id == virtual_uuid))
await db.execute(delete(CustomerOrder).where(CustomerOrder.tenant_id == virtual_uuid))
await db.execute(delete(Customer).where(Customer.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Order data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "orders",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"orders": order_count,
"items": item_count,
"customers": customer_count,
2025-10-30 21:08:07 +01:00
"total": order_count + item_count + customer_count
2025-10-24 13:05:04 +02:00
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete order data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))