Files
bakery-ia/services/distribution/app/api/internal_demo.py
2025-12-05 20:07:01 +01:00

382 lines
17 KiB
Python

"""
Internal Demo API for Distribution Service
Handles internal demo setup for enterprise tier
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from typing import Dict, Any, List, Optional
import structlog
from datetime import datetime
import uuid
import json
import time
from app.services.distribution_service import DistributionService
from app.api.dependencies import get_distribution_service
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter()
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
# Legacy /internal/demo/setup and /internal/demo/cleanup endpoints removed
# Distribution now uses the standard /internal/demo/clone pattern like all other services
# Data is cloned from base template tenants via DataCloner
@router.get("/internal/health")
async def internal_health_check(
_: bool = Depends(verify_internal_api_key)
):
"""
Internal health check endpoint
"""
return {
"service": "distribution-service",
"endpoint": "internal-demo",
"status": "healthy",
"timestamp": datetime.utcnow().isoformat()
}
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
session_metadata: Optional[str] = None,
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone distribution data from base tenant to virtual tenant
This follows the standard cloning pattern used by other services:
1. Query base tenant data (routes, shipments, schedules)
2. Clone to virtual tenant with ID substitution and date adjustment
3. Return records cloned count
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: ISO timestamp when demo session was created (for date adjustment)
"""
try:
if not all([base_tenant_id, virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
)
logger.info("Cloning distribution data from base tenant",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
session_id=session_id)
# Clean up any existing demo data for this virtual tenant to prevent conflicts
logger.info("Cleaning up existing demo data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(virtual_tenant_id)
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(virtual_tenant_id)
if deleted_routes > 0 or deleted_shipments > 0:
logger.info("Cleaned up existing demo data",
virtual_tenant_id=virtual_tenant_id,
deleted_routes=deleted_routes,
deleted_shipments=deleted_shipments)
# Generate a single timestamp suffix for this cloning operation to ensure uniqueness
timestamp_suffix = str(int(time.time()))[-6:] # Last 6 digits of timestamp
# Parse session creation date for date adjustment
from datetime import date, datetime, timezone
from dateutil import parser as date_parser
from shared.utils.demo_dates import BASE_REFERENCE_DATE, adjust_date_for_demo
if session_created_at:
if isinstance(session_created_at, str):
session_dt = date_parser.parse(session_created_at)
else:
session_dt = session_created_at
else:
session_dt = datetime.now(timezone.utc)
# Parse session_metadata to extract child tenant mappings for enterprise demos
child_tenant_id_map = {}
if session_metadata:
try:
metadata_dict = json.loads(session_metadata)
child_configs = metadata_dict.get("child_configs", [])
child_tenant_ids = metadata_dict.get("child_tenant_ids", [])
# Build mapping: base_child_id -> virtual_child_id
for idx, child_config in enumerate(child_configs):
if idx < len(child_tenant_ids):
base_child_id = child_config.get("base_tenant_id")
virtual_child_id = child_tenant_ids[idx]
if base_child_id and virtual_child_id:
child_tenant_id_map[base_child_id] = virtual_child_id
logger.info(
"Built child tenant ID mapping for enterprise demo",
mapping_count=len(child_tenant_id_map),
session_id=session_id,
mappings=child_tenant_id_map
)
except Exception as e:
logger.warning("Failed to parse session_metadata", error=str(e), session_id=session_id)
# Clone delivery routes from base tenant
base_routes = await distribution_service.route_repository.get_all_routes_for_tenant(base_tenant_id)
routes_cloned = 0
route_id_map = {} # Map old route IDs to new route IDs
for base_route in base_routes:
# Adjust route_date relative to session creation
adjusted_route_date = adjust_date_for_demo(
base_route.get('route_date'),
session_dt,
BASE_REFERENCE_DATE
)
# Map child tenant IDs in route_sequence
route_sequence = base_route.get('route_sequence', [])
if child_tenant_id_map and route_sequence:
mapped_sequence = []
for stop in route_sequence:
if isinstance(stop, dict) and 'child_tenant_id' in stop:
base_child_id = str(stop['child_tenant_id'])
if base_child_id in child_tenant_id_map:
stop = {**stop, 'child_tenant_id': child_tenant_id_map[base_child_id]}
logger.debug(
"Mapped child_tenant_id in route_sequence",
base_child_id=base_child_id,
virtual_child_id=child_tenant_id_map[base_child_id],
session_id=session_id
)
mapped_sequence.append(stop)
route_sequence = mapped_sequence
# Generate unique route number for the virtual tenant to avoid duplicates
base_route_number = base_route.get('route_number')
if base_route_number and base_route_number.startswith('DEMO-'):
# For demo routes, append the virtual tenant ID to ensure uniqueness
# Use more characters from UUID and include a timestamp component to reduce collision risk
# Handle both string and UUID inputs for virtual_tenant_id
try:
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
except (ValueError, TypeError):
# If it's already a UUID object, use it directly
tenant_uuid = virtual_tenant_id
# Use more characters to make it more unique
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
# Use the single timestamp suffix generated at the start of the operation
route_number = f"{base_route_number}-{tenant_suffix}-{timestamp_suffix}"
else:
# For non-demo routes, use original route number
route_number = base_route_number
new_route = await distribution_service.route_repository.create_route({
'tenant_id': uuid.UUID(virtual_tenant_id),
'route_number': route_number,
'route_date': adjusted_route_date,
'vehicle_id': base_route.get('vehicle_id'),
'driver_id': base_route.get('driver_id'),
'total_distance_km': base_route.get('total_distance_km'),
'estimated_duration_minutes': base_route.get('estimated_duration_minutes'),
'route_sequence': route_sequence,
'status': base_route.get('status')
})
routes_cloned += 1
# Map old route ID to the new route ID returned by the repository
route_id_map[base_route.get('id')] = new_route['id']
# Clone shipments from base tenant
base_shipments = await distribution_service.shipment_repository.get_all_shipments_for_tenant(base_tenant_id)
shipments_cloned = 0
for base_shipment in base_shipments:
# Adjust shipment_date relative to session creation
adjusted_shipment_date = adjust_date_for_demo(
base_shipment.get('shipment_date'),
session_dt,
BASE_REFERENCE_DATE
)
# Map delivery_route_id to new route ID
old_route_id = base_shipment.get('delivery_route_id')
new_route_id = route_id_map.get(old_route_id) if old_route_id else None
# Generate unique shipment number for the virtual tenant to avoid duplicates
base_shipment_number = base_shipment.get('shipment_number')
if base_shipment_number and base_shipment_number.startswith('DEMO'):
# For demo shipments, append the virtual tenant ID to ensure uniqueness
# Use more characters from UUID and include a timestamp component to reduce collision risk
# Handle both string and UUID inputs for virtual_tenant_id
try:
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
except (ValueError, TypeError):
# If it's already a UUID object, use it directly
tenant_uuid = virtual_tenant_id
# Use more characters to make it more unique
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
# Use the single timestamp suffix generated at the start of the operation
shipment_number = f"{base_shipment_number}-{tenant_suffix}-{timestamp_suffix}"
else:
# For non-demo shipments, use original shipment number
shipment_number = base_shipment_number
# Map child_tenant_id to virtual child ID (THE KEY FIX)
base_child_id = base_shipment.get('child_tenant_id')
virtual_child_id = None
if base_child_id:
base_child_id_str = str(base_child_id)
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
logger.debug(
"Mapped child tenant ID for shipment",
base_child_id=base_child_id_str,
virtual_child_id=str(virtual_child_id),
shipment_number=shipment_number,
session_id=session_id
)
else:
virtual_child_id = base_child_id # Fallback to original
else:
virtual_child_id = None
new_shipment = await distribution_service.shipment_repository.create_shipment({
'id': uuid.uuid4(),
'tenant_id': uuid.UUID(virtual_tenant_id),
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
'delivery_route_id': new_route_id,
'shipment_number': shipment_number,
'shipment_date': adjusted_shipment_date,
'status': base_shipment.get('status'),
'total_weight_kg': base_shipment.get('total_weight_kg'),
'total_volume_m3': base_shipment.get('total_volume_m3'),
'delivery_notes': base_shipment.get('delivery_notes')
})
shipments_cloned += 1
# Clone delivery schedules from base tenant
base_schedules = await distribution_service.schedule_repository.get_schedules_by_tenant(base_tenant_id)
schedules_cloned = 0
for base_schedule in base_schedules:
# Map child_tenant_id to virtual child ID
base_child_id = base_schedule.get('child_tenant_id')
virtual_child_id = None
if base_child_id:
base_child_id_str = str(base_child_id)
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
logger.debug(
"Mapped child tenant ID for delivery schedule",
base_child_id=base_child_id_str,
virtual_child_id=str(virtual_child_id),
session_id=session_id
)
else:
virtual_child_id = base_child_id # Fallback to original
else:
virtual_child_id = None
new_schedule = await distribution_service.schedule_repository.create_schedule({
'id': uuid.uuid4(),
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
'schedule_name': base_schedule.get('schedule_name'),
'delivery_days': base_schedule.get('delivery_days'),
'delivery_time': base_schedule.get('delivery_time'),
'auto_generate_orders': base_schedule.get('auto_generate_orders'),
'lead_time_days': base_schedule.get('lead_time_days'),
'is_active': base_schedule.get('is_active')
})
schedules_cloned += 1
total_records = routes_cloned + shipments_cloned + schedules_cloned
logger.info(
"Distribution cloning completed successfully",
session_id=session_id,
routes_cloned=routes_cloned,
shipments_cloned=shipments_cloned,
schedules_cloned=schedules_cloned,
total_records=total_records,
child_mappings_applied=len(child_tenant_id_map),
is_enterprise=len(child_tenant_id_map) > 0
)
return {
"service": "distribution",
"status": "completed",
"records_cloned": total_records,
"routes_cloned": routes_cloned,
"shipments_cloned": shipments_cloned,
"schedules_cloned": schedules_cloned
}
except Exception as e:
logger.error(f"Error cloning distribution data: {e}", exc_info=True)
# Don't fail the entire cloning process if distribution fails, but add more context
error_msg = f"Distribution cloning failed: {str(e)}"
logger.warning(f"Distribution cloning partially failed but continuing: {error_msg}")
return {
"service": "distribution",
"status": "failed",
"error": error_msg,
"records_cloned": 0,
"routes_cloned": 0,
"shipments_cloned": 0,
"schedules_cloned": 0
}
@router.delete("/internal/demo/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all distribution data for a virtual demo tenant"""
try:
logger.info("Deleting distribution data", virtual_tenant_id=virtual_tenant_id)
# Reuse existing cleanup logic
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(
tenant_id=virtual_tenant_id
)
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(
tenant_id=virtual_tenant_id
)
return {
"service": "distribution",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"routes": deleted_routes,
"shipments": deleted_shipments
}
}
except Exception as e:
logger.error(f"Error deleting distribution data: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=str(e))