418 lines
16 KiB
Python
418 lines
16 KiB
Python
"""
|
|
Internal Demo Cloning API for Distribution Service
|
|
Service-to-service endpoint for cloning distribution data
|
|
"""
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from sqlalchemy import select, delete, func
|
|
import structlog
|
|
import uuid
|
|
from datetime import datetime, timezone, timedelta
|
|
from typing import Optional
|
|
import os
|
|
import json
|
|
from pathlib import Path
|
|
|
|
from app.core.database import get_db
|
|
from app.models.distribution import DeliveryRoute, Shipment
|
|
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
|
|
|
from app.core.config import settings
|
|
|
|
logger = structlog.get_logger()
|
|
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
|
|
|
|
|
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
|
"""
|
|
Parse date field, handling both ISO strings and BASE_TS markers.
|
|
|
|
Supports:
|
|
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
|
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
|
- None values (returns None)
|
|
|
|
Returns timezone-aware datetime or None.
|
|
"""
|
|
if not date_value:
|
|
return None
|
|
|
|
# Check if it's a BASE_TS marker
|
|
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
|
try:
|
|
return resolve_time_marker(date_value, session_time)
|
|
except ValueError as e:
|
|
logger.warning(
|
|
f"Invalid BASE_TS marker in {field_name}",
|
|
marker=date_value,
|
|
error=str(e)
|
|
)
|
|
return None
|
|
|
|
# Handle regular ISO date strings
|
|
try:
|
|
if isinstance(date_value, str):
|
|
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
|
elif hasattr(date_value, 'isoformat'):
|
|
original_date = date_value
|
|
else:
|
|
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
|
return None
|
|
|
|
return adjust_date_for_demo(original_date, session_time)
|
|
except (ValueError, AttributeError) as e:
|
|
logger.warning(
|
|
f"Invalid date format in {field_name}",
|
|
date_value=date_value,
|
|
error=str(e)
|
|
)
|
|
return None
|
|
|
|
|
|
@router.post("/clone")
|
|
async def clone_demo_data(
|
|
base_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str,
|
|
session_id: Optional[str] = None,
|
|
session_created_at: Optional[str] = None,
|
|
db: AsyncSession = Depends(get_db)
|
|
):
|
|
"""
|
|
Clone distribution service data for a virtual demo tenant
|
|
|
|
Clones:
|
|
- Delivery routes
|
|
- Shipments
|
|
- Adjusts dates to recent timeframe
|
|
|
|
Args:
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
demo_account_type: Type of demo account
|
|
session_id: Originating session ID for tracing
|
|
|
|
Returns:
|
|
Cloning status and record counts
|
|
"""
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
# Parse session creation time for date adjustment
|
|
if session_created_at:
|
|
try:
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
except (ValueError, AttributeError):
|
|
session_time = start_time
|
|
else:
|
|
session_time = start_time
|
|
|
|
logger.info(
|
|
"Starting distribution data cloning",
|
|
base_tenant_id=base_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
demo_account_type=demo_account_type,
|
|
session_id=session_id,
|
|
session_created_at=session_created_at
|
|
)
|
|
|
|
try:
|
|
# Validate UUIDs
|
|
base_uuid = uuid.UUID(base_tenant_id)
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Track cloning statistics
|
|
stats = {
|
|
"delivery_routes": 0,
|
|
"shipments": 0,
|
|
"alerts_generated": 0
|
|
}
|
|
|
|
# Load seed data from JSON files
|
|
from shared.utils.seed_data_paths import get_seed_data_path
|
|
|
|
if demo_account_type == "professional":
|
|
json_file = get_seed_data_path("professional", "12-distribution.json")
|
|
elif demo_account_type == "enterprise":
|
|
json_file = get_seed_data_path("enterprise", "12-distribution.json")
|
|
elif demo_account_type == "enterprise_child":
|
|
# Child outlets don't have their own distribution data
|
|
# Distribution is managed centrally by the parent tenant
|
|
# Child locations are delivery destinations, not distribution hubs
|
|
logger.info(
|
|
"Skipping distribution cloning for child outlet - distribution managed by parent",
|
|
base_tenant_id=base_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
session_id=session_id
|
|
)
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
return {
|
|
"service": "distribution",
|
|
"status": "completed",
|
|
"records_cloned": 0,
|
|
"duration_ms": duration_ms,
|
|
"details": {
|
|
"note": "Child outlets don't manage distribution - handled by parent tenant"
|
|
}
|
|
}
|
|
else:
|
|
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
|
|
|
# Load JSON data
|
|
with open(json_file, 'r', encoding='utf-8') as f:
|
|
seed_data = json.load(f)
|
|
|
|
logger.info(
|
|
"Loaded distribution seed data",
|
|
delivery_routes=len(seed_data.get('delivery_routes', [])),
|
|
shipments=len(seed_data.get('shipments', []))
|
|
)
|
|
|
|
# Clone Delivery Routes
|
|
for route_data in seed_data.get('delivery_routes', []):
|
|
# Transform IDs using XOR
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
try:
|
|
route_uuid = uuid.UUID(route_data['id'])
|
|
transformed_id = transform_id(route_data['id'], virtual_uuid)
|
|
except ValueError as e:
|
|
logger.error("Failed to parse route UUID",
|
|
route_id=route_data['id'],
|
|
error=str(e))
|
|
continue
|
|
|
|
# Parse date fields
|
|
route_date = parse_date_field(
|
|
route_data.get('route_date'),
|
|
session_time,
|
|
"route_date"
|
|
) or session_time
|
|
|
|
# Parse route sequence dates
|
|
parsed_sequence = []
|
|
for stop in route_data.get('route_sequence', []):
|
|
estimated_arrival = parse_date_field(
|
|
stop.get('estimated_arrival'),
|
|
session_time,
|
|
"estimated_arrival"
|
|
)
|
|
actual_arrival = parse_date_field(
|
|
stop.get('actual_arrival'),
|
|
session_time,
|
|
"actual_arrival"
|
|
)
|
|
|
|
parsed_sequence.append({
|
|
**stop,
|
|
"estimated_arrival": estimated_arrival.isoformat() if estimated_arrival else None,
|
|
"actual_arrival": actual_arrival.isoformat() if actual_arrival else None
|
|
})
|
|
|
|
# Make route_number unique per virtual tenant to prevent conflicts across demo sessions
|
|
# Append last 6 chars of virtual_tenant_id to ensure uniqueness
|
|
base_route_number = route_data.get('route_number', 'ROUTE-001')
|
|
unique_route_number = f"{base_route_number}-{str(virtual_uuid)[-6:]}"
|
|
|
|
# Create new delivery route
|
|
new_route = DeliveryRoute(
|
|
id=transformed_id,
|
|
tenant_id=virtual_uuid,
|
|
route_number=unique_route_number,
|
|
route_date=route_date,
|
|
vehicle_id=route_data.get('vehicle_id'),
|
|
driver_id=route_data.get('driver_id'),
|
|
total_distance_km=route_data.get('total_distance_km'),
|
|
estimated_duration_minutes=route_data.get('estimated_duration_minutes'),
|
|
route_sequence=parsed_sequence,
|
|
notes=route_data.get('notes'),
|
|
status=route_data.get('status', 'planned'),
|
|
created_at=session_time,
|
|
updated_at=session_time,
|
|
created_by=base_uuid,
|
|
updated_by=base_uuid
|
|
)
|
|
db.add(new_route)
|
|
stats["delivery_routes"] += 1
|
|
|
|
# Clone Shipments
|
|
for shipment_data in seed_data.get('shipments', []):
|
|
# Transform IDs using XOR
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
try:
|
|
shipment_uuid = uuid.UUID(shipment_data['id'])
|
|
transformed_id = transform_id(shipment_data['id'], virtual_uuid)
|
|
except ValueError as e:
|
|
logger.error("Failed to parse shipment UUID",
|
|
shipment_id=shipment_data['id'],
|
|
error=str(e))
|
|
continue
|
|
|
|
# Parse date fields
|
|
shipment_date = parse_date_field(
|
|
shipment_data.get('shipment_date'),
|
|
session_time,
|
|
"shipment_date"
|
|
) or session_time
|
|
|
|
# Note: The Shipment model doesn't have estimated_delivery_time
|
|
# Only actual_delivery_time is stored
|
|
actual_delivery_time = parse_date_field(
|
|
shipment_data.get('actual_delivery_time'),
|
|
session_time,
|
|
"actual_delivery_time"
|
|
)
|
|
|
|
# Transform purchase_order_id if present (links to internal transfer PO)
|
|
purchase_order_id = None
|
|
if shipment_data.get('purchase_order_id'):
|
|
try:
|
|
po_uuid = uuid.UUID(shipment_data['purchase_order_id'])
|
|
purchase_order_id = transform_id(shipment_data['purchase_order_id'], virtual_uuid)
|
|
except ValueError:
|
|
logger.warning(
|
|
"Invalid purchase_order_id format",
|
|
purchase_order_id=shipment_data.get('purchase_order_id')
|
|
)
|
|
|
|
# Transform delivery_route_id (CRITICAL: must reference transformed route)
|
|
delivery_route_id = None
|
|
if shipment_data.get('delivery_route_id'):
|
|
try:
|
|
route_uuid = uuid.UUID(shipment_data['delivery_route_id'])
|
|
delivery_route_id = transform_id(shipment_data['delivery_route_id'], virtual_uuid)
|
|
except ValueError:
|
|
logger.warning(
|
|
"Invalid delivery_route_id format",
|
|
delivery_route_id=shipment_data.get('delivery_route_id')
|
|
)
|
|
|
|
# Store items in delivery_notes as JSON for demo purposes
|
|
# (In production, items are in the linked purchase order)
|
|
items_json = json.dumps(shipment_data.get('items', [])) if shipment_data.get('items') else None
|
|
|
|
# Make shipment_number unique per virtual tenant to prevent conflicts across demo sessions
|
|
# Append last 6 chars of virtual_tenant_id to ensure uniqueness
|
|
base_shipment_number = shipment_data.get('shipment_number', 'SHIP-001')
|
|
unique_shipment_number = f"{base_shipment_number}-{str(virtual_uuid)[-6:]}"
|
|
|
|
# Create new shipment
|
|
new_shipment = Shipment(
|
|
id=transformed_id,
|
|
tenant_id=virtual_uuid,
|
|
parent_tenant_id=virtual_uuid, # Parent is the same as tenant for demo
|
|
child_tenant_id=shipment_data.get('child_tenant_id'),
|
|
purchase_order_id=purchase_order_id, # Link to internal transfer PO
|
|
delivery_route_id=delivery_route_id, # MUST use transformed ID
|
|
shipment_number=unique_shipment_number,
|
|
shipment_date=shipment_date,
|
|
status=shipment_data.get('status', 'pending'),
|
|
total_weight_kg=shipment_data.get('total_weight_kg'),
|
|
actual_delivery_time=actual_delivery_time,
|
|
# Store items info in delivery_notes for demo display
|
|
delivery_notes=f"{shipment_data.get('notes', '')}\nItems: {items_json}" if items_json else shipment_data.get('notes'),
|
|
created_at=session_time,
|
|
updated_at=session_time,
|
|
created_by=base_uuid,
|
|
updated_by=base_uuid
|
|
)
|
|
db.add(new_shipment)
|
|
stats["shipments"] += 1
|
|
|
|
# Commit cloned data
|
|
await db.commit()
|
|
|
|
total_records = stats["delivery_routes"] + stats["shipments"]
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
logger.info(
|
|
"Distribution data cloning completed",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
total_records=total_records,
|
|
stats=stats,
|
|
duration_ms=duration_ms
|
|
)
|
|
|
|
return {
|
|
"service": "distribution",
|
|
"status": "completed",
|
|
"records_cloned": total_records,
|
|
"duration_ms": duration_ms,
|
|
"details": stats
|
|
}
|
|
|
|
except ValueError as e:
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to clone distribution data",
|
|
error=str(e),
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
exc_info=True
|
|
)
|
|
|
|
# Rollback on error
|
|
await db.rollback()
|
|
|
|
return {
|
|
"service": "distribution",
|
|
"status": "failed",
|
|
"records_cloned": 0,
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
"error": str(e)
|
|
}
|
|
|
|
|
|
@router.get("/clone/health")
|
|
async def clone_health_check():
|
|
"""
|
|
Health check for internal cloning endpoint
|
|
Used by orchestrator to verify service availability
|
|
"""
|
|
return {
|
|
"service": "distribution",
|
|
"clone_endpoint": "available",
|
|
"version": "1.0.0"
|
|
}
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
|
async def delete_demo_data(
|
|
virtual_tenant_id: str,
|
|
db: AsyncSession = Depends(get_db)
|
|
):
|
|
"""Delete all distribution data for a virtual demo tenant"""
|
|
logger.info("Deleting distribution data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
try:
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Count records
|
|
route_count = await db.scalar(select(func.count(DeliveryRoute.id)).where(DeliveryRoute.tenant_id == virtual_uuid))
|
|
shipment_count = await db.scalar(select(func.count(Shipment.id)).where(Shipment.tenant_id == virtual_uuid))
|
|
|
|
# Delete in order
|
|
await db.execute(delete(Shipment).where(Shipment.tenant_id == virtual_uuid))
|
|
await db.execute(delete(DeliveryRoute).where(DeliveryRoute.tenant_id == virtual_uuid))
|
|
await db.commit()
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
logger.info("Distribution data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
|
|
|
return {
|
|
"service": "distribution",
|
|
"status": "deleted",
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
"records_deleted": {
|
|
"delivery_routes": route_count,
|
|
"shipments": shipment_count,
|
|
"total": route_count + shipment_count
|
|
},
|
|
"duration_ms": duration_ms
|
|
}
|
|
except Exception as e:
|
|
logger.error("Failed to delete distribution data", error=str(e), exc_info=True)
|
|
await db.rollback()
|
|
raise HTTPException(status_code=500, detail=str(e)) |