demo seed change

This commit is contained in:
Urtzi Alfaro
2025-12-13 23:57:54 +01:00
parent f3688dfb04
commit ff830a3415
299 changed files with 20328 additions and 19485 deletions

View File

@@ -1,382 +0,0 @@
"""
Internal Demo API for Distribution Service
Handles internal demo setup for enterprise tier
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from typing import Dict, Any, List, Optional
import structlog
from datetime import datetime
import uuid
import json
import time
from app.services.distribution_service import DistributionService
from app.api.dependencies import get_distribution_service
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter()
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
# Legacy /internal/demo/setup and /internal/demo/cleanup endpoints removed
# Distribution now uses the standard /internal/demo/clone pattern like all other services
# Data is cloned from base template tenants via DataCloner
@router.get("/internal/health")
async def internal_health_check(
_: bool = Depends(verify_internal_api_key)
):
"""
Internal health check endpoint
"""
return {
"service": "distribution-service",
"endpoint": "internal-demo",
"status": "healthy",
"timestamp": datetime.utcnow().isoformat()
}
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
session_metadata: Optional[str] = None,
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone distribution data from base tenant to virtual tenant
This follows the standard cloning pattern used by other services:
1. Query base tenant data (routes, shipments, schedules)
2. Clone to virtual tenant with ID substitution and date adjustment
3. Return records cloned count
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: ISO timestamp when demo session was created (for date adjustment)
"""
try:
if not all([base_tenant_id, virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
)
logger.info("Cloning distribution data from base tenant",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
session_id=session_id)
# Clean up any existing demo data for this virtual tenant to prevent conflicts
logger.info("Cleaning up existing demo data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(virtual_tenant_id)
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(virtual_tenant_id)
if deleted_routes > 0 or deleted_shipments > 0:
logger.info("Cleaned up existing demo data",
virtual_tenant_id=virtual_tenant_id,
deleted_routes=deleted_routes,
deleted_shipments=deleted_shipments)
# Generate a single timestamp suffix for this cloning operation to ensure uniqueness
timestamp_suffix = str(int(time.time()))[-6:] # Last 6 digits of timestamp
# Parse session creation date for date adjustment
from datetime import date, datetime, timezone
from dateutil import parser as date_parser
from shared.utils.demo_dates import BASE_REFERENCE_DATE, adjust_date_for_demo
if session_created_at:
if isinstance(session_created_at, str):
session_dt = date_parser.parse(session_created_at)
else:
session_dt = session_created_at
else:
session_dt = datetime.now(timezone.utc)
# Parse session_metadata to extract child tenant mappings for enterprise demos
child_tenant_id_map = {}
if session_metadata:
try:
metadata_dict = json.loads(session_metadata)
child_configs = metadata_dict.get("child_configs", [])
child_tenant_ids = metadata_dict.get("child_tenant_ids", [])
# Build mapping: base_child_id -> virtual_child_id
for idx, child_config in enumerate(child_configs):
if idx < len(child_tenant_ids):
base_child_id = child_config.get("base_tenant_id")
virtual_child_id = child_tenant_ids[idx]
if base_child_id and virtual_child_id:
child_tenant_id_map[base_child_id] = virtual_child_id
logger.info(
"Built child tenant ID mapping for enterprise demo",
mapping_count=len(child_tenant_id_map),
session_id=session_id,
mappings=child_tenant_id_map
)
except Exception as e:
logger.warning("Failed to parse session_metadata", error=str(e), session_id=session_id)
# Clone delivery routes from base tenant
base_routes = await distribution_service.route_repository.get_all_routes_for_tenant(base_tenant_id)
routes_cloned = 0
route_id_map = {} # Map old route IDs to new route IDs
for base_route in base_routes:
# Adjust route_date relative to session creation
adjusted_route_date = adjust_date_for_demo(
base_route.get('route_date'),
session_dt,
BASE_REFERENCE_DATE
)
# Map child tenant IDs in route_sequence
route_sequence = base_route.get('route_sequence', [])
if child_tenant_id_map and route_sequence:
mapped_sequence = []
for stop in route_sequence:
if isinstance(stop, dict) and 'child_tenant_id' in stop:
base_child_id = str(stop['child_tenant_id'])
if base_child_id in child_tenant_id_map:
stop = {**stop, 'child_tenant_id': child_tenant_id_map[base_child_id]}
logger.debug(
"Mapped child_tenant_id in route_sequence",
base_child_id=base_child_id,
virtual_child_id=child_tenant_id_map[base_child_id],
session_id=session_id
)
mapped_sequence.append(stop)
route_sequence = mapped_sequence
# Generate unique route number for the virtual tenant to avoid duplicates
base_route_number = base_route.get('route_number')
if base_route_number and base_route_number.startswith('DEMO-'):
# For demo routes, append the virtual tenant ID to ensure uniqueness
# Use more characters from UUID and include a timestamp component to reduce collision risk
# Handle both string and UUID inputs for virtual_tenant_id
try:
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
except (ValueError, TypeError):
# If it's already a UUID object, use it directly
tenant_uuid = virtual_tenant_id
# Use more characters to make it more unique
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
# Use the single timestamp suffix generated at the start of the operation
route_number = f"{base_route_number}-{tenant_suffix}-{timestamp_suffix}"
else:
# For non-demo routes, use original route number
route_number = base_route_number
new_route = await distribution_service.route_repository.create_route({
'tenant_id': uuid.UUID(virtual_tenant_id),
'route_number': route_number,
'route_date': adjusted_route_date,
'vehicle_id': base_route.get('vehicle_id'),
'driver_id': base_route.get('driver_id'),
'total_distance_km': base_route.get('total_distance_km'),
'estimated_duration_minutes': base_route.get('estimated_duration_minutes'),
'route_sequence': route_sequence,
'status': base_route.get('status')
})
routes_cloned += 1
# Map old route ID to the new route ID returned by the repository
route_id_map[base_route.get('id')] = new_route['id']
# Clone shipments from base tenant
base_shipments = await distribution_service.shipment_repository.get_all_shipments_for_tenant(base_tenant_id)
shipments_cloned = 0
for base_shipment in base_shipments:
# Adjust shipment_date relative to session creation
adjusted_shipment_date = adjust_date_for_demo(
base_shipment.get('shipment_date'),
session_dt,
BASE_REFERENCE_DATE
)
# Map delivery_route_id to new route ID
old_route_id = base_shipment.get('delivery_route_id')
new_route_id = route_id_map.get(old_route_id) if old_route_id else None
# Generate unique shipment number for the virtual tenant to avoid duplicates
base_shipment_number = base_shipment.get('shipment_number')
if base_shipment_number and base_shipment_number.startswith('DEMO'):
# For demo shipments, append the virtual tenant ID to ensure uniqueness
# Use more characters from UUID and include a timestamp component to reduce collision risk
# Handle both string and UUID inputs for virtual_tenant_id
try:
tenant_uuid = uuid.UUID(virtual_tenant_id) if isinstance(virtual_tenant_id, str) else virtual_tenant_id
except (ValueError, TypeError):
# If it's already a UUID object, use it directly
tenant_uuid = virtual_tenant_id
# Use more characters to make it more unique
tenant_suffix = str(tenant_uuid).replace('-', '')[:16]
# Use the single timestamp suffix generated at the start of the operation
shipment_number = f"{base_shipment_number}-{tenant_suffix}-{timestamp_suffix}"
else:
# For non-demo shipments, use original shipment number
shipment_number = base_shipment_number
# Map child_tenant_id to virtual child ID (THE KEY FIX)
base_child_id = base_shipment.get('child_tenant_id')
virtual_child_id = None
if base_child_id:
base_child_id_str = str(base_child_id)
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
logger.debug(
"Mapped child tenant ID for shipment",
base_child_id=base_child_id_str,
virtual_child_id=str(virtual_child_id),
shipment_number=shipment_number,
session_id=session_id
)
else:
virtual_child_id = base_child_id # Fallback to original
else:
virtual_child_id = None
new_shipment = await distribution_service.shipment_repository.create_shipment({
'id': uuid.uuid4(),
'tenant_id': uuid.UUID(virtual_tenant_id),
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
'delivery_route_id': new_route_id,
'shipment_number': shipment_number,
'shipment_date': adjusted_shipment_date,
'status': base_shipment.get('status'),
'total_weight_kg': base_shipment.get('total_weight_kg'),
'total_volume_m3': base_shipment.get('total_volume_m3'),
'delivery_notes': base_shipment.get('delivery_notes')
})
shipments_cloned += 1
# Clone delivery schedules from base tenant
base_schedules = await distribution_service.schedule_repository.get_schedules_by_tenant(base_tenant_id)
schedules_cloned = 0
for base_schedule in base_schedules:
# Map child_tenant_id to virtual child ID
base_child_id = base_schedule.get('child_tenant_id')
virtual_child_id = None
if base_child_id:
base_child_id_str = str(base_child_id)
if child_tenant_id_map and base_child_id_str in child_tenant_id_map:
virtual_child_id = uuid.UUID(child_tenant_id_map[base_child_id_str])
logger.debug(
"Mapped child tenant ID for delivery schedule",
base_child_id=base_child_id_str,
virtual_child_id=str(virtual_child_id),
session_id=session_id
)
else:
virtual_child_id = base_child_id # Fallback to original
else:
virtual_child_id = None
new_schedule = await distribution_service.schedule_repository.create_schedule({
'id': uuid.uuid4(),
'parent_tenant_id': uuid.UUID(virtual_tenant_id),
'child_tenant_id': virtual_child_id, # Mapped child tenant ID
'schedule_name': base_schedule.get('schedule_name'),
'delivery_days': base_schedule.get('delivery_days'),
'delivery_time': base_schedule.get('delivery_time'),
'auto_generate_orders': base_schedule.get('auto_generate_orders'),
'lead_time_days': base_schedule.get('lead_time_days'),
'is_active': base_schedule.get('is_active')
})
schedules_cloned += 1
total_records = routes_cloned + shipments_cloned + schedules_cloned
logger.info(
"Distribution cloning completed successfully",
session_id=session_id,
routes_cloned=routes_cloned,
shipments_cloned=shipments_cloned,
schedules_cloned=schedules_cloned,
total_records=total_records,
child_mappings_applied=len(child_tenant_id_map),
is_enterprise=len(child_tenant_id_map) > 0
)
return {
"service": "distribution",
"status": "completed",
"records_cloned": total_records,
"routes_cloned": routes_cloned,
"shipments_cloned": shipments_cloned,
"schedules_cloned": schedules_cloned
}
except Exception as e:
logger.error(f"Error cloning distribution data: {e}", exc_info=True)
# Don't fail the entire cloning process if distribution fails, but add more context
error_msg = f"Distribution cloning failed: {str(e)}"
logger.warning(f"Distribution cloning partially failed but continuing: {error_msg}")
return {
"service": "distribution",
"status": "failed",
"error": error_msg,
"records_cloned": 0,
"routes_cloned": 0,
"shipments_cloned": 0,
"schedules_cloned": 0
}
@router.delete("/internal/demo/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
distribution_service: DistributionService = Depends(get_distribution_service),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all distribution data for a virtual demo tenant"""
try:
logger.info("Deleting distribution data", virtual_tenant_id=virtual_tenant_id)
# Reuse existing cleanup logic
deleted_routes = await distribution_service.route_repository.delete_demo_routes_for_tenant(
tenant_id=virtual_tenant_id
)
deleted_shipments = await distribution_service.shipment_repository.delete_demo_shipments_for_tenant(
tenant_id=virtual_tenant_id
)
return {
"service": "distribution",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"routes": deleted_routes,
"shipments": deleted_shipments
}
}
except Exception as e:
logger.error(f"Error deleting distribution data: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -8,7 +8,7 @@ from app.core.config import settings
from app.core.database import database_manager
from app.api.routes import router as distribution_router
from app.api.shipments import router as shipments_router
from app.api.internal_demo import router as internal_demo_router
# from app.api.internal_demo import router as internal_demo_router # REMOVED: Replaced by script-based seed data loading
from shared.service_base import StandardFastAPIService
@@ -122,4 +122,4 @@ service.setup_standard_endpoints()
# Note: Routes now use RouteBuilder which includes full paths, so no prefix needed
service.add_router(distribution_router, tags=["distribution"])
service.add_router(shipments_router, tags=["shipments"])
service.add_router(internal_demo_router, tags=["internal-demo"])
# service.add_router(internal_demo_router, tags=["internal-demo"]) # REMOVED: Replaced by script-based seed data loading

View File

@@ -1,300 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Distribution History Seeding Script for Distribution Service
Creates 30 days of historical delivery routes and shipments for enterprise demo
This is the CRITICAL missing piece that connects parent (Obrador) to children (retail outlets).
It populates the template with realistic VRP-optimized delivery routes.
Usage:
python /app/scripts/demo/seed_demo_distribution_history.py
Environment Variables Required:
DISTRIBUTION_DATABASE_URL - PostgreSQL connection string
DEMO_MODE - Set to 'production' for production seeding
"""
import asyncio
import uuid
import sys
import os
import random
from datetime import datetime, timezone, timedelta
from pathlib import Path
from decimal import Decimal
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
# Add shared to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
import structlog
from shared.utils.demo_dates import BASE_REFERENCE_DATE
from app.models import DeliveryRoute, Shipment, DeliveryRouteStatus, ShipmentStatus
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.dev.ConsoleRenderer()
]
)
logger = structlog.get_logger()
# Fixed Demo Tenant IDs
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Parent (Obrador)
DEMO_TENANT_CHILD_1 = uuid.UUID("d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9") # Madrid Centro
DEMO_TENANT_CHILD_2 = uuid.UUID("e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0") # Barcelona Gràcia
DEMO_TENANT_CHILD_3 = uuid.UUID("f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1") # Valencia Ruzafa
CHILD_TENANTS = [
(DEMO_TENANT_CHILD_1, "Madrid Centro", 150.0),
(DEMO_TENANT_CHILD_2, "Barcelona Gràcia", 120.0),
(DEMO_TENANT_CHILD_3, "Valencia Ruzafa", 100.0)
]
# Delivery schedule: Mon/Wed/Fri (as per distribution service)
DELIVERY_WEEKDAYS = [0, 2, 4] # Monday, Wednesday, Friday
async def seed_distribution_history(db: AsyncSession):
"""
Seed 30 days of distribution data (routes + shipments) centered around BASE_REFERENCE_DATE
Creates delivery routes for Mon/Wed/Fri pattern spanning from 15 days before to 15 days after BASE_REFERENCE_DATE.
This ensures data exists for today when BASE_REFERENCE_DATE is set to the current date.
"""
logger.info("=" * 80)
logger.info("🚚 Starting Demo Distribution History Seeding")
logger.info("=" * 80)
logger.info(f"Parent Tenant: {DEMO_TENANT_ENTERPRISE_CHAIN} (Obrador Madrid)")
logger.info(f"Child Tenants: {len(CHILD_TENANTS)}")
logger.info(f"Delivery Pattern: Mon/Wed/Fri (3x per week)")
logger.info(f"Date Range: {(BASE_REFERENCE_DATE - timedelta(days=15)).strftime('%Y-%m-%d')} to {(BASE_REFERENCE_DATE + timedelta(days=15)).strftime('%Y-%m-%d')}")
logger.info(f"Reference Date (today): {BASE_REFERENCE_DATE.strftime('%Y-%m-%d')}")
logger.info("")
routes_created = 0
shipments_created = 0
# Generate 30 days of routes centered around BASE_REFERENCE_DATE (-15 to +15 days)
# This ensures we have past data, current data, and future data
# Range is inclusive of start, exclusive of end, so -15 to 16 gives -15..15
for days_offset in range(-15, 16): # -15 to +15 = 31 days total
delivery_date = BASE_REFERENCE_DATE + timedelta(days=days_offset)
# Only create routes for Mon/Wed/Fri
if delivery_date.weekday() not in DELIVERY_WEEKDAYS:
continue
# Check if route already exists
result = await db.execute(
select(DeliveryRoute).where(
DeliveryRoute.tenant_id == DEMO_TENANT_ENTERPRISE_CHAIN,
DeliveryRoute.route_date == delivery_date
).limit(1)
)
existing_route = result.scalar_one_or_none()
if existing_route:
logger.debug(f"Route already exists for {delivery_date.strftime('%Y-%m-%d')}, skipping")
continue
# Create delivery route
route_number = f"DEMO-{delivery_date.strftime('%Y%m%d')}-001"
# Realistic VRP metrics for 3-stop route
# Distance: Madrid Centro (closest) + Barcelona Gràcia (medium) + Valencia Ruzafa (farthest)
total_distance_km = random.uniform(75.0, 95.0) # Realistic for 3 retail outlets in region
estimated_duration_minutes = random.randint(180, 240) # 3-4 hours for 3 stops
# Route sequence (order of deliveries) with full GPS coordinates for map display
# Determine status based on date
is_past = delivery_date < BASE_REFERENCE_DATE
point_status = "delivered" if is_past else "pending"
route_sequence = [
{
"tenant_id": str(DEMO_TENANT_CHILD_1),
"name": "Madrid Centro",
"address": "Calle Gran Vía 28, 28013 Madrid, Spain",
"latitude": 40.4168,
"longitude": -3.7038,
"status": point_status,
"id": str(uuid.uuid4()),
"sequence": 1
},
{
"tenant_id": str(DEMO_TENANT_CHILD_2),
"name": "Barcelona Gràcia",
"address": "Carrer Gran de Gràcia 15, 08012 Barcelona, Spain",
"latitude": 41.4036,
"longitude": 2.1561,
"status": point_status,
"id": str(uuid.uuid4()),
"sequence": 2
},
{
"tenant_id": str(DEMO_TENANT_CHILD_3),
"name": "Valencia Ruzafa",
"address": "Carrer de Sueca 51, 46006 Valencia, Spain",
"latitude": 39.4647,
"longitude": -0.3679,
"status": point_status,
"id": str(uuid.uuid4()),
"sequence": 3
}
]
# Route status (already determined is_past above)
route_status = DeliveryRouteStatus.completed if is_past else DeliveryRouteStatus.planned
route = DeliveryRoute(
id=uuid.uuid4(),
tenant_id=DEMO_TENANT_ENTERPRISE_CHAIN,
route_number=route_number,
route_date=delivery_date,
total_distance_km=Decimal(str(round(total_distance_km, 2))),
estimated_duration_minutes=estimated_duration_minutes,
route_sequence=route_sequence,
status=route_status,
driver_id=uuid.uuid4(), # Use a random UUID for the driver_id
vehicle_id=f"VEH-{random.choice(['001', '002', '003'])}",
created_at=delivery_date - timedelta(days=1), # Routes created day before
updated_at=delivery_date,
created_by=uuid.uuid4(), # Add required audit field
updated_by=uuid.uuid4() # Add required audit field
)
db.add(route)
routes_created += 1
# Create shipments for each child tenant on this route
for child_tenant_id, child_name, avg_weight_kg in CHILD_TENANTS:
# Vary weight slightly
shipment_weight = avg_weight_kg * random.uniform(0.9, 1.1)
shipment_number = f"DEMOSHP-{delivery_date.strftime('%Y%m%d')}-{child_name.split()[0].upper()[:3]}"
# Determine shipment status based on date
shipment_status = ShipmentStatus.delivered if is_past else ShipmentStatus.pending
shipment = Shipment(
id=uuid.uuid4(),
tenant_id=DEMO_TENANT_ENTERPRISE_CHAIN,
parent_tenant_id=DEMO_TENANT_ENTERPRISE_CHAIN,
child_tenant_id=child_tenant_id,
shipment_number=shipment_number,
shipment_date=delivery_date,
status=shipment_status,
total_weight_kg=Decimal(str(round(shipment_weight, 2))),
delivery_route_id=route.id,
delivery_notes=f"Entrega regular a {child_name}",
created_at=delivery_date - timedelta(days=1),
updated_at=delivery_date,
created_by=uuid.uuid4(), # Add required audit field
updated_by=uuid.uuid4() # Add required audit field
)
db.add(shipment)
shipments_created += 1
logger.debug(
f"{delivery_date.strftime('%a %Y-%m-%d')}: "
f"Route {route_number} with {len(CHILD_TENANTS)} shipments"
)
# Commit all changes
await db.commit()
logger.info("")
logger.info("=" * 80)
logger.info("✅ Demo Distribution History Seeding Completed")
logger.info("=" * 80)
logger.info(f" 📊 Routes created: {routes_created}")
logger.info(f" 📦 Shipments created: {shipments_created}")
logger.info("")
logger.info("Distribution characteristics:")
logger.info(" ✓ 30 days of historical data")
logger.info(" ✓ Mon/Wed/Fri delivery schedule (3x per week)")
logger.info(" ✓ VRP-optimized route sequencing")
logger.info(" ✓ ~13 routes (30 days ÷ 7 days/week × 3 delivery days)")
logger.info(" ✓ ~39 shipments (13 routes × 3 children)")
logger.info(" ✓ Realistic distances and durations")
logger.info("")
return {
"service": "distribution",
"routes_created": routes_created,
"shipments_created": shipments_created
}
async def main():
"""Main execution function"""
logger.info("Demo Distribution History Seeding Script Starting")
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
# Get database URL from environment
database_url = os.getenv("DISTRIBUTION_DATABASE_URL") or os.getenv("DATABASE_URL")
if not database_url:
logger.error("❌ DISTRIBUTION_DATABASE_URL or DATABASE_URL environment variable must be set")
return 1
# Convert to async URL if needed
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
logger.info("Connecting to distribution database")
# Create engine and session
engine = create_async_engine(
database_url,
echo=False,
pool_pre_ping=True,
pool_size=5,
max_overflow=10
)
async_session = sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False
)
try:
async with async_session() as session:
result = await seed_distribution_history(session)
logger.info("🎉 Success! Distribution history is ready for cloning.")
logger.info("")
logger.info("Next steps:")
logger.info(" 1. Create Kubernetes job YAMLs for all child scripts")
logger.info(" 2. Update kustomization.yaml with proper execution order")
logger.info(" 3. Test enterprise demo end-to-end")
logger.info("")
return 0
except Exception as e:
logger.error("=" * 80)
logger.error("❌ Demo Distribution History Seeding Failed")
logger.error("=" * 80)
logger.error("Error: %s", str(e))
logger.error("", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)