demo seed change 5

This commit is contained in:
Urtzi Alfaro
2025-12-14 20:13:59 +01:00
parent 82f9622411
commit 56a1821256
4 changed files with 462 additions and 173 deletions

View File

@@ -58,6 +58,107 @@ def verify_internal_api_key(x_internal_api_key: str = Header(...)):
return True
async def load_fixture_data_for_tenant(
db: AsyncSession,
tenant_uuid: UUID,
demo_account_type: str,
reference_time: datetime
) -> int:
"""
Load orchestration run data from JSON fixture directly into the virtual tenant.
Returns the number of runs created.
"""
from shared.utils.seed_data_paths import get_seed_data_path
from shared.utils.demo_dates import resolve_time_marker, adjust_date_for_demo
# Load fixture data
try:
json_file = get_seed_data_path(demo_account_type, "11-orchestrator.json")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "shared" / "demo" / "fixtures"
json_file = seed_data_dir / demo_account_type / "11-orchestrator.json"
if not json_file.exists():
logger.warning("Orchestrator fixture file not found", file=str(json_file))
return 0
with open(json_file, 'r', encoding='utf-8') as f:
fixture_data = json.load(f)
orchestration_run_data = fixture_data.get("orchestration_run")
if not orchestration_run_data:
logger.warning("No orchestration_run data in fixture")
return 0
# Parse and adjust dates from fixture to reference_time
base_started_at = resolve_time_marker(orchestration_run_data.get("started_at"))
base_completed_at = resolve_time_marker(orchestration_run_data.get("completed_at"))
# Adjust dates to make them appear recent relative to session creation
started_at = adjust_date_for_demo(base_started_at, reference_time) if base_started_at else reference_time - timedelta(hours=2)
completed_at = adjust_date_for_demo(base_completed_at, reference_time) if base_completed_at else started_at + timedelta(minutes=15)
# Generate unique run number with session context
current_year = reference_time.year
unique_suffix = str(uuid.uuid4())[:8].upper()
run_number = f"ORCH-DEMO-PROF-{current_year}-001-{unique_suffix}"
# Create orchestration run for virtual tenant
new_run = OrchestrationRun(
id=uuid.uuid4(), # Generate new UUID
tenant_id=tenant_uuid,
run_number=run_number,
status=OrchestrationStatus[orchestration_run_data["status"]],
run_type=orchestration_run_data.get("run_type", "daily"),
priority="normal",
started_at=started_at,
completed_at=completed_at,
duration_seconds=orchestration_run_data.get("duration_seconds", 900),
# Step statuses from orchestration_results
forecasting_status="success",
forecasting_started_at=started_at,
forecasting_completed_at=started_at + timedelta(minutes=2),
production_status="success",
production_started_at=started_at + timedelta(minutes=2),
production_completed_at=started_at + timedelta(minutes=5),
procurement_status="success",
procurement_started_at=started_at + timedelta(minutes=5),
procurement_completed_at=started_at + timedelta(minutes=8),
notification_status="success",
notification_started_at=started_at + timedelta(minutes=8),
notification_completed_at=completed_at,
# Results from orchestration_results
forecasts_generated=fixture_data.get("orchestration_results", {}).get("forecasts_generated", 10),
production_batches_created=fixture_data.get("orchestration_results", {}).get("production_batches_created", 18),
procurement_plans_created=0,
purchase_orders_created=fixture_data.get("orchestration_results", {}).get("purchase_orders_created", 6),
notifications_sent=fixture_data.get("orchestration_results", {}).get("notifications_sent", 8),
# Metadata
triggered_by="system",
created_at=started_at,
updated_at=completed_at
)
db.add(new_run)
await db.flush()
logger.info(
"Loaded orchestration run from fixture",
tenant_id=str(tenant_uuid),
run_number=new_run.run_number,
started_at=started_at.isoformat()
)
return 1
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
@@ -73,6 +174,8 @@ async def clone_demo_data(
This endpoint is called by the demo_session service during session initialization.
It clones orchestration runs with date adjustments to make them appear recent.
If the base tenant has no orchestration runs, it will first seed them from the fixture.
"""
start_time = datetime.now(timezone.utc)
@@ -96,150 +199,24 @@ async def clone_demo_data(
)
try:
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Fetch base tenant orchestration runs
# Get all completed and partial_success runs from the base tenant
result = await db.execute(
select(OrchestrationRun)
.where(OrchestrationRun.tenant_id == base_uuid)
.order_by(OrchestrationRun.started_at.desc())
.limit(10) # Clone last 10 runs for demo
# Load fixture data directly into virtual tenant (no base tenant cloning)
runs_created = await load_fixture_data_for_tenant(
db,
virtual_uuid,
demo_account_type,
reference_time
)
base_runs = list(result.scalars().all())
runs_cloned = 0
# Clone each orchestration run with date adjustment
for base_run in base_runs:
# Use the shared date adjustment utility to ensure dates are always in the past
# This calculates the offset from BASE_REFERENCE_DATE and applies it to session creation time
if base_run.started_at:
new_started_at = adjust_date_for_demo(
base_run.started_at, reference_time
)
else:
new_started_at = reference_time - timedelta(hours=2)
# Adjust completed_at using the same utility
if base_run.completed_at:
new_completed_at = adjust_date_for_demo(
base_run.completed_at, reference_time
)
# Ensure completion is after start (in case of edge cases)
if new_completed_at and new_started_at and new_completed_at < new_started_at:
# Preserve original duration
duration = base_run.completed_at - base_run.started_at
new_completed_at = new_started_at + duration
else:
new_completed_at = None
# Adjust all step timestamps using the shared utility
def adjust_timestamp(original_timestamp):
if not original_timestamp:
return None
return adjust_date_for_demo(original_timestamp, reference_time)
# Create new orchestration run for virtual tenant
# Update run_number to have current year instead of original year, and make it unique
current_year = reference_time.year
# Extract type from original run number and create new format
parts = base_run.run_number.split('-')
if len(parts) >= 4:
tenant_prefix = parts[1] if len(parts) > 1 else "DEMO"
type_code = parts[2] if len(parts) > 2 else "TST"
original_index = parts[3] if len(parts) > 3 else "001"
# Generate a more robust unique suffix to avoid collisions
# Use UUID instead of just session_id substring to ensure uniqueness
unique_suffix = str(uuid.uuid4())[:8].upper()
proposed_run_number = f"ORCH-{tenant_prefix}-{type_code}-{current_year}-{original_index}-{unique_suffix}"
else:
unique_suffix = str(uuid.uuid4())[:12].upper()
proposed_run_number = f"{base_run.run_number}-{unique_suffix}"
# Ensure the run number is truly unique by checking against existing entries
# This prevents collisions especially in high-concurrency scenarios
run_number = await ensure_unique_run_number(db, proposed_run_number)
new_run = OrchestrationRun(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
run_number=run_number,
status=base_run.status,
run_type=base_run.run_type,
priority=base_run.priority,
started_at=new_started_at,
completed_at=new_completed_at,
duration_seconds=base_run.duration_seconds,
# Forecasting step
forecasting_started_at=adjust_timestamp(base_run.forecasting_started_at),
forecasting_completed_at=adjust_timestamp(base_run.forecasting_completed_at),
forecasting_status=base_run.forecasting_status,
forecasting_error=base_run.forecasting_error,
# Production step
production_started_at=adjust_timestamp(base_run.production_started_at),
production_completed_at=adjust_timestamp(base_run.production_completed_at),
production_status=base_run.production_status,
production_error=base_run.production_error,
# Procurement step
procurement_started_at=adjust_timestamp(base_run.procurement_started_at),
procurement_completed_at=adjust_timestamp(base_run.procurement_completed_at),
procurement_status=base_run.procurement_status,
procurement_error=base_run.procurement_error,
# Notification step
notification_started_at=adjust_timestamp(base_run.notification_started_at),
notification_completed_at=adjust_timestamp(base_run.notification_completed_at),
notification_status=base_run.notification_status,
notification_error=base_run.notification_error,
# AI Insights (if exists)
ai_insights_started_at=adjust_timestamp(base_run.ai_insights_started_at) if hasattr(base_run, 'ai_insights_started_at') else None,
ai_insights_completed_at=adjust_timestamp(base_run.ai_insights_completed_at) if hasattr(base_run, 'ai_insights_completed_at') else None,
ai_insights_status=base_run.ai_insights_status if hasattr(base_run, 'ai_insights_status') else None,
ai_insights_generated=base_run.ai_insights_generated if hasattr(base_run, 'ai_insights_generated') else None,
ai_insights_posted=base_run.ai_insights_posted if hasattr(base_run, 'ai_insights_posted') else None,
# Results summary
forecasts_generated=base_run.forecasts_generated,
production_batches_created=base_run.production_batches_created,
procurement_plans_created=base_run.procurement_plans_created,
purchase_orders_created=base_run.purchase_orders_created,
notifications_sent=base_run.notifications_sent,
# Performance metrics
fulfillment_rate=base_run.fulfillment_rate,
on_time_delivery_rate=base_run.on_time_delivery_rate,
cost_accuracy=base_run.cost_accuracy,
quality_score=base_run.quality_score,
# Data
forecast_data=base_run.forecast_data,
run_metadata=base_run.run_metadata,
# Metadata
triggered_by=base_run.triggered_by,
created_at=reference_time,
updated_at=reference_time
)
db.add(new_run)
await db.flush()
runs_cloned += 1
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Orchestration runs cloned successfully",
"Orchestration runs loaded from fixture successfully",
virtual_tenant_id=str(virtual_tenant_id),
runs_cloned=runs_cloned,
runs_created=runs_created,
duration_ms=duration_ms
)
@@ -247,8 +224,8 @@ async def clone_demo_data(
"service": "orchestrator",
"status": "completed",
"success": True,
"records_cloned": runs_cloned,
"runs_cloned": runs_cloned,
"records_cloned": runs_created,
"runs_cloned": runs_created,
"duration_ms": duration_ms
}