Files
bakery-ia/services/production/app/api/internal_demo.py
2025-12-17 13:03:52 +01:00

808 lines
36 KiB
Python

"""
Internal Demo Cloning API for Production Service
Service-to-service endpoint for cloning production data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from uuid import UUID
from datetime import datetime, timezone, timedelta
from typing import Optional, Dict, Any
import os
import json
from pathlib import Path
from app.core.database import get_db
from app.models.production import (
ProductionBatch, ProductionSchedule, ProductionCapacity,
QualityCheckTemplate, QualityCheck, Equipment,
ProductionStatus, ProductionPriority, ProcessStage,
EquipmentStatus, EquipmentType
)
from shared.utils.demo_dates import (
adjust_date_for_demo, resolve_time_marker
)
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter()
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone production service data for a virtual demo tenant
Clones:
- Production batches (historical production runs)
- Production schedules (daily planning)
- Production capacity records
- Quality check templates
- Quality checks (inspection records)
- Equipment (machines and tools)
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: Session creation timestamp for date adjustment
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting production data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"batches": 0,
"production_schedules": 0,
"production_capacity": 0,
"quality_check_templates": 0,
"quality_checks": 0,
"equipment": 0,
"alerts_generated": 0
}
def parse_date_field(date_value, session_time, field_name="date"):
"""Parse date field, handling both ISO strings and BASE_TS markers"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
return adjust_date_for_demo(
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
session_time
)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "06-production.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "06-production.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "06-production.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Create Equipment first (no dependencies)
for equipment_data in seed_data.get('equipment', []):
# Transform equipment ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
equipment_uuid = UUID(equipment_data['id'])
transformed_id = transform_id(equipment_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse equipment UUID",
equipment_id=equipment_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in equipment data: {str(e)}"
)
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_install_date = parse_date_field(
equipment_data.get('install_date'),
session_time,
"install_date"
)
adjusted_last_maintenance = parse_date_field(
equipment_data.get('last_maintenance_date'),
session_time,
"last_maintenance_date"
)
adjusted_next_maintenance = parse_date_field(
equipment_data.get('next_maintenance_date'),
session_time,
"next_maintenance_date"
)
adjusted_created_at = parse_date_field(
equipment_data.get('created_at'),
session_time,
"created_at"
)
adjusted_updated_at = parse_date_field(
equipment_data.get('updated_at'),
session_time,
"updated_at"
)
new_equipment = Equipment(
id=str(transformed_id),
tenant_id=virtual_uuid,
name=equipment_data['name'],
type=equipment_data['type'],
model=equipment_data['model'],
serial_number=equipment_data.get('serial_number'),
location=equipment_data['location'],
status=equipment_data['status'],
install_date=adjusted_install_date,
last_maintenance_date=adjusted_last_maintenance,
next_maintenance_date=adjusted_next_maintenance,
maintenance_interval_days=equipment_data.get('maintenance_interval_days'),
efficiency_percentage=equipment_data.get('efficiency_percentage'),
uptime_percentage=equipment_data.get('uptime_percentage'),
energy_usage_kwh=equipment_data.get('energy_usage_kwh'),
power_kw=equipment_data.get('power_kw'),
capacity=equipment_data.get('capacity'),
weight_kg=equipment_data.get('weight_kg'),
current_temperature=equipment_data.get('current_temperature'),
target_temperature=equipment_data.get('target_temperature'),
is_active=equipment_data.get('is_active', True),
notes=equipment_data.get('notes'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_equipment)
stats["equipment"] += 1
# Flush to get equipment IDs
await db.flush()
# Clone Quality Check Templates from seed data
template_id_map = {}
for template_data in seed_data.get('quality_check_templates', []):
# Transform template ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
template_uuid = UUID(template_data['id'])
transformed_id = transform_id(template_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse template UUID",
template_id=template_data['id'],
error=str(e))
continue
template_id_map[UUID(template_data['id'])] = transformed_id
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_created_at = parse_date_field(
template_data.get('created_at'),
session_time,
"created_at"
) or session_time
adjusted_updated_at = parse_date_field(
template_data.get('updated_at'),
session_time,
"updated_at"
) or adjusted_created_at
new_template = QualityCheckTemplate(
id=str(transformed_id),
tenant_id=virtual_uuid,
name=template_data.get('name'),
template_code=template_data.get('template_code'),
check_type=template_data.get('check_type'),
category=template_data.get('category'),
description=template_data.get('description'),
instructions=template_data.get('instructions'),
parameters=template_data.get('parameters'),
thresholds=template_data.get('thresholds'),
scoring_criteria=template_data.get('scoring_criteria'),
is_active=template_data.get('is_active', True),
is_required=template_data.get('is_required', False),
is_critical=template_data.get('is_critical', False),
weight=template_data.get('weight', 1.0),
min_value=template_data.get('min_value'),
max_value=template_data.get('max_value'),
target_value=template_data.get('target_value'),
unit=template_data.get('unit'),
tolerance_percentage=template_data.get('tolerance_percentage'),
applicable_stages=template_data.get('applicable_stages'),
created_by=template_data.get('created_by'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_template)
stats["quality_check_templates"] += 1
# Flush to get template IDs
await db.flush()
# Clone Production Batches from seed data
batch_id_map = {}
for batch_data in seed_data.get('batches', []):
# Transform batch ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
batch_uuid = UUID(batch_data['id'])
transformed_id = transform_id(batch_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse batch UUID",
batch_id=batch_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in batch data: {str(e)}"
)
batch_id_map[UUID(batch_data['id'])] = transformed_id
# Adjust dates relative to session creation time
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), session_time, "planned_start_time")
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), session_time, "planned_end_time")
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), session_time, "actual_start_time")
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), session_time, "actual_end_time")
adjusted_completed = parse_date_field(batch_data.get('completed_at'), session_time, "completed_at")
adjusted_created_at = parse_date_field(batch_data.get('created_at'), session_time, "created_at") or session_time
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), session_time, "updated_at") or adjusted_created_at
# Map status and priority enums
status_value = batch_data.get('status', 'PENDING')
if isinstance(status_value, str):
try:
status_value = ProductionStatus[status_value]
except KeyError:
status_value = ProductionStatus.PENDING
priority_value = batch_data.get('priority', 'MEDIUM')
if isinstance(priority_value, str):
try:
priority_value = ProductionPriority[priority_value]
except KeyError:
priority_value = ProductionPriority.MEDIUM
# Map process stage enum
process_stage_value = batch_data.get('current_process_stage')
if process_stage_value and isinstance(process_stage_value, str):
try:
process_stage_value = ProcessStage[process_stage_value]
except KeyError:
process_stage_value = None
# Transform foreign key references (product_id, recipe_id, order_id, forecast_id)
transformed_product_id = None
if batch_data.get('product_id'):
try:
transformed_product_id = str(transform_id(batch_data['product_id'], virtual_uuid))
except (ValueError, Exception) as e:
logger.warning("Failed to transform product_id",
product_id=batch_data.get('product_id'),
error=str(e))
transformed_recipe_id = None
if batch_data.get('recipe_id'):
try:
transformed_recipe_id = str(transform_id(batch_data['recipe_id'], virtual_uuid))
except (ValueError, Exception) as e:
logger.warning("Failed to transform recipe_id",
recipe_id=batch_data.get('recipe_id'),
error=str(e))
transformed_order_id = None
if batch_data.get('order_id'):
try:
transformed_order_id = str(transform_id(batch_data['order_id'], virtual_uuid))
except (ValueError, Exception) as e:
logger.warning("Failed to transform order_id",
order_id=batch_data.get('order_id'),
error=str(e))
transformed_forecast_id = None
if batch_data.get('forecast_id'):
try:
transformed_forecast_id = str(transform_id(batch_data['forecast_id'], virtual_uuid))
except (ValueError, Exception) as e:
logger.warning("Failed to transform forecast_id",
forecast_id=batch_data.get('forecast_id'),
error=str(e))
# Transform equipment_used array
transformed_equipment = []
if batch_data.get('equipment_used'):
for equip_id in batch_data['equipment_used']:
try:
transformed_equipment.append(str(transform_id(equip_id, virtual_uuid)))
except (ValueError, Exception) as e:
logger.warning("Failed to transform equipment_id",
equipment_id=equip_id,
error=str(e))
# staff_assigned contains user IDs - these should NOT be transformed
# because they reference actual user accounts which are NOT cloned
# The demo uses the same user accounts across all virtual tenants
staff_assigned = batch_data.get('staff_assigned', [])
new_batch = ProductionBatch(
id=str(transformed_id),
tenant_id=virtual_uuid,
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
product_id=transformed_product_id,
product_name=batch_data.get('product_name'),
recipe_id=transformed_recipe_id,
planned_start_time=adjusted_planned_start,
planned_end_time=adjusted_planned_end,
planned_quantity=batch_data.get('planned_quantity'),
planned_duration_minutes=batch_data.get('planned_duration_minutes'),
actual_start_time=adjusted_actual_start,
actual_end_time=adjusted_actual_end,
actual_quantity=batch_data.get('actual_quantity'),
actual_duration_minutes=batch_data.get('actual_duration_minutes'),
status=status_value,
priority=priority_value,
current_process_stage=process_stage_value,
process_stage_history=batch_data.get('process_stage_history'),
pending_quality_checks=batch_data.get('pending_quality_checks'),
completed_quality_checks=batch_data.get('completed_quality_checks'),
estimated_cost=batch_data.get('estimated_cost'),
actual_cost=batch_data.get('actual_cost'),
labor_cost=batch_data.get('labor_cost'),
material_cost=batch_data.get('material_cost'),
overhead_cost=batch_data.get('overhead_cost'),
yield_percentage=batch_data.get('yield_percentage'),
quality_score=batch_data.get('quality_score'),
waste_quantity=batch_data.get('waste_quantity'),
defect_quantity=batch_data.get('defect_quantity'),
waste_defect_type=batch_data.get('waste_defect_type'),
equipment_used=transformed_equipment,
staff_assigned=staff_assigned,
station_id=batch_data.get('station_id'),
order_id=transformed_order_id,
forecast_id=transformed_forecast_id,
is_rush_order=batch_data.get('is_rush_order', False),
is_special_recipe=batch_data.get('is_special_recipe', False),
is_ai_assisted=batch_data.get('is_ai_assisted', False),
production_notes=batch_data.get('production_notes'),
quality_notes=batch_data.get('quality_notes'),
delay_reason=batch_data.get('delay_reason'),
cancellation_reason=batch_data.get('cancellation_reason'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
completed_at=adjusted_completed
)
db.add(new_batch)
stats["batches"] += 1
# Flush to get batch IDs
await db.flush()
# Clone Quality Checks from seed data (if any)
for check_data in seed_data.get('quality_checks', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
check_uuid = UUID(check_data['id'])
transformed_id = transform_id(check_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse check UUID",
check_id=check_data['id'],
error=str(e))
continue
# Map batch_id if it exists in our map
batch_id_value = check_data.get('batch_id')
if batch_id_value:
batch_id_value = batch_id_map.get(UUID(batch_id_value), UUID(batch_id_value))
# Map template_id if it exists
template_id_value = check_data.get('template_id')
if template_id_value:
template_id_value = template_id_map.get(UUID(template_id_value), UUID(template_id_value))
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_check_time = parse_date_field(
check_data.get('check_time'),
session_time,
"check_time"
)
adjusted_created_at = parse_date_field(
check_data.get('created_at'),
session_time,
"created_at"
)
adjusted_updated_at = parse_date_field(
check_data.get('updated_at'),
session_time,
"updated_at"
) or adjusted_created_at
new_check = QualityCheck(
id=str(transformed_id),
tenant_id=virtual_uuid,
batch_id=str(batch_id_value) if batch_id_value else None,
template_id=str(template_id_value) if template_id_value else None,
check_type=check_data.get('check_type'),
process_stage=check_data.get('process_stage'),
check_time=adjusted_check_time,
checker_id=check_data.get('checker_id'),
quality_score=check_data.get('quality_score'),
pass_fail=check_data.get('pass_fail'),
defect_count=check_data.get('defect_count'),
defect_types=check_data.get('defect_types'),
measured_weight=check_data.get('measured_weight'),
measured_temperature=check_data.get('measured_temperature'),
measured_moisture=check_data.get('measured_moisture'),
measured_dimensions=check_data.get('measured_dimensions'),
stage_specific_data=check_data.get('stage_specific_data'),
target_weight=check_data.get('target_weight'),
target_temperature=check_data.get('target_temperature'),
target_moisture=check_data.get('target_moisture'),
tolerance_percentage=check_data.get('tolerance_percentage'),
within_tolerance=check_data.get('within_tolerance'),
corrective_action_needed=check_data.get('corrective_action_needed'),
corrective_actions=check_data.get('corrective_actions'),
template_results=check_data.get('template_results'),
criteria_scores=check_data.get('criteria_scores'),
check_notes=check_data.get('check_notes'),
photos_urls=check_data.get('photos_urls'),
certificate_url=check_data.get('certificate_url'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_check)
stats["quality_checks"] += 1
# Clone Production Schedules from seed data (if any)
for schedule_data in seed_data.get('production_schedules', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
schedule_uuid = UUID(schedule_data['id'])
transformed_id = transform_id(schedule_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse schedule UUID",
schedule_id=schedule_data['id'],
error=str(e))
continue
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_schedule_date = parse_date_field(
schedule_data.get('schedule_date'),
session_time,
"schedule_date"
)
adjusted_shift_start = parse_date_field(
schedule_data.get('shift_start'),
session_time,
"shift_start"
)
adjusted_shift_end = parse_date_field(
schedule_data.get('shift_end'),
session_time,
"shift_end"
)
adjusted_finalized = parse_date_field(
schedule_data.get('finalized_at'),
session_time,
"finalized_at"
)
adjusted_created_at = parse_date_field(
schedule_data.get('created_at'),
session_time,
"created_at"
)
adjusted_updated_at = parse_date_field(
schedule_data.get('updated_at'),
session_time,
"updated_at"
) or adjusted_created_at
new_schedule = ProductionSchedule(
id=str(transformed_id),
tenant_id=virtual_uuid,
schedule_date=adjusted_schedule_date,
shift_start=adjusted_shift_start,
shift_end=adjusted_shift_end,
total_capacity_hours=schedule_data.get('total_capacity_hours'),
planned_capacity_hours=schedule_data.get('planned_capacity_hours'),
actual_capacity_hours=schedule_data.get('actual_capacity_hours'),
overtime_hours=schedule_data.get('overtime_hours', 0.0),
staff_count=schedule_data.get('staff_count'),
equipment_capacity=schedule_data.get('equipment_capacity'),
station_assignments=schedule_data.get('station_assignments'),
total_batches_planned=schedule_data.get('total_batches_planned', 0),
total_batches_completed=schedule_data.get('total_batches_completed', 0),
total_quantity_planned=schedule_data.get('total_quantity_planned', 0.0),
total_quantity_produced=schedule_data.get('total_quantity_produced', 0.0),
is_finalized=schedule_data.get('is_finalized', False),
is_active=schedule_data.get('is_active', True),
efficiency_percentage=schedule_data.get('efficiency_percentage'),
utilization_percentage=schedule_data.get('utilization_percentage'),
on_time_completion_rate=schedule_data.get('on_time_completion_rate'),
schedule_notes=schedule_data.get('schedule_notes'),
schedule_adjustments=schedule_data.get('schedule_adjustments'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
finalized_at=adjusted_finalized
)
db.add(new_schedule)
stats["production_schedules"] += 1
# Clone Production Capacity from seed data (if any)
for capacity_data in seed_data.get('production_capacity', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
capacity_uuid = UUID(capacity_data['id'])
transformed_id = transform_id(capacity_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse capacity UUID",
capacity_id=capacity_data['id'],
error=str(e))
continue
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_date = parse_date_field(
capacity_data.get('date'),
session_time,
"date"
)
adjusted_start_time = parse_date_field(
capacity_data.get('start_time'),
session_time,
"start_time"
)
adjusted_end_time = parse_date_field(
capacity_data.get('end_time'),
session_time,
"end_time"
)
adjusted_last_maintenance = parse_date_field(
capacity_data.get('last_maintenance_date'),
session_time,
"last_maintenance_date"
)
adjusted_created_at = parse_date_field(
capacity_data.get('created_at'),
session_time,
"created_at"
)
adjusted_updated_at = parse_date_field(
capacity_data.get('updated_at'),
session_time,
"updated_at"
) or adjusted_created_at
new_capacity = ProductionCapacity(
id=str(transformed_id),
tenant_id=virtual_uuid,
resource_type=capacity_data.get('resource_type'),
resource_id=capacity_data.get('resource_id'),
resource_name=capacity_data.get('resource_name'),
date=adjusted_date,
start_time=adjusted_start_time,
end_time=adjusted_end_time,
total_capacity_units=capacity_data.get('total_capacity_units'),
allocated_capacity_units=capacity_data.get('allocated_capacity_units'),
remaining_capacity_units=capacity_data.get('remaining_capacity_units'),
is_available=capacity_data.get('is_available'),
is_maintenance=capacity_data.get('is_maintenance'),
is_reserved=capacity_data.get('is_reserved'),
equipment_type=capacity_data.get('equipment_type'),
max_batch_size=capacity_data.get('max_batch_size'),
min_batch_size=capacity_data.get('min_batch_size'),
setup_time_minutes=capacity_data.get('setup_time_minutes'),
cleanup_time_minutes=capacity_data.get('cleanup_time_minutes'),
efficiency_rating=capacity_data.get('efficiency_rating'),
maintenance_status=capacity_data.get('maintenance_status'),
last_maintenance_date=adjusted_last_maintenance,
notes=capacity_data.get('notes'),
restrictions=capacity_data.get('restrictions'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_capacity)
stats["production_capacity"] += 1
# Note: Edge cases are now handled exclusively through JSON seed data
# The seed data files already contain comprehensive edge cases including:
# - Overdue batches (should have started 2 hours ago)
# - In-progress batches (currently being processed)
# - Upcoming batches (scheduled for later today/tomorrow)
# This ensures standardization and single source of truth for demo data
logger.info(
"Edge cases handled by JSON seed data - no manual creation needed",
seed_data_edge_cases="overdue_batches, in_progress_batches, upcoming_batches"
)
# Commit cloned data
await db.commit()
# NOTE: Alert generation removed - alerts are now generated automatically by the
# production alert service which runs scheduled checks at appropriate intervals.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
# Calculate total from non-alert stats
total_records = (stats["equipment"] + stats["batches"] + stats["production_schedules"] +
stats["quality_check_templates"] + stats["quality_checks"] +
stats["production_capacity"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Production data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "production",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone production data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "production",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "production",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all production data for a virtual demo tenant"""
logger.info("Deleting production data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
batch_count = await db.scalar(select(func.count(ProductionBatch.id)).where(ProductionBatch.tenant_id == virtual_uuid))
schedule_count = await db.scalar(select(func.count(ProductionSchedule.id)).where(ProductionSchedule.tenant_id == virtual_uuid))
quality_count = await db.scalar(select(func.count(QualityCheck.id)).where(QualityCheck.tenant_id == virtual_uuid))
equipment_count = await db.scalar(select(func.count(Equipment.id)).where(Equipment.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(QualityCheck).where(QualityCheck.tenant_id == virtual_uuid))
await db.execute(delete(ProductionBatch).where(ProductionBatch.tenant_id == virtual_uuid))
await db.execute(delete(ProductionSchedule).where(ProductionSchedule.tenant_id == virtual_uuid))
await db.execute(delete(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == virtual_uuid))
await db.execute(delete(Equipment).where(Equipment.tenant_id == virtual_uuid))
await db.execute(delete(ProductionCapacity).where(ProductionCapacity.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Production data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "production",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"batches": batch_count,
"schedules": schedule_count,
"quality_checks": quality_count,
"equipment": equipment_count,
"total": batch_count + schedule_count + quality_count + equipment_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete production data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))