2025-10-12 18:47:33 +02:00
|
|
|
"""
|
|
|
|
|
Internal Demo Cloning API for Production Service
|
|
|
|
|
Service-to-service endpoint for cloning production data
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
2025-10-24 13:05:04 +02:00
|
|
|
from sqlalchemy import select, delete, func
|
2025-10-12 18:47:33 +02:00
|
|
|
import structlog
|
|
|
|
|
import uuid
|
2025-12-13 23:57:54 +01:00
|
|
|
from uuid import UUID
|
2025-10-12 18:47:33 +02:00
|
|
|
from datetime import datetime, timezone, timedelta
|
2025-10-17 07:31:14 +02:00
|
|
|
from typing import Optional, Dict, Any
|
2025-10-12 18:47:33 +02:00
|
|
|
import os
|
2025-12-13 23:57:54 +01:00
|
|
|
import json
|
|
|
|
|
from pathlib import Path
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
from app.core.database import get_db
|
|
|
|
|
from app.models.production import (
|
|
|
|
|
ProductionBatch, ProductionSchedule, ProductionCapacity,
|
|
|
|
|
QualityCheckTemplate, QualityCheck, Equipment,
|
|
|
|
|
ProductionStatus, ProductionPriority, ProcessStage,
|
|
|
|
|
EquipmentStatus, EquipmentType
|
|
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
from shared.utils.demo_dates import (
|
2025-12-14 16:04:16 +01:00
|
|
|
adjust_date_for_demo, resolve_time_marker
|
2025-12-14 11:58:14 +01:00
|
|
|
)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-11-30 09:12:40 +01:00
|
|
|
from app.core.config import settings
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
logger = structlog.get_logger()
|
2025-12-13 23:57:54 +01:00
|
|
|
router = APIRouter()
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
# Base demo tenant IDs
|
2025-11-30 09:12:40 +01:00
|
|
|
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
|
|
|
|
"""Verify internal API key for service-to-service communication"""
|
2025-11-30 09:12:40 +01:00
|
|
|
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
2025-10-12 18:47:33 +02:00
|
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
|
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
@router.post("/internal/demo/clone")
|
2025-10-12 18:47:33 +02:00
|
|
|
async def clone_demo_data(
|
|
|
|
|
base_tenant_id: str,
|
|
|
|
|
virtual_tenant_id: str,
|
|
|
|
|
demo_account_type: str,
|
|
|
|
|
session_id: Optional[str] = None,
|
2025-10-17 07:31:14 +02:00
|
|
|
session_created_at: Optional[str] = None,
|
2025-10-12 18:47:33 +02:00
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Clone production service data for a virtual demo tenant
|
|
|
|
|
|
|
|
|
|
Clones:
|
|
|
|
|
- Production batches (historical production runs)
|
|
|
|
|
- Production schedules (daily planning)
|
|
|
|
|
- Production capacity records
|
|
|
|
|
- Quality check templates
|
|
|
|
|
- Quality checks (inspection records)
|
|
|
|
|
- Equipment (machines and tools)
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
|
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
|
|
|
demo_account_type: Type of demo account
|
|
|
|
|
session_id: Originating session ID for tracing
|
2025-10-17 07:31:14 +02:00
|
|
|
session_created_at: Session creation timestamp for date adjustment
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Cloning status and record counts
|
|
|
|
|
"""
|
|
|
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
|
|
2025-10-17 07:31:14 +02:00
|
|
|
# Parse session creation time for date adjustment
|
|
|
|
|
if session_created_at:
|
|
|
|
|
try:
|
|
|
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
|
|
|
except (ValueError, AttributeError):
|
|
|
|
|
session_time = start_time
|
|
|
|
|
else:
|
|
|
|
|
session_time = start_time
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
logger.info(
|
|
|
|
|
"Starting production data cloning",
|
|
|
|
|
base_tenant_id=base_tenant_id,
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
demo_account_type=demo_account_type,
|
2025-10-17 07:31:14 +02:00
|
|
|
session_id=session_id,
|
|
|
|
|
session_created_at=session_created_at
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Validate UUIDs
|
|
|
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
# Track cloning statistics
|
|
|
|
|
stats = {
|
2025-12-13 23:57:54 +01:00
|
|
|
"batches": 0,
|
2025-10-12 18:47:33 +02:00
|
|
|
"production_schedules": 0,
|
|
|
|
|
"production_capacity": 0,
|
|
|
|
|
"quality_check_templates": 0,
|
|
|
|
|
"quality_checks": 0,
|
2025-10-17 07:31:14 +02:00
|
|
|
"equipment": 0,
|
|
|
|
|
"alerts_generated": 0
|
2025-10-12 18:47:33 +02:00
|
|
|
}
|
|
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
def parse_date_field(date_value, session_time, field_name="date"):
|
2025-12-13 23:57:54 +01:00
|
|
|
"""Parse date field, handling both ISO strings and BASE_TS markers"""
|
|
|
|
|
if not date_value:
|
|
|
|
|
return None
|
2025-12-14 11:58:14 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Check if it's a BASE_TS marker
|
|
|
|
|
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
|
|
|
|
try:
|
|
|
|
|
return resolve_time_marker(date_value, session_time)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.warning(
|
|
|
|
|
f"Invalid BASE_TS marker in {field_name}",
|
|
|
|
|
marker=date_value,
|
|
|
|
|
error=str(e)
|
|
|
|
|
)
|
|
|
|
|
return None
|
2025-12-14 11:58:14 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Handle regular ISO date strings
|
|
|
|
|
try:
|
|
|
|
|
return adjust_date_for_demo(
|
|
|
|
|
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
|
2025-12-14 11:58:14 +01:00
|
|
|
session_time
|
2025-12-13 23:57:54 +01:00
|
|
|
)
|
|
|
|
|
except (ValueError, AttributeError) as e:
|
|
|
|
|
logger.warning(
|
|
|
|
|
f"Invalid date format in {field_name}",
|
|
|
|
|
date_value=date_value,
|
|
|
|
|
error=str(e)
|
|
|
|
|
)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
# Load seed data from JSON files
|
|
|
|
|
try:
|
|
|
|
|
from shared.utils.seed_data_paths import get_seed_data_path
|
|
|
|
|
|
|
|
|
|
if demo_account_type == "professional":
|
|
|
|
|
json_file = get_seed_data_path("professional", "06-production.json")
|
|
|
|
|
elif demo_account_type == "enterprise":
|
|
|
|
|
json_file = get_seed_data_path("enterprise", "06-production.json")
|
|
|
|
|
else:
|
|
|
|
|
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
|
|
|
|
|
|
|
|
|
except ImportError:
|
|
|
|
|
# Fallback to original path
|
|
|
|
|
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
|
|
|
|
|
if demo_account_type == "professional":
|
|
|
|
|
json_file = seed_data_dir / "professional" / "06-production.json"
|
|
|
|
|
elif demo_account_type == "enterprise":
|
|
|
|
|
json_file = seed_data_dir / "enterprise" / "parent" / "06-production.json"
|
|
|
|
|
else:
|
|
|
|
|
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
|
|
|
|
|
|
|
|
|
if not json_file.exists():
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=404,
|
|
|
|
|
detail=f"Seed data file not found: {json_file}"
|
|
|
|
|
)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Load JSON data
|
|
|
|
|
with open(json_file, 'r', encoding='utf-8') as f:
|
|
|
|
|
seed_data = json.load(f)
|
|
|
|
|
|
|
|
|
|
# Create Equipment first (no dependencies)
|
|
|
|
|
for equipment_data in seed_data.get('equipment', []):
|
|
|
|
|
# Transform equipment ID using XOR
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
equipment_uuid = UUID(equipment_data['id'])
|
|
|
|
|
transformed_id = transform_id(equipment_data['id'], virtual_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse equipment UUID",
|
|
|
|
|
equipment_id=equipment_data['id'],
|
|
|
|
|
error=str(e))
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=f"Invalid UUID format in equipment data: {str(e)}"
|
|
|
|
|
)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
|
|
|
|
adjusted_install_date = parse_date_field(
|
|
|
|
|
equipment_data.get('install_date'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"install_date"
|
2025-10-17 07:31:14 +02:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_last_maintenance = parse_date_field(
|
|
|
|
|
equipment_data.get('last_maintenance_date'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"last_maintenance_date"
|
2025-10-17 07:31:14 +02:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_next_maintenance = parse_date_field(
|
|
|
|
|
equipment_data.get('next_maintenance_date'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"next_maintenance_date"
|
2025-12-13 23:57:54 +01:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_created_at = parse_date_field(
|
|
|
|
|
equipment_data.get('created_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"created_at"
|
2025-12-13 23:57:54 +01:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_updated_at = parse_date_field(
|
|
|
|
|
equipment_data.get('updated_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"updated_at"
|
2025-10-17 07:31:14 +02:00
|
|
|
)
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_equipment = Equipment(
|
2025-12-13 23:57:54 +01:00
|
|
|
id=str(transformed_id),
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
2025-12-13 23:57:54 +01:00
|
|
|
name=equipment_data['name'],
|
|
|
|
|
type=equipment_data['type'],
|
|
|
|
|
model=equipment_data['model'],
|
|
|
|
|
serial_number=equipment_data.get('serial_number'),
|
|
|
|
|
location=equipment_data['location'],
|
|
|
|
|
status=equipment_data['status'],
|
2025-10-17 07:31:14 +02:00
|
|
|
install_date=adjusted_install_date,
|
|
|
|
|
last_maintenance_date=adjusted_last_maintenance,
|
|
|
|
|
next_maintenance_date=adjusted_next_maintenance,
|
2025-12-13 23:57:54 +01:00
|
|
|
maintenance_interval_days=equipment_data.get('maintenance_interval_days'),
|
|
|
|
|
efficiency_percentage=equipment_data.get('efficiency_percentage'),
|
|
|
|
|
uptime_percentage=equipment_data.get('uptime_percentage'),
|
|
|
|
|
energy_usage_kwh=equipment_data.get('energy_usage_kwh'),
|
|
|
|
|
power_kw=equipment_data.get('power_kw'),
|
|
|
|
|
capacity=equipment_data.get('capacity'),
|
|
|
|
|
weight_kg=equipment_data.get('weight_kg'),
|
|
|
|
|
current_temperature=equipment_data.get('current_temperature'),
|
|
|
|
|
target_temperature=equipment_data.get('target_temperature'),
|
|
|
|
|
is_active=equipment_data.get('is_active', True),
|
|
|
|
|
notes=equipment_data.get('notes'),
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
updated_at=adjusted_updated_at
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_equipment)
|
|
|
|
|
stats["equipment"] += 1
|
|
|
|
|
|
|
|
|
|
# Flush to get equipment IDs
|
|
|
|
|
await db.flush()
|
|
|
|
|
|
2025-12-14 20:13:59 +01:00
|
|
|
# Clone Quality Check Templates from seed data
|
2025-12-13 23:57:54 +01:00
|
|
|
template_id_map = {}
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-14 20:13:59 +01:00
|
|
|
for template_data in seed_data.get('quality_check_templates', []):
|
|
|
|
|
# Transform template ID using XOR
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
template_uuid = UUID(template_data['id'])
|
|
|
|
|
transformed_id = transform_id(template_data['id'], virtual_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse template UUID",
|
|
|
|
|
template_id=template_data['id'],
|
|
|
|
|
error=str(e))
|
|
|
|
|
continue
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-14 20:13:59 +01:00
|
|
|
template_id_map[UUID(template_data['id'])] = transformed_id
|
|
|
|
|
|
|
|
|
|
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
|
|
|
|
adjusted_created_at = parse_date_field(
|
|
|
|
|
template_data.get('created_at'),
|
|
|
|
|
session_time,
|
|
|
|
|
"created_at"
|
|
|
|
|
) or session_time
|
|
|
|
|
adjusted_updated_at = parse_date_field(
|
|
|
|
|
template_data.get('updated_at'),
|
|
|
|
|
session_time,
|
|
|
|
|
"updated_at"
|
|
|
|
|
) or adjusted_created_at
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
new_template = QualityCheckTemplate(
|
2025-12-14 20:13:59 +01:00
|
|
|
id=str(transformed_id),
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
2025-12-14 20:13:59 +01:00
|
|
|
name=template_data.get('name'),
|
|
|
|
|
template_code=template_data.get('template_code'),
|
|
|
|
|
check_type=template_data.get('check_type'),
|
|
|
|
|
category=template_data.get('category'),
|
|
|
|
|
description=template_data.get('description'),
|
|
|
|
|
instructions=template_data.get('instructions'),
|
|
|
|
|
parameters=template_data.get('parameters'),
|
|
|
|
|
thresholds=template_data.get('thresholds'),
|
|
|
|
|
scoring_criteria=template_data.get('scoring_criteria'),
|
|
|
|
|
is_active=template_data.get('is_active', True),
|
|
|
|
|
is_required=template_data.get('is_required', False),
|
|
|
|
|
is_critical=template_data.get('is_critical', False),
|
|
|
|
|
weight=template_data.get('weight', 1.0),
|
|
|
|
|
min_value=template_data.get('min_value'),
|
|
|
|
|
max_value=template_data.get('max_value'),
|
|
|
|
|
target_value=template_data.get('target_value'),
|
|
|
|
|
unit=template_data.get('unit'),
|
|
|
|
|
tolerance_percentage=template_data.get('tolerance_percentage'),
|
|
|
|
|
applicable_stages=template_data.get('applicable_stages'),
|
|
|
|
|
created_by=template_data.get('created_by'),
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
updated_at=adjusted_updated_at
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_template)
|
|
|
|
|
stats["quality_check_templates"] += 1
|
|
|
|
|
|
|
|
|
|
# Flush to get template IDs
|
|
|
|
|
await db.flush()
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Clone Production Batches from seed data
|
|
|
|
|
batch_id_map = {}
|
|
|
|
|
for batch_data in seed_data.get('batches', []):
|
|
|
|
|
# Transform batch ID using XOR
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
batch_uuid = UUID(batch_data['id'])
|
|
|
|
|
transformed_id = transform_id(batch_data['id'], virtual_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse batch UUID",
|
|
|
|
|
batch_id=batch_data['id'],
|
|
|
|
|
error=str(e))
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=f"Invalid UUID format in batch data: {str(e)}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
batch_id_map[UUID(batch_data['id'])] = transformed_id
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-10-30 21:08:07 +01:00
|
|
|
# Adjust dates relative to session creation time
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), session_time, "planned_start_time")
|
|
|
|
|
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), session_time, "planned_end_time")
|
|
|
|
|
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), session_time, "actual_start_time")
|
|
|
|
|
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), session_time, "actual_end_time")
|
|
|
|
|
adjusted_completed = parse_date_field(batch_data.get('completed_at'), session_time, "completed_at")
|
|
|
|
|
adjusted_created_at = parse_date_field(batch_data.get('created_at'), session_time, "created_at") or session_time
|
|
|
|
|
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), session_time, "updated_at") or adjusted_created_at
|
2025-12-13 23:57:54 +01:00
|
|
|
|
|
|
|
|
# Map status and priority enums
|
|
|
|
|
status_value = batch_data.get('status', 'PENDING')
|
|
|
|
|
if isinstance(status_value, str):
|
|
|
|
|
try:
|
|
|
|
|
status_value = ProductionStatus[status_value]
|
|
|
|
|
except KeyError:
|
|
|
|
|
status_value = ProductionStatus.PENDING
|
|
|
|
|
|
|
|
|
|
priority_value = batch_data.get('priority', 'MEDIUM')
|
|
|
|
|
if isinstance(priority_value, str):
|
|
|
|
|
try:
|
|
|
|
|
priority_value = ProductionPriority[priority_value]
|
|
|
|
|
except KeyError:
|
|
|
|
|
priority_value = ProductionPriority.MEDIUM
|
|
|
|
|
|
|
|
|
|
# Map process stage enum
|
|
|
|
|
process_stage_value = batch_data.get('current_process_stage')
|
|
|
|
|
if process_stage_value and isinstance(process_stage_value, str):
|
|
|
|
|
try:
|
|
|
|
|
process_stage_value = ProcessStage[process_stage_value]
|
|
|
|
|
except KeyError:
|
|
|
|
|
process_stage_value = None
|
2025-10-30 21:08:07 +01:00
|
|
|
|
2025-12-15 21:14:22 +01:00
|
|
|
# Transform foreign key references (product_id, recipe_id, order_id, forecast_id)
|
|
|
|
|
transformed_product_id = None
|
|
|
|
|
if batch_data.get('product_id'):
|
|
|
|
|
try:
|
|
|
|
|
transformed_product_id = str(transform_id(batch_data['product_id'], virtual_uuid))
|
|
|
|
|
except (ValueError, Exception) as e:
|
|
|
|
|
logger.warning("Failed to transform product_id",
|
|
|
|
|
product_id=batch_data.get('product_id'),
|
|
|
|
|
error=str(e))
|
|
|
|
|
|
|
|
|
|
transformed_recipe_id = None
|
|
|
|
|
if batch_data.get('recipe_id'):
|
|
|
|
|
try:
|
|
|
|
|
transformed_recipe_id = str(transform_id(batch_data['recipe_id'], virtual_uuid))
|
|
|
|
|
except (ValueError, Exception) as e:
|
|
|
|
|
logger.warning("Failed to transform recipe_id",
|
|
|
|
|
recipe_id=batch_data.get('recipe_id'),
|
|
|
|
|
error=str(e))
|
|
|
|
|
|
|
|
|
|
transformed_order_id = None
|
|
|
|
|
if batch_data.get('order_id'):
|
|
|
|
|
try:
|
|
|
|
|
transformed_order_id = str(transform_id(batch_data['order_id'], virtual_uuid))
|
|
|
|
|
except (ValueError, Exception) as e:
|
|
|
|
|
logger.warning("Failed to transform order_id",
|
|
|
|
|
order_id=batch_data.get('order_id'),
|
|
|
|
|
error=str(e))
|
|
|
|
|
|
|
|
|
|
transformed_forecast_id = None
|
|
|
|
|
if batch_data.get('forecast_id'):
|
|
|
|
|
try:
|
|
|
|
|
transformed_forecast_id = str(transform_id(batch_data['forecast_id'], virtual_uuid))
|
|
|
|
|
except (ValueError, Exception) as e:
|
|
|
|
|
logger.warning("Failed to transform forecast_id",
|
|
|
|
|
forecast_id=batch_data.get('forecast_id'),
|
|
|
|
|
error=str(e))
|
|
|
|
|
|
|
|
|
|
# Transform equipment_used array
|
|
|
|
|
transformed_equipment = []
|
|
|
|
|
if batch_data.get('equipment_used'):
|
|
|
|
|
for equip_id in batch_data['equipment_used']:
|
|
|
|
|
try:
|
|
|
|
|
transformed_equipment.append(str(transform_id(equip_id, virtual_uuid)))
|
|
|
|
|
except (ValueError, Exception) as e:
|
|
|
|
|
logger.warning("Failed to transform equipment_id",
|
|
|
|
|
equipment_id=equip_id,
|
|
|
|
|
error=str(e))
|
|
|
|
|
|
|
|
|
|
# staff_assigned contains user IDs - these should NOT be transformed
|
|
|
|
|
# because they reference actual user accounts which are NOT cloned
|
|
|
|
|
# The demo uses the same user accounts across all virtual tenants
|
|
|
|
|
staff_assigned = batch_data.get('staff_assigned', [])
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_batch = ProductionBatch(
|
2025-12-13 23:57:54 +01:00
|
|
|
id=str(transformed_id),
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
2025-12-13 23:57:54 +01:00
|
|
|
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
|
2025-12-15 21:14:22 +01:00
|
|
|
product_id=transformed_product_id,
|
2025-12-13 23:57:54 +01:00
|
|
|
product_name=batch_data.get('product_name'),
|
2025-12-15 21:14:22 +01:00
|
|
|
recipe_id=transformed_recipe_id,
|
2025-10-30 21:08:07 +01:00
|
|
|
planned_start_time=adjusted_planned_start,
|
|
|
|
|
planned_end_time=adjusted_planned_end,
|
2025-12-13 23:57:54 +01:00
|
|
|
planned_quantity=batch_data.get('planned_quantity'),
|
|
|
|
|
planned_duration_minutes=batch_data.get('planned_duration_minutes'),
|
2025-10-30 21:08:07 +01:00
|
|
|
actual_start_time=adjusted_actual_start,
|
|
|
|
|
actual_end_time=adjusted_actual_end,
|
2025-12-13 23:57:54 +01:00
|
|
|
actual_quantity=batch_data.get('actual_quantity'),
|
|
|
|
|
actual_duration_minutes=batch_data.get('actual_duration_minutes'),
|
|
|
|
|
status=status_value,
|
|
|
|
|
priority=priority_value,
|
|
|
|
|
current_process_stage=process_stage_value,
|
|
|
|
|
process_stage_history=batch_data.get('process_stage_history'),
|
|
|
|
|
pending_quality_checks=batch_data.get('pending_quality_checks'),
|
|
|
|
|
completed_quality_checks=batch_data.get('completed_quality_checks'),
|
|
|
|
|
estimated_cost=batch_data.get('estimated_cost'),
|
|
|
|
|
actual_cost=batch_data.get('actual_cost'),
|
|
|
|
|
labor_cost=batch_data.get('labor_cost'),
|
|
|
|
|
material_cost=batch_data.get('material_cost'),
|
|
|
|
|
overhead_cost=batch_data.get('overhead_cost'),
|
|
|
|
|
yield_percentage=batch_data.get('yield_percentage'),
|
|
|
|
|
quality_score=batch_data.get('quality_score'),
|
|
|
|
|
waste_quantity=batch_data.get('waste_quantity'),
|
|
|
|
|
defect_quantity=batch_data.get('defect_quantity'),
|
2025-12-15 13:39:33 +01:00
|
|
|
waste_defect_type=batch_data.get('waste_defect_type'),
|
2025-12-15 21:14:22 +01:00
|
|
|
equipment_used=transformed_equipment,
|
|
|
|
|
staff_assigned=staff_assigned,
|
2025-12-13 23:57:54 +01:00
|
|
|
station_id=batch_data.get('station_id'),
|
2025-12-15 21:14:22 +01:00
|
|
|
order_id=transformed_order_id,
|
|
|
|
|
forecast_id=transformed_forecast_id,
|
2025-12-13 23:57:54 +01:00
|
|
|
is_rush_order=batch_data.get('is_rush_order', False),
|
|
|
|
|
is_special_recipe=batch_data.get('is_special_recipe', False),
|
2025-12-15 13:39:33 +01:00
|
|
|
is_ai_assisted=batch_data.get('is_ai_assisted', False),
|
2025-12-13 23:57:54 +01:00
|
|
|
production_notes=batch_data.get('production_notes'),
|
|
|
|
|
quality_notes=batch_data.get('quality_notes'),
|
|
|
|
|
delay_reason=batch_data.get('delay_reason'),
|
|
|
|
|
cancellation_reason=batch_data.get('cancellation_reason'),
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
updated_at=adjusted_updated_at,
|
2025-10-30 21:08:07 +01:00
|
|
|
completed_at=adjusted_completed
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_batch)
|
2025-12-13 23:57:54 +01:00
|
|
|
stats["batches"] += 1
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
# Flush to get batch IDs
|
|
|
|
|
await db.flush()
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Clone Quality Checks from seed data (if any)
|
|
|
|
|
for check_data in seed_data.get('quality_checks', []):
|
|
|
|
|
# Transform IDs
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
check_uuid = UUID(check_data['id'])
|
|
|
|
|
transformed_id = transform_id(check_data['id'], virtual_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse check UUID",
|
|
|
|
|
check_id=check_data['id'],
|
|
|
|
|
error=str(e))
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Map batch_id if it exists in our map
|
|
|
|
|
batch_id_value = check_data.get('batch_id')
|
|
|
|
|
if batch_id_value:
|
|
|
|
|
batch_id_value = batch_id_map.get(UUID(batch_id_value), UUID(batch_id_value))
|
|
|
|
|
|
|
|
|
|
# Map template_id if it exists
|
|
|
|
|
template_id_value = check_data.get('template_id')
|
|
|
|
|
if template_id_value:
|
|
|
|
|
template_id_value = template_id_map.get(UUID(template_id_value), UUID(template_id_value))
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
|
|
|
|
adjusted_check_time = parse_date_field(
|
|
|
|
|
check_data.get('check_time'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"check_time"
|
|
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_created_at = parse_date_field(
|
|
|
|
|
check_data.get('created_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"created_at"
|
2025-12-13 23:57:54 +01:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_updated_at = parse_date_field(
|
|
|
|
|
check_data.get('updated_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"updated_at"
|
|
|
|
|
) or adjusted_created_at
|
2025-10-30 21:08:07 +01:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_check = QualityCheck(
|
2025-12-13 23:57:54 +01:00
|
|
|
id=str(transformed_id),
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
2025-12-13 23:57:54 +01:00
|
|
|
batch_id=str(batch_id_value) if batch_id_value else None,
|
|
|
|
|
template_id=str(template_id_value) if template_id_value else None,
|
|
|
|
|
check_type=check_data.get('check_type'),
|
|
|
|
|
process_stage=check_data.get('process_stage'),
|
2025-10-30 21:08:07 +01:00
|
|
|
check_time=adjusted_check_time,
|
2025-12-13 23:57:54 +01:00
|
|
|
checker_id=check_data.get('checker_id'),
|
|
|
|
|
quality_score=check_data.get('quality_score'),
|
|
|
|
|
pass_fail=check_data.get('pass_fail'),
|
|
|
|
|
defect_count=check_data.get('defect_count'),
|
|
|
|
|
defect_types=check_data.get('defect_types'),
|
|
|
|
|
measured_weight=check_data.get('measured_weight'),
|
|
|
|
|
measured_temperature=check_data.get('measured_temperature'),
|
|
|
|
|
measured_moisture=check_data.get('measured_moisture'),
|
|
|
|
|
measured_dimensions=check_data.get('measured_dimensions'),
|
|
|
|
|
stage_specific_data=check_data.get('stage_specific_data'),
|
|
|
|
|
target_weight=check_data.get('target_weight'),
|
|
|
|
|
target_temperature=check_data.get('target_temperature'),
|
|
|
|
|
target_moisture=check_data.get('target_moisture'),
|
|
|
|
|
tolerance_percentage=check_data.get('tolerance_percentage'),
|
|
|
|
|
within_tolerance=check_data.get('within_tolerance'),
|
|
|
|
|
corrective_action_needed=check_data.get('corrective_action_needed'),
|
|
|
|
|
corrective_actions=check_data.get('corrective_actions'),
|
|
|
|
|
template_results=check_data.get('template_results'),
|
|
|
|
|
criteria_scores=check_data.get('criteria_scores'),
|
|
|
|
|
check_notes=check_data.get('check_notes'),
|
|
|
|
|
photos_urls=check_data.get('photos_urls'),
|
|
|
|
|
certificate_url=check_data.get('certificate_url'),
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
updated_at=adjusted_updated_at
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_check)
|
|
|
|
|
stats["quality_checks"] += 1
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Clone Production Schedules from seed data (if any)
|
|
|
|
|
for schedule_data in seed_data.get('production_schedules', []):
|
|
|
|
|
# Transform IDs
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
schedule_uuid = UUID(schedule_data['id'])
|
|
|
|
|
transformed_id = transform_id(schedule_data['id'], virtual_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse schedule UUID",
|
|
|
|
|
schedule_id=schedule_data['id'],
|
|
|
|
|
error=str(e))
|
|
|
|
|
continue
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
|
|
|
|
adjusted_schedule_date = parse_date_field(
|
|
|
|
|
schedule_data.get('schedule_date'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"schedule_date"
|
|
|
|
|
)
|
|
|
|
|
adjusted_shift_start = parse_date_field(
|
|
|
|
|
schedule_data.get('shift_start'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"shift_start"
|
|
|
|
|
)
|
|
|
|
|
adjusted_shift_end = parse_date_field(
|
|
|
|
|
schedule_data.get('shift_end'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"shift_end"
|
|
|
|
|
)
|
|
|
|
|
adjusted_finalized = parse_date_field(
|
|
|
|
|
schedule_data.get('finalized_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"finalized_at"
|
|
|
|
|
)
|
|
|
|
|
adjusted_created_at = parse_date_field(
|
|
|
|
|
schedule_data.get('created_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"created_at"
|
2025-12-13 23:57:54 +01:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_updated_at = parse_date_field(
|
|
|
|
|
schedule_data.get('updated_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"updated_at"
|
|
|
|
|
) or adjusted_created_at
|
2025-10-30 21:08:07 +01:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_schedule = ProductionSchedule(
|
2025-12-13 23:57:54 +01:00
|
|
|
id=str(transformed_id),
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
2025-10-30 21:08:07 +01:00
|
|
|
schedule_date=adjusted_schedule_date,
|
|
|
|
|
shift_start=adjusted_shift_start,
|
|
|
|
|
shift_end=adjusted_shift_end,
|
2025-12-13 23:57:54 +01:00
|
|
|
total_capacity_hours=schedule_data.get('total_capacity_hours'),
|
|
|
|
|
planned_capacity_hours=schedule_data.get('planned_capacity_hours'),
|
|
|
|
|
actual_capacity_hours=schedule_data.get('actual_capacity_hours'),
|
|
|
|
|
overtime_hours=schedule_data.get('overtime_hours', 0.0),
|
|
|
|
|
staff_count=schedule_data.get('staff_count'),
|
|
|
|
|
equipment_capacity=schedule_data.get('equipment_capacity'),
|
|
|
|
|
station_assignments=schedule_data.get('station_assignments'),
|
|
|
|
|
total_batches_planned=schedule_data.get('total_batches_planned', 0),
|
|
|
|
|
total_batches_completed=schedule_data.get('total_batches_completed', 0),
|
|
|
|
|
total_quantity_planned=schedule_data.get('total_quantity_planned', 0.0),
|
|
|
|
|
total_quantity_produced=schedule_data.get('total_quantity_produced', 0.0),
|
|
|
|
|
is_finalized=schedule_data.get('is_finalized', False),
|
|
|
|
|
is_active=schedule_data.get('is_active', True),
|
|
|
|
|
efficiency_percentage=schedule_data.get('efficiency_percentage'),
|
|
|
|
|
utilization_percentage=schedule_data.get('utilization_percentage'),
|
|
|
|
|
on_time_completion_rate=schedule_data.get('on_time_completion_rate'),
|
|
|
|
|
schedule_notes=schedule_data.get('schedule_notes'),
|
|
|
|
|
schedule_adjustments=schedule_data.get('schedule_adjustments'),
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
updated_at=adjusted_updated_at,
|
2025-10-30 21:08:07 +01:00
|
|
|
finalized_at=adjusted_finalized
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_schedule)
|
|
|
|
|
stats["production_schedules"] += 1
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Clone Production Capacity from seed data (if any)
|
|
|
|
|
for capacity_data in seed_data.get('production_capacity', []):
|
|
|
|
|
# Transform IDs
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
capacity_uuid = UUID(capacity_data['id'])
|
|
|
|
|
transformed_id = transform_id(capacity_data['id'], virtual_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse capacity UUID",
|
|
|
|
|
capacity_id=capacity_data['id'],
|
|
|
|
|
error=str(e))
|
|
|
|
|
continue
|
2025-10-12 18:47:33 +02:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
|
|
|
|
adjusted_date = parse_date_field(
|
|
|
|
|
capacity_data.get('date'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"date"
|
|
|
|
|
)
|
|
|
|
|
adjusted_start_time = parse_date_field(
|
|
|
|
|
capacity_data.get('start_time'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"start_time"
|
|
|
|
|
)
|
|
|
|
|
adjusted_end_time = parse_date_field(
|
|
|
|
|
capacity_data.get('end_time'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"end_time"
|
|
|
|
|
)
|
|
|
|
|
adjusted_last_maintenance = parse_date_field(
|
|
|
|
|
capacity_data.get('last_maintenance_date'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"last_maintenance_date"
|
|
|
|
|
)
|
|
|
|
|
adjusted_created_at = parse_date_field(
|
|
|
|
|
capacity_data.get('created_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"created_at"
|
2025-12-13 23:57:54 +01:00
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
adjusted_updated_at = parse_date_field(
|
|
|
|
|
capacity_data.get('updated_at'),
|
2025-12-13 23:57:54 +01:00
|
|
|
session_time,
|
2025-12-14 11:58:14 +01:00
|
|
|
"updated_at"
|
|
|
|
|
) or adjusted_created_at
|
2025-10-30 21:08:07 +01:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_capacity = ProductionCapacity(
|
2025-12-13 23:57:54 +01:00
|
|
|
id=str(transformed_id),
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
2025-12-13 23:57:54 +01:00
|
|
|
resource_type=capacity_data.get('resource_type'),
|
|
|
|
|
resource_id=capacity_data.get('resource_id'),
|
|
|
|
|
resource_name=capacity_data.get('resource_name'),
|
2025-10-30 21:08:07 +01:00
|
|
|
date=adjusted_date,
|
|
|
|
|
start_time=adjusted_start_time,
|
|
|
|
|
end_time=adjusted_end_time,
|
2025-12-13 23:57:54 +01:00
|
|
|
total_capacity_units=capacity_data.get('total_capacity_units'),
|
|
|
|
|
allocated_capacity_units=capacity_data.get('allocated_capacity_units'),
|
|
|
|
|
remaining_capacity_units=capacity_data.get('remaining_capacity_units'),
|
|
|
|
|
is_available=capacity_data.get('is_available'),
|
|
|
|
|
is_maintenance=capacity_data.get('is_maintenance'),
|
|
|
|
|
is_reserved=capacity_data.get('is_reserved'),
|
|
|
|
|
equipment_type=capacity_data.get('equipment_type'),
|
|
|
|
|
max_batch_size=capacity_data.get('max_batch_size'),
|
|
|
|
|
min_batch_size=capacity_data.get('min_batch_size'),
|
|
|
|
|
setup_time_minutes=capacity_data.get('setup_time_minutes'),
|
|
|
|
|
cleanup_time_minutes=capacity_data.get('cleanup_time_minutes'),
|
|
|
|
|
efficiency_rating=capacity_data.get('efficiency_rating'),
|
|
|
|
|
maintenance_status=capacity_data.get('maintenance_status'),
|
2025-10-30 21:08:07 +01:00
|
|
|
last_maintenance_date=adjusted_last_maintenance,
|
2025-12-13 23:57:54 +01:00
|
|
|
notes=capacity_data.get('notes'),
|
|
|
|
|
restrictions=capacity_data.get('restrictions'),
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
updated_at=adjusted_updated_at
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_capacity)
|
|
|
|
|
stats["production_capacity"] += 1
|
|
|
|
|
|
2025-12-14 16:04:16 +01:00
|
|
|
# Note: Edge cases are now handled exclusively through JSON seed data
|
|
|
|
|
# The seed data files already contain comprehensive edge cases including:
|
|
|
|
|
# - Overdue batches (should have started 2 hours ago)
|
|
|
|
|
# - In-progress batches (currently being processed)
|
|
|
|
|
# - Upcoming batches (scheduled for later today/tomorrow)
|
|
|
|
|
# This ensures standardization and single source of truth for demo data
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Edge cases handled by JSON seed data - no manual creation needed",
|
|
|
|
|
seed_data_edge_cases="overdue_batches, in_progress_batches, upcoming_batches"
|
|
|
|
|
)
|
2025-12-14 11:58:14 +01:00
|
|
|
|
2025-10-23 07:44:54 +02:00
|
|
|
# Commit cloned data
|
2025-10-12 18:47:33 +02:00
|
|
|
await db.commit()
|
|
|
|
|
|
2025-10-23 07:44:54 +02:00
|
|
|
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
|
|
|
|
# production alert service which runs scheduled checks at appropriate intervals.
|
|
|
|
|
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
|
|
|
|
stats["alerts_generated"] = 0
|
|
|
|
|
|
|
|
|
|
# Calculate total from non-alert stats
|
2025-12-13 23:57:54 +01:00
|
|
|
total_records = (stats["equipment"] + stats["batches"] + stats["production_schedules"] +
|
2025-10-30 21:08:07 +01:00
|
|
|
stats["quality_check_templates"] + stats["quality_checks"] +
|
2025-10-23 07:44:54 +02:00
|
|
|
stats["production_capacity"])
|
2025-10-12 18:47:33 +02:00
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Production data cloning completed",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
total_records=total_records,
|
|
|
|
|
stats=stats,
|
|
|
|
|
duration_ms=duration_ms
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "production",
|
|
|
|
|
"status": "completed",
|
|
|
|
|
"records_cloned": total_records,
|
|
|
|
|
"duration_ms": duration_ms,
|
|
|
|
|
"details": stats
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Failed to clone production data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
exc_info=True
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Rollback on error
|
|
|
|
|
await db.rollback()
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "production",
|
|
|
|
|
"status": "failed",
|
|
|
|
|
"records_cloned": 0,
|
|
|
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
|
|
|
"error": str(e)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/clone/health")
|
|
|
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
|
|
|
"""
|
|
|
|
|
Health check for internal cloning endpoint
|
|
|
|
|
Used by orchestrator to verify service availability
|
|
|
|
|
"""
|
|
|
|
|
return {
|
|
|
|
|
"service": "production",
|
|
|
|
|
"clone_endpoint": "available",
|
|
|
|
|
"version": "2.0.0"
|
|
|
|
|
}
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
|
|
|
|
async def delete_demo_data(
|
|
|
|
|
virtual_tenant_id: str,
|
|
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""Delete all production data for a virtual demo tenant"""
|
|
|
|
|
logger.info("Deleting production data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
|
|
|
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
# Count records
|
|
|
|
|
batch_count = await db.scalar(select(func.count(ProductionBatch.id)).where(ProductionBatch.tenant_id == virtual_uuid))
|
|
|
|
|
schedule_count = await db.scalar(select(func.count(ProductionSchedule.id)).where(ProductionSchedule.tenant_id == virtual_uuid))
|
|
|
|
|
quality_count = await db.scalar(select(func.count(QualityCheck.id)).where(QualityCheck.tenant_id == virtual_uuid))
|
|
|
|
|
equipment_count = await db.scalar(select(func.count(Equipment.id)).where(Equipment.tenant_id == virtual_uuid))
|
|
|
|
|
|
|
|
|
|
# Delete in order
|
|
|
|
|
await db.execute(delete(QualityCheck).where(QualityCheck.tenant_id == virtual_uuid))
|
|
|
|
|
await db.execute(delete(ProductionBatch).where(ProductionBatch.tenant_id == virtual_uuid))
|
|
|
|
|
await db.execute(delete(ProductionSchedule).where(ProductionSchedule.tenant_id == virtual_uuid))
|
|
|
|
|
await db.execute(delete(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == virtual_uuid))
|
|
|
|
|
await db.execute(delete(Equipment).where(Equipment.tenant_id == virtual_uuid))
|
|
|
|
|
await db.execute(delete(ProductionCapacity).where(ProductionCapacity.tenant_id == virtual_uuid))
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
|
logger.info("Production data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "production",
|
|
|
|
|
"status": "deleted",
|
|
|
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
|
|
|
"records_deleted": {
|
|
|
|
|
"batches": batch_count,
|
|
|
|
|
"schedules": schedule_count,
|
|
|
|
|
"quality_checks": quality_count,
|
|
|
|
|
"equipment": equipment_count,
|
|
|
|
|
"total": batch_count + schedule_count + quality_count + equipment_count
|
|
|
|
|
},
|
|
|
|
|
"duration_ms": duration_ms
|
|
|
|
|
}
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to delete production data", error=str(e), exc_info=True)
|
|
|
|
|
await db.rollback()
|
|
|
|
|
raise HTTPException(status_code=500, detail=str(e))
|