Files
bakery-ia/services/production/app/api/internal_demo.py

880 lines
38 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Production Service
Service-to-service endpoint for cloning production data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
2025-10-24 13:05:04 +02:00
from sqlalchemy import select, delete, func
import structlog
import uuid
2025-12-13 23:57:54 +01:00
from uuid import UUID
from datetime import datetime, timezone, timedelta
2025-10-17 07:31:14 +02:00
from typing import Optional, Dict, Any
import os
2025-12-13 23:57:54 +01:00
import json
from pathlib import Path
from app.core.database import get_db
from app.models.production import (
ProductionBatch, ProductionSchedule, ProductionCapacity,
QualityCheckTemplate, QualityCheck, Equipment,
ProductionStatus, ProductionPriority, ProcessStage,
EquipmentStatus, EquipmentType
)
2025-12-14 11:58:14 +01:00
from shared.utils.demo_dates import (
adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
)
2025-11-30 09:12:40 +01:00
from app.core.config import settings
logger = structlog.get_logger()
2025-12-13 23:57:54 +01:00
router = APIRouter()
# Base demo tenant IDs
2025-11-30 09:12:40 +01:00
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
2025-11-30 09:12:40 +01:00
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
2025-12-13 23:57:54 +01:00
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
2025-10-17 07:31:14 +02:00
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone production service data for a virtual demo tenant
Clones:
- Production batches (historical production runs)
- Production schedules (daily planning)
- Production capacity records
- Quality check templates
- Quality checks (inspection records)
- Equipment (machines and tools)
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
2025-10-17 07:31:14 +02:00
session_created_at: Session creation timestamp for date adjustment
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
2025-10-17 07:31:14 +02:00
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting production data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
2025-10-17 07:31:14 +02:00
session_id=session_id,
session_created_at=session_created_at
)
try:
# Validate UUIDs
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
2025-12-13 23:57:54 +01:00
"batches": 0,
"production_schedules": 0,
"production_capacity": 0,
"quality_check_templates": 0,
"quality_checks": 0,
2025-10-17 07:31:14 +02:00
"equipment": 0,
"alerts_generated": 0
}
2025-12-14 11:58:14 +01:00
def parse_date_field(date_value, session_time, field_name="date"):
2025-12-13 23:57:54 +01:00
"""Parse date field, handling both ISO strings and BASE_TS markers"""
if not date_value:
return None
2025-12-14 11:58:14 +01:00
2025-12-13 23:57:54 +01:00
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
2025-12-14 11:58:14 +01:00
2025-12-13 23:57:54 +01:00
# Handle regular ISO date strings
try:
return adjust_date_for_demo(
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
2025-12-14 11:58:14 +01:00
session_time
2025-12-13 23:57:54 +01:00
)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "06-production.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "06-production.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "06-production.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "06-production.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
2025-12-13 23:57:54 +01:00
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Create Equipment first (no dependencies)
for equipment_data in seed_data.get('equipment', []):
# Transform equipment ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
equipment_uuid = UUID(equipment_data['id'])
transformed_id = transform_id(equipment_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse equipment UUID",
equipment_id=equipment_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in equipment data: {str(e)}"
)
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_install_date = parse_date_field(
equipment_data.get('install_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"install_date"
2025-10-17 07:31:14 +02:00
)
2025-12-14 11:58:14 +01:00
adjusted_last_maintenance = parse_date_field(
equipment_data.get('last_maintenance_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"last_maintenance_date"
2025-10-17 07:31:14 +02:00
)
2025-12-14 11:58:14 +01:00
adjusted_next_maintenance = parse_date_field(
equipment_data.get('next_maintenance_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"next_maintenance_date"
2025-12-13 23:57:54 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_created_at = parse_date_field(
equipment_data.get('created_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"created_at"
2025-12-13 23:57:54 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_updated_at = parse_date_field(
equipment_data.get('updated_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"updated_at"
2025-10-17 07:31:14 +02:00
)
new_equipment = Equipment(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
name=equipment_data['name'],
type=equipment_data['type'],
model=equipment_data['model'],
serial_number=equipment_data.get('serial_number'),
location=equipment_data['location'],
status=equipment_data['status'],
2025-10-17 07:31:14 +02:00
install_date=adjusted_install_date,
last_maintenance_date=adjusted_last_maintenance,
next_maintenance_date=adjusted_next_maintenance,
2025-12-13 23:57:54 +01:00
maintenance_interval_days=equipment_data.get('maintenance_interval_days'),
efficiency_percentage=equipment_data.get('efficiency_percentage'),
uptime_percentage=equipment_data.get('uptime_percentage'),
energy_usage_kwh=equipment_data.get('energy_usage_kwh'),
power_kw=equipment_data.get('power_kw'),
capacity=equipment_data.get('capacity'),
weight_kg=equipment_data.get('weight_kg'),
current_temperature=equipment_data.get('current_temperature'),
target_temperature=equipment_data.get('target_temperature'),
is_active=equipment_data.get('is_active', True),
notes=equipment_data.get('notes'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_equipment)
stats["equipment"] += 1
# Flush to get equipment IDs
await db.flush()
# Clone Quality Check Templates
2025-12-13 23:57:54 +01:00
# Note: Quality check templates are not included in seed data
# They would need to be added to the production seed data if needed
template_id_map = {}
base_templates = []
logger.info(
2025-12-13 23:57:54 +01:00
"No quality check templates to clone (not in seed data)",
count=len(base_templates)
)
2025-12-13 23:57:54 +01:00
# Only create templates if they exist in base templates
for template in base_templates:
new_template_id = uuid.uuid4()
template_id_map[template.id] = new_template_id
new_template = QualityCheckTemplate(
id=new_template_id,
tenant_id=virtual_uuid,
name=template.name,
template_code=template.template_code,
check_type=template.check_type,
category=template.category,
description=template.description,
instructions=template.instructions,
parameters=template.parameters,
thresholds=template.thresholds,
scoring_criteria=template.scoring_criteria,
is_active=template.is_active,
is_required=template.is_required,
is_critical=template.is_critical,
weight=template.weight,
min_value=template.min_value,
max_value=template.max_value,
target_value=template.target_value,
unit=template.unit,
tolerance_percentage=template.tolerance_percentage,
applicable_stages=template.applicable_stages,
created_by=template.created_by,
2025-10-17 07:31:14 +02:00
created_at=session_time,
updated_at=session_time
)
db.add(new_template)
stats["quality_check_templates"] += 1
# Flush to get template IDs
await db.flush()
2025-12-13 23:57:54 +01:00
# Clone Production Batches from seed data
batch_id_map = {}
for batch_data in seed_data.get('batches', []):
# Transform batch ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
batch_uuid = UUID(batch_data['id'])
transformed_id = transform_id(batch_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse batch UUID",
batch_id=batch_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in batch data: {str(e)}"
)
batch_id_map[UUID(batch_data['id'])] = transformed_id
2025-10-30 21:08:07 +01:00
# Adjust dates relative to session creation time
2025-12-14 11:58:14 +01:00
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), session_time, "planned_start_time")
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), session_time, "planned_end_time")
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), session_time, "actual_start_time")
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), session_time, "actual_end_time")
adjusted_completed = parse_date_field(batch_data.get('completed_at'), session_time, "completed_at")
adjusted_created_at = parse_date_field(batch_data.get('created_at'), session_time, "created_at") or session_time
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), session_time, "updated_at") or adjusted_created_at
2025-12-13 23:57:54 +01:00
# Map status and priority enums
status_value = batch_data.get('status', 'PENDING')
if isinstance(status_value, str):
try:
status_value = ProductionStatus[status_value]
except KeyError:
status_value = ProductionStatus.PENDING
priority_value = batch_data.get('priority', 'MEDIUM')
if isinstance(priority_value, str):
try:
priority_value = ProductionPriority[priority_value]
except KeyError:
priority_value = ProductionPriority.MEDIUM
# Map process stage enum
process_stage_value = batch_data.get('current_process_stage')
if process_stage_value and isinstance(process_stage_value, str):
try:
process_stage_value = ProcessStage[process_stage_value]
except KeyError:
process_stage_value = None
2025-10-30 21:08:07 +01:00
new_batch = ProductionBatch(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
product_id=batch_data.get('product_id'),
product_name=batch_data.get('product_name'),
recipe_id=batch_data.get('recipe_id'),
2025-10-30 21:08:07 +01:00
planned_start_time=adjusted_planned_start,
planned_end_time=adjusted_planned_end,
2025-12-13 23:57:54 +01:00
planned_quantity=batch_data.get('planned_quantity'),
planned_duration_minutes=batch_data.get('planned_duration_minutes'),
2025-10-30 21:08:07 +01:00
actual_start_time=adjusted_actual_start,
actual_end_time=adjusted_actual_end,
2025-12-13 23:57:54 +01:00
actual_quantity=batch_data.get('actual_quantity'),
actual_duration_minutes=batch_data.get('actual_duration_minutes'),
status=status_value,
priority=priority_value,
current_process_stage=process_stage_value,
process_stage_history=batch_data.get('process_stage_history'),
pending_quality_checks=batch_data.get('pending_quality_checks'),
completed_quality_checks=batch_data.get('completed_quality_checks'),
estimated_cost=batch_data.get('estimated_cost'),
actual_cost=batch_data.get('actual_cost'),
labor_cost=batch_data.get('labor_cost'),
material_cost=batch_data.get('material_cost'),
overhead_cost=batch_data.get('overhead_cost'),
yield_percentage=batch_data.get('yield_percentage'),
quality_score=batch_data.get('quality_score'),
waste_quantity=batch_data.get('waste_quantity'),
defect_quantity=batch_data.get('defect_quantity'),
equipment_used=batch_data.get('equipment_used'),
staff_assigned=batch_data.get('staff_assigned'),
station_id=batch_data.get('station_id'),
order_id=batch_data.get('order_id'),
forecast_id=batch_data.get('forecast_id'),
is_rush_order=batch_data.get('is_rush_order', False),
is_special_recipe=batch_data.get('is_special_recipe', False),
production_notes=batch_data.get('production_notes'),
quality_notes=batch_data.get('quality_notes'),
delay_reason=batch_data.get('delay_reason'),
cancellation_reason=batch_data.get('cancellation_reason'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
2025-10-30 21:08:07 +01:00
completed_at=adjusted_completed
)
db.add(new_batch)
2025-12-13 23:57:54 +01:00
stats["batches"] += 1
# Flush to get batch IDs
await db.flush()
2025-12-13 23:57:54 +01:00
# Clone Quality Checks from seed data (if any)
for check_data in seed_data.get('quality_checks', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
check_uuid = UUID(check_data['id'])
transformed_id = transform_id(check_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse check UUID",
check_id=check_data['id'],
error=str(e))
continue
# Map batch_id if it exists in our map
batch_id_value = check_data.get('batch_id')
if batch_id_value:
batch_id_value = batch_id_map.get(UUID(batch_id_value), UUID(batch_id_value))
# Map template_id if it exists
template_id_value = check_data.get('template_id')
if template_id_value:
template_id_value = template_id_map.get(UUID(template_id_value), UUID(template_id_value))
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_check_time = parse_date_field(
check_data.get('check_time'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"check_time"
)
2025-12-13 23:57:54 +01:00
2025-12-14 11:58:14 +01:00
adjusted_created_at = parse_date_field(
check_data.get('created_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"created_at"
2025-12-13 23:57:54 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_updated_at = parse_date_field(
check_data.get('updated_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"updated_at"
) or adjusted_created_at
2025-10-30 21:08:07 +01:00
new_check = QualityCheck(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
batch_id=str(batch_id_value) if batch_id_value else None,
template_id=str(template_id_value) if template_id_value else None,
check_type=check_data.get('check_type'),
process_stage=check_data.get('process_stage'),
2025-10-30 21:08:07 +01:00
check_time=adjusted_check_time,
2025-12-13 23:57:54 +01:00
checker_id=check_data.get('checker_id'),
quality_score=check_data.get('quality_score'),
pass_fail=check_data.get('pass_fail'),
defect_count=check_data.get('defect_count'),
defect_types=check_data.get('defect_types'),
measured_weight=check_data.get('measured_weight'),
measured_temperature=check_data.get('measured_temperature'),
measured_moisture=check_data.get('measured_moisture'),
measured_dimensions=check_data.get('measured_dimensions'),
stage_specific_data=check_data.get('stage_specific_data'),
target_weight=check_data.get('target_weight'),
target_temperature=check_data.get('target_temperature'),
target_moisture=check_data.get('target_moisture'),
tolerance_percentage=check_data.get('tolerance_percentage'),
within_tolerance=check_data.get('within_tolerance'),
corrective_action_needed=check_data.get('corrective_action_needed'),
corrective_actions=check_data.get('corrective_actions'),
template_results=check_data.get('template_results'),
criteria_scores=check_data.get('criteria_scores'),
check_notes=check_data.get('check_notes'),
photos_urls=check_data.get('photos_urls'),
certificate_url=check_data.get('certificate_url'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_check)
stats["quality_checks"] += 1
2025-12-13 23:57:54 +01:00
# Clone Production Schedules from seed data (if any)
for schedule_data in seed_data.get('production_schedules', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
schedule_uuid = UUID(schedule_data['id'])
transformed_id = transform_id(schedule_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse schedule UUID",
schedule_id=schedule_data['id'],
error=str(e))
continue
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_schedule_date = parse_date_field(
schedule_data.get('schedule_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"schedule_date"
)
adjusted_shift_start = parse_date_field(
schedule_data.get('shift_start'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"shift_start"
)
adjusted_shift_end = parse_date_field(
schedule_data.get('shift_end'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"shift_end"
)
adjusted_finalized = parse_date_field(
schedule_data.get('finalized_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"finalized_at"
)
adjusted_created_at = parse_date_field(
schedule_data.get('created_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"created_at"
2025-12-13 23:57:54 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_updated_at = parse_date_field(
schedule_data.get('updated_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"updated_at"
) or adjusted_created_at
2025-10-30 21:08:07 +01:00
new_schedule = ProductionSchedule(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-10-30 21:08:07 +01:00
schedule_date=adjusted_schedule_date,
shift_start=adjusted_shift_start,
shift_end=adjusted_shift_end,
2025-12-13 23:57:54 +01:00
total_capacity_hours=schedule_data.get('total_capacity_hours'),
planned_capacity_hours=schedule_data.get('planned_capacity_hours'),
actual_capacity_hours=schedule_data.get('actual_capacity_hours'),
overtime_hours=schedule_data.get('overtime_hours', 0.0),
staff_count=schedule_data.get('staff_count'),
equipment_capacity=schedule_data.get('equipment_capacity'),
station_assignments=schedule_data.get('station_assignments'),
total_batches_planned=schedule_data.get('total_batches_planned', 0),
total_batches_completed=schedule_data.get('total_batches_completed', 0),
total_quantity_planned=schedule_data.get('total_quantity_planned', 0.0),
total_quantity_produced=schedule_data.get('total_quantity_produced', 0.0),
is_finalized=schedule_data.get('is_finalized', False),
is_active=schedule_data.get('is_active', True),
efficiency_percentage=schedule_data.get('efficiency_percentage'),
utilization_percentage=schedule_data.get('utilization_percentage'),
on_time_completion_rate=schedule_data.get('on_time_completion_rate'),
schedule_notes=schedule_data.get('schedule_notes'),
schedule_adjustments=schedule_data.get('schedule_adjustments'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
2025-10-30 21:08:07 +01:00
finalized_at=adjusted_finalized
)
db.add(new_schedule)
stats["production_schedules"] += 1
2025-12-13 23:57:54 +01:00
# Clone Production Capacity from seed data (if any)
for capacity_data in seed_data.get('production_capacity', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
capacity_uuid = UUID(capacity_data['id'])
transformed_id = transform_id(capacity_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse capacity UUID",
capacity_id=capacity_data['id'],
error=str(e))
continue
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_date = parse_date_field(
capacity_data.get('date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"date"
)
adjusted_start_time = parse_date_field(
capacity_data.get('start_time'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"start_time"
)
adjusted_end_time = parse_date_field(
capacity_data.get('end_time'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"end_time"
)
adjusted_last_maintenance = parse_date_field(
capacity_data.get('last_maintenance_date'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"last_maintenance_date"
)
adjusted_created_at = parse_date_field(
capacity_data.get('created_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"created_at"
2025-12-13 23:57:54 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_updated_at = parse_date_field(
capacity_data.get('updated_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"updated_at"
) or adjusted_created_at
2025-10-30 21:08:07 +01:00
new_capacity = ProductionCapacity(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
resource_type=capacity_data.get('resource_type'),
resource_id=capacity_data.get('resource_id'),
resource_name=capacity_data.get('resource_name'),
2025-10-30 21:08:07 +01:00
date=adjusted_date,
start_time=adjusted_start_time,
end_time=adjusted_end_time,
2025-12-13 23:57:54 +01:00
total_capacity_units=capacity_data.get('total_capacity_units'),
allocated_capacity_units=capacity_data.get('allocated_capacity_units'),
remaining_capacity_units=capacity_data.get('remaining_capacity_units'),
is_available=capacity_data.get('is_available'),
is_maintenance=capacity_data.get('is_maintenance'),
is_reserved=capacity_data.get('is_reserved'),
equipment_type=capacity_data.get('equipment_type'),
max_batch_size=capacity_data.get('max_batch_size'),
min_batch_size=capacity_data.get('min_batch_size'),
setup_time_minutes=capacity_data.get('setup_time_minutes'),
cleanup_time_minutes=capacity_data.get('cleanup_time_minutes'),
efficiency_rating=capacity_data.get('efficiency_rating'),
maintenance_status=capacity_data.get('maintenance_status'),
2025-10-30 21:08:07 +01:00
last_maintenance_date=adjusted_last_maintenance,
2025-12-13 23:57:54 +01:00
notes=capacity_data.get('notes'),
restrictions=capacity_data.get('restrictions'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_capacity)
stats["production_capacity"] += 1
2025-12-14 11:58:14 +01:00
# Add deterministic edge case batches
edge_times = calculate_edge_case_times(session_time)
# Get a sample product_id from existing batches for edge cases
sample_product_id = None
if seed_data.get('batches'):
sample_product_id = seed_data['batches'][0].get('product_id')
if sample_product_id:
# Edge Case 1: Overdue Batch (should have started 2 hours ago)
overdue_batch = ProductionBatch(
id=str(uuid.uuid4()),
tenant_id=virtual_uuid,
batch_number=f"{session_id[:8]}-EDGE-OVERDUE",
product_id=sample_product_id,
product_name="Pan Integral (Edge Case)",
planned_start_time=edge_times["overdue_batch_planned_start"],
planned_end_time=edge_times["overdue_batch_planned_start"] + timedelta(hours=3),
planned_quantity=50.0,
planned_duration_minutes=180,
actual_start_time=None,
actual_end_time=None,
actual_quantity=None,
status=ProductionStatus.PENDING,
priority=ProductionPriority.URGENT,
current_process_stage=None,
production_notes="⚠️ EDGE CASE: Should have started 2 hours ago - triggers yellow alert for delayed production",
created_at=session_time,
updated_at=session_time
)
db.add(overdue_batch)
stats["batches"] += 1
# Edge Case 2: In-Progress Batch (started 1h45m ago)
in_progress_batch = ProductionBatch(
id=str(uuid.uuid4()),
tenant_id=virtual_uuid,
batch_number=f"{session_id[:8]}-EDGE-INPROGRESS",
product_id=sample_product_id,
product_name="Croissant de Mantequilla (Edge Case)",
planned_start_time=edge_times["in_progress_batch_actual_start"],
planned_end_time=edge_times["upcoming_batch_planned_start"],
planned_quantity=100.0,
planned_duration_minutes=195,
actual_start_time=edge_times["in_progress_batch_actual_start"],
actual_end_time=None,
actual_quantity=None,
status=ProductionStatus.IN_PROGRESS,
priority=ProductionPriority.HIGH,
current_process_stage=ProcessStage.BAKING,
production_notes="⚠️ EDGE CASE: Currently in progress - visible in active production dashboard",
created_at=session_time,
updated_at=session_time
)
db.add(in_progress_batch)
stats["batches"] += 1
# Edge Case 3: Upcoming Batch (starts in 1.5 hours)
upcoming_batch = ProductionBatch(
id=str(uuid.uuid4()),
tenant_id=virtual_uuid,
batch_number=f"{session_id[:8]}-EDGE-UPCOMING",
product_id=sample_product_id,
product_name="Baguette Tradicional (Edge Case)",
planned_start_time=edge_times["upcoming_batch_planned_start"],
planned_end_time=edge_times["upcoming_batch_planned_start"] + timedelta(hours=2),
planned_quantity=75.0,
planned_duration_minutes=120,
actual_start_time=None,
actual_end_time=None,
actual_quantity=None,
status=ProductionStatus.PENDING,
priority=ProductionPriority.MEDIUM,
current_process_stage=None,
production_notes="⚠️ EDGE CASE: Starting in 1.5 hours - visible in upcoming production schedule",
created_at=session_time,
updated_at=session_time
)
db.add(upcoming_batch)
stats["batches"] += 1
# Edge Case 4: Evening Batch (starts at 17:00 today)
evening_batch = ProductionBatch(
id=str(uuid.uuid4()),
tenant_id=virtual_uuid,
batch_number=f"{session_id[:8]}-EDGE-EVENING",
product_id=sample_product_id,
product_name="Pan de Molde (Edge Case)",
planned_start_time=edge_times["evening_batch_planned_start"],
planned_end_time=edge_times["evening_batch_planned_start"] + timedelta(hours=2, minutes=30),
planned_quantity=60.0,
planned_duration_minutes=150,
actual_start_time=None,
actual_end_time=None,
actual_quantity=None,
status=ProductionStatus.PENDING,
priority=ProductionPriority.MEDIUM,
current_process_stage=None,
production_notes="⚠️ EDGE CASE: Evening shift production - scheduled for 17:00",
created_at=session_time,
updated_at=session_time
)
db.add(evening_batch)
stats["batches"] += 1
# Edge Case 5: Tomorrow Morning Batch (starts at 05:00 tomorrow)
tomorrow_batch = ProductionBatch(
id=str(uuid.uuid4()),
tenant_id=virtual_uuid,
batch_number=f"{session_id[:8]}-EDGE-TOMORROW",
product_id=sample_product_id,
product_name="Bollería Variada (Edge Case)",
planned_start_time=edge_times["tomorrow_morning_planned_start"],
planned_end_time=edge_times["tomorrow_morning_planned_start"] + timedelta(hours=4),
planned_quantity=120.0,
planned_duration_minutes=240,
actual_start_time=None,
actual_end_time=None,
actual_quantity=None,
status=ProductionStatus.PENDING,
priority=ProductionPriority.MEDIUM,
current_process_stage=None,
production_notes="⚠️ EDGE CASE: Tomorrow morning production - scheduled for 05:00",
created_at=session_time,
updated_at=session_time
)
db.add(tomorrow_batch)
stats["batches"] += 1
logger.info(
"Added deterministic edge case batches",
edge_cases_added=5,
overdue=edge_times["overdue_batch_planned_start"].isoformat(),
in_progress=edge_times["in_progress_batch_actual_start"].isoformat(),
upcoming=edge_times["upcoming_batch_planned_start"].isoformat()
)
# Commit cloned data
await db.commit()
# NOTE: Alert generation removed - alerts are now generated automatically by the
# production alert service which runs scheduled checks at appropriate intervals.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
# Calculate total from non-alert stats
2025-12-13 23:57:54 +01:00
total_records = (stats["equipment"] + stats["batches"] + stats["production_schedules"] +
2025-10-30 21:08:07 +01:00
stats["quality_check_templates"] + stats["quality_checks"] +
stats["production_capacity"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Production data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "production",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone production data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "production",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "production",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""Delete all production data for a virtual demo tenant"""
logger.info("Deleting production data for virtual tenant", virtual_tenant_id=virtual_tenant_id)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records
batch_count = await db.scalar(select(func.count(ProductionBatch.id)).where(ProductionBatch.tenant_id == virtual_uuid))
schedule_count = await db.scalar(select(func.count(ProductionSchedule.id)).where(ProductionSchedule.tenant_id == virtual_uuid))
quality_count = await db.scalar(select(func.count(QualityCheck.id)).where(QualityCheck.tenant_id == virtual_uuid))
equipment_count = await db.scalar(select(func.count(Equipment.id)).where(Equipment.tenant_id == virtual_uuid))
# Delete in order
await db.execute(delete(QualityCheck).where(QualityCheck.tenant_id == virtual_uuid))
await db.execute(delete(ProductionBatch).where(ProductionBatch.tenant_id == virtual_uuid))
await db.execute(delete(ProductionSchedule).where(ProductionSchedule.tenant_id == virtual_uuid))
await db.execute(delete(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == virtual_uuid))
await db.execute(delete(Equipment).where(Equipment.tenant_id == virtual_uuid))
await db.execute(delete(ProductionCapacity).where(ProductionCapacity.tenant_id == virtual_uuid))
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info("Production data deleted successfully", virtual_tenant_id=virtual_tenant_id, duration_ms=duration_ms)
return {
"service": "production",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"batches": batch_count,
"schedules": schedule_count,
"quality_checks": quality_count,
"equipment": equipment_count,
"total": batch_count + schedule_count + quality_count + equipment_count
},
"duration_ms": duration_ms
}
except Exception as e:
logger.error("Failed to delete production data", error=str(e), exc_info=True)
await db.rollback()
raise HTTPException(status_code=500, detail=str(e))