demo seed change

This commit is contained in:
Urtzi Alfaro
2025-12-13 23:57:54 +01:00
parent f3688dfb04
commit ff830a3415
299 changed files with 20328 additions and 19485 deletions

View File

@@ -8,9 +8,12 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete, func
import structlog
import uuid
from uuid import UUID
from datetime import datetime, timezone, timedelta
from typing import Optional, Dict, Any
import os
import json
from pathlib import Path
from app.core.database import get_db
from app.models.production import (
@@ -19,12 +22,12 @@ from app.models.production import (
ProductionStatus, ProductionPriority, ProcessStage,
EquipmentStatus, EquipmentType
)
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE, resolve_time_marker
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
router = APIRouter()
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
@@ -38,7 +41,7 @@ def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
return True
@router.post("/clone")
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
@@ -91,12 +94,11 @@ async def clone_demo_data(
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"production_batches": 0,
"batches": 0,
"production_schedules": 0,
"production_capacity": 0,
"quality_check_templates": 0,
@@ -105,63 +107,137 @@ async def clone_demo_data(
"alerts_generated": 0
}
# ID mappings
batch_id_map = {}
template_id_map = {}
equipment_id_map = {}
def parse_date_field(date_value, field_name="date"):
"""Parse date field, handling both ISO strings and BASE_TS markers"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
return adjust_date_for_demo(
datetime.fromisoformat(date_value.replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
# Clone Equipment first (no dependencies)
result = await db.execute(
select(Equipment).where(Equipment.tenant_id == base_uuid)
)
base_equipment = result.scalars().all()
# Load seed data from JSON files
try:
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "06-production.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "06-production.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
logger.info(
"Found equipment to clone",
count=len(base_equipment),
base_tenant=str(base_uuid)
)
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "06-production.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "06-production.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
for equipment in base_equipment:
new_equipment_id = uuid.uuid4()
equipment_id_map[equipment.id] = new_equipment_id
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Create Equipment first (no dependencies)
for equipment_data in seed_data.get('equipment', []):
# Transform equipment ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
equipment_uuid = UUID(equipment_data['id'])
transformed_id = transform_id(equipment_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse equipment UUID",
equipment_id=equipment_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in equipment data: {str(e)}"
)
# Adjust dates relative to session creation time
adjusted_install_date = adjust_date_for_demo(
equipment.install_date, session_time, BASE_REFERENCE_DATE
datetime.fromisoformat(equipment_data['install_date'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_last_maintenance = adjust_date_for_demo(
equipment.last_maintenance_date, session_time, BASE_REFERENCE_DATE
datetime.fromisoformat(equipment_data['last_maintenance_date'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_next_maintenance = adjust_date_for_demo(
equipment.next_maintenance_date, session_time, BASE_REFERENCE_DATE
datetime.fromisoformat(equipment_data['next_maintenance_date'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_created_at = adjust_date_for_demo(
datetime.fromisoformat(equipment_data['created_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_updated_at = adjust_date_for_demo(
datetime.fromisoformat(equipment_data['updated_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
new_equipment = Equipment(
id=new_equipment_id,
id=str(transformed_id),
tenant_id=virtual_uuid,
name=equipment.name,
type=equipment.type,
model=equipment.model,
serial_number=equipment.serial_number,
location=equipment.location,
status=equipment.status,
name=equipment_data['name'],
type=equipment_data['type'],
model=equipment_data['model'],
serial_number=equipment_data.get('serial_number'),
location=equipment_data['location'],
status=equipment_data['status'],
install_date=adjusted_install_date,
last_maintenance_date=adjusted_last_maintenance,
next_maintenance_date=adjusted_next_maintenance,
maintenance_interval_days=equipment.maintenance_interval_days,
efficiency_percentage=equipment.efficiency_percentage,
uptime_percentage=equipment.uptime_percentage,
energy_usage_kwh=equipment.energy_usage_kwh,
power_kw=equipment.power_kw,
capacity=equipment.capacity,
weight_kg=equipment.weight_kg,
current_temperature=equipment.current_temperature,
target_temperature=equipment.target_temperature,
is_active=equipment.is_active,
notes=equipment.notes,
created_at=session_time,
updated_at=session_time
maintenance_interval_days=equipment_data.get('maintenance_interval_days'),
efficiency_percentage=equipment_data.get('efficiency_percentage'),
uptime_percentage=equipment_data.get('uptime_percentage'),
energy_usage_kwh=equipment_data.get('energy_usage_kwh'),
power_kw=equipment_data.get('power_kw'),
capacity=equipment_data.get('capacity'),
weight_kg=equipment_data.get('weight_kg'),
current_temperature=equipment_data.get('current_temperature'),
target_temperature=equipment_data.get('target_temperature'),
is_active=equipment_data.get('is_active', True),
notes=equipment_data.get('notes'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_equipment)
stats["equipment"] += 1
@@ -170,17 +246,17 @@ async def clone_demo_data(
await db.flush()
# Clone Quality Check Templates
result = await db.execute(
select(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == base_uuid)
)
base_templates = result.scalars().all()
# Note: Quality check templates are not included in seed data
# They would need to be added to the production seed data if needed
template_id_map = {}
base_templates = []
logger.info(
"Found quality check templates to clone",
count=len(base_templates),
base_tenant=str(base_uuid)
"No quality check templates to clone (not in seed data)",
count=len(base_templates)
)
# Only create templates if they exist in base templates
for template in base_templates:
new_template_id = uuid.uuid4()
template_id_map[template.id] = new_template_id
@@ -217,253 +293,333 @@ async def clone_demo_data(
# Flush to get template IDs
await db.flush()
# Clone Production Batches
result = await db.execute(
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
)
base_batches = result.scalars().all()
# Clone Production Batches from seed data
batch_id_map = {}
for batch_data in seed_data.get('batches', []):
# Transform batch ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
batch_uuid = UUID(batch_data['id'])
transformed_id = transform_id(batch_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse batch UUID",
batch_id=batch_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in batch data: {str(e)}"
)
logger.info(
"Found production batches to clone",
count=len(base_batches),
base_tenant=str(base_uuid)
)
for batch in base_batches:
new_batch_id = uuid.uuid4()
batch_id_map[batch.id] = new_batch_id
batch_id_map[UUID(batch_data['id'])] = transformed_id
# Adjust dates relative to session creation time
adjusted_planned_start = adjust_date_for_demo(
batch.planned_start_time, session_time, BASE_REFERENCE_DATE
) if batch.planned_start_time else None
adjusted_planned_end = adjust_date_for_demo(
batch.planned_end_time, session_time, BASE_REFERENCE_DATE
) if batch.planned_end_time else None
adjusted_actual_start = adjust_date_for_demo(
batch.actual_start_time, session_time, BASE_REFERENCE_DATE
) if batch.actual_start_time else None
adjusted_actual_end = adjust_date_for_demo(
batch.actual_end_time, session_time, BASE_REFERENCE_DATE
) if batch.actual_end_time else None
adjusted_completed = adjust_date_for_demo(
batch.completed_at, session_time, BASE_REFERENCE_DATE
) if batch.completed_at else None
adjusted_planned_start = parse_date_field(batch_data.get('planned_start_time'), "planned_start_time")
adjusted_planned_end = parse_date_field(batch_data.get('planned_end_time'), "planned_end_time")
adjusted_actual_start = parse_date_field(batch_data.get('actual_start_time'), "actual_start_time")
adjusted_actual_end = parse_date_field(batch_data.get('actual_end_time'), "actual_end_time")
adjusted_completed = parse_date_field(batch_data.get('completed_at'), "completed_at")
adjusted_created_at = parse_date_field(batch_data.get('created_at'), "created_at") or session_time
adjusted_updated_at = parse_date_field(batch_data.get('updated_at'), "updated_at") or adjusted_created_at
# Map status and priority enums
status_value = batch_data.get('status', 'PENDING')
if isinstance(status_value, str):
try:
status_value = ProductionStatus[status_value]
except KeyError:
status_value = ProductionStatus.PENDING
priority_value = batch_data.get('priority', 'MEDIUM')
if isinstance(priority_value, str):
try:
priority_value = ProductionPriority[priority_value]
except KeyError:
priority_value = ProductionPriority.MEDIUM
# Map process stage enum
process_stage_value = batch_data.get('current_process_stage')
if process_stage_value and isinstance(process_stage_value, str):
try:
process_stage_value = ProcessStage[process_stage_value]
except KeyError:
process_stage_value = None
new_batch = ProductionBatch(
id=new_batch_id,
id=str(transformed_id),
tenant_id=virtual_uuid,
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
product_id=batch.product_id, # Keep product reference
product_name=batch.product_name,
recipe_id=batch.recipe_id, # Keep recipe reference
batch_number=f"{session_id[:8]}-{batch_data.get('batch_number', f'BATCH-{uuid.uuid4().hex[:8].upper()}')}",
product_id=batch_data.get('product_id'),
product_name=batch_data.get('product_name'),
recipe_id=batch_data.get('recipe_id'),
planned_start_time=adjusted_planned_start,
planned_end_time=adjusted_planned_end,
planned_quantity=batch.planned_quantity,
planned_duration_minutes=batch.planned_duration_minutes,
planned_quantity=batch_data.get('planned_quantity'),
planned_duration_minutes=batch_data.get('planned_duration_minutes'),
actual_start_time=adjusted_actual_start,
actual_end_time=adjusted_actual_end,
actual_quantity=batch.actual_quantity,
actual_duration_minutes=batch.actual_duration_minutes,
status=batch.status,
priority=batch.priority,
current_process_stage=batch.current_process_stage,
process_stage_history=batch.process_stage_history,
pending_quality_checks=batch.pending_quality_checks,
completed_quality_checks=batch.completed_quality_checks,
estimated_cost=batch.estimated_cost,
actual_cost=batch.actual_cost,
labor_cost=batch.labor_cost,
material_cost=batch.material_cost,
overhead_cost=batch.overhead_cost,
yield_percentage=batch.yield_percentage,
quality_score=batch.quality_score,
waste_quantity=batch.waste_quantity,
defect_quantity=batch.defect_quantity,
equipment_used=batch.equipment_used,
staff_assigned=batch.staff_assigned,
station_id=batch.station_id,
order_id=batch.order_id,
forecast_id=batch.forecast_id,
is_rush_order=batch.is_rush_order,
is_special_recipe=batch.is_special_recipe,
production_notes=batch.production_notes,
quality_notes=batch.quality_notes,
delay_reason=batch.delay_reason,
cancellation_reason=batch.cancellation_reason,
created_at=session_time,
updated_at=session_time,
actual_quantity=batch_data.get('actual_quantity'),
actual_duration_minutes=batch_data.get('actual_duration_minutes'),
status=status_value,
priority=priority_value,
current_process_stage=process_stage_value,
process_stage_history=batch_data.get('process_stage_history'),
pending_quality_checks=batch_data.get('pending_quality_checks'),
completed_quality_checks=batch_data.get('completed_quality_checks'),
estimated_cost=batch_data.get('estimated_cost'),
actual_cost=batch_data.get('actual_cost'),
labor_cost=batch_data.get('labor_cost'),
material_cost=batch_data.get('material_cost'),
overhead_cost=batch_data.get('overhead_cost'),
yield_percentage=batch_data.get('yield_percentage'),
quality_score=batch_data.get('quality_score'),
waste_quantity=batch_data.get('waste_quantity'),
defect_quantity=batch_data.get('defect_quantity'),
equipment_used=batch_data.get('equipment_used'),
staff_assigned=batch_data.get('staff_assigned'),
station_id=batch_data.get('station_id'),
order_id=batch_data.get('order_id'),
forecast_id=batch_data.get('forecast_id'),
is_rush_order=batch_data.get('is_rush_order', False),
is_special_recipe=batch_data.get('is_special_recipe', False),
production_notes=batch_data.get('production_notes'),
quality_notes=batch_data.get('quality_notes'),
delay_reason=batch_data.get('delay_reason'),
cancellation_reason=batch_data.get('cancellation_reason'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
completed_at=adjusted_completed
)
db.add(new_batch)
stats["production_batches"] += 1
stats["batches"] += 1
# Flush to get batch IDs
await db.flush()
# Clone Quality Checks
result = await db.execute(
select(QualityCheck).where(QualityCheck.tenant_id == base_uuid)
)
base_checks = result.scalars().all()
# Clone Quality Checks from seed data (if any)
for check_data in seed_data.get('quality_checks', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
check_uuid = UUID(check_data['id'])
transformed_id = transform_id(check_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse check UUID",
check_id=check_data['id'],
error=str(e))
continue
logger.info(
"Found quality checks to clone",
count=len(base_checks),
base_tenant=str(base_uuid)
)
# Map batch_id if it exists in our map
batch_id_value = check_data.get('batch_id')
if batch_id_value:
batch_id_value = batch_id_map.get(UUID(batch_id_value), UUID(batch_id_value))
for check in base_checks:
new_batch_id = batch_id_map.get(check.batch_id, check.batch_id)
new_template_id = template_id_map.get(check.template_id, check.template_id) if check.template_id else None
# Map template_id if it exists
template_id_value = check_data.get('template_id')
if template_id_value:
template_id_value = template_id_map.get(UUID(template_id_value), UUID(template_id_value))
# Adjust check time relative to session creation time
adjusted_check_time = adjust_date_for_demo(
check.check_time, session_time, BASE_REFERENCE_DATE
) if check.check_time else None
datetime.fromisoformat(check_data['check_time'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if check_data.get('check_time') else None
adjusted_created_at = adjust_date_for_demo(
datetime.fromisoformat(check_data['created_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_updated_at = adjust_date_for_demo(
datetime.fromisoformat(check_data['updated_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if check_data.get('updated_at') else adjusted_created_at
new_check = QualityCheck(
id=uuid.uuid4(),
id=str(transformed_id),
tenant_id=virtual_uuid,
batch_id=new_batch_id,
template_id=new_template_id,
check_type=check.check_type,
process_stage=check.process_stage,
batch_id=str(batch_id_value) if batch_id_value else None,
template_id=str(template_id_value) if template_id_value else None,
check_type=check_data.get('check_type'),
process_stage=check_data.get('process_stage'),
check_time=adjusted_check_time,
checker_id=check.checker_id,
quality_score=check.quality_score,
pass_fail=check.pass_fail,
defect_count=check.defect_count,
defect_types=check.defect_types,
measured_weight=check.measured_weight,
measured_temperature=check.measured_temperature,
measured_moisture=check.measured_moisture,
measured_dimensions=check.measured_dimensions,
stage_specific_data=check.stage_specific_data,
target_weight=check.target_weight,
target_temperature=check.target_temperature,
target_moisture=check.target_moisture,
tolerance_percentage=check.tolerance_percentage,
within_tolerance=check.within_tolerance,
corrective_action_needed=check.corrective_action_needed,
corrective_actions=check.corrective_actions,
template_results=check.template_results,
criteria_scores=check.criteria_scores,
check_notes=check.check_notes,
photos_urls=check.photos_urls,
certificate_url=check.certificate_url,
created_at=session_time,
updated_at=session_time
checker_id=check_data.get('checker_id'),
quality_score=check_data.get('quality_score'),
pass_fail=check_data.get('pass_fail'),
defect_count=check_data.get('defect_count'),
defect_types=check_data.get('defect_types'),
measured_weight=check_data.get('measured_weight'),
measured_temperature=check_data.get('measured_temperature'),
measured_moisture=check_data.get('measured_moisture'),
measured_dimensions=check_data.get('measured_dimensions'),
stage_specific_data=check_data.get('stage_specific_data'),
target_weight=check_data.get('target_weight'),
target_temperature=check_data.get('target_temperature'),
target_moisture=check_data.get('target_moisture'),
tolerance_percentage=check_data.get('tolerance_percentage'),
within_tolerance=check_data.get('within_tolerance'),
corrective_action_needed=check_data.get('corrective_action_needed'),
corrective_actions=check_data.get('corrective_actions'),
template_results=check_data.get('template_results'),
criteria_scores=check_data.get('criteria_scores'),
check_notes=check_data.get('check_notes'),
photos_urls=check_data.get('photos_urls'),
certificate_url=check_data.get('certificate_url'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_check)
stats["quality_checks"] += 1
# Clone Production Schedules
result = await db.execute(
select(ProductionSchedule).where(ProductionSchedule.tenant_id == base_uuid)
)
base_schedules = result.scalars().all()
# Clone Production Schedules from seed data (if any)
for schedule_data in seed_data.get('production_schedules', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
schedule_uuid = UUID(schedule_data['id'])
transformed_id = transform_id(schedule_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse schedule UUID",
schedule_id=schedule_data['id'],
error=str(e))
continue
logger.info(
"Found production schedules to clone",
count=len(base_schedules),
base_tenant=str(base_uuid)
)
for schedule in base_schedules:
# Adjust schedule dates relative to session creation time
adjusted_schedule_date = adjust_date_for_demo(
schedule.schedule_date, session_time, BASE_REFERENCE_DATE
) if schedule.schedule_date else None
datetime.fromisoformat(schedule_data['schedule_date'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if schedule_data.get('schedule_date') else None
adjusted_shift_start = adjust_date_for_demo(
schedule.shift_start, session_time, BASE_REFERENCE_DATE
) if schedule.shift_start else None
datetime.fromisoformat(schedule_data['shift_start'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if schedule_data.get('shift_start') else None
adjusted_shift_end = adjust_date_for_demo(
schedule.shift_end, session_time, BASE_REFERENCE_DATE
) if schedule.shift_end else None
datetime.fromisoformat(schedule_data['shift_end'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if schedule_data.get('shift_end') else None
adjusted_finalized = adjust_date_for_demo(
schedule.finalized_at, session_time, BASE_REFERENCE_DATE
) if schedule.finalized_at else None
datetime.fromisoformat(schedule_data['finalized_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if schedule_data.get('finalized_at') else None
adjusted_created_at = adjust_date_for_demo(
datetime.fromisoformat(schedule_data['created_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_updated_at = adjust_date_for_demo(
datetime.fromisoformat(schedule_data['updated_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if schedule_data.get('updated_at') else adjusted_created_at
new_schedule = ProductionSchedule(
id=uuid.uuid4(),
id=str(transformed_id),
tenant_id=virtual_uuid,
schedule_date=adjusted_schedule_date,
shift_start=adjusted_shift_start,
shift_end=adjusted_shift_end,
total_capacity_hours=schedule.total_capacity_hours,
planned_capacity_hours=schedule.planned_capacity_hours,
actual_capacity_hours=schedule.actual_capacity_hours,
overtime_hours=schedule.overtime_hours,
staff_count=schedule.staff_count,
equipment_capacity=schedule.equipment_capacity,
station_assignments=schedule.station_assignments,
total_batches_planned=schedule.total_batches_planned,
total_batches_completed=schedule.total_batches_completed,
total_quantity_planned=schedule.total_quantity_planned,
total_quantity_produced=schedule.total_quantity_produced,
is_finalized=schedule.is_finalized,
is_active=schedule.is_active,
efficiency_percentage=schedule.efficiency_percentage,
utilization_percentage=schedule.utilization_percentage,
on_time_completion_rate=schedule.on_time_completion_rate,
schedule_notes=schedule.schedule_notes,
schedule_adjustments=schedule.schedule_adjustments,
created_at=session_time,
updated_at=session_time,
total_capacity_hours=schedule_data.get('total_capacity_hours'),
planned_capacity_hours=schedule_data.get('planned_capacity_hours'),
actual_capacity_hours=schedule_data.get('actual_capacity_hours'),
overtime_hours=schedule_data.get('overtime_hours', 0.0),
staff_count=schedule_data.get('staff_count'),
equipment_capacity=schedule_data.get('equipment_capacity'),
station_assignments=schedule_data.get('station_assignments'),
total_batches_planned=schedule_data.get('total_batches_planned', 0),
total_batches_completed=schedule_data.get('total_batches_completed', 0),
total_quantity_planned=schedule_data.get('total_quantity_planned', 0.0),
total_quantity_produced=schedule_data.get('total_quantity_produced', 0.0),
is_finalized=schedule_data.get('is_finalized', False),
is_active=schedule_data.get('is_active', True),
efficiency_percentage=schedule_data.get('efficiency_percentage'),
utilization_percentage=schedule_data.get('utilization_percentage'),
on_time_completion_rate=schedule_data.get('on_time_completion_rate'),
schedule_notes=schedule_data.get('schedule_notes'),
schedule_adjustments=schedule_data.get('schedule_adjustments'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at,
finalized_at=adjusted_finalized
)
db.add(new_schedule)
stats["production_schedules"] += 1
# Clone Production Capacity
result = await db.execute(
select(ProductionCapacity).where(ProductionCapacity.tenant_id == base_uuid)
)
base_capacity = result.scalars().all()
# Clone Production Capacity from seed data (if any)
for capacity_data in seed_data.get('production_capacity', []):
# Transform IDs
from shared.utils.demo_id_transformer import transform_id
try:
capacity_uuid = UUID(capacity_data['id'])
transformed_id = transform_id(capacity_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse capacity UUID",
capacity_id=capacity_data['id'],
error=str(e))
continue
for capacity in base_capacity:
# Adjust capacity dates relative to session creation time
adjusted_date = adjust_date_for_demo(
capacity.date, session_time, BASE_REFERENCE_DATE
) if capacity.date else None
datetime.fromisoformat(capacity_data['date'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if capacity_data.get('date') else None
adjusted_start_time = adjust_date_for_demo(
capacity.start_time, session_time, BASE_REFERENCE_DATE
) if capacity.start_time else None
datetime.fromisoformat(capacity_data['start_time'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if capacity_data.get('start_time') else None
adjusted_end_time = adjust_date_for_demo(
capacity.end_time, session_time, BASE_REFERENCE_DATE
) if capacity.end_time else None
datetime.fromisoformat(capacity_data['end_time'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if capacity_data.get('end_time') else None
adjusted_last_maintenance = adjust_date_for_demo(
capacity.last_maintenance_date, session_time, BASE_REFERENCE_DATE
) if capacity.last_maintenance_date else None
datetime.fromisoformat(capacity_data['last_maintenance_date'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if capacity_data.get('last_maintenance_date') else None
adjusted_created_at = adjust_date_for_demo(
datetime.fromisoformat(capacity_data['created_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
)
adjusted_updated_at = adjust_date_for_demo(
datetime.fromisoformat(capacity_data['updated_at'].replace('Z', '+00:00')),
session_time,
BASE_REFERENCE_DATE
) if capacity_data.get('updated_at') else adjusted_created_at
new_capacity = ProductionCapacity(
id=uuid.uuid4(),
id=str(transformed_id),
tenant_id=virtual_uuid,
resource_type=capacity.resource_type,
resource_id=capacity.resource_id,
resource_name=capacity.resource_name,
resource_type=capacity_data.get('resource_type'),
resource_id=capacity_data.get('resource_id'),
resource_name=capacity_data.get('resource_name'),
date=adjusted_date,
start_time=adjusted_start_time,
end_time=adjusted_end_time,
total_capacity_units=capacity.total_capacity_units,
allocated_capacity_units=capacity.allocated_capacity_units,
remaining_capacity_units=capacity.remaining_capacity_units,
is_available=capacity.is_available,
is_maintenance=capacity.is_maintenance,
is_reserved=capacity.is_reserved,
equipment_type=capacity.equipment_type,
max_batch_size=capacity.max_batch_size,
min_batch_size=capacity.min_batch_size,
setup_time_minutes=capacity.setup_time_minutes,
cleanup_time_minutes=capacity.cleanup_time_minutes,
efficiency_rating=capacity.efficiency_rating,
maintenance_status=capacity.maintenance_status,
total_capacity_units=capacity_data.get('total_capacity_units'),
allocated_capacity_units=capacity_data.get('allocated_capacity_units'),
remaining_capacity_units=capacity_data.get('remaining_capacity_units'),
is_available=capacity_data.get('is_available'),
is_maintenance=capacity_data.get('is_maintenance'),
is_reserved=capacity_data.get('is_reserved'),
equipment_type=capacity_data.get('equipment_type'),
max_batch_size=capacity_data.get('max_batch_size'),
min_batch_size=capacity_data.get('min_batch_size'),
setup_time_minutes=capacity_data.get('setup_time_minutes'),
cleanup_time_minutes=capacity_data.get('cleanup_time_minutes'),
efficiency_rating=capacity_data.get('efficiency_rating'),
maintenance_status=capacity_data.get('maintenance_status'),
last_maintenance_date=adjusted_last_maintenance,
notes=capacity.notes,
restrictions=capacity.restrictions,
created_at=session_time,
updated_at=session_time
notes=capacity_data.get('notes'),
restrictions=capacity_data.get('restrictions'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_capacity)
stats["production_capacity"] += 1
@@ -477,7 +633,7 @@ async def clone_demo_data(
stats["alerts_generated"] = 0
# Calculate total from non-alert stats
total_records = (stats["equipment"] + stats["production_batches"] + stats["production_schedules"] +
total_records = (stats["equipment"] + stats["batches"] + stats["production_schedules"] +
stats["quality_check_templates"] + stats["quality_checks"] +
stats["production_capacity"])
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)