demo seed change

This commit is contained in:
Urtzi Alfaro
2025-12-13 23:57:54 +01:00
parent f3688dfb04
commit ff830a3415
299 changed files with 20328 additions and 19485 deletions

View File

@@ -1,345 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Production Batches Seeding Script for Production Service
Creates production batches for demo template tenants
This script runs as a Kubernetes init job inside the production-service container.
"""
import asyncio
import uuid
import sys
import os
import json
from datetime import datetime, timezone, timedelta
from pathlib import Path
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
import structlog
from app.models.production import ProductionBatch, ProductionStatus, ProductionPriority, ProcessStage
# Import reasoning helper functions for i18n support
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.schemas.reasoning_types import create_batch_reasoning_forecast_demand, create_batch_reasoning_regular_schedule
# Configure logging
logger = structlog.get_logger()
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
from shared.utils.demo_dates import BASE_REFERENCE_DATE
def load_batches_data():
"""Load production batches data from JSON file"""
data_file = Path(__file__).parent / "lotes_produccion_es.json"
if not data_file.exists():
raise FileNotFoundError(f"Production batches data file not found: {data_file}")
with open(data_file, 'r', encoding='utf-8') as f:
return json.load(f)
def calculate_datetime_from_offset(offset_days: int, hour: int, minute: int) -> datetime:
"""Calculate a datetime based on offset from BASE_REFERENCE_DATE"""
base_date = BASE_REFERENCE_DATE.replace(hour=hour, minute=minute, second=0, microsecond=0)
return base_date + timedelta(days=offset_days)
def map_status(status_str: str) -> ProductionStatus:
"""Map status string to enum"""
mapping = {
"PENDING": ProductionStatus.PENDING,
"IN_PROGRESS": ProductionStatus.IN_PROGRESS,
"COMPLETED": ProductionStatus.COMPLETED,
"CANCELLED": ProductionStatus.CANCELLED,
"ON_HOLD": ProductionStatus.ON_HOLD,
"QUALITY_CHECK": ProductionStatus.QUALITY_CHECK,
"FAILED": ProductionStatus.FAILED
}
return mapping.get(status_str, ProductionStatus.PENDING)
def map_priority(priority_str: str) -> ProductionPriority:
"""Map priority string to enum"""
mapping = {
"LOW": ProductionPriority.LOW,
"MEDIUM": ProductionPriority.MEDIUM,
"HIGH": ProductionPriority.HIGH,
"URGENT": ProductionPriority.URGENT
}
return mapping.get(priority_str, ProductionPriority.MEDIUM)
def map_process_stage(stage_str: str) -> ProcessStage:
"""Map process stage string to enum"""
if not stage_str:
return None
mapping = {
"mixing": ProcessStage.MIXING,
"proofing": ProcessStage.PROOFING,
"shaping": ProcessStage.SHAPING,
"baking": ProcessStage.BAKING,
"cooling": ProcessStage.COOLING,
"packaging": ProcessStage.PACKAGING,
"finishing": ProcessStage.FINISHING
}
return mapping.get(stage_str, None)
async def seed_batches_for_tenant(
db: AsyncSession,
tenant_id: uuid.UUID,
tenant_name: str,
batches_list: list
):
"""Seed production batches for a specific tenant"""
logger.info(f"Seeding production batches for: {tenant_name}", tenant_id=str(tenant_id))
# Check if batches already exist
result = await db.execute(
select(ProductionBatch).where(ProductionBatch.tenant_id == tenant_id).limit(1)
)
existing = result.scalar_one_or_none()
if existing:
logger.info(f"Production batches already exist for {tenant_name}, skipping seed")
return {"tenant_id": str(tenant_id), "batches_created": 0, "skipped": True}
count = 0
for batch_data in batches_list:
# Calculate planned start and end times
planned_start = calculate_datetime_from_offset(
batch_data["planned_start_offset_days"],
batch_data["planned_start_hour"],
batch_data["planned_start_minute"]
)
planned_end = planned_start + timedelta(minutes=batch_data["planned_duration_minutes"])
# Calculate actual times for completed batches
actual_start = None
actual_end = None
completed_at = None
actual_duration = None
if batch_data["status"] in ["COMPLETED", "QUALITY_CHECK"]:
actual_start = planned_start # Assume started on time
actual_duration = batch_data["planned_duration_minutes"]
actual_end = actual_start + timedelta(minutes=actual_duration)
completed_at = actual_end
elif batch_data["status"] == "IN_PROGRESS":
# For IN_PROGRESS batches, set actual_start to a recent time to ensure valid progress calculation
# If planned_start is in the past, use it; otherwise, set to 30 minutes ago
# Use BASE_REFERENCE_DATE as "now" for consistent demo data
now = BASE_REFERENCE_DATE
if planned_start < now:
# If planned start was in the past, use a time that ensures batch is ~30% complete
elapsed_time_minutes = min(
int(batch_data["planned_duration_minutes"] * 0.3),
int((now - planned_start).total_seconds() / 60)
)
actual_start = now - timedelta(minutes=elapsed_time_minutes)
else:
# If planned_start is in the future, start batch 30 minutes ago
actual_start = now - timedelta(minutes=30)
actual_duration = None
actual_end = None
# For San Pablo, use original IDs. For La Espiga, generate new UUIDs
if tenant_id == DEMO_TENANT_PROFESSIONAL:
batch_id = uuid.UUID(batch_data["id"])
else:
# Generate deterministic UUID for La Espiga based on original ID
base_uuid = uuid.UUID(batch_data["id"])
# Add a fixed offset to create a unique but deterministic ID
batch_id = uuid.UUID(int=base_uuid.int + 0x10000000000000000000000000000000)
# Map enums
status = map_status(batch_data["status"])
priority = map_priority(batch_data["priority"])
current_stage = map_process_stage(batch_data.get("current_process_stage"))
# Create unique batch number for each tenant
if tenant_id == DEMO_TENANT_PROFESSIONAL:
batch_number = batch_data["batch_number"]
else:
# For La Espiga, append tenant suffix to make batch number unique
batch_number = batch_data["batch_number"] + "-LE"
# Generate structured reasoning_data for i18n support
reasoning_data = None
try:
# Use forecast demand reasoning for most batches
if batch_data.get("is_ai_assisted") or priority in [ProductionPriority.HIGH, ProductionPriority.URGENT]:
reasoning_data = create_batch_reasoning_forecast_demand(
product_name=batch_data["product_name"],
predicted_demand=batch_data["planned_quantity"],
current_stock=int(batch_data["planned_quantity"] * 0.3), # Demo: assume 30% current stock
production_needed=batch_data["planned_quantity"],
target_date=planned_start.date().isoformat(),
confidence_score=0.85 if batch_data.get("is_ai_assisted") else 0.75
)
else:
# Regular schedule reasoning for standard batches
reasoning_data = create_batch_reasoning_regular_schedule(
product_name=batch_data["product_name"],
schedule_frequency="daily",
batch_size=batch_data["planned_quantity"]
)
except Exception as e:
logger.warning(f"Failed to generate reasoning_data for batch {batch_number}: {e}")
# Create production batch
batch = ProductionBatch(
id=batch_id,
tenant_id=tenant_id,
batch_number=batch_number,
product_id=uuid.UUID(batch_data["product_id"]),
product_name=batch_data["product_name"],
recipe_id=uuid.UUID(batch_data["recipe_id"]) if batch_data.get("recipe_id") else None,
planned_start_time=planned_start,
planned_end_time=planned_end,
planned_quantity=batch_data["planned_quantity"],
planned_duration_minutes=batch_data["planned_duration_minutes"],
actual_start_time=actual_start,
actual_end_time=actual_end,
actual_quantity=batch_data.get("actual_quantity"),
actual_duration_minutes=actual_duration,
status=status,
priority=priority,
current_process_stage=current_stage,
yield_percentage=batch_data.get("yield_percentage"),
quality_score=batch_data.get("quality_score"),
waste_quantity=batch_data.get("waste_quantity"),
defect_quantity=batch_data.get("defect_quantity"),
estimated_cost=batch_data.get("estimated_cost"),
actual_cost=batch_data.get("actual_cost"),
labor_cost=batch_data.get("labor_cost"),
material_cost=batch_data.get("material_cost"),
overhead_cost=batch_data.get("overhead_cost"),
equipment_used=batch_data.get("equipment_used"),
station_id=batch_data.get("station_id"),
is_rush_order=batch_data.get("is_rush_order", False),
is_special_recipe=batch_data.get("is_special_recipe", False),
is_ai_assisted=batch_data.get("is_ai_assisted", False),
waste_defect_type=batch_data.get("waste_defect_type"),
production_notes=batch_data.get("production_notes"),
quality_notes=batch_data.get("quality_notes"),
reasoning_data=reasoning_data, # Structured reasoning for i18n support
created_at=BASE_REFERENCE_DATE,
updated_at=BASE_REFERENCE_DATE,
completed_at=completed_at
)
db.add(batch)
count += 1
logger.debug(f"Created production batch: {batch.batch_number}", batch_id=str(batch.id))
await db.commit()
logger.info(f"Successfully created {count} production batches for {tenant_name}")
return {
"tenant_id": str(tenant_id),
"batches_created": count,
"skipped": False
}
async def seed_all(db: AsyncSession):
"""Seed all demo tenants with production batches"""
logger.info("Starting demo production batches seed process")
# Load batches data
data = load_batches_data()
results = []
# Seed Professional Bakery with production batches (single location)
result_professional = await seed_batches_for_tenant(
db,
DEMO_TENANT_PROFESSIONAL,
"Panadería Artesana Madrid (Professional)",
data["lotes_produccion"]
)
results.append(result_professional)
# Seed Enterprise Parent (central production - Obrador) with scaled-up batches
result_enterprise_parent = await seed_batches_for_tenant(
db,
DEMO_TENANT_ENTERPRISE_CHAIN,
"Panadería Central - Obrador Madrid (Enterprise Parent)",
data["lotes_produccion"]
)
results.append(result_enterprise_parent)
total_created = sum(r["batches_created"] for r in results)
return {
"results": results,
"total_batches_created": total_created,
"status": "completed"
}
async def main():
"""Main execution function"""
# Get database URL from environment
database_url = os.getenv("PRODUCTION_DATABASE_URL")
if not database_url:
logger.error("PRODUCTION_DATABASE_URL environment variable must be set")
return 1
# Ensure asyncpg driver
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
# Create async engine
engine = create_async_engine(database_url, echo=False)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
try:
async with async_session() as session:
result = await seed_all(session)
logger.info(
"Production batches seed completed successfully!",
total_batches=result["total_batches_created"],
status=result["status"]
)
# Print summary
print("\n" + "="*60)
print("DEMO PRODUCTION BATCHES SEED SUMMARY")
print("="*60)
for tenant_result in result["results"]:
tenant_id = tenant_result["tenant_id"]
count = tenant_result["batches_created"]
skipped = tenant_result.get("skipped", False)
status = "SKIPPED (already exists)" if skipped else f"CREATED {count} batches"
print(f"Tenant {tenant_id}: {status}")
print(f"\nTotal Batches Created: {result['total_batches_created']}")
print("="*60 + "\n")
return 0
except Exception as e:
logger.error(f"Production batches seed failed: {str(e)}", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

View File

@@ -1,243 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Equipment Seeding Script for Production Service
Creates production equipment for demo template tenants
This script runs as a Kubernetes init job inside the production-service container.
"""
import asyncio
import uuid
import sys
import os
import json
from datetime import datetime, timezone, timedelta
from pathlib import Path
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
import structlog
from app.models.production import Equipment, EquipmentType, EquipmentStatus
# Add shared path for demo utilities
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import BASE_REFERENCE_DATE
# Configure logging
logger = structlog.get_logger()
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
def load_equipment_data():
"""Load equipment data from JSON file"""
data_file = Path(__file__).parent / "equipos_es.json"
if not data_file.exists():
raise FileNotFoundError(f"Equipment data file not found: {data_file}")
with open(data_file, 'r', encoding='utf-8') as f:
return json.load(f)
def calculate_date_from_offset(offset_days: int) -> datetime:
"""Calculate a date based on offset from BASE_REFERENCE_DATE"""
return BASE_REFERENCE_DATE + timedelta(days=offset_days)
async def seed_equipment_for_tenant(
db: AsyncSession,
tenant_id: uuid.UUID,
tenant_name: str,
equipment_list: list
):
"""Seed equipment for a specific tenant"""
logger.info(f"Seeding equipment for: {tenant_name}", tenant_id=str(tenant_id))
# Check if equipment already exists
result = await db.execute(
select(Equipment).where(Equipment.tenant_id == tenant_id).limit(1)
)
existing = result.scalar_one_or_none()
if existing:
logger.info(f"Equipment already exists for {tenant_name}, skipping seed")
return {"tenant_id": str(tenant_id), "equipment_created": 0, "skipped": True}
count = 0
for equip_data in equipment_list:
# Calculate dates from offsets
install_date = None
if "install_date_offset_days" in equip_data:
install_date = calculate_date_from_offset(equip_data["install_date_offset_days"])
last_maintenance_date = None
if "last_maintenance_offset_days" in equip_data:
last_maintenance_date = calculate_date_from_offset(equip_data["last_maintenance_offset_days"])
# Calculate next maintenance date
next_maintenance_date = None
if last_maintenance_date and equip_data.get("maintenance_interval_days"):
next_maintenance_date = last_maintenance_date + timedelta(
days=equip_data["maintenance_interval_days"]
)
# Map status string to enum
status_mapping = {
"operational": EquipmentStatus.OPERATIONAL,
"warning": EquipmentStatus.WARNING,
"maintenance": EquipmentStatus.MAINTENANCE,
"down": EquipmentStatus.DOWN
}
status = status_mapping.get(equip_data["status"], EquipmentStatus.OPERATIONAL)
# Map type string to enum
type_mapping = {
"oven": EquipmentType.OVEN,
"mixer": EquipmentType.MIXER,
"proofer": EquipmentType.PROOFER,
"freezer": EquipmentType.FREEZER,
"packaging": EquipmentType.PACKAGING,
"other": EquipmentType.OTHER
}
equipment_type = type_mapping.get(equip_data["type"], EquipmentType.OTHER)
# Generate tenant-specific equipment ID using XOR transformation
base_equipment_id = uuid.UUID(equip_data["id"])
tenant_int = int(tenant_id.hex, 16)
equipment_id = uuid.UUID(int=tenant_int ^ int(base_equipment_id.hex, 16))
# Create equipment
equipment = Equipment(
id=equipment_id,
tenant_id=tenant_id,
name=equip_data["name"],
type=equipment_type,
model=equip_data.get("model"),
serial_number=equip_data.get("serial_number"),
location=equip_data.get("location"),
status=status,
power_kw=equip_data.get("power_kw"),
capacity=equip_data.get("capacity"),
efficiency_percentage=equip_data.get("efficiency_percentage"),
current_temperature=equip_data.get("current_temperature"),
target_temperature=equip_data.get("target_temperature"),
maintenance_interval_days=equip_data.get("maintenance_interval_days"),
last_maintenance_date=last_maintenance_date,
next_maintenance_date=next_maintenance_date,
install_date=install_date,
notes=equip_data.get("notes"),
created_at=BASE_REFERENCE_DATE,
updated_at=BASE_REFERENCE_DATE
)
db.add(equipment)
count += 1
logger.debug(f"Created equipment: {equipment.name}", equipment_id=str(equipment.id))
await db.commit()
logger.info(f"Successfully created {count} equipment items for {tenant_name}")
return {
"tenant_id": str(tenant_id),
"equipment_created": count,
"skipped": False
}
async def seed_all(db: AsyncSession):
"""Seed all demo tenants with equipment"""
logger.info("Starting demo equipment seed process")
# Load equipment data
data = load_equipment_data()
results = []
# Seed Professional Bakery with equipment (single location)
result_professional = await seed_equipment_for_tenant(
db,
DEMO_TENANT_PROFESSIONAL,
"Panadería Artesana Madrid (Professional)",
data["equipos_individual_bakery"]
)
results.append(result_professional)
# Seed Enterprise Parent (central production - Obrador) with scaled-up equipment
# Use enterprise equipment list if available, otherwise use individual bakery equipment
enterprise_equipment_key = "equipos_enterprise_chain" if "equipos_enterprise_chain" in data else "equipos_individual_bakery"
result_enterprise_parent = await seed_equipment_for_tenant(
db,
DEMO_TENANT_ENTERPRISE_CHAIN,
"Panadería Central - Obrador Madrid (Enterprise Parent)",
data[enterprise_equipment_key]
)
results.append(result_enterprise_parent)
total_created = sum(r["equipment_created"] for r in results)
return {
"results": results,
"total_equipment_created": total_created,
"status": "completed"
}
async def main():
"""Main execution function"""
# Get database URL from environment
database_url = os.getenv("PRODUCTION_DATABASE_URL")
if not database_url:
logger.error("PRODUCTION_DATABASE_URL environment variable must be set")
return 1
# Ensure asyncpg driver
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
# Create async engine
engine = create_async_engine(database_url, echo=False)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
try:
async with async_session() as session:
result = await seed_all(session)
logger.info(
"Equipment seed completed successfully!",
total_equipment=result["total_equipment_created"],
status=result["status"]
)
# Print summary
print("\n" + "="*60)
print("DEMO EQUIPMENT SEED SUMMARY")
print("="*60)
for tenant_result in result["results"]:
tenant_id = tenant_result["tenant_id"]
count = tenant_result["equipment_created"]
skipped = tenant_result.get("skipped", False)
status = "SKIPPED (already exists)" if skipped else f"CREATED {count} items"
print(f"Tenant {tenant_id}: {status}")
print(f"\nTotal Equipment Created: {result['total_equipment_created']}")
print("="*60 + "\n")
return 0
except Exception as e:
logger.error(f"Equipment seed failed: {str(e)}", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)

View File

@@ -1,218 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Quality Templates Seeding Script for Production Service
Creates quality check templates for demo template tenants
This script runs as a Kubernetes init job inside the production-service container.
"""
import asyncio
import uuid
import sys
import os
import json
from datetime import datetime, timezone
from pathlib import Path
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select
import structlog
from app.models.production import QualityCheckTemplate
# Add shared path for demo utilities
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import BASE_REFERENCE_DATE
# Configure logging
logger = structlog.get_logger()
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
# System user ID (first admin user from auth service)
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
def load_quality_templates_data():
"""Load quality templates data from JSON file"""
data_file = Path(__file__).parent / "plantillas_calidad_es.json"
if not data_file.exists():
raise FileNotFoundError(f"Quality templates data file not found: {data_file}")
with open(data_file, 'r', encoding='utf-8') as f:
return json.load(f)
# Model uses simple strings, no need for enum mapping functions
async def seed_quality_templates_for_tenant(
db: AsyncSession,
tenant_id: uuid.UUID,
tenant_name: str,
templates_list: list
):
"""Seed quality templates for a specific tenant"""
logger.info(f"Seeding quality templates for: {tenant_name}", tenant_id=str(tenant_id))
# Check if templates already exist
result = await db.execute(
select(QualityCheckTemplate).where(QualityCheckTemplate.tenant_id == tenant_id).limit(1)
)
existing = result.scalar_one_or_none()
if existing:
logger.info(f"Quality templates already exist for {tenant_name}, skipping seed")
return {"tenant_id": str(tenant_id), "templates_created": 0, "skipped": True}
count = 0
for template_data in templates_list:
# Use strings directly (model doesn't use enums)
check_type = template_data["check_type"]
applicable_stages = template_data.get("applicable_stages", [])
# For San Pablo, use original IDs. For La Espiga, generate new UUIDs
if tenant_id == DEMO_TENANT_PROFESSIONAL:
template_id = uuid.UUID(template_data["id"])
else:
# Generate deterministic UUID for La Espiga based on original ID
base_uuid = uuid.UUID(template_data["id"])
# Add a fixed offset to create a unique but deterministic ID
template_id = uuid.UUID(int=base_uuid.int + 0x10000000000000000000000000000000)
# Create quality check template
template = QualityCheckTemplate(
id=template_id,
tenant_id=tenant_id,
name=template_data["name"],
template_code=template_data["template_code"],
check_type=check_type,
category=template_data.get("category"),
description=template_data.get("description"),
instructions=template_data.get("instructions"),
parameters=template_data.get("parameters"),
thresholds=template_data.get("thresholds"),
scoring_criteria=template_data.get("scoring_criteria"),
is_active=template_data.get("is_active", True),
is_required=template_data.get("is_required", False),
is_critical=template_data.get("is_critical", False),
weight=template_data.get("weight", 1.0),
min_value=template_data.get("min_value"),
max_value=template_data.get("max_value"),
target_value=template_data.get("target_value"),
unit=template_data.get("unit"),
tolerance_percentage=template_data.get("tolerance_percentage"),
applicable_stages=applicable_stages,
created_by=SYSTEM_USER_ID,
created_at=BASE_REFERENCE_DATE,
updated_at=BASE_REFERENCE_DATE
)
db.add(template)
count += 1
logger.debug(f"Created quality template: {template.name}", template_id=str(template.id))
await db.commit()
logger.info(f"Successfully created {count} quality templates for {tenant_name}")
return {
"tenant_id": str(tenant_id),
"templates_created": count,
"skipped": False
}
async def seed_all(db: AsyncSession):
"""Seed all demo tenants with quality templates"""
logger.info("Starting demo quality templates seed process")
# Load quality templates data
data = load_quality_templates_data()
results = []
# Seed Professional Bakery with quality templates (single location)
result_professional = await seed_quality_templates_for_tenant(
db,
DEMO_TENANT_PROFESSIONAL,
"Panadería Artesana Madrid (Professional)",
data["plantillas_calidad"]
)
results.append(result_professional)
# Seed Enterprise Parent (central production - Obrador) with same quality templates
result_enterprise_parent = await seed_quality_templates_for_tenant(
db,
DEMO_TENANT_ENTERPRISE_CHAIN,
"Panadería Central - Obrador Madrid (Enterprise Parent)",
data["plantillas_calidad"]
)
results.append(result_enterprise_parent)
total_created = sum(r["templates_created"] for r in results)
return {
"results": results,
"total_templates_created": total_created,
"status": "completed"
}
async def main():
"""Main execution function"""
# Get database URL from environment
database_url = os.getenv("PRODUCTION_DATABASE_URL")
if not database_url:
logger.error("PRODUCTION_DATABASE_URL environment variable must be set")
return 1
# Ensure asyncpg driver
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
# Create async engine
engine = create_async_engine(database_url, echo=False)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
try:
async with async_session() as session:
result = await seed_all(session)
logger.info(
"Quality templates seed completed successfully!",
total_templates=result["total_templates_created"],
status=result["status"]
)
# Print summary
print("\n" + "="*60)
print("DEMO QUALITY TEMPLATES SEED SUMMARY")
print("="*60)
for tenant_result in result["results"]:
tenant_id = tenant_result["tenant_id"]
count = tenant_result["templates_created"]
skipped = tenant_result.get("skipped", False)
status = "SKIPPED (already exists)" if skipped else f"CREATED {count} templates"
print(f"Tenant {tenant_id}: {status}")
print(f"\nTotal Templates Created: {result['total_templates_created']}")
print("="*60 + "\n")
return 0
except Exception as e:
logger.error(f"Quality templates seed failed: {str(e)}", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)