demo seed change
This commit is contained in:
@@ -1,392 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Demo Recipes Seeding Script for Recipes Service
|
||||
Creates realistic Spanish recipes for demo template tenants
|
||||
|
||||
This script runs as a Kubernetes init job inside the recipes-service container.
|
||||
It populates the template tenants with a comprehensive catalog of recipes using pre-defined UUIDs.
|
||||
|
||||
Usage:
|
||||
python /app/scripts/demo/seed_demo_recipes.py
|
||||
|
||||
Environment Variables Required:
|
||||
RECIPES_DATABASE_URL - PostgreSQL connection string for recipes database
|
||||
DEMO_MODE - Set to 'production' for production seeding
|
||||
LOG_LEVEL - Logging level (default: INFO)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pathlib import Path
|
||||
import random
|
||||
|
||||
# Add app to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
# Add shared to path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select
|
||||
import structlog
|
||||
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
from app.models.recipes import (
|
||||
Recipe, RecipeIngredient, ProductionBatch,
|
||||
RecipeStatus, ProductionStatus, ProductionPriority, MeasurementUnit
|
||||
)
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.dev.ConsoleRenderer()
|
||||
]
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Fixed Demo Tenant IDs (must match tenant service)
|
||||
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6")
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
|
||||
|
||||
|
||||
def load_recipes_data():
|
||||
"""Load recipes data from JSON file"""
|
||||
# Look for data file in the same directory as this script
|
||||
data_file = Path(__file__).parent / "recetas_es.json"
|
||||
|
||||
if not data_file.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Recipes data file not found: {data_file}. "
|
||||
"Make sure recetas_es.json is in the same directory as this script."
|
||||
)
|
||||
|
||||
logger.info("Loading recipes data", file=str(data_file))
|
||||
|
||||
with open(data_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
recipes = data.get("recetas", [])
|
||||
logger.info(f"Loaded {len(recipes)} recipes from JSON")
|
||||
return recipes
|
||||
|
||||
|
||||
async def seed_recipes_for_tenant(
|
||||
db: AsyncSession,
|
||||
tenant_id: uuid.UUID,
|
||||
tenant_name: str,
|
||||
recipes_data: list
|
||||
) -> dict:
|
||||
"""
|
||||
Seed recipes for a specific tenant using pre-defined UUIDs
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: UUID of the tenant
|
||||
tenant_name: Name of the tenant (for logging)
|
||||
recipes_data: List of recipe dictionaries with pre-defined IDs
|
||||
|
||||
Returns:
|
||||
Dict with seeding statistics
|
||||
"""
|
||||
logger.info("─" * 80)
|
||||
logger.info(f"Seeding recipes for: {tenant_name}")
|
||||
logger.info(f"Tenant ID: {tenant_id}")
|
||||
logger.info("─" * 80)
|
||||
|
||||
created_recipes = 0
|
||||
skipped_recipes = 0
|
||||
created_ingredients = 0
|
||||
created_batches = 0
|
||||
|
||||
for recipe_data in recipes_data:
|
||||
recipe_name = recipe_data["name"]
|
||||
|
||||
# Generate tenant-specific UUIDs (same approach as inventory)
|
||||
base_recipe_id = uuid.UUID(recipe_data["id"])
|
||||
base_product_id = uuid.UUID(recipe_data["finished_product_id"])
|
||||
tenant_int = int(tenant_id.hex, 16)
|
||||
|
||||
recipe_id = uuid.UUID(int=tenant_int ^ int(base_recipe_id.hex, 16))
|
||||
finished_product_id = uuid.UUID(int=tenant_int ^ int(base_product_id.hex, 16))
|
||||
|
||||
# Check if recipe already exists
|
||||
result = await db.execute(
|
||||
select(Recipe).where(
|
||||
Recipe.tenant_id == tenant_id,
|
||||
Recipe.id == recipe_id
|
||||
)
|
||||
)
|
||||
existing_recipe = result.scalars().first()
|
||||
|
||||
if existing_recipe:
|
||||
logger.debug(f" ⏭️ Skipping recipe (exists): {recipe_name}")
|
||||
skipped_recipes += 1
|
||||
continue
|
||||
|
||||
# Create recipe using pre-defined UUID
|
||||
recipe = Recipe(
|
||||
id=recipe_id,
|
||||
tenant_id=tenant_id,
|
||||
name=recipe_name,
|
||||
recipe_code=f"REC-{created_recipes + 1:03d}",
|
||||
version="1.0",
|
||||
finished_product_id=finished_product_id,
|
||||
description=recipe_data.get("description"),
|
||||
category=recipe_data.get("category"),
|
||||
cuisine_type=recipe_data.get("cuisine_type"),
|
||||
difficulty_level=recipe_data.get("difficulty_level", 1),
|
||||
yield_quantity=recipe_data.get("yield_quantity"),
|
||||
yield_unit=MeasurementUnit(recipe_data.get("yield_unit", "units")),
|
||||
prep_time_minutes=recipe_data.get("prep_time_minutes"),
|
||||
cook_time_minutes=recipe_data.get("cook_time_minutes"),
|
||||
total_time_minutes=recipe_data.get("total_time_minutes"),
|
||||
rest_time_minutes=recipe_data.get("rest_time_minutes"),
|
||||
instructions=recipe_data.get("instructions"),
|
||||
preparation_notes=recipe_data.get("preparation_notes"),
|
||||
storage_instructions=recipe_data.get("storage_instructions"),
|
||||
quality_check_configuration=recipe_data.get("quality_check_configuration"),
|
||||
status=RecipeStatus.ACTIVE,
|
||||
is_seasonal=recipe_data.get("is_seasonal", False),
|
||||
is_signature_item=recipe_data.get("is_signature_item", False),
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
db.add(recipe)
|
||||
created_recipes += 1
|
||||
logger.debug(f" ✅ Created recipe: {recipe_name}")
|
||||
|
||||
# Create recipe ingredients using tenant-specific ingredient IDs
|
||||
for ing_data in recipe_data.get("ingredientes", []):
|
||||
base_ingredient_id = uuid.UUID(ing_data["ingredient_id"])
|
||||
ingredient_id = uuid.UUID(int=tenant_int ^ int(base_ingredient_id.hex, 16))
|
||||
|
||||
# Parse unit
|
||||
unit_str = ing_data.get("unit", "g")
|
||||
try:
|
||||
unit = MeasurementUnit(unit_str)
|
||||
except ValueError:
|
||||
logger.warning(f" ⚠️ Invalid unit: {unit_str}, using GRAMS")
|
||||
unit = MeasurementUnit.GRAMS
|
||||
|
||||
recipe_ingredient = RecipeIngredient(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=recipe_id,
|
||||
ingredient_id=ingredient_id,
|
||||
quantity=ing_data["quantity"],
|
||||
unit=unit,
|
||||
preparation_method=ing_data.get("preparation_method"),
|
||||
ingredient_order=ing_data.get("ingredient_order", 1),
|
||||
ingredient_group=ing_data.get("ingredient_group")
|
||||
)
|
||||
|
||||
db.add(recipe_ingredient)
|
||||
created_ingredients += 1
|
||||
|
||||
# Create some sample production batches (historical data)
|
||||
num_batches = random.randint(3, 8)
|
||||
for i in range(num_batches):
|
||||
# Random date in the past 30 days (relative to BASE_REFERENCE_DATE)
|
||||
days_ago = random.randint(1, 30)
|
||||
production_date = BASE_REFERENCE_DATE - timedelta(days=days_ago)
|
||||
|
||||
# Random multiplier and quantity
|
||||
multiplier = random.choice([0.5, 1.0, 1.5, 2.0])
|
||||
planned_qty = recipe_data.get("yield_quantity", 10) * multiplier
|
||||
actual_qty = planned_qty * random.uniform(0.95, 1.05)
|
||||
|
||||
batch = ProductionBatch(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=recipe_id,
|
||||
batch_number=f"BATCH-{tenant_id.hex[:8].upper()}-{i+1:04d}",
|
||||
production_date=production_date,
|
||||
planned_quantity=planned_qty,
|
||||
actual_quantity=actual_qty,
|
||||
yield_percentage=(actual_qty / planned_qty * 100) if planned_qty > 0 else 100,
|
||||
batch_size_multiplier=multiplier,
|
||||
status=ProductionStatus.COMPLETED,
|
||||
priority=ProductionPriority.NORMAL,
|
||||
quality_score=random.uniform(7.5, 9.5),
|
||||
created_at=production_date,
|
||||
updated_at=production_date
|
||||
)
|
||||
|
||||
db.add(batch)
|
||||
created_batches += 1
|
||||
|
||||
# Commit all changes for this tenant
|
||||
await db.commit()
|
||||
|
||||
logger.info(f" 📊 Recipes: {created_recipes}, Ingredients: {created_ingredients}, Batches: {created_batches}")
|
||||
logger.info("")
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tenant_name": tenant_name,
|
||||
"recipes_created": created_recipes,
|
||||
"recipes_skipped": skipped_recipes,
|
||||
"recipe_ingredients_created": created_ingredients,
|
||||
"production_batches_created": created_batches,
|
||||
"total_recipes": len(recipes_data)
|
||||
}
|
||||
|
||||
|
||||
async def seed_recipes(db: AsyncSession):
|
||||
"""
|
||||
Seed recipes for all demo template tenants
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dict with overall seeding statistics
|
||||
"""
|
||||
logger.info("=" * 80)
|
||||
logger.info("📚 Starting Demo Recipes Seeding")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Load recipes data once
|
||||
try:
|
||||
recipes_data = load_recipes_data()
|
||||
except FileNotFoundError as e:
|
||||
logger.error(str(e))
|
||||
raise
|
||||
|
||||
results = []
|
||||
|
||||
# Seed for Professional Bakery (single location)
|
||||
logger.info("")
|
||||
result_professional = await seed_recipes_for_tenant(
|
||||
db,
|
||||
DEMO_TENANT_PROFESSIONAL,
|
||||
"Panadería Artesana Madrid (Professional)",
|
||||
recipes_data
|
||||
)
|
||||
results.append(result_professional)
|
||||
|
||||
# Seed for Enterprise Parent (central production - Obrador)
|
||||
logger.info("")
|
||||
result_enterprise_parent = await seed_recipes_for_tenant(
|
||||
db,
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN,
|
||||
"Panadería Central - Obrador Madrid (Enterprise Parent)",
|
||||
recipes_data
|
||||
)
|
||||
results.append(result_enterprise_parent)
|
||||
# Calculate totals
|
||||
total_recipes = sum(r["recipes_created"] for r in results)
|
||||
total_ingredients = sum(r["recipe_ingredients_created"] for r in results)
|
||||
total_batches = sum(r["production_batches_created"] for r in results)
|
||||
total_skipped = sum(r["recipes_skipped"] for r in results)
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("✅ Demo Recipes Seeding Completed")
|
||||
logger.info("=" * 80)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"tenants_seeded": len(results),
|
||||
"total_recipes_created": total_recipes,
|
||||
"total_recipe_ingredients_created": total_ingredients,
|
||||
"total_production_batches_created": total_batches,
|
||||
"total_skipped": total_skipped,
|
||||
"results": results
|
||||
}
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main execution function"""
|
||||
|
||||
logger.info("Demo Recipes Seeding Script Starting")
|
||||
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
|
||||
logger.info("Log Level: %s", os.getenv("LOG_LEVEL", "INFO"))
|
||||
|
||||
# Get database URLs from environment
|
||||
database_url = os.getenv("RECIPES_DATABASE_URL") or os.getenv("DATABASE_URL")
|
||||
if not database_url:
|
||||
logger.error("❌ RECIPES_DATABASE_URL or DATABASE_URL environment variable must be set")
|
||||
return 1
|
||||
|
||||
# Convert to async URL if needed
|
||||
if database_url.startswith("postgresql://"):
|
||||
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
||||
|
||||
logger.info("Connecting to recipes database")
|
||||
|
||||
# Create engine and session
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_size=5,
|
||||
max_overflow=10
|
||||
)
|
||||
|
||||
session_maker = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
try:
|
||||
async with session_maker() as session:
|
||||
result = await seed_recipes(session)
|
||||
|
||||
logger.info("")
|
||||
logger.info("📊 Seeding Summary:")
|
||||
logger.info(f" ✅ Tenants seeded: {result['tenants_seeded']}")
|
||||
logger.info(f" ✅ Recipes created: {result['total_recipes_created']}")
|
||||
logger.info(f" ✅ Recipe ingredients: {result['total_recipe_ingredients_created']}")
|
||||
logger.info(f" ✅ Production batches: {result['total_production_batches_created']}")
|
||||
logger.info(f" ⏭️ Skipped: {result['total_skipped']}")
|
||||
logger.info("")
|
||||
|
||||
# Print per-tenant details
|
||||
for tenant_result in result['results']:
|
||||
logger.info(
|
||||
f" {tenant_result['tenant_name']}: "
|
||||
f"{tenant_result['recipes_created']} recipes, "
|
||||
f"{tenant_result['recipe_ingredients_created']} ingredients, "
|
||||
f"{tenant_result['production_batches_created']} batches"
|
||||
)
|
||||
|
||||
logger.info("")
|
||||
logger.info("🎉 Success! Recipe catalog is ready for cloning.")
|
||||
logger.info("")
|
||||
logger.info("Recipes created:")
|
||||
logger.info(" • Baguette Francesa Tradicional")
|
||||
logger.info(" • Croissant de Mantequilla Artesanal")
|
||||
logger.info(" • Pan de Pueblo con Masa Madre")
|
||||
logger.info(" • Napolitana de Chocolate")
|
||||
logger.info("")
|
||||
logger.info("Note: All IDs are pre-defined and hardcoded for cross-service consistency")
|
||||
logger.info("")
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 80)
|
||||
logger.error("❌ Demo Recipes Seeding Failed")
|
||||
logger.error("=" * 80)
|
||||
logger.error("Error: %s", str(e))
|
||||
logger.error("", exc_info=True)
|
||||
return 1
|
||||
|
||||
finally:
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit_code = asyncio.run(main())
|
||||
sys.exit(exit_code)
|
||||
Reference in New Issue
Block a user