demo seed change
This commit is contained in:
@@ -8,10 +8,12 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
@@ -26,7 +28,7 @@ from app.models.recipes import (
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
router = APIRouter()
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
@@ -40,7 +42,7 @@ def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
@@ -53,376 +55,238 @@ async def clone_demo_data(
|
||||
"""
|
||||
Clone recipes service data for a virtual demo tenant
|
||||
|
||||
Clones:
|
||||
- Recipes (master recipe definitions)
|
||||
- Recipe ingredients (with measurements)
|
||||
- Production batches (historical production runs)
|
||||
- Production ingredient consumption (actual usage tracking)
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_created_at or fallback to now
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Invalid session_created_at format, using current time",
|
||||
session_created_at=session_created_at,
|
||||
error=str(e)
|
||||
)
|
||||
session_time = datetime.now(timezone.utc)
|
||||
else:
|
||||
logger.warning("session_created_at not provided, using current time")
|
||||
session_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting recipes data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_time.isoformat()
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting recipes data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
# Load seed data from JSON files
|
||||
try:
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "04-recipes.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "04-recipes.json")
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
except ImportError:
|
||||
# Fallback to original path
|
||||
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
|
||||
if demo_account_type == "professional":
|
||||
json_file = seed_data_dir / "professional" / "04-recipes.json"
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = seed_data_dir / "enterprise" / "parent" / "04-recipes.json"
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
if not json_file.exists():
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Seed data file not found: {json_file}"
|
||||
)
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"recipes": 0,
|
||||
"recipe_ingredients": 0,
|
||||
"production_batches": 0,
|
||||
"ingredient_consumptions": 0
|
||||
"recipe_ingredients": 0
|
||||
}
|
||||
|
||||
# Recipe ID mapping (old -> new)
|
||||
recipe_id_map = {}
|
||||
recipe_ingredient_map = {}
|
||||
|
||||
# Clone Recipes
|
||||
logger.info("Starting to clone recipes", base_tenant=str(base_uuid))
|
||||
result = await db.execute(
|
||||
select(Recipe).where(Recipe.tenant_id == base_uuid)
|
||||
)
|
||||
base_recipes = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found recipes to clone",
|
||||
count=len(base_recipes),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for recipe in base_recipes:
|
||||
new_recipe_id = uuid.uuid4()
|
||||
recipe_id_map[recipe.id] = new_recipe_id
|
||||
|
||||
# Validate required fields before creating new recipe
|
||||
if recipe.finished_product_id is None:
|
||||
logger.warning(
|
||||
"Recipe has null finished_product_id, skipping clone",
|
||||
recipe_id=recipe.id,
|
||||
recipe_name=recipe.name
|
||||
# Create Recipes
|
||||
for recipe_data in seed_data.get('recipes', []):
|
||||
# Transform recipe ID using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
recipe_uuid = uuid.UUID(recipe_data['id'])
|
||||
transformed_id = transform_id(recipe_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse recipe UUID",
|
||||
recipe_id=recipe_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in recipe data: {str(e)}"
|
||||
)
|
||||
continue # Skip recipes with null required field
|
||||
|
||||
# Generate a unique recipe code to avoid potential duplicates
|
||||
recipe_code = f"REC-{uuid.uuid4().hex[:8].upper()}"
|
||||
# Adjust dates relative to session creation time
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(recipe_data['created_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_updated_at = adjust_date_for_demo(
|
||||
datetime.fromisoformat(recipe_data['updated_at'].replace('Z', '+00:00')),
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
# Map field names from seed data to model fields
|
||||
# Handle yield_quantity/yield_unit (may be named finished_product_quantity/unit in seed data)
|
||||
yield_quantity = recipe_data.get('yield_quantity') or recipe_data.get('finished_product_quantity', 1.0)
|
||||
yield_unit_str = recipe_data.get('yield_unit') or recipe_data.get('finished_product_unit', 'UNITS')
|
||||
|
||||
# Convert yield_unit string to enum if needed
|
||||
if isinstance(yield_unit_str, str):
|
||||
try:
|
||||
yield_unit = MeasurementUnit[yield_unit_str.upper()]
|
||||
except KeyError:
|
||||
yield_unit = MeasurementUnit.UNITS
|
||||
else:
|
||||
yield_unit = yield_unit_str
|
||||
|
||||
# Convert status string to enum if needed
|
||||
status = recipe_data.get('status', 'ACTIVE')
|
||||
if isinstance(status, str):
|
||||
try:
|
||||
status = RecipeStatus[status.upper()]
|
||||
except KeyError:
|
||||
status = RecipeStatus.ACTIVE
|
||||
|
||||
new_recipe = Recipe(
|
||||
id=new_recipe_id,
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
name=recipe.name,
|
||||
recipe_code=recipe_code, # New unique code
|
||||
version=recipe.version,
|
||||
finished_product_id=recipe.finished_product_id, # Keep product reference
|
||||
description=recipe.description,
|
||||
category=recipe.category,
|
||||
cuisine_type=recipe.cuisine_type,
|
||||
difficulty_level=recipe.difficulty_level,
|
||||
yield_quantity=recipe.yield_quantity,
|
||||
yield_unit=recipe.yield_unit,
|
||||
prep_time_minutes=recipe.prep_time_minutes,
|
||||
cook_time_minutes=recipe.cook_time_minutes,
|
||||
total_time_minutes=recipe.total_time_minutes,
|
||||
rest_time_minutes=recipe.rest_time_minutes,
|
||||
estimated_cost_per_unit=recipe.estimated_cost_per_unit,
|
||||
last_calculated_cost=recipe.last_calculated_cost,
|
||||
cost_calculation_date=recipe.cost_calculation_date,
|
||||
target_margin_percentage=recipe.target_margin_percentage,
|
||||
suggested_selling_price=recipe.suggested_selling_price,
|
||||
instructions=recipe.instructions,
|
||||
preparation_notes=recipe.preparation_notes,
|
||||
storage_instructions=recipe.storage_instructions,
|
||||
serves_count=recipe.serves_count,
|
||||
nutritional_info=recipe.nutritional_info,
|
||||
allergen_info=recipe.allergen_info,
|
||||
dietary_tags=recipe.dietary_tags,
|
||||
batch_size_multiplier=recipe.batch_size_multiplier,
|
||||
minimum_batch_size=recipe.minimum_batch_size,
|
||||
maximum_batch_size=recipe.maximum_batch_size,
|
||||
optimal_production_temperature=recipe.optimal_production_temperature,
|
||||
optimal_humidity=recipe.optimal_humidity,
|
||||
quality_check_configuration=recipe.quality_check_configuration,
|
||||
status=recipe.status,
|
||||
is_seasonal=recipe.is_seasonal,
|
||||
season_start_month=recipe.season_start_month,
|
||||
season_end_month=recipe.season_end_month,
|
||||
is_signature_item=recipe.is_signature_item,
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=recipe.created_by,
|
||||
updated_by=recipe.updated_by
|
||||
name=recipe_data['name'],
|
||||
description=recipe_data.get('description'),
|
||||
recipe_code=recipe_data.get('recipe_code'),
|
||||
version=recipe_data.get('version', '1.0'),
|
||||
status=status,
|
||||
finished_product_id=recipe_data['finished_product_id'],
|
||||
yield_quantity=yield_quantity,
|
||||
yield_unit=yield_unit,
|
||||
category=recipe_data.get('category'),
|
||||
difficulty_level=recipe_data.get('difficulty_level', 1),
|
||||
prep_time_minutes=recipe_data.get('prep_time_minutes') or recipe_data.get('preparation_time_minutes'),
|
||||
cook_time_minutes=recipe_data.get('cook_time_minutes') or recipe_data.get('baking_time_minutes'),
|
||||
total_time_minutes=recipe_data.get('total_time_minutes'),
|
||||
rest_time_minutes=recipe_data.get('rest_time_minutes') or recipe_data.get('cooling_time_minutes'),
|
||||
instructions=recipe_data.get('instructions'),
|
||||
preparation_notes=recipe_data.get('notes') or recipe_data.get('preparation_notes'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at
|
||||
)
|
||||
# Add to session
|
||||
db.add(new_recipe)
|
||||
stats["recipes"] += 1
|
||||
|
||||
# Flush to get recipe IDs for foreign keys
|
||||
logger.debug("Flushing recipe changes to get IDs")
|
||||
await db.flush()
|
||||
# Map recipe ID for ingredients
|
||||
recipe_id_map = {recipe_data['id']: str(transformed_id)}
|
||||
|
||||
# Clone Recipe Ingredients
|
||||
logger.info("Cloning recipe ingredients", recipe_ingredients_count=len(recipe_id_map))
|
||||
for old_recipe_id, new_recipe_id in recipe_id_map.items():
|
||||
result = await db.execute(
|
||||
select(RecipeIngredient).where(RecipeIngredient.recipe_id == old_recipe_id)
|
||||
)
|
||||
recipe_ingredients = result.scalars().all()
|
||||
|
||||
for ingredient in recipe_ingredients:
|
||||
new_ingredient_id = uuid.uuid4()
|
||||
recipe_ingredient_map[ingredient.id] = new_ingredient_id
|
||||
|
||||
new_ingredient = RecipeIngredient(
|
||||
id=new_ingredient_id,
|
||||
tenant_id=virtual_uuid,
|
||||
recipe_id=new_recipe_id,
|
||||
ingredient_id=ingredient.ingredient_id, # Keep ingredient reference
|
||||
quantity=ingredient.quantity,
|
||||
unit=ingredient.unit,
|
||||
quantity_in_base_unit=ingredient.quantity_in_base_unit,
|
||||
alternative_quantity=ingredient.alternative_quantity,
|
||||
alternative_unit=ingredient.alternative_unit,
|
||||
preparation_method=ingredient.preparation_method,
|
||||
ingredient_notes=ingredient.ingredient_notes,
|
||||
is_optional=ingredient.is_optional,
|
||||
ingredient_order=ingredient.ingredient_order,
|
||||
ingredient_group=ingredient.ingredient_group,
|
||||
substitution_options=ingredient.substitution_options,
|
||||
substitution_ratio=ingredient.substitution_ratio,
|
||||
unit_cost=ingredient.unit_cost,
|
||||
total_cost=ingredient.total_cost,
|
||||
cost_updated_at=ingredient.cost_updated_at
|
||||
# Create Recipe Ingredients
|
||||
for recipe_ingredient_data in seed_data.get('recipe_ingredients', []):
|
||||
# Transform ingredient ID using XOR
|
||||
try:
|
||||
ingredient_uuid = uuid.UUID(recipe_ingredient_data['id'])
|
||||
transformed_id = transform_id(ingredient_uuid, virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse recipe ingredient UUID",
|
||||
ingredient_id=recipe_ingredient_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in recipe ingredient data: {str(e)}"
|
||||
)
|
||||
db.add(new_ingredient)
|
||||
stats["recipe_ingredients"] += 1
|
||||
|
||||
# Flush to get recipe ingredient IDs
|
||||
logger.debug("Flushing recipe ingredient changes to get IDs")
|
||||
await db.flush()
|
||||
|
||||
# Clone Production Batches
|
||||
logger.info("Starting to clone production batches", base_tenant=str(base_uuid))
|
||||
result = await db.execute(
|
||||
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
|
||||
)
|
||||
base_batches = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found production batches to clone",
|
||||
count=len(base_batches),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
batch_id_map = {}
|
||||
|
||||
for batch in base_batches:
|
||||
new_batch_id = uuid.uuid4()
|
||||
batch_id_map[batch.id] = new_batch_id
|
||||
|
||||
# Get the new recipe ID (this might be None if the recipe was skipped due to null finished_product_id)
|
||||
new_recipe_id = recipe_id_map.get(batch.recipe_id)
|
||||
if new_recipe_id is None:
|
||||
logger.warning(
|
||||
"Skipping production batch with no corresponding recipe",
|
||||
batch_id=batch.id,
|
||||
original_recipe_id=batch.recipe_id
|
||||
)
|
||||
# Get the transformed recipe ID
|
||||
recipe_id = recipe_id_map.get(recipe_ingredient_data['recipe_id'])
|
||||
if not recipe_id:
|
||||
logger.error("Recipe not found for ingredient",
|
||||
recipe_id=recipe_ingredient_data['recipe_id'])
|
||||
continue
|
||||
|
||||
# Adjust all date fields using the shared utility
|
||||
adjusted_production_date = adjust_date_for_demo(
|
||||
batch.production_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.production_date else None
|
||||
adjusted_planned_start = adjust_date_for_demo(
|
||||
batch.planned_start_time,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.planned_start_time else None
|
||||
adjusted_actual_start = adjust_date_for_demo(
|
||||
batch.actual_start_time,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.actual_start_time else None
|
||||
adjusted_planned_end = adjust_date_for_demo(
|
||||
batch.planned_end_time,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.planned_end_time else None
|
||||
adjusted_actual_end = adjust_date_for_demo(
|
||||
batch.actual_end_time,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.actual_end_time else None
|
||||
# Convert unit string to enum if needed
|
||||
unit_str = recipe_ingredient_data.get('unit', 'KILOGRAMS')
|
||||
if isinstance(unit_str, str):
|
||||
try:
|
||||
unit = MeasurementUnit[unit_str.upper()]
|
||||
except KeyError:
|
||||
# Try without 'S' for singular forms
|
||||
try:
|
||||
unit = MeasurementUnit[unit_str.upper().rstrip('S')]
|
||||
except KeyError:
|
||||
unit = MeasurementUnit.KILOGRAMS
|
||||
else:
|
||||
unit = unit_str
|
||||
|
||||
new_batch = ProductionBatch(
|
||||
id=new_batch_id,
|
||||
new_recipe_ingredient = RecipeIngredient(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
recipe_id=new_recipe_id,
|
||||
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
|
||||
production_date=adjusted_production_date,
|
||||
planned_start_time=adjusted_planned_start,
|
||||
actual_start_time=adjusted_actual_start,
|
||||
planned_end_time=adjusted_planned_end,
|
||||
actual_end_time=adjusted_actual_end,
|
||||
planned_quantity=batch.planned_quantity,
|
||||
actual_quantity=batch.actual_quantity,
|
||||
yield_percentage=batch.yield_percentage,
|
||||
batch_size_multiplier=batch.batch_size_multiplier,
|
||||
status=batch.status,
|
||||
priority=batch.priority,
|
||||
assigned_staff=batch.assigned_staff,
|
||||
production_notes=batch.production_notes,
|
||||
quality_score=batch.quality_score,
|
||||
quality_notes=batch.quality_notes,
|
||||
defect_rate=batch.defect_rate,
|
||||
rework_required=batch.rework_required,
|
||||
planned_material_cost=batch.planned_material_cost,
|
||||
actual_material_cost=batch.actual_material_cost,
|
||||
labor_cost=batch.labor_cost,
|
||||
overhead_cost=batch.overhead_cost,
|
||||
total_production_cost=batch.total_production_cost,
|
||||
cost_per_unit=batch.cost_per_unit,
|
||||
production_temperature=batch.production_temperature,
|
||||
production_humidity=batch.production_humidity,
|
||||
oven_temperature=batch.oven_temperature,
|
||||
baking_time_minutes=batch.baking_time_minutes,
|
||||
waste_quantity=batch.waste_quantity,
|
||||
waste_reason=batch.waste_reason,
|
||||
efficiency_percentage=batch.efficiency_percentage,
|
||||
customer_order_reference=batch.customer_order_reference,
|
||||
pre_order_quantity=batch.pre_order_quantity,
|
||||
shelf_quantity=batch.shelf_quantity,
|
||||
created_at=session_time,
|
||||
updated_at=session_time,
|
||||
created_by=batch.created_by,
|
||||
completed_by=batch.completed_by
|
||||
recipe_id=recipe_id,
|
||||
ingredient_id=recipe_ingredient_data['ingredient_id'],
|
||||
quantity=recipe_ingredient_data['quantity'],
|
||||
unit=unit,
|
||||
unit_cost=recipe_ingredient_data.get('cost_per_unit') or recipe_ingredient_data.get('unit_cost', 0.0),
|
||||
total_cost=recipe_ingredient_data.get('total_cost'),
|
||||
ingredient_order=recipe_ingredient_data.get('sequence') or recipe_ingredient_data.get('ingredient_order', 1),
|
||||
is_optional=recipe_ingredient_data.get('is_optional', False),
|
||||
ingredient_notes=recipe_ingredient_data.get('notes') or recipe_ingredient_data.get('ingredient_notes')
|
||||
)
|
||||
db.add(new_batch)
|
||||
stats["production_batches"] += 1
|
||||
db.add(new_recipe_ingredient)
|
||||
stats["recipe_ingredients"] += 1
|
||||
|
||||
# Flush to get batch IDs
|
||||
logger.debug("Flushing production batch changes to get IDs")
|
||||
await db.flush()
|
||||
|
||||
# Clone Production Ingredient Consumption
|
||||
logger.info("Cloning production ingredient consumption")
|
||||
for old_batch_id, new_batch_id in batch_id_map.items():
|
||||
# Skip consumption if the batch was skipped (no corresponding recipe)
|
||||
if old_batch_id not in batch_id_map: # This condition was redundant/incorrect
|
||||
continue # This batch was skipped, so skip its consumption too
|
||||
|
||||
result = await db.execute(
|
||||
select(ProductionIngredientConsumption).where(
|
||||
ProductionIngredientConsumption.production_batch_id == old_batch_id
|
||||
)
|
||||
)
|
||||
consumptions = result.scalars().all()
|
||||
|
||||
for consumption in consumptions:
|
||||
# Get the new recipe ingredient ID (skip if original ingredient's recipe was skipped)
|
||||
new_recipe_ingredient_id = recipe_ingredient_map.get(
|
||||
consumption.recipe_ingredient_id
|
||||
)
|
||||
if new_recipe_ingredient_id is None:
|
||||
logger.warning(
|
||||
"Skipping consumption with no corresponding recipe ingredient",
|
||||
consumption_id=consumption.id,
|
||||
original_recipe_ingredient_id=consumption.recipe_ingredient_id
|
||||
)
|
||||
continue
|
||||
|
||||
adjusted_consumption_time = adjust_date_for_demo(
|
||||
consumption.consumption_time,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if consumption.consumption_time else None
|
||||
|
||||
new_consumption = ProductionIngredientConsumption(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
production_batch_id=new_batch_id,
|
||||
recipe_ingredient_id=new_recipe_ingredient_id,
|
||||
ingredient_id=consumption.ingredient_id, # Keep ingredient reference
|
||||
stock_id=None, # Don't clone stock references
|
||||
planned_quantity=consumption.planned_quantity,
|
||||
actual_quantity=consumption.actual_quantity,
|
||||
unit=consumption.unit,
|
||||
variance_quantity=consumption.variance_quantity,
|
||||
variance_percentage=consumption.variance_percentage,
|
||||
unit_cost=consumption.unit_cost,
|
||||
total_cost=consumption.total_cost,
|
||||
consumption_time=adjusted_consumption_time,
|
||||
consumption_notes=consumption.consumption_notes,
|
||||
staff_member=consumption.staff_member,
|
||||
ingredient_condition=consumption.ingredient_condition,
|
||||
quality_impact=consumption.quality_impact,
|
||||
substitution_used=consumption.substitution_used,
|
||||
substitution_details=consumption.substitution_details
|
||||
)
|
||||
db.add(new_consumption)
|
||||
stats["ingredient_consumptions"] += 1
|
||||
|
||||
# Commit all changes
|
||||
logger.debug("Committing all cloned changes")
|
||||
await db.commit()
|
||||
|
||||
total_records = sum(stats.values())
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Recipes data cloning completed",
|
||||
"Recipes data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
records_cloned=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"records_cloned": sum(stats.values()),
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats
|
||||
"details": {
|
||||
"recipes": stats["recipes"],
|
||||
"recipe_ingredients": stats["recipe_ingredients"],
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
@@ -459,80 +323,68 @@ async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Delete all recipe data for a virtual demo tenant
|
||||
|
||||
Called by demo session cleanup service to remove ephemeral data
|
||||
when demo sessions expire or are destroyed.
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
logger.info(
|
||||
"Deleting recipe data for virtual tenant",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
start_time = datetime.now(timezone.utc)
|
||||
start_time = datetime.now()
|
||||
|
||||
records_deleted = {
|
||||
"recipes": 0,
|
||||
"recipe_ingredients": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
# Delete in reverse dependency order
|
||||
|
||||
# 1. Delete recipe ingredients (depends on recipes)
|
||||
result = await db.execute(
|
||||
delete(RecipeIngredient)
|
||||
.where(RecipeIngredient.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["recipe_ingredients"] = result.rowcount
|
||||
|
||||
# Count records before deletion
|
||||
recipe_count = await db.scalar(
|
||||
select(func.count(Recipe.id)).where(Recipe.tenant_id == virtual_uuid)
|
||||
)
|
||||
ingredient_count = await db.scalar(
|
||||
select(func.count(RecipeIngredient.id)).where(RecipeIngredient.tenant_id == virtual_uuid)
|
||||
# 2. Delete recipes
|
||||
result = await db.execute(
|
||||
delete(Recipe)
|
||||
.where(Recipe.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["recipes"] = result.rowcount
|
||||
|
||||
# Delete in correct order (RecipeIngredient references Recipe)
|
||||
await db.execute(
|
||||
delete(RecipeIngredient).where(RecipeIngredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Recipe).where(Recipe.tenant_id == virtual_uuid)
|
||||
)
|
||||
records_deleted["total"] = sum(records_deleted.values())
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Recipe data deleted successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
recipes_deleted=recipe_count,
|
||||
ingredients_deleted=ingredient_count,
|
||||
duration_ms=duration_ms
|
||||
"demo_data_deleted",
|
||||
service="recipes",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"recipes": recipe_count,
|
||||
"recipe_ingredients": ingredient_count,
|
||||
"total": recipe_count + ingredient_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete recipe data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="recipes",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete recipe data: {str(e)}"
|
||||
)
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
@@ -14,7 +14,7 @@ from .core.database import db_manager
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
# Import API routers
|
||||
from .api import recipes, recipe_quality_configs, recipe_operations, internal_demo, audit
|
||||
from .api import recipes, recipe_quality_configs, recipe_operations, audit, internal_demo
|
||||
|
||||
# Import models to register them with SQLAlchemy metadata
|
||||
from .models import recipes as recipe_models
|
||||
@@ -121,7 +121,7 @@ service.add_router(audit.router)
|
||||
service.add_router(recipes.router)
|
||||
service.add_router(recipe_quality_configs.router)
|
||||
service.add_router(recipe_operations.router)
|
||||
service.add_router(internal_demo.router)
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,392 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Demo Recipes Seeding Script for Recipes Service
|
||||
Creates realistic Spanish recipes for demo template tenants
|
||||
|
||||
This script runs as a Kubernetes init job inside the recipes-service container.
|
||||
It populates the template tenants with a comprehensive catalog of recipes using pre-defined UUIDs.
|
||||
|
||||
Usage:
|
||||
python /app/scripts/demo/seed_demo_recipes.py
|
||||
|
||||
Environment Variables Required:
|
||||
RECIPES_DATABASE_URL - PostgreSQL connection string for recipes database
|
||||
DEMO_MODE - Set to 'production' for production seeding
|
||||
LOG_LEVEL - Logging level (default: INFO)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pathlib import Path
|
||||
import random
|
||||
|
||||
# Add app to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
# Add shared to path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select
|
||||
import structlog
|
||||
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
from app.models.recipes import (
|
||||
Recipe, RecipeIngredient, ProductionBatch,
|
||||
RecipeStatus, ProductionStatus, ProductionPriority, MeasurementUnit
|
||||
)
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.dev.ConsoleRenderer()
|
||||
]
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Fixed Demo Tenant IDs (must match tenant service)
|
||||
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6")
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
|
||||
|
||||
|
||||
def load_recipes_data():
|
||||
"""Load recipes data from JSON file"""
|
||||
# Look for data file in the same directory as this script
|
||||
data_file = Path(__file__).parent / "recetas_es.json"
|
||||
|
||||
if not data_file.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Recipes data file not found: {data_file}. "
|
||||
"Make sure recetas_es.json is in the same directory as this script."
|
||||
)
|
||||
|
||||
logger.info("Loading recipes data", file=str(data_file))
|
||||
|
||||
with open(data_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
recipes = data.get("recetas", [])
|
||||
logger.info(f"Loaded {len(recipes)} recipes from JSON")
|
||||
return recipes
|
||||
|
||||
|
||||
async def seed_recipes_for_tenant(
|
||||
db: AsyncSession,
|
||||
tenant_id: uuid.UUID,
|
||||
tenant_name: str,
|
||||
recipes_data: list
|
||||
) -> dict:
|
||||
"""
|
||||
Seed recipes for a specific tenant using pre-defined UUIDs
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: UUID of the tenant
|
||||
tenant_name: Name of the tenant (for logging)
|
||||
recipes_data: List of recipe dictionaries with pre-defined IDs
|
||||
|
||||
Returns:
|
||||
Dict with seeding statistics
|
||||
"""
|
||||
logger.info("─" * 80)
|
||||
logger.info(f"Seeding recipes for: {tenant_name}")
|
||||
logger.info(f"Tenant ID: {tenant_id}")
|
||||
logger.info("─" * 80)
|
||||
|
||||
created_recipes = 0
|
||||
skipped_recipes = 0
|
||||
created_ingredients = 0
|
||||
created_batches = 0
|
||||
|
||||
for recipe_data in recipes_data:
|
||||
recipe_name = recipe_data["name"]
|
||||
|
||||
# Generate tenant-specific UUIDs (same approach as inventory)
|
||||
base_recipe_id = uuid.UUID(recipe_data["id"])
|
||||
base_product_id = uuid.UUID(recipe_data["finished_product_id"])
|
||||
tenant_int = int(tenant_id.hex, 16)
|
||||
|
||||
recipe_id = uuid.UUID(int=tenant_int ^ int(base_recipe_id.hex, 16))
|
||||
finished_product_id = uuid.UUID(int=tenant_int ^ int(base_product_id.hex, 16))
|
||||
|
||||
# Check if recipe already exists
|
||||
result = await db.execute(
|
||||
select(Recipe).where(
|
||||
Recipe.tenant_id == tenant_id,
|
||||
Recipe.id == recipe_id
|
||||
)
|
||||
)
|
||||
existing_recipe = result.scalars().first()
|
||||
|
||||
if existing_recipe:
|
||||
logger.debug(f" ⏭️ Skipping recipe (exists): {recipe_name}")
|
||||
skipped_recipes += 1
|
||||
continue
|
||||
|
||||
# Create recipe using pre-defined UUID
|
||||
recipe = Recipe(
|
||||
id=recipe_id,
|
||||
tenant_id=tenant_id,
|
||||
name=recipe_name,
|
||||
recipe_code=f"REC-{created_recipes + 1:03d}",
|
||||
version="1.0",
|
||||
finished_product_id=finished_product_id,
|
||||
description=recipe_data.get("description"),
|
||||
category=recipe_data.get("category"),
|
||||
cuisine_type=recipe_data.get("cuisine_type"),
|
||||
difficulty_level=recipe_data.get("difficulty_level", 1),
|
||||
yield_quantity=recipe_data.get("yield_quantity"),
|
||||
yield_unit=MeasurementUnit(recipe_data.get("yield_unit", "units")),
|
||||
prep_time_minutes=recipe_data.get("prep_time_minutes"),
|
||||
cook_time_minutes=recipe_data.get("cook_time_minutes"),
|
||||
total_time_minutes=recipe_data.get("total_time_minutes"),
|
||||
rest_time_minutes=recipe_data.get("rest_time_minutes"),
|
||||
instructions=recipe_data.get("instructions"),
|
||||
preparation_notes=recipe_data.get("preparation_notes"),
|
||||
storage_instructions=recipe_data.get("storage_instructions"),
|
||||
quality_check_configuration=recipe_data.get("quality_check_configuration"),
|
||||
status=RecipeStatus.ACTIVE,
|
||||
is_seasonal=recipe_data.get("is_seasonal", False),
|
||||
is_signature_item=recipe_data.get("is_signature_item", False),
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
db.add(recipe)
|
||||
created_recipes += 1
|
||||
logger.debug(f" ✅ Created recipe: {recipe_name}")
|
||||
|
||||
# Create recipe ingredients using tenant-specific ingredient IDs
|
||||
for ing_data in recipe_data.get("ingredientes", []):
|
||||
base_ingredient_id = uuid.UUID(ing_data["ingredient_id"])
|
||||
ingredient_id = uuid.UUID(int=tenant_int ^ int(base_ingredient_id.hex, 16))
|
||||
|
||||
# Parse unit
|
||||
unit_str = ing_data.get("unit", "g")
|
||||
try:
|
||||
unit = MeasurementUnit(unit_str)
|
||||
except ValueError:
|
||||
logger.warning(f" ⚠️ Invalid unit: {unit_str}, using GRAMS")
|
||||
unit = MeasurementUnit.GRAMS
|
||||
|
||||
recipe_ingredient = RecipeIngredient(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=recipe_id,
|
||||
ingredient_id=ingredient_id,
|
||||
quantity=ing_data["quantity"],
|
||||
unit=unit,
|
||||
preparation_method=ing_data.get("preparation_method"),
|
||||
ingredient_order=ing_data.get("ingredient_order", 1),
|
||||
ingredient_group=ing_data.get("ingredient_group")
|
||||
)
|
||||
|
||||
db.add(recipe_ingredient)
|
||||
created_ingredients += 1
|
||||
|
||||
# Create some sample production batches (historical data)
|
||||
num_batches = random.randint(3, 8)
|
||||
for i in range(num_batches):
|
||||
# Random date in the past 30 days (relative to BASE_REFERENCE_DATE)
|
||||
days_ago = random.randint(1, 30)
|
||||
production_date = BASE_REFERENCE_DATE - timedelta(days=days_ago)
|
||||
|
||||
# Random multiplier and quantity
|
||||
multiplier = random.choice([0.5, 1.0, 1.5, 2.0])
|
||||
planned_qty = recipe_data.get("yield_quantity", 10) * multiplier
|
||||
actual_qty = planned_qty * random.uniform(0.95, 1.05)
|
||||
|
||||
batch = ProductionBatch(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=tenant_id,
|
||||
recipe_id=recipe_id,
|
||||
batch_number=f"BATCH-{tenant_id.hex[:8].upper()}-{i+1:04d}",
|
||||
production_date=production_date,
|
||||
planned_quantity=planned_qty,
|
||||
actual_quantity=actual_qty,
|
||||
yield_percentage=(actual_qty / planned_qty * 100) if planned_qty > 0 else 100,
|
||||
batch_size_multiplier=multiplier,
|
||||
status=ProductionStatus.COMPLETED,
|
||||
priority=ProductionPriority.NORMAL,
|
||||
quality_score=random.uniform(7.5, 9.5),
|
||||
created_at=production_date,
|
||||
updated_at=production_date
|
||||
)
|
||||
|
||||
db.add(batch)
|
||||
created_batches += 1
|
||||
|
||||
# Commit all changes for this tenant
|
||||
await db.commit()
|
||||
|
||||
logger.info(f" 📊 Recipes: {created_recipes}, Ingredients: {created_ingredients}, Batches: {created_batches}")
|
||||
logger.info("")
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tenant_name": tenant_name,
|
||||
"recipes_created": created_recipes,
|
||||
"recipes_skipped": skipped_recipes,
|
||||
"recipe_ingredients_created": created_ingredients,
|
||||
"production_batches_created": created_batches,
|
||||
"total_recipes": len(recipes_data)
|
||||
}
|
||||
|
||||
|
||||
async def seed_recipes(db: AsyncSession):
|
||||
"""
|
||||
Seed recipes for all demo template tenants
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dict with overall seeding statistics
|
||||
"""
|
||||
logger.info("=" * 80)
|
||||
logger.info("📚 Starting Demo Recipes Seeding")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Load recipes data once
|
||||
try:
|
||||
recipes_data = load_recipes_data()
|
||||
except FileNotFoundError as e:
|
||||
logger.error(str(e))
|
||||
raise
|
||||
|
||||
results = []
|
||||
|
||||
# Seed for Professional Bakery (single location)
|
||||
logger.info("")
|
||||
result_professional = await seed_recipes_for_tenant(
|
||||
db,
|
||||
DEMO_TENANT_PROFESSIONAL,
|
||||
"Panadería Artesana Madrid (Professional)",
|
||||
recipes_data
|
||||
)
|
||||
results.append(result_professional)
|
||||
|
||||
# Seed for Enterprise Parent (central production - Obrador)
|
||||
logger.info("")
|
||||
result_enterprise_parent = await seed_recipes_for_tenant(
|
||||
db,
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN,
|
||||
"Panadería Central - Obrador Madrid (Enterprise Parent)",
|
||||
recipes_data
|
||||
)
|
||||
results.append(result_enterprise_parent)
|
||||
# Calculate totals
|
||||
total_recipes = sum(r["recipes_created"] for r in results)
|
||||
total_ingredients = sum(r["recipe_ingredients_created"] for r in results)
|
||||
total_batches = sum(r["production_batches_created"] for r in results)
|
||||
total_skipped = sum(r["recipes_skipped"] for r in results)
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("✅ Demo Recipes Seeding Completed")
|
||||
logger.info("=" * 80)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"tenants_seeded": len(results),
|
||||
"total_recipes_created": total_recipes,
|
||||
"total_recipe_ingredients_created": total_ingredients,
|
||||
"total_production_batches_created": total_batches,
|
||||
"total_skipped": total_skipped,
|
||||
"results": results
|
||||
}
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main execution function"""
|
||||
|
||||
logger.info("Demo Recipes Seeding Script Starting")
|
||||
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
|
||||
logger.info("Log Level: %s", os.getenv("LOG_LEVEL", "INFO"))
|
||||
|
||||
# Get database URLs from environment
|
||||
database_url = os.getenv("RECIPES_DATABASE_URL") or os.getenv("DATABASE_URL")
|
||||
if not database_url:
|
||||
logger.error("❌ RECIPES_DATABASE_URL or DATABASE_URL environment variable must be set")
|
||||
return 1
|
||||
|
||||
# Convert to async URL if needed
|
||||
if database_url.startswith("postgresql://"):
|
||||
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
||||
|
||||
logger.info("Connecting to recipes database")
|
||||
|
||||
# Create engine and session
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_size=5,
|
||||
max_overflow=10
|
||||
)
|
||||
|
||||
session_maker = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
try:
|
||||
async with session_maker() as session:
|
||||
result = await seed_recipes(session)
|
||||
|
||||
logger.info("")
|
||||
logger.info("📊 Seeding Summary:")
|
||||
logger.info(f" ✅ Tenants seeded: {result['tenants_seeded']}")
|
||||
logger.info(f" ✅ Recipes created: {result['total_recipes_created']}")
|
||||
logger.info(f" ✅ Recipe ingredients: {result['total_recipe_ingredients_created']}")
|
||||
logger.info(f" ✅ Production batches: {result['total_production_batches_created']}")
|
||||
logger.info(f" ⏭️ Skipped: {result['total_skipped']}")
|
||||
logger.info("")
|
||||
|
||||
# Print per-tenant details
|
||||
for tenant_result in result['results']:
|
||||
logger.info(
|
||||
f" {tenant_result['tenant_name']}: "
|
||||
f"{tenant_result['recipes_created']} recipes, "
|
||||
f"{tenant_result['recipe_ingredients_created']} ingredients, "
|
||||
f"{tenant_result['production_batches_created']} batches"
|
||||
)
|
||||
|
||||
logger.info("")
|
||||
logger.info("🎉 Success! Recipe catalog is ready for cloning.")
|
||||
logger.info("")
|
||||
logger.info("Recipes created:")
|
||||
logger.info(" • Baguette Francesa Tradicional")
|
||||
logger.info(" • Croissant de Mantequilla Artesanal")
|
||||
logger.info(" • Pan de Pueblo con Masa Madre")
|
||||
logger.info(" • Napolitana de Chocolate")
|
||||
logger.info("")
|
||||
logger.info("Note: All IDs are pre-defined and hardcoded for cross-service consistency")
|
||||
logger.info("")
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 80)
|
||||
logger.error("❌ Demo Recipes Seeding Failed")
|
||||
logger.error("=" * 80)
|
||||
logger.error("Error: %s", str(e))
|
||||
logger.error("", exc_info=True)
|
||||
return 1
|
||||
|
||||
finally:
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit_code = asyncio.run(main())
|
||||
sys.exit(exit_code)
|
||||
Reference in New Issue
Block a user