Files
bakery-ia/services/recipes/app/api/internal_demo.py

539 lines
21 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Recipes Service
Service-to-service endpoint for cloning recipe and production data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
2025-10-24 13:05:04 +02:00
from sqlalchemy import select, delete, func
import structlog
import uuid
from datetime import datetime, timezone, timedelta
from typing import Optional
import os
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from app.core.database import get_db
from app.models.recipes import (
Recipe, RecipeIngredient, ProductionBatch, ProductionIngredientConsumption,
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
)
2025-11-30 09:12:40 +01:00
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
2025-11-30 09:12:40 +01:00
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
2025-11-30 09:12:40 +01:00
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone recipes service data for a virtual demo tenant
Clones:
- Recipes (master recipe definitions)
- Recipe ingredients (with measurements)
- Production batches (historical production runs)
- Production ingredient consumption (actual usage tracking)
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: ISO timestamp when demo session was created (for date adjustment)
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
# Parse session_created_at or fallback to now
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError) as e:
logger.warning(
"Invalid session_created_at format, using current time",
session_created_at=session_created_at,
error=str(e)
)
session_time = datetime.now(timezone.utc)
else:
logger.warning("session_created_at not provided, using current time")
session_time = datetime.now(timezone.utc)
logger.info(
"Starting recipes data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_time.isoformat()
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"recipes": 0,
"recipe_ingredients": 0,
"production_batches": 0,
"ingredient_consumptions": 0
}
# Recipe ID mapping (old -> new)
recipe_id_map = {}
recipe_ingredient_map = {}
# Clone Recipes
2025-11-30 09:12:40 +01:00
logger.info("Starting to clone recipes", base_tenant=str(base_uuid))
result = await db.execute(
select(Recipe).where(Recipe.tenant_id == base_uuid)
)
base_recipes = result.scalars().all()
logger.info(
"Found recipes to clone",
count=len(base_recipes),
base_tenant=str(base_uuid)
)
for recipe in base_recipes:
new_recipe_id = uuid.uuid4()
recipe_id_map[recipe.id] = new_recipe_id
2025-11-30 09:12:40 +01:00
# Validate required fields before creating new recipe
if recipe.finished_product_id is None:
logger.warning(
"Recipe has null finished_product_id, skipping clone",
recipe_id=recipe.id,
recipe_name=recipe.name
)
continue # Skip recipes with null required field
# Generate a unique recipe code to avoid potential duplicates
recipe_code = f"REC-{uuid.uuid4().hex[:8].upper()}"
new_recipe = Recipe(
id=new_recipe_id,
tenant_id=virtual_uuid,
name=recipe.name,
2025-11-30 09:12:40 +01:00
recipe_code=recipe_code, # New unique code
version=recipe.version,
finished_product_id=recipe.finished_product_id, # Keep product reference
description=recipe.description,
category=recipe.category,
cuisine_type=recipe.cuisine_type,
difficulty_level=recipe.difficulty_level,
yield_quantity=recipe.yield_quantity,
yield_unit=recipe.yield_unit,
prep_time_minutes=recipe.prep_time_minutes,
cook_time_minutes=recipe.cook_time_minutes,
total_time_minutes=recipe.total_time_minutes,
rest_time_minutes=recipe.rest_time_minutes,
estimated_cost_per_unit=recipe.estimated_cost_per_unit,
last_calculated_cost=recipe.last_calculated_cost,
cost_calculation_date=recipe.cost_calculation_date,
target_margin_percentage=recipe.target_margin_percentage,
suggested_selling_price=recipe.suggested_selling_price,
instructions=recipe.instructions,
preparation_notes=recipe.preparation_notes,
storage_instructions=recipe.storage_instructions,
serves_count=recipe.serves_count,
nutritional_info=recipe.nutritional_info,
allergen_info=recipe.allergen_info,
dietary_tags=recipe.dietary_tags,
batch_size_multiplier=recipe.batch_size_multiplier,
minimum_batch_size=recipe.minimum_batch_size,
maximum_batch_size=recipe.maximum_batch_size,
optimal_production_temperature=recipe.optimal_production_temperature,
optimal_humidity=recipe.optimal_humidity,
quality_check_configuration=recipe.quality_check_configuration,
status=recipe.status,
is_seasonal=recipe.is_seasonal,
season_start_month=recipe.season_start_month,
season_end_month=recipe.season_end_month,
is_signature_item=recipe.is_signature_item,
created_at=session_time,
updated_at=session_time,
created_by=recipe.created_by,
updated_by=recipe.updated_by
)
2025-11-30 09:12:40 +01:00
# Add to session
db.add(new_recipe)
stats["recipes"] += 1
# Flush to get recipe IDs for foreign keys
2025-11-30 09:12:40 +01:00
logger.debug("Flushing recipe changes to get IDs")
await db.flush()
# Clone Recipe Ingredients
2025-11-30 09:12:40 +01:00
logger.info("Cloning recipe ingredients", recipe_ingredients_count=len(recipe_id_map))
for old_recipe_id, new_recipe_id in recipe_id_map.items():
result = await db.execute(
select(RecipeIngredient).where(RecipeIngredient.recipe_id == old_recipe_id)
)
recipe_ingredients = result.scalars().all()
for ingredient in recipe_ingredients:
new_ingredient_id = uuid.uuid4()
recipe_ingredient_map[ingredient.id] = new_ingredient_id
new_ingredient = RecipeIngredient(
id=new_ingredient_id,
tenant_id=virtual_uuid,
recipe_id=new_recipe_id,
ingredient_id=ingredient.ingredient_id, # Keep ingredient reference
quantity=ingredient.quantity,
unit=ingredient.unit,
quantity_in_base_unit=ingredient.quantity_in_base_unit,
alternative_quantity=ingredient.alternative_quantity,
alternative_unit=ingredient.alternative_unit,
preparation_method=ingredient.preparation_method,
ingredient_notes=ingredient.ingredient_notes,
is_optional=ingredient.is_optional,
ingredient_order=ingredient.ingredient_order,
ingredient_group=ingredient.ingredient_group,
substitution_options=ingredient.substitution_options,
substitution_ratio=ingredient.substitution_ratio,
unit_cost=ingredient.unit_cost,
total_cost=ingredient.total_cost,
cost_updated_at=ingredient.cost_updated_at
)
db.add(new_ingredient)
stats["recipe_ingredients"] += 1
# Flush to get recipe ingredient IDs
2025-11-30 09:12:40 +01:00
logger.debug("Flushing recipe ingredient changes to get IDs")
await db.flush()
# Clone Production Batches
2025-11-30 09:12:40 +01:00
logger.info("Starting to clone production batches", base_tenant=str(base_uuid))
result = await db.execute(
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
)
base_batches = result.scalars().all()
logger.info(
"Found production batches to clone",
count=len(base_batches),
base_tenant=str(base_uuid)
)
batch_id_map = {}
for batch in base_batches:
new_batch_id = uuid.uuid4()
batch_id_map[batch.id] = new_batch_id
2025-11-30 09:12:40 +01:00
# Get the new recipe ID (this might be None if the recipe was skipped due to null finished_product_id)
new_recipe_id = recipe_id_map.get(batch.recipe_id)
if new_recipe_id is None:
logger.warning(
"Skipping production batch with no corresponding recipe",
batch_id=batch.id,
original_recipe_id=batch.recipe_id
)
continue
# Adjust all date fields using the shared utility
adjusted_production_date = adjust_date_for_demo(
batch.production_date,
session_time,
BASE_REFERENCE_DATE
) if batch.production_date else None
adjusted_planned_start = adjust_date_for_demo(
batch.planned_start_time,
session_time,
BASE_REFERENCE_DATE
) if batch.planned_start_time else None
adjusted_actual_start = adjust_date_for_demo(
batch.actual_start_time,
session_time,
BASE_REFERENCE_DATE
) if batch.actual_start_time else None
adjusted_planned_end = adjust_date_for_demo(
batch.planned_end_time,
session_time,
BASE_REFERENCE_DATE
) if batch.planned_end_time else None
adjusted_actual_end = adjust_date_for_demo(
batch.actual_end_time,
session_time,
BASE_REFERENCE_DATE
) if batch.actual_end_time else None
new_batch = ProductionBatch(
id=new_batch_id,
tenant_id=virtual_uuid,
recipe_id=new_recipe_id,
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
production_date=adjusted_production_date,
planned_start_time=adjusted_planned_start,
actual_start_time=adjusted_actual_start,
planned_end_time=adjusted_planned_end,
actual_end_time=adjusted_actual_end,
planned_quantity=batch.planned_quantity,
actual_quantity=batch.actual_quantity,
yield_percentage=batch.yield_percentage,
batch_size_multiplier=batch.batch_size_multiplier,
status=batch.status,
priority=batch.priority,
assigned_staff=batch.assigned_staff,
production_notes=batch.production_notes,
quality_score=batch.quality_score,
quality_notes=batch.quality_notes,
defect_rate=batch.defect_rate,
rework_required=batch.rework_required,
planned_material_cost=batch.planned_material_cost,
actual_material_cost=batch.actual_material_cost,
labor_cost=batch.labor_cost,
overhead_cost=batch.overhead_cost,
total_production_cost=batch.total_production_cost,
cost_per_unit=batch.cost_per_unit,
production_temperature=batch.production_temperature,
production_humidity=batch.production_humidity,
oven_temperature=batch.oven_temperature,
baking_time_minutes=batch.baking_time_minutes,
waste_quantity=batch.waste_quantity,
waste_reason=batch.waste_reason,
efficiency_percentage=batch.efficiency_percentage,
customer_order_reference=batch.customer_order_reference,
pre_order_quantity=batch.pre_order_quantity,
shelf_quantity=batch.shelf_quantity,
created_at=session_time,
updated_at=session_time,
created_by=batch.created_by,
completed_by=batch.completed_by
)
db.add(new_batch)
stats["production_batches"] += 1
# Flush to get batch IDs
2025-11-30 09:12:40 +01:00
logger.debug("Flushing production batch changes to get IDs")
await db.flush()
# Clone Production Ingredient Consumption
2025-11-30 09:12:40 +01:00
logger.info("Cloning production ingredient consumption")
for old_batch_id, new_batch_id in batch_id_map.items():
2025-11-30 09:12:40 +01:00
# Skip consumption if the batch was skipped (no corresponding recipe)
if old_batch_id not in batch_id_map: # This condition was redundant/incorrect
continue # This batch was skipped, so skip its consumption too
result = await db.execute(
select(ProductionIngredientConsumption).where(
ProductionIngredientConsumption.production_batch_id == old_batch_id
)
)
consumptions = result.scalars().all()
for consumption in consumptions:
2025-11-30 09:12:40 +01:00
# Get the new recipe ingredient ID (skip if original ingredient's recipe was skipped)
new_recipe_ingredient_id = recipe_ingredient_map.get(
consumption.recipe_ingredient_id
)
2025-11-30 09:12:40 +01:00
if new_recipe_ingredient_id is None:
logger.warning(
"Skipping consumption with no corresponding recipe ingredient",
consumption_id=consumption.id,
original_recipe_ingredient_id=consumption.recipe_ingredient_id
)
continue
adjusted_consumption_time = adjust_date_for_demo(
consumption.consumption_time,
session_time,
BASE_REFERENCE_DATE
) if consumption.consumption_time else None
new_consumption = ProductionIngredientConsumption(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
production_batch_id=new_batch_id,
recipe_ingredient_id=new_recipe_ingredient_id,
ingredient_id=consumption.ingredient_id, # Keep ingredient reference
stock_id=None, # Don't clone stock references
planned_quantity=consumption.planned_quantity,
actual_quantity=consumption.actual_quantity,
unit=consumption.unit,
variance_quantity=consumption.variance_quantity,
variance_percentage=consumption.variance_percentage,
unit_cost=consumption.unit_cost,
total_cost=consumption.total_cost,
consumption_time=adjusted_consumption_time,
consumption_notes=consumption.consumption_notes,
staff_member=consumption.staff_member,
ingredient_condition=consumption.ingredient_condition,
quality_impact=consumption.quality_impact,
substitution_used=consumption.substitution_used,
substitution_details=consumption.substitution_details
)
db.add(new_consumption)
stats["ingredient_consumptions"] += 1
# Commit all changes
2025-11-30 09:12:40 +01:00
logger.debug("Committing all cloned changes")
await db.commit()
total_records = sum(stats.values())
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Recipes data cloning completed",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "recipes",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone recipes data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "recipes",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "recipes",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Delete all recipe data for a virtual demo tenant
Called by demo session cleanup service to remove ephemeral data
when demo sessions expire or are destroyed.
"""
logger.info(
"Deleting recipe data for virtual tenant",
virtual_tenant_id=virtual_tenant_id
)
start_time = datetime.now(timezone.utc)
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Count records before deletion
recipe_count = await db.scalar(
select(func.count(Recipe.id)).where(Recipe.tenant_id == virtual_uuid)
)
ingredient_count = await db.scalar(
select(func.count(RecipeIngredient.id)).where(RecipeIngredient.tenant_id == virtual_uuid)
)
# Delete in correct order (RecipeIngredient references Recipe)
await db.execute(
delete(RecipeIngredient).where(RecipeIngredient.tenant_id == virtual_uuid)
)
await db.execute(
delete(Recipe).where(Recipe.tenant_id == virtual_uuid)
)
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Recipe data deleted successfully",
virtual_tenant_id=virtual_tenant_id,
recipes_deleted=recipe_count,
ingredients_deleted=ingredient_count,
duration_ms=duration_ms
)
return {
"service": "recipes",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"recipes": recipe_count,
"recipe_ingredients": ingredient_count,
"total": recipe_count + ingredient_count
},
"duration_ms": duration_ms
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to delete recipe data",
virtual_tenant_id=virtual_tenant_id,
error=str(e),
exc_info=True
)
await db.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to delete recipe data: {str(e)}"
)