503 lines
19 KiB
Python
503 lines
19 KiB
Python
"""
|
|
Internal Demo Cloning API for Recipes Service
|
|
Service-to-service endpoint for cloning recipe and production data
|
|
"""
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from sqlalchemy import select, delete, func
|
|
import structlog
|
|
import uuid
|
|
from datetime import datetime, timezone, timedelta
|
|
from typing import Optional
|
|
import os
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
|
|
|
from app.core.database import get_db
|
|
from app.models.recipes import (
|
|
Recipe, RecipeIngredient, ProductionBatch, ProductionIngredientConsumption,
|
|
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
|
|
)
|
|
|
|
logger = structlog.get_logger()
|
|
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
|
|
|
# Internal API key for service-to-service auth
|
|
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
|
|
|
# Base demo tenant IDs
|
|
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
|
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
|
|
|
|
|
|
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
|
"""Verify internal API key for service-to-service communication"""
|
|
if x_internal_api_key != INTERNAL_API_KEY:
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
return True
|
|
|
|
|
|
@router.post("/clone")
|
|
async def clone_demo_data(
|
|
base_tenant_id: str,
|
|
virtual_tenant_id: str,
|
|
demo_account_type: str,
|
|
session_id: Optional[str] = None,
|
|
session_created_at: Optional[str] = None,
|
|
db: AsyncSession = Depends(get_db),
|
|
_: bool = Depends(verify_internal_api_key)
|
|
):
|
|
"""
|
|
Clone recipes service data for a virtual demo tenant
|
|
|
|
Clones:
|
|
- Recipes (master recipe definitions)
|
|
- Recipe ingredients (with measurements)
|
|
- Production batches (historical production runs)
|
|
- Production ingredient consumption (actual usage tracking)
|
|
|
|
Args:
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
demo_account_type: Type of demo account
|
|
session_id: Originating session ID for tracing
|
|
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
|
|
|
Returns:
|
|
Cloning status and record counts
|
|
"""
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
# Parse session_created_at or fallback to now
|
|
if session_created_at:
|
|
try:
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
except (ValueError, AttributeError) as e:
|
|
logger.warning(
|
|
"Invalid session_created_at format, using current time",
|
|
session_created_at=session_created_at,
|
|
error=str(e)
|
|
)
|
|
session_time = datetime.now(timezone.utc)
|
|
else:
|
|
logger.warning("session_created_at not provided, using current time")
|
|
session_time = datetime.now(timezone.utc)
|
|
|
|
logger.info(
|
|
"Starting recipes data cloning",
|
|
base_tenant_id=base_tenant_id,
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
demo_account_type=demo_account_type,
|
|
session_id=session_id,
|
|
session_time=session_time.isoformat()
|
|
)
|
|
|
|
try:
|
|
# Validate UUIDs
|
|
base_uuid = uuid.UUID(base_tenant_id)
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Track cloning statistics
|
|
stats = {
|
|
"recipes": 0,
|
|
"recipe_ingredients": 0,
|
|
"production_batches": 0,
|
|
"ingredient_consumptions": 0
|
|
}
|
|
|
|
# Recipe ID mapping (old -> new)
|
|
recipe_id_map = {}
|
|
recipe_ingredient_map = {}
|
|
|
|
# Clone Recipes
|
|
result = await db.execute(
|
|
select(Recipe).where(Recipe.tenant_id == base_uuid)
|
|
)
|
|
base_recipes = result.scalars().all()
|
|
|
|
logger.info(
|
|
"Found recipes to clone",
|
|
count=len(base_recipes),
|
|
base_tenant=str(base_uuid)
|
|
)
|
|
|
|
for recipe in base_recipes:
|
|
new_recipe_id = uuid.uuid4()
|
|
recipe_id_map[recipe.id] = new_recipe_id
|
|
|
|
new_recipe = Recipe(
|
|
id=new_recipe_id,
|
|
tenant_id=virtual_uuid,
|
|
name=recipe.name,
|
|
recipe_code=f"REC-{uuid.uuid4().hex[:8].upper()}", # New unique code
|
|
version=recipe.version,
|
|
finished_product_id=recipe.finished_product_id, # Keep product reference
|
|
description=recipe.description,
|
|
category=recipe.category,
|
|
cuisine_type=recipe.cuisine_type,
|
|
difficulty_level=recipe.difficulty_level,
|
|
yield_quantity=recipe.yield_quantity,
|
|
yield_unit=recipe.yield_unit,
|
|
prep_time_minutes=recipe.prep_time_minutes,
|
|
cook_time_minutes=recipe.cook_time_minutes,
|
|
total_time_minutes=recipe.total_time_minutes,
|
|
rest_time_minutes=recipe.rest_time_minutes,
|
|
estimated_cost_per_unit=recipe.estimated_cost_per_unit,
|
|
last_calculated_cost=recipe.last_calculated_cost,
|
|
cost_calculation_date=recipe.cost_calculation_date,
|
|
target_margin_percentage=recipe.target_margin_percentage,
|
|
suggested_selling_price=recipe.suggested_selling_price,
|
|
instructions=recipe.instructions,
|
|
preparation_notes=recipe.preparation_notes,
|
|
storage_instructions=recipe.storage_instructions,
|
|
serves_count=recipe.serves_count,
|
|
nutritional_info=recipe.nutritional_info,
|
|
allergen_info=recipe.allergen_info,
|
|
dietary_tags=recipe.dietary_tags,
|
|
batch_size_multiplier=recipe.batch_size_multiplier,
|
|
minimum_batch_size=recipe.minimum_batch_size,
|
|
maximum_batch_size=recipe.maximum_batch_size,
|
|
optimal_production_temperature=recipe.optimal_production_temperature,
|
|
optimal_humidity=recipe.optimal_humidity,
|
|
quality_check_configuration=recipe.quality_check_configuration,
|
|
status=recipe.status,
|
|
is_seasonal=recipe.is_seasonal,
|
|
season_start_month=recipe.season_start_month,
|
|
season_end_month=recipe.season_end_month,
|
|
is_signature_item=recipe.is_signature_item,
|
|
created_at=session_time,
|
|
updated_at=session_time,
|
|
created_by=recipe.created_by,
|
|
updated_by=recipe.updated_by
|
|
)
|
|
db.add(new_recipe)
|
|
stats["recipes"] += 1
|
|
|
|
# Flush to get recipe IDs for foreign keys
|
|
await db.flush()
|
|
|
|
# Clone Recipe Ingredients
|
|
for old_recipe_id, new_recipe_id in recipe_id_map.items():
|
|
result = await db.execute(
|
|
select(RecipeIngredient).where(RecipeIngredient.recipe_id == old_recipe_id)
|
|
)
|
|
recipe_ingredients = result.scalars().all()
|
|
|
|
for ingredient in recipe_ingredients:
|
|
new_ingredient_id = uuid.uuid4()
|
|
recipe_ingredient_map[ingredient.id] = new_ingredient_id
|
|
|
|
new_ingredient = RecipeIngredient(
|
|
id=new_ingredient_id,
|
|
tenant_id=virtual_uuid,
|
|
recipe_id=new_recipe_id,
|
|
ingredient_id=ingredient.ingredient_id, # Keep ingredient reference
|
|
quantity=ingredient.quantity,
|
|
unit=ingredient.unit,
|
|
quantity_in_base_unit=ingredient.quantity_in_base_unit,
|
|
alternative_quantity=ingredient.alternative_quantity,
|
|
alternative_unit=ingredient.alternative_unit,
|
|
preparation_method=ingredient.preparation_method,
|
|
ingredient_notes=ingredient.ingredient_notes,
|
|
is_optional=ingredient.is_optional,
|
|
ingredient_order=ingredient.ingredient_order,
|
|
ingredient_group=ingredient.ingredient_group,
|
|
substitution_options=ingredient.substitution_options,
|
|
substitution_ratio=ingredient.substitution_ratio,
|
|
unit_cost=ingredient.unit_cost,
|
|
total_cost=ingredient.total_cost,
|
|
cost_updated_at=ingredient.cost_updated_at
|
|
)
|
|
db.add(new_ingredient)
|
|
stats["recipe_ingredients"] += 1
|
|
|
|
# Flush to get recipe ingredient IDs
|
|
await db.flush()
|
|
|
|
# Clone Production Batches
|
|
result = await db.execute(
|
|
select(ProductionBatch).where(ProductionBatch.tenant_id == base_uuid)
|
|
)
|
|
base_batches = result.scalars().all()
|
|
|
|
logger.info(
|
|
"Found production batches to clone",
|
|
count=len(base_batches),
|
|
base_tenant=str(base_uuid)
|
|
)
|
|
|
|
batch_id_map = {}
|
|
|
|
for batch in base_batches:
|
|
new_batch_id = uuid.uuid4()
|
|
batch_id_map[batch.id] = new_batch_id
|
|
|
|
# Get the new recipe ID
|
|
new_recipe_id = recipe_id_map.get(batch.recipe_id, batch.recipe_id)
|
|
|
|
# Adjust all date fields using the shared utility
|
|
adjusted_production_date = adjust_date_for_demo(
|
|
batch.production_date,
|
|
session_time,
|
|
BASE_REFERENCE_DATE
|
|
) if batch.production_date else None
|
|
adjusted_planned_start = adjust_date_for_demo(
|
|
batch.planned_start_time,
|
|
session_time,
|
|
BASE_REFERENCE_DATE
|
|
) if batch.planned_start_time else None
|
|
adjusted_actual_start = adjust_date_for_demo(
|
|
batch.actual_start_time,
|
|
session_time,
|
|
BASE_REFERENCE_DATE
|
|
) if batch.actual_start_time else None
|
|
adjusted_planned_end = adjust_date_for_demo(
|
|
batch.planned_end_time,
|
|
session_time,
|
|
BASE_REFERENCE_DATE
|
|
) if batch.planned_end_time else None
|
|
adjusted_actual_end = adjust_date_for_demo(
|
|
batch.actual_end_time,
|
|
session_time,
|
|
BASE_REFERENCE_DATE
|
|
) if batch.actual_end_time else None
|
|
|
|
new_batch = ProductionBatch(
|
|
id=new_batch_id,
|
|
tenant_id=virtual_uuid,
|
|
recipe_id=new_recipe_id,
|
|
batch_number=f"BATCH-{uuid.uuid4().hex[:8].upper()}", # New batch number
|
|
production_date=adjusted_production_date,
|
|
planned_start_time=adjusted_planned_start,
|
|
actual_start_time=adjusted_actual_start,
|
|
planned_end_time=adjusted_planned_end,
|
|
actual_end_time=adjusted_actual_end,
|
|
planned_quantity=batch.planned_quantity,
|
|
actual_quantity=batch.actual_quantity,
|
|
yield_percentage=batch.yield_percentage,
|
|
batch_size_multiplier=batch.batch_size_multiplier,
|
|
status=batch.status,
|
|
priority=batch.priority,
|
|
assigned_staff=batch.assigned_staff,
|
|
production_notes=batch.production_notes,
|
|
quality_score=batch.quality_score,
|
|
quality_notes=batch.quality_notes,
|
|
defect_rate=batch.defect_rate,
|
|
rework_required=batch.rework_required,
|
|
planned_material_cost=batch.planned_material_cost,
|
|
actual_material_cost=batch.actual_material_cost,
|
|
labor_cost=batch.labor_cost,
|
|
overhead_cost=batch.overhead_cost,
|
|
total_production_cost=batch.total_production_cost,
|
|
cost_per_unit=batch.cost_per_unit,
|
|
production_temperature=batch.production_temperature,
|
|
production_humidity=batch.production_humidity,
|
|
oven_temperature=batch.oven_temperature,
|
|
baking_time_minutes=batch.baking_time_minutes,
|
|
waste_quantity=batch.waste_quantity,
|
|
waste_reason=batch.waste_reason,
|
|
efficiency_percentage=batch.efficiency_percentage,
|
|
customer_order_reference=batch.customer_order_reference,
|
|
pre_order_quantity=batch.pre_order_quantity,
|
|
shelf_quantity=batch.shelf_quantity,
|
|
created_at=session_time,
|
|
updated_at=session_time,
|
|
created_by=batch.created_by,
|
|
completed_by=batch.completed_by
|
|
)
|
|
db.add(new_batch)
|
|
stats["production_batches"] += 1
|
|
|
|
# Flush to get batch IDs
|
|
await db.flush()
|
|
|
|
# Clone Production Ingredient Consumption
|
|
for old_batch_id, new_batch_id in batch_id_map.items():
|
|
result = await db.execute(
|
|
select(ProductionIngredientConsumption).where(
|
|
ProductionIngredientConsumption.production_batch_id == old_batch_id
|
|
)
|
|
)
|
|
consumptions = result.scalars().all()
|
|
|
|
for consumption in consumptions:
|
|
# Get the new recipe ingredient ID
|
|
new_recipe_ingredient_id = recipe_ingredient_map.get(
|
|
consumption.recipe_ingredient_id,
|
|
consumption.recipe_ingredient_id
|
|
)
|
|
|
|
adjusted_consumption_time = adjust_date_for_demo(
|
|
consumption.consumption_time,
|
|
session_time,
|
|
BASE_REFERENCE_DATE
|
|
) if consumption.consumption_time else None
|
|
|
|
new_consumption = ProductionIngredientConsumption(
|
|
id=uuid.uuid4(),
|
|
tenant_id=virtual_uuid,
|
|
production_batch_id=new_batch_id,
|
|
recipe_ingredient_id=new_recipe_ingredient_id,
|
|
ingredient_id=consumption.ingredient_id, # Keep ingredient reference
|
|
stock_id=None, # Don't clone stock references
|
|
planned_quantity=consumption.planned_quantity,
|
|
actual_quantity=consumption.actual_quantity,
|
|
unit=consumption.unit,
|
|
variance_quantity=consumption.variance_quantity,
|
|
variance_percentage=consumption.variance_percentage,
|
|
unit_cost=consumption.unit_cost,
|
|
total_cost=consumption.total_cost,
|
|
consumption_time=adjusted_consumption_time,
|
|
consumption_notes=consumption.consumption_notes,
|
|
staff_member=consumption.staff_member,
|
|
ingredient_condition=consumption.ingredient_condition,
|
|
quality_impact=consumption.quality_impact,
|
|
substitution_used=consumption.substitution_used,
|
|
substitution_details=consumption.substitution_details
|
|
)
|
|
db.add(new_consumption)
|
|
stats["ingredient_consumptions"] += 1
|
|
|
|
# Commit all changes
|
|
await db.commit()
|
|
|
|
total_records = sum(stats.values())
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
logger.info(
|
|
"Recipes data cloning completed",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
total_records=total_records,
|
|
stats=stats,
|
|
duration_ms=duration_ms
|
|
)
|
|
|
|
return {
|
|
"service": "recipes",
|
|
"status": "completed",
|
|
"records_cloned": total_records,
|
|
"duration_ms": duration_ms,
|
|
"details": stats
|
|
}
|
|
|
|
except ValueError as e:
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to clone recipes data",
|
|
error=str(e),
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
exc_info=True
|
|
)
|
|
|
|
# Rollback on error
|
|
await db.rollback()
|
|
|
|
return {
|
|
"service": "recipes",
|
|
"status": "failed",
|
|
"records_cloned": 0,
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
"error": str(e)
|
|
}
|
|
|
|
|
|
@router.get("/clone/health")
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
"""
|
|
Health check for internal cloning endpoint
|
|
Used by orchestrator to verify service availability
|
|
"""
|
|
return {
|
|
"service": "recipes",
|
|
"clone_endpoint": "available",
|
|
"version": "2.0.0"
|
|
}
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
|
async def delete_demo_data(
|
|
virtual_tenant_id: str,
|
|
db: AsyncSession = Depends(get_db),
|
|
_: bool = Depends(verify_internal_api_key)
|
|
):
|
|
"""
|
|
Delete all recipe data for a virtual demo tenant
|
|
|
|
Called by demo session cleanup service to remove ephemeral data
|
|
when demo sessions expire or are destroyed.
|
|
"""
|
|
logger.info(
|
|
"Deleting recipe data for virtual tenant",
|
|
virtual_tenant_id=virtual_tenant_id
|
|
)
|
|
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
try:
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
# Count records before deletion
|
|
recipe_count = await db.scalar(
|
|
select(func.count(Recipe.id)).where(Recipe.tenant_id == virtual_uuid)
|
|
)
|
|
ingredient_count = await db.scalar(
|
|
select(func.count(RecipeIngredient.id)).where(RecipeIngredient.tenant_id == virtual_uuid)
|
|
)
|
|
|
|
# Delete in correct order (RecipeIngredient references Recipe)
|
|
await db.execute(
|
|
delete(RecipeIngredient).where(RecipeIngredient.tenant_id == virtual_uuid)
|
|
)
|
|
await db.execute(
|
|
delete(Recipe).where(Recipe.tenant_id == virtual_uuid)
|
|
)
|
|
|
|
await db.commit()
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
logger.info(
|
|
"Recipe data deleted successfully",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
recipes_deleted=recipe_count,
|
|
ingredients_deleted=ingredient_count,
|
|
duration_ms=duration_ms
|
|
)
|
|
|
|
return {
|
|
"service": "recipes",
|
|
"status": "deleted",
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
"records_deleted": {
|
|
"recipes": recipe_count,
|
|
"recipe_ingredients": ingredient_count,
|
|
"total": recipe_count + ingredient_count
|
|
},
|
|
"duration_ms": duration_ms
|
|
}
|
|
|
|
except ValueError as e:
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
except Exception as e:
|
|
logger.error(
|
|
"Failed to delete recipe data",
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
error=str(e),
|
|
exc_info=True
|
|
)
|
|
await db.rollback()
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Failed to delete recipe data: {str(e)}"
|
|
)
|