Files
bakery-ia/services/recipes/app/api/internal_demo.py

446 lines
16 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Recipes Service
Service-to-service endpoint for cloning recipe and production data
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
2025-10-24 13:05:04 +02:00
from sqlalchemy import select, delete, func
import structlog
import uuid
2025-12-13 23:57:54 +01:00
from uuid import UUID
from datetime import datetime, timezone, timedelta
from typing import Optional
import os
import sys
2025-12-13 23:57:54 +01:00
import json
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
2025-12-14 11:58:14 +01:00
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
from app.core.database import get_db
from app.models.recipes import (
Recipe, RecipeIngredient, ProductionBatch, ProductionIngredientConsumption,
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
)
2025-11-30 09:12:40 +01:00
from app.core.config import settings
logger = structlog.get_logger()
2025-12-13 23:57:54 +01:00
router = APIRouter()
# Base demo tenant IDs
2025-11-30 09:12:40 +01:00
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
2025-12-14 11:58:14 +01:00
def parse_date_field(
field_value: any,
session_time: datetime,
field_name: str = "date"
) -> Optional[datetime]:
"""
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
Args:
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
session_time: Session creation time (timezone-aware UTC)
field_name: Name of the field (for logging)
Returns:
Timezone-aware UTC datetime or None
"""
if field_value is None:
return None
# Handle BASE_TS markers
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
try:
return resolve_time_marker(field_value, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to resolve BASE_TS marker",
field_name=field_name,
marker=field_value,
error=str(e)
)
return None
# Handle ISO timestamps (legacy format - convert to absolute datetime)
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
try:
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
# Adjust relative to session time
return adjust_date_for_demo(parsed_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
"Failed to parse ISO timestamp",
field_name=field_name,
value=field_value,
error=str(e)
)
return None
logger.warning(
"Unknown date format",
field_name=field_name,
value=field_value,
value_type=type(field_value).__name__
)
return None
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
2025-11-30 09:12:40 +01:00
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
2025-12-13 23:57:54 +01:00
@router.post("/internal/demo/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone recipes service data for a virtual demo tenant
2025-12-13 23:57:54 +01:00
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
2025-12-13 23:57:54 +01:00
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
2025-12-13 23:57:54 +01:00
session_created_at: Session creation timestamp for date adjustment
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
try:
# Validate UUIDs
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
logger.info(
"Starting recipes data cloning",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_created_at=session_created_at
)
2025-12-13 23:57:54 +01:00
# Load seed data from JSON files
try:
2025-12-13 23:57:54 +01:00
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "04-recipes.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "04-recipes.json")
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if demo_account_type == "professional":
json_file = seed_data_dir / "professional" / "04-recipes.json"
elif demo_account_type == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "04-recipes.json"
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
2025-12-13 23:57:54 +01:00
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Track cloning statistics
stats = {
"recipes": 0,
2025-12-13 23:57:54 +01:00
"recipe_ingredients": 0
}
2025-12-13 23:57:54 +01:00
# Create Recipes
for recipe_data in seed_data.get('recipes', []):
# Transform recipe ID using XOR
from shared.utils.demo_id_transformer import transform_id
try:
recipe_uuid = uuid.UUID(recipe_data['id'])
transformed_id = transform_id(recipe_data['id'], virtual_uuid)
except ValueError as e:
logger.error("Failed to parse recipe UUID",
recipe_id=recipe_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in recipe data: {str(e)}"
2025-11-30 09:12:40 +01:00
)
2025-12-14 11:58:14 +01:00
# Parse date fields (supports BASE_TS markers and ISO timestamps)
adjusted_created_at = parse_date_field(
recipe_data.get('created_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"created_at"
2025-12-13 23:57:54 +01:00
)
2025-12-14 11:58:14 +01:00
adjusted_updated_at = parse_date_field(
recipe_data.get('updated_at'),
2025-12-13 23:57:54 +01:00
session_time,
2025-12-14 11:58:14 +01:00
"updated_at"
2025-12-13 23:57:54 +01:00
)
# Map field names from seed data to model fields
# Handle yield_quantity/yield_unit (may be named finished_product_quantity/unit in seed data)
yield_quantity = recipe_data.get('yield_quantity') or recipe_data.get('finished_product_quantity', 1.0)
yield_unit_str = recipe_data.get('yield_unit') or recipe_data.get('finished_product_unit', 'UNITS')
# Convert yield_unit string to enum if needed
if isinstance(yield_unit_str, str):
try:
yield_unit = MeasurementUnit[yield_unit_str.upper()]
except KeyError:
yield_unit = MeasurementUnit.UNITS
else:
yield_unit = yield_unit_str
# Convert status string to enum if needed
status = recipe_data.get('status', 'ACTIVE')
if isinstance(status, str):
try:
status = RecipeStatus[status.upper()]
except KeyError:
status = RecipeStatus.ACTIVE
2025-11-30 09:12:40 +01:00
new_recipe = Recipe(
2025-12-13 23:57:54 +01:00
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
name=recipe_data['name'],
description=recipe_data.get('description'),
recipe_code=recipe_data.get('recipe_code'),
version=recipe_data.get('version', '1.0'),
status=status,
finished_product_id=recipe_data['finished_product_id'],
yield_quantity=yield_quantity,
yield_unit=yield_unit,
category=recipe_data.get('category'),
difficulty_level=recipe_data.get('difficulty_level', 1),
prep_time_minutes=recipe_data.get('prep_time_minutes') or recipe_data.get('preparation_time_minutes'),
cook_time_minutes=recipe_data.get('cook_time_minutes') or recipe_data.get('baking_time_minutes'),
total_time_minutes=recipe_data.get('total_time_minutes'),
rest_time_minutes=recipe_data.get('rest_time_minutes') or recipe_data.get('cooling_time_minutes'),
instructions=recipe_data.get('instructions'),
preparation_notes=recipe_data.get('notes') or recipe_data.get('preparation_notes'),
created_at=adjusted_created_at,
updated_at=adjusted_updated_at
)
db.add(new_recipe)
stats["recipes"] += 1
2025-12-13 23:57:54 +01:00
# Map recipe ID for ingredients
recipe_id_map = {recipe_data['id']: str(transformed_id)}
# Create Recipe Ingredients
for recipe_ingredient_data in seed_data.get('recipe_ingredients', []):
# Transform ingredient ID using XOR
try:
ingredient_uuid = uuid.UUID(recipe_ingredient_data['id'])
transformed_id = transform_id(ingredient_uuid, virtual_uuid)
except ValueError as e:
logger.error("Failed to parse recipe ingredient UUID",
ingredient_id=recipe_ingredient_data['id'],
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in recipe ingredient data: {str(e)}"
)
2025-12-13 23:57:54 +01:00
# Get the transformed recipe ID
recipe_id = recipe_id_map.get(recipe_ingredient_data['recipe_id'])
if not recipe_id:
logger.error("Recipe not found for ingredient",
recipe_id=recipe_ingredient_data['recipe_id'])
2025-11-30 09:12:40 +01:00
continue
2025-12-13 23:57:54 +01:00
# Convert unit string to enum if needed
unit_str = recipe_ingredient_data.get('unit', 'KILOGRAMS')
if isinstance(unit_str, str):
try:
unit = MeasurementUnit[unit_str.upper()]
except KeyError:
# Try without 'S' for singular forms
try:
unit = MeasurementUnit[unit_str.upper().rstrip('S')]
except KeyError:
unit = MeasurementUnit.KILOGRAMS
else:
unit = unit_str
new_recipe_ingredient = RecipeIngredient(
id=str(transformed_id),
tenant_id=virtual_uuid,
2025-12-13 23:57:54 +01:00
recipe_id=recipe_id,
ingredient_id=recipe_ingredient_data['ingredient_id'],
quantity=recipe_ingredient_data['quantity'],
unit=unit,
unit_cost=recipe_ingredient_data.get('cost_per_unit') or recipe_ingredient_data.get('unit_cost', 0.0),
total_cost=recipe_ingredient_data.get('total_cost'),
ingredient_order=recipe_ingredient_data.get('sequence') or recipe_ingredient_data.get('ingredient_order', 1),
is_optional=recipe_ingredient_data.get('is_optional', False),
ingredient_notes=recipe_ingredient_data.get('notes') or recipe_ingredient_data.get('ingredient_notes')
)
2025-12-13 23:57:54 +01:00
db.add(new_recipe_ingredient)
stats["recipe_ingredients"] += 1
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
2025-12-13 23:57:54 +01:00
"Recipes data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
2025-12-13 23:57:54 +01:00
records_cloned=stats,
duration_ms=duration_ms
)
return {
"service": "recipes",
"status": "completed",
2025-12-13 23:57:54 +01:00
"records_cloned": sum(stats.values()),
"duration_ms": duration_ms,
2025-12-13 23:57:54 +01:00
"details": {
"recipes": stats["recipes"],
"recipe_ingredients": stats["recipe_ingredients"],
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
2025-12-13 23:57:54 +01:00
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone recipes data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "recipes",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "recipes",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
2025-12-13 23:57:54 +01:00
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
2025-10-24 13:05:04 +02:00
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
2025-12-13 23:57:54 +01:00
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
2025-10-24 13:05:04 +02:00
"""
2025-12-14 11:58:14 +01:00
start_time = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
records_deleted = {
"recipes": 0,
"recipe_ingredients": 0,
"total": 0
}
2025-10-24 13:05:04 +02:00
try:
2025-12-13 23:57:54 +01:00
# Delete in reverse dependency order
# 1. Delete recipe ingredients (depends on recipes)
result = await db.execute(
delete(RecipeIngredient)
.where(RecipeIngredient.tenant_id == virtual_tenant_id)
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
records_deleted["recipe_ingredients"] = result.rowcount
2025-10-24 13:05:04 +02:00
2025-12-13 23:57:54 +01:00
# 2. Delete recipes
result = await db.execute(
delete(Recipe)
.where(Recipe.tenant_id == virtual_tenant_id)
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
records_deleted["recipes"] = result.rowcount
2025-10-24 13:05:04 +02:00
2025-12-13 23:57:54 +01:00
records_deleted["total"] = sum(records_deleted.values())
2025-10-24 13:05:04 +02:00
2025-12-13 23:57:54 +01:00
await db.commit()
2025-10-24 13:05:04 +02:00
logger.info(
2025-12-13 23:57:54 +01:00
"demo_data_deleted",
service="recipes",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
2025-10-24 13:05:04 +02:00
)
return {
"service": "recipes",
"status": "deleted",
2025-12-13 23:57:54 +01:00
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
2025-12-14 11:58:14 +01:00
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
2025-10-24 13:05:04 +02:00
}
except Exception as e:
2025-12-13 23:57:54 +01:00
await db.rollback()
2025-10-24 13:05:04 +02:00
logger.error(
2025-12-13 23:57:54 +01:00
"demo_data_deletion_failed",
service="recipes",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
2025-10-24 13:05:04 +02:00
)
raise HTTPException(
status_code=500,
2025-12-13 23:57:54 +01:00
detail=f"Failed to delete demo data: {str(e)}"
)