Initial commit - production deployment
This commit is contained in:
426
services/recipes/app/api/internal_demo.py
Normal file
426
services/recipes/app/api/internal_demo.py
Normal file
@@ -0,0 +1,426 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Recipes Service
|
||||
Service-to-service endpoint for cloning recipe and production data
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.recipes import (
|
||||
Recipe, RecipeIngredient, ProductionBatch, ProductionIngredientConsumption,
|
||||
RecipeStatus, ProductionStatus, MeasurementUnit, ProductionPriority
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
|
||||
|
||||
def parse_date_field(
|
||||
field_value: any,
|
||||
session_time: datetime,
|
||||
field_name: str = "date"
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Parse a date field from JSON, supporting BASE_TS markers and ISO timestamps.
|
||||
|
||||
Args:
|
||||
field_value: The date field value (can be BASE_TS marker, ISO string, or None)
|
||||
session_time: Session creation time (timezone-aware UTC)
|
||||
field_name: Name of the field (for logging)
|
||||
|
||||
Returns:
|
||||
Timezone-aware UTC datetime or None
|
||||
"""
|
||||
if field_value is None:
|
||||
return None
|
||||
|
||||
# Handle BASE_TS markers
|
||||
if isinstance(field_value, str) and field_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(field_value, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to resolve BASE_TS marker",
|
||||
field_name=field_name,
|
||||
marker=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle ISO timestamps (legacy format - convert to absolute datetime)
|
||||
if isinstance(field_value, str) and ('T' in field_value or 'Z' in field_value):
|
||||
try:
|
||||
parsed_date = datetime.fromisoformat(field_value.replace('Z', '+00:00'))
|
||||
# Adjust relative to session time
|
||||
return adjust_date_for_demo(parsed_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Failed to parse ISO timestamp",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
logger.warning(
|
||||
"Unknown date format",
|
||||
field_name=field_name,
|
||||
value=field_value,
|
||||
value_type=type(field_value).__name__
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone recipes service data for a virtual demo tenant
|
||||
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
logger.info(
|
||||
"Starting recipes data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_created_at=session_created_at
|
||||
)
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "04-recipes.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "04-recipes.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "04-recipes.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"recipes": 0,
|
||||
"recipe_ingredients": 0
|
||||
}
|
||||
|
||||
# First, build recipe ID map by processing all recipes
|
||||
recipe_id_map = {}
|
||||
|
||||
# Create Recipes
|
||||
for recipe_data in seed_data.get('recipes', []):
|
||||
# Transform recipe ID using XOR
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
recipe_uuid = uuid.UUID(recipe_data['id'])
|
||||
transformed_id = transform_id(recipe_data['id'], virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse recipe UUID",
|
||||
recipe_id=recipe_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in recipe data: {str(e)}"
|
||||
)
|
||||
|
||||
# Parse date fields (supports BASE_TS markers and ISO timestamps)
|
||||
adjusted_created_at = parse_date_field(
|
||||
recipe_data.get('created_at'),
|
||||
session_time,
|
||||
"created_at"
|
||||
)
|
||||
adjusted_updated_at = parse_date_field(
|
||||
recipe_data.get('updated_at'),
|
||||
session_time,
|
||||
"updated_at"
|
||||
)
|
||||
|
||||
# Map field names from seed data to model fields
|
||||
# Handle yield_quantity/yield_unit (may be named finished_product_quantity/unit in seed data)
|
||||
yield_quantity = recipe_data.get('yield_quantity') or recipe_data.get('finished_product_quantity', 1.0)
|
||||
yield_unit_str = recipe_data.get('yield_unit') or recipe_data.get('finished_product_unit', 'UNITS')
|
||||
|
||||
# Convert yield_unit string to enum if needed
|
||||
if isinstance(yield_unit_str, str):
|
||||
try:
|
||||
yield_unit = MeasurementUnit[yield_unit_str.upper()]
|
||||
except KeyError:
|
||||
yield_unit = MeasurementUnit.UNITS
|
||||
else:
|
||||
yield_unit = yield_unit_str
|
||||
|
||||
# Convert status string to enum if needed
|
||||
status = recipe_data.get('status', 'ACTIVE')
|
||||
if isinstance(status, str):
|
||||
try:
|
||||
status = RecipeStatus[status.upper()]
|
||||
except KeyError:
|
||||
status = RecipeStatus.ACTIVE
|
||||
|
||||
new_recipe = Recipe(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
name=recipe_data['name'],
|
||||
description=recipe_data.get('description'),
|
||||
recipe_code=recipe_data.get('recipe_code'),
|
||||
version=recipe_data.get('version', '1.0'),
|
||||
status=status,
|
||||
finished_product_id=recipe_data['finished_product_id'],
|
||||
yield_quantity=yield_quantity,
|
||||
yield_unit=yield_unit,
|
||||
category=recipe_data.get('category'),
|
||||
difficulty_level=recipe_data.get('difficulty_level', 1),
|
||||
prep_time_minutes=recipe_data.get('prep_time_minutes') or recipe_data.get('preparation_time_minutes'),
|
||||
cook_time_minutes=recipe_data.get('cook_time_minutes') or recipe_data.get('baking_time_minutes'),
|
||||
total_time_minutes=recipe_data.get('total_time_minutes'),
|
||||
rest_time_minutes=recipe_data.get('rest_time_minutes') or recipe_data.get('cooling_time_minutes'),
|
||||
instructions=recipe_data.get('instructions'),
|
||||
preparation_notes=recipe_data.get('notes') or recipe_data.get('preparation_notes'),
|
||||
created_at=adjusted_created_at,
|
||||
updated_at=adjusted_updated_at
|
||||
)
|
||||
db.add(new_recipe)
|
||||
stats["recipes"] += 1
|
||||
|
||||
# Add recipe ID to map for ingredients
|
||||
recipe_id_map[recipe_data['id']] = str(transformed_id)
|
||||
|
||||
# Create Recipe Ingredients
|
||||
for recipe_ingredient_data in seed_data.get('recipe_ingredients', []):
|
||||
# Transform ingredient ID using XOR
|
||||
try:
|
||||
ingredient_uuid = uuid.UUID(recipe_ingredient_data['id'])
|
||||
transformed_id = transform_id(ingredient_uuid, virtual_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse recipe ingredient UUID",
|
||||
ingredient_id=recipe_ingredient_data['id'],
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in recipe ingredient data: {str(e)}"
|
||||
)
|
||||
|
||||
# Get the transformed recipe ID
|
||||
recipe_id = recipe_id_map.get(recipe_ingredient_data['recipe_id'])
|
||||
if not recipe_id:
|
||||
logger.error("Recipe not found for ingredient",
|
||||
recipe_id=recipe_ingredient_data['recipe_id'])
|
||||
continue
|
||||
|
||||
# Convert unit string to enum if needed
|
||||
unit_str = recipe_ingredient_data.get('unit', 'KILOGRAMS')
|
||||
if isinstance(unit_str, str):
|
||||
try:
|
||||
unit = MeasurementUnit[unit_str.upper()]
|
||||
except KeyError:
|
||||
# Try without 'S' for singular forms
|
||||
try:
|
||||
unit = MeasurementUnit[unit_str.upper().rstrip('S')]
|
||||
except KeyError:
|
||||
unit = MeasurementUnit.KILOGRAMS
|
||||
else:
|
||||
unit = unit_str
|
||||
|
||||
new_recipe_ingredient = RecipeIngredient(
|
||||
id=str(transformed_id),
|
||||
tenant_id=virtual_uuid,
|
||||
recipe_id=recipe_id,
|
||||
ingredient_id=recipe_ingredient_data['ingredient_id'],
|
||||
quantity=recipe_ingredient_data['quantity'],
|
||||
unit=unit,
|
||||
unit_cost=recipe_ingredient_data.get('cost_per_unit') or recipe_ingredient_data.get('unit_cost', 0.0),
|
||||
total_cost=recipe_ingredient_data.get('total_cost'),
|
||||
ingredient_order=recipe_ingredient_data.get('sequence') or recipe_ingredient_data.get('ingredient_order', 1),
|
||||
is_optional=recipe_ingredient_data.get('is_optional', False),
|
||||
ingredient_notes=recipe_ingredient_data.get('notes') or recipe_ingredient_data.get('ingredient_notes')
|
||||
)
|
||||
db.add(new_recipe_ingredient)
|
||||
stats["recipe_ingredients"] += 1
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Recipes data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
records_cloned=stats,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "completed",
|
||||
"records_cloned": sum(stats.values()),
|
||||
"duration_ms": duration_ms,
|
||||
"details": {
|
||||
"recipes": stats["recipes"],
|
||||
"recipe_ingredients": stats["recipe_ingredients"],
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone recipes data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "recipes",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
records_deleted = {
|
||||
"recipes": 0,
|
||||
"recipe_ingredients": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Delete in reverse dependency order
|
||||
|
||||
# 1. Delete recipe ingredients (depends on recipes)
|
||||
result = await db.execute(
|
||||
delete(RecipeIngredient)
|
||||
.where(RecipeIngredient.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["recipe_ingredients"] = result.rowcount
|
||||
|
||||
# 2. Delete recipes
|
||||
result = await db.execute(
|
||||
delete(Recipe)
|
||||
.where(Recipe.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["recipes"] = result.rowcount
|
||||
|
||||
records_deleted["total"] = sum(records_deleted.values())
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"demo_data_deleted",
|
||||
service="recipes",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "recipes",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="recipes",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user