Files
bakery-ia/services/inventory/app/api/internal_demo.py
2025-12-13 23:57:54 +01:00

486 lines
19 KiB
Python

"""
Internal Demo Cloning API for Inventory Service
Handles internal demo data cloning operations
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from typing import Optional
import structlog
import json
from pathlib import Path
from datetime import datetime
import uuid
from uuid import UUID
from app.core.database import get_db
from app.core.config import settings
from app.models import Ingredient, Stock, ProductType
logger = structlog.get_logger()
router = APIRouter()
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/internal/demo/clone")
async def clone_demo_data_internal(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone inventory service data for a virtual demo tenant
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: Session creation timestamp for date adjustment
db: Database session
Returns:
Dictionary with cloning results
Raises:
HTTPException: On validation or cloning errors
"""
start_time = datetime.now()
try:
# Validate UUIDs
virtual_uuid = UUID(virtual_tenant_id)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
# Debug logging for UUID values
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
if not all([base_tenant_id, virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
)
# Validate UUID format before processing
try:
UUID(base_tenant_id)
UUID(virtual_tenant_id)
except ValueError as e:
logger.error("Invalid UUID format in request",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format: {str(e)}"
)
# Parse session creation time
if session_created_at:
try:
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_created_at_parsed = datetime.now()
else:
session_created_at_parsed = datetime.now()
# Determine profile based on demo_account_type
if demo_account_type == "enterprise":
profile = "enterprise"
else:
profile = "professional"
logger.info(
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_created_at_parsed.isoformat()
)
# Load seed data using shared utility
try:
from shared.utils.seed_data_paths import get_seed_data_path
if profile == "professional":
json_file = get_seed_data_path("professional", "03-inventory.json")
elif profile == "enterprise":
json_file = get_seed_data_path("enterprise", "03-inventory.json")
else:
raise ValueError(f"Invalid profile: {profile}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if profile == "professional":
json_file = seed_data_dir / "professional" / "03-inventory.json"
elif profile == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "03-inventory.json"
else:
raise ValueError(f"Invalid profile: {profile}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Check if data already exists for this virtual tenant (idempotency)
from sqlalchemy import select, delete
existing_check = await db.execute(
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
)
existing_ingredient = existing_check.scalar_one_or_none()
if existing_ingredient:
logger.warning(
"Demo data already exists, skipping clone",
virtual_tenant_id=virtual_tenant_id
)
return {
"status": "skipped",
"reason": "Data already exists",
"records_cloned": 0
}
# Transform and insert data
records_cloned = 0
# Clone ingredients
for ingredient_data in seed_data.get('ingredients', []):
# Transform ID
from shared.utils.demo_id_transformer import transform_id
try:
ingredient_uuid = UUID(ingredient_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
except ValueError as e:
logger.error("Failed to parse UUIDs for ID transformation",
ingredient_id=ingredient_data['id'],
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in ingredient data: {str(e)}"
)
# Transform dates
from shared.utils.demo_dates import adjust_date_for_demo
for date_field in ['expiration_date', 'received_date', 'created_at', 'updated_at']:
if date_field in ingredient_data:
try:
date_value = ingredient_data[date_field]
# Handle both string dates and date objects
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value)
elif hasattr(date_value, 'isoformat'):
# Already a date/datetime object
original_date = date_value
else:
# Skip if not a valid date format
logger.warning("Skipping invalid date format",
date_field=date_field,
date_value=date_value)
continue
adjusted_date = adjust_date_for_demo(
original_date,
session_created_at_parsed
)
ingredient_data[date_field] = adjusted_date
except (ValueError, AttributeError) as e:
logger.warning("Failed to parse date, skipping",
date_field=date_field,
date_value=ingredient_data[date_field],
error=str(e))
# Remove invalid date to avoid model errors
ingredient_data.pop(date_field, None)
# Map category field to ingredient_category enum
if 'category' in ingredient_data:
category_value = ingredient_data.pop('category')
# Convert category string to IngredientCategory enum
from app.models.inventory import IngredientCategory
try:
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
except KeyError:
# If category not found in enum, use OTHER
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
# Map unit_of_measure string to enum
if 'unit_of_measure' in ingredient_data:
from app.models.inventory import UnitOfMeasure
unit_mapping = {
'kilograms': UnitOfMeasure.KILOGRAMS,
'grams': UnitOfMeasure.GRAMS,
'liters': UnitOfMeasure.LITERS,
'milliliters': UnitOfMeasure.MILLILITERS,
'units': UnitOfMeasure.UNITS,
'pieces': UnitOfMeasure.PIECES,
'packages': UnitOfMeasure.PACKAGES,
'bags': UnitOfMeasure.BAGS,
'boxes': UnitOfMeasure.BOXES
}
unit_str = ingredient_data['unit_of_measure']
if unit_str in unit_mapping:
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
else:
# Default to units if not found
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
original_unit=unit_str)
# Note: All seed data fields now match the model schema exactly
# No field filtering needed
# Remove original id and tenant_id from ingredient_data to avoid conflict
ingredient_data.pop('id', None)
ingredient_data.pop('tenant_id', None)
# Create ingredient
ingredient = Ingredient(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**ingredient_data
)
db.add(ingredient)
records_cloned += 1
# Clone stock batches
for stock_data in seed_data.get('stock_batches', []):
# Transform ID - handle both UUID and string IDs
from shared.utils.demo_id_transformer import transform_id
try:
# Try to parse as UUID first
stock_uuid = UUID(stock_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(stock_data['id'], tenant_uuid)
except ValueError:
# If not a UUID, generate a deterministic UUID from the string ID
import hashlib
stock_id_string = stock_data['id']
tenant_uuid = UUID(virtual_tenant_id)
# Create a deterministic UUID from the string ID and tenant ID
combined = f"{stock_id_string}-{tenant_uuid}"
hash_obj = hashlib.sha256(combined.encode('utf-8'))
transformed_id = UUID(hash_obj.hexdigest()[:32])
logger.info("Generated UUID for non-UUID stock ID",
original_id=stock_id_string,
generated_id=str(transformed_id))
# Transform dates - handle both timestamp dictionaries and ISO strings
for date_field in ['received_date', 'expiration_date', 'best_before_date', 'original_expiration_date', 'transformation_date', 'final_expiration_date', 'created_at', 'updated_at']:
if date_field in stock_data:
try:
date_value = stock_data[date_field]
# Handle timestamp dictionaries (offset_days, hour, minute)
if isinstance(date_value, dict) and 'offset_days' in date_value:
from shared.utils.demo_dates import calculate_demo_datetime
original_date = calculate_demo_datetime(
offset_days=date_value.get('offset_days', 0),
hour=date_value.get('hour', 0),
minute=date_value.get('minute', 0),
session_created_at=session_created_at_parsed
)
elif isinstance(date_value, str):
# ISO string
original_date = datetime.fromisoformat(date_value)
elif hasattr(date_value, 'isoformat'):
# Already a date/datetime object
original_date = date_value
else:
# Skip if not a valid date format
logger.warning("Skipping invalid date format",
date_field=date_field,
date_value=date_value)
continue
adjusted_stock_date = adjust_date_for_demo(
original_date,
session_created_at_parsed
)
stock_data[date_field] = adjusted_stock_date
except (ValueError, AttributeError) as e:
logger.warning("Failed to parse date, skipping",
date_field=date_field,
date_value=stock_data[date_field],
error=str(e))
# Remove invalid date to avoid model errors
stock_data.pop(date_field, None)
# Remove original id and tenant_id from stock_data to avoid conflict
stock_data.pop('id', None)
stock_data.pop('tenant_id', None)
# Create stock batch
stock = Stock(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**stock_data
)
db.add(stock)
records_cloned += 1
await db.commit()
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
logger.info(
"Inventory data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
records_cloned=records_cloned,
duration_ms=duration_ms,
ingredients_cloned=len(seed_data.get('ingredients', [])),
stock_batches_cloned=len(seed_data.get('stock_batches', []))
)
return {
"service": "inventory",
"status": "completed",
"records_cloned": records_cloned,
"duration_ms": duration_ms,
"details": {
"ingredients": len(seed_data.get('ingredients', [])),
"stock_batches": len(seed_data.get('stock_batches', [])),
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone inventory data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "inventory",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "inventory",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
"""
start_time = datetime.now()
records_deleted = {
"ingredients": 0,
"stock": 0,
"total": 0
}
try:
# Delete in reverse dependency order
# 1. Delete stock batches (depends on ingredients)
result = await db.execute(
delete(Stock)
.where(Stock.tenant_id == virtual_tenant_id)
)
records_deleted["stock"] = result.rowcount
# 2. Delete ingredients
result = await db.execute(
delete(Ingredient)
.where(Ingredient.tenant_id == virtual_tenant_id)
)
records_deleted["ingredients"] = result.rowcount
records_deleted["total"] = sum(records_deleted.values())
await db.commit()
logger.info(
"demo_data_deleted",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
)
return {
"service": "inventory",
"status": "deleted",
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
}
except Exception as e:
await db.rollback()
logger.error(
"demo_data_deletion_failed",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Failed to delete demo data: {str(e)}"
)