Files
bakery-ia/services/inventory/app/api/internal_demo.py

544 lines
21 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Inventory Service
2025-12-13 23:57:54 +01:00
Handles internal demo data cloning operations
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from typing import Optional
2025-12-13 23:57:54 +01:00
import structlog
import json
2025-10-17 07:31:14 +02:00
from pathlib import Path
2025-12-14 11:58:14 +01:00
from datetime import datetime, timezone, timedelta
2025-12-13 23:57:54 +01:00
import uuid
from uuid import UUID
from app.core.database import get_db
2025-12-13 23:57:54 +01:00
from app.core.config import settings
from app.models import Ingredient, Stock, ProductType
2025-12-14 11:58:14 +01:00
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
logger = structlog.get_logger()
2025-12-13 23:57:54 +01:00
router = APIRouter()
2025-12-13 23:57:54 +01:00
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
2025-12-13 23:57:54 +01:00
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
2025-12-14 11:58:14 +01:00
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
"""
Parse date field, handling both ISO strings and BASE_TS markers.
Supports:
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
- ISO 8601 strings: "2025-01-15T06:00:00Z"
- None values (returns None)
Returns timezone-aware datetime or None.
"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
elif hasattr(date_value, 'isoformat'):
original_date = date_value
else:
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
return None
return adjust_date_for_demo(original_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
2025-12-13 23:57:54 +01:00
@router.post("/internal/demo/clone")
async def clone_demo_data_internal(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone inventory service data for a virtual demo tenant
2025-12-13 23:57:54 +01:00
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
2025-12-13 23:57:54 +01:00
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
2025-12-13 23:57:54 +01:00
session_created_at: Session creation timestamp for date adjustment
db: Database session
2025-12-14 11:58:14 +01:00
Returns:
2025-12-13 23:57:54 +01:00
Dictionary with cloning results
2025-12-14 11:58:14 +01:00
2025-12-13 23:57:54 +01:00
Raises:
HTTPException: On validation or cloning errors
"""
2025-12-14 11:58:14 +01:00
start_time = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
try:
# Validate UUIDs
2025-12-13 23:57:54 +01:00
virtual_uuid = UUID(virtual_tenant_id)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
# Debug logging for UUID values
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
if not all([base_tenant_id, virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
# Validate UUID format before processing
try:
UUID(base_tenant_id)
UUID(virtual_tenant_id)
except ValueError as e:
logger.error("Invalid UUID format in request",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format: {str(e)}"
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
# Parse session creation time
if session_created_at:
try:
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
2025-12-14 11:58:14 +01:00
session_created_at_parsed = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
else:
2025-12-14 11:58:14 +01:00
session_created_at_parsed = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
# Determine profile based on demo_account_type
if demo_account_type == "enterprise":
profile = "enterprise"
else:
profile = "professional"
logger.info(
2025-12-13 23:57:54 +01:00
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_created_at_parsed.isoformat()
)
2025-12-13 23:57:54 +01:00
# Load seed data using shared utility
try:
from shared.utils.seed_data_paths import get_seed_data_path
if profile == "professional":
json_file = get_seed_data_path("professional", "03-inventory.json")
elif profile == "enterprise":
json_file = get_seed_data_path("enterprise", "03-inventory.json")
else:
raise ValueError(f"Invalid profile: {profile}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if profile == "professional":
json_file = seed_data_dir / "professional" / "03-inventory.json"
elif profile == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "03-inventory.json"
else:
raise ValueError(f"Invalid profile: {profile}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
2025-10-17 07:31:14 +02:00
2025-12-13 23:57:54 +01:00
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
2025-10-17 07:31:14 +02:00
2025-12-13 23:57:54 +01:00
# Check if data already exists for this virtual tenant (idempotency)
from sqlalchemy import select, delete
existing_check = await db.execute(
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
2025-10-17 07:31:14 +02:00
)
2025-12-13 23:57:54 +01:00
existing_ingredient = existing_check.scalar_one_or_none()
2025-10-17 07:31:14 +02:00
2025-12-13 23:57:54 +01:00
if existing_ingredient:
logger.warning(
"Demo data already exists, skipping clone",
virtual_tenant_id=virtual_tenant_id
2025-10-17 07:31:14 +02:00
)
2025-12-13 23:57:54 +01:00
return {
"status": "skipped",
"reason": "Data already exists",
"records_cloned": 0
}
# Transform and insert data
records_cloned = 0
# Clone ingredients
for ingredient_data in seed_data.get('ingredients', []):
# Transform ID
from shared.utils.demo_id_transformer import transform_id
try:
ingredient_uuid = UUID(ingredient_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
except ValueError as e:
logger.error("Failed to parse UUIDs for ID transformation",
ingredient_id=ingredient_data['id'],
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in ingredient data: {str(e)}"
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
2025-12-14 11:58:14 +01:00
# Transform dates using standardized helper
ingredient_data['created_at'] = parse_date_field(
ingredient_data.get('created_at'), session_time, 'created_at'
) or session_time
ingredient_data['updated_at'] = parse_date_field(
ingredient_data.get('updated_at'), session_time, 'updated_at'
) or session_time
2025-12-13 23:57:54 +01:00
# Map category field to ingredient_category enum
if 'category' in ingredient_data:
category_value = ingredient_data.pop('category')
# Convert category string to IngredientCategory enum
from app.models.inventory import IngredientCategory
try:
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
except KeyError:
# If category not found in enum, use OTHER
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
# Map unit_of_measure string to enum
if 'unit_of_measure' in ingredient_data:
from app.models.inventory import UnitOfMeasure
unit_mapping = {
'kilograms': UnitOfMeasure.KILOGRAMS,
'grams': UnitOfMeasure.GRAMS,
'liters': UnitOfMeasure.LITERS,
'milliliters': UnitOfMeasure.MILLILITERS,
'units': UnitOfMeasure.UNITS,
'pieces': UnitOfMeasure.PIECES,
'packages': UnitOfMeasure.PACKAGES,
'bags': UnitOfMeasure.BAGS,
'boxes': UnitOfMeasure.BOXES
}
2025-12-14 11:58:14 +01:00
# Also support uppercase versions
unit_mapping.update({
'KILOGRAMS': UnitOfMeasure.KILOGRAMS,
'GRAMS': UnitOfMeasure.GRAMS,
'LITERS': UnitOfMeasure.LITERS,
'MILLILITERS': UnitOfMeasure.MILLILITERS,
'UNITS': UnitOfMeasure.UNITS,
'PIECES': UnitOfMeasure.PIECES,
'PACKAGES': UnitOfMeasure.PACKAGES,
'BAGS': UnitOfMeasure.BAGS,
'BOXES': UnitOfMeasure.BOXES
})
2025-12-13 23:57:54 +01:00
unit_str = ingredient_data['unit_of_measure']
if unit_str in unit_mapping:
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
else:
# Default to units if not found
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
2025-12-14 11:58:14 +01:00
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
2025-12-13 23:57:54 +01:00
original_unit=unit_str)
# Note: All seed data fields now match the model schema exactly
# No field filtering needed
# Remove original id and tenant_id from ingredient_data to avoid conflict
ingredient_data.pop('id', None)
ingredient_data.pop('tenant_id', None)
# Create ingredient
ingredient = Ingredient(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**ingredient_data
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
db.add(ingredient)
records_cloned += 1
# Clone stock batches
2025-12-14 16:04:16 +01:00
for stock_data in seed_data.get('stock', []):
2025-12-13 23:57:54 +01:00
# Transform ID - handle both UUID and string IDs
from shared.utils.demo_id_transformer import transform_id
try:
# Try to parse as UUID first
stock_uuid = UUID(stock_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(stock_data['id'], tenant_uuid)
except ValueError:
# If not a UUID, generate a deterministic UUID from the string ID
import hashlib
stock_id_string = stock_data['id']
tenant_uuid = UUID(virtual_tenant_id)
# Create a deterministic UUID from the string ID and tenant ID
combined = f"{stock_id_string}-{tenant_uuid}"
hash_obj = hashlib.sha256(combined.encode('utf-8'))
transformed_id = UUID(hash_obj.hexdigest()[:32])
logger.info("Generated UUID for non-UUID stock ID",
original_id=stock_id_string,
generated_id=str(transformed_id))
2025-12-14 11:58:14 +01:00
# Transform dates using standardized helper
stock_data['received_date'] = parse_date_field(
stock_data.get('received_date'), session_time, 'received_date'
)
stock_data['expiration_date'] = parse_date_field(
stock_data.get('expiration_date'), session_time, 'expiration_date'
)
stock_data['best_before_date'] = parse_date_field(
stock_data.get('best_before_date'), session_time, 'best_before_date'
)
stock_data['created_at'] = parse_date_field(
stock_data.get('created_at'), session_time, 'created_at'
) or session_time
stock_data['updated_at'] = parse_date_field(
stock_data.get('updated_at'), session_time, 'updated_at'
) or session_time
2025-12-13 23:57:54 +01:00
# Remove original id and tenant_id from stock_data to avoid conflict
stock_data.pop('id', None)
stock_data.pop('tenant_id', None)
2025-12-14 16:04:16 +01:00
# Remove notes field as it doesn't exist in the Stock model
stock_data.pop('notes', None)
# Transform ingredient_id to match transformed ingredient IDs
if 'ingredient_id' in stock_data:
ingredient_id_str = stock_data['ingredient_id']
try:
ingredient_uuid = UUID(ingredient_id_str)
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
stock_data['ingredient_id'] = str(transformed_ingredient_id)
except ValueError as e:
logger.error("Failed to transform ingredient_id",
original_ingredient_id=ingredient_id_str,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid ingredient_id format: {str(e)}"
)
# Transform supplier_id if present
if 'supplier_id' in stock_data:
supplier_id_str = stock_data['supplier_id']
try:
supplier_uuid = UUID(supplier_id_str)
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
stock_data['supplier_id'] = str(transformed_supplier_id)
except ValueError as e:
logger.error("Failed to transform supplier_id",
original_supplier_id=supplier_id_str,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid supplier_id format: {str(e)}"
)
2025-12-13 23:57:54 +01:00
# Create stock batch
stock = Stock(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**stock_data
)
db.add(stock)
records_cloned += 1
2025-10-24 13:05:04 +02:00
2025-12-14 16:04:16 +01:00
# Note: Edge cases are now handled exclusively through JSON seed data
# The seed data files already contain comprehensive edge cases including:
# - Low stock items below reorder points
# - Items expiring soon
# - Freshly received stock
# This ensures standardization and single source of truth for demo data
2025-12-14 11:58:14 +01:00
logger.info(
2025-12-14 16:04:16 +01:00
"Edge cases handled by JSON seed data - no manual creation needed",
seed_data_edge_cases="low_stock, expiring_soon, fresh_stock"
2025-12-14 11:58:14 +01:00
)
await db.commit()
2025-12-14 11:58:14 +01:00
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
2025-12-13 23:57:54 +01:00
"Inventory data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
2025-12-13 23:57:54 +01:00
records_cloned=records_cloned,
duration_ms=duration_ms,
ingredients_cloned=len(seed_data.get('ingredients', [])),
2025-12-14 16:04:16 +01:00
stock_batches_cloned=len(seed_data.get('stock', []))
)
return {
"service": "inventory",
"status": "completed",
2025-12-13 23:57:54 +01:00
"records_cloned": records_cloned,
"duration_ms": duration_ms,
2025-12-13 23:57:54 +01:00
"details": {
"ingredients": len(seed_data.get('ingredients', [])),
2025-12-14 16:04:16 +01:00
"stock": len(seed_data.get('stock', [])),
2025-12-13 23:57:54 +01:00
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
2025-12-13 23:57:54 +01:00
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone inventory data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "inventory",
"status": "failed",
"records_cloned": 0,
2025-12-14 11:58:14 +01:00
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "inventory",
"clone_endpoint": "available",
"version": "2.0.0"
}
2025-10-24 13:05:04 +02:00
@router.delete("/tenant/{virtual_tenant_id}")
2025-12-13 23:57:54 +01:00
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
2025-10-24 13:05:04 +02:00
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
2025-12-13 23:57:54 +01:00
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
2025-10-24 13:05:04 +02:00
"""
2025-12-14 11:58:14 +01:00
start_time = datetime.now(timezone.utc)
2025-12-13 23:57:54 +01:00
records_deleted = {
"ingredients": 0,
"stock": 0,
"total": 0
}
2025-10-24 13:05:04 +02:00
try:
2025-12-13 23:57:54 +01:00
# Delete in reverse dependency order
# 1. Delete stock batches (depends on ingredients)
result = await db.execute(
delete(Stock)
.where(Stock.tenant_id == virtual_tenant_id)
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
records_deleted["stock"] = result.rowcount
2025-10-24 13:05:04 +02:00
2025-12-13 23:57:54 +01:00
# 2. Delete ingredients
result = await db.execute(
delete(Ingredient)
.where(Ingredient.tenant_id == virtual_tenant_id)
2025-10-24 13:05:04 +02:00
)
2025-12-13 23:57:54 +01:00
records_deleted["ingredients"] = result.rowcount
2025-10-24 13:05:04 +02:00
2025-12-13 23:57:54 +01:00
records_deleted["total"] = sum(records_deleted.values())
2025-10-24 13:05:04 +02:00
await db.commit()
logger.info(
2025-12-13 23:57:54 +01:00
"demo_data_deleted",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
2025-10-24 13:05:04 +02:00
)
return {
"service": "inventory",
"status": "deleted",
2025-12-13 23:57:54 +01:00
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
2025-12-14 11:58:14 +01:00
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
2025-10-24 13:05:04 +02:00
}
except Exception as e:
2025-12-13 23:57:54 +01:00
await db.rollback()
2025-10-24 13:05:04 +02:00
logger.error(
2025-12-13 23:57:54 +01:00
"demo_data_deletion_failed",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
2025-10-24 13:05:04 +02:00
)
raise HTTPException(
status_code=500,
2025-12-13 23:57:54 +01:00
detail=f"Failed to delete demo data: {str(e)}"
)