2025-10-12 18:47:33 +02:00
|
|
|
"""
|
|
|
|
|
Internal Demo Cloning API for Inventory Service
|
2025-12-13 23:57:54 +01:00
|
|
|
Handles internal demo data cloning operations
|
2025-10-12 18:47:33 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
|
from typing import Optional
|
2025-12-13 23:57:54 +01:00
|
|
|
import structlog
|
|
|
|
|
import json
|
2025-10-17 07:31:14 +02:00
|
|
|
from pathlib import Path
|
2025-12-14 11:58:14 +01:00
|
|
|
from datetime import datetime, timezone, timedelta
|
2025-12-13 23:57:54 +01:00
|
|
|
import uuid
|
|
|
|
|
from uuid import UUID
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
from app.core.database import get_db
|
2025-12-13 23:57:54 +01:00
|
|
|
from app.core.config import settings
|
|
|
|
|
from app.models import Ingredient, Stock, ProductType
|
2025-12-14 11:58:14 +01:00
|
|
|
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
logger = structlog.get_logger()
|
2025-12-13 23:57:54 +01:00
|
|
|
router = APIRouter()
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
2025-10-12 18:47:33 +02:00
|
|
|
"""Verify internal API key for service-to-service communication"""
|
2025-12-13 23:57:54 +01:00
|
|
|
required_key = settings.INTERNAL_API_KEY
|
|
|
|
|
if x_internal_api_key != required_key:
|
2025-10-12 18:47:33 +02:00
|
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
|
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
|
|
|
|
"""
|
|
|
|
|
Parse date field, handling both ISO strings and BASE_TS markers.
|
|
|
|
|
|
|
|
|
|
Supports:
|
|
|
|
|
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
|
|
|
|
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
|
|
|
|
- None values (returns None)
|
|
|
|
|
|
|
|
|
|
Returns timezone-aware datetime or None.
|
|
|
|
|
"""
|
|
|
|
|
if not date_value:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
# Check if it's a BASE_TS marker
|
|
|
|
|
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
|
|
|
|
try:
|
|
|
|
|
return resolve_time_marker(date_value, session_time)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.warning(
|
|
|
|
|
f"Invalid BASE_TS marker in {field_name}",
|
|
|
|
|
marker=date_value,
|
|
|
|
|
error=str(e)
|
|
|
|
|
)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
# Handle regular ISO date strings
|
|
|
|
|
try:
|
|
|
|
|
if isinstance(date_value, str):
|
|
|
|
|
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
|
|
|
|
elif hasattr(date_value, 'isoformat'):
|
|
|
|
|
original_date = date_value
|
|
|
|
|
else:
|
|
|
|
|
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return adjust_date_for_demo(original_date, session_time)
|
|
|
|
|
except (ValueError, AttributeError) as e:
|
|
|
|
|
logger.warning(
|
|
|
|
|
f"Invalid date format in {field_name}",
|
|
|
|
|
date_value=date_value,
|
|
|
|
|
error=str(e)
|
|
|
|
|
)
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
@router.post("/internal/demo/clone")
|
|
|
|
|
async def clone_demo_data_internal(
|
2025-10-12 18:47:33 +02:00
|
|
|
base_tenant_id: str,
|
|
|
|
|
virtual_tenant_id: str,
|
|
|
|
|
demo_account_type: str,
|
|
|
|
|
session_id: Optional[str] = None,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_created_at: Optional[str] = None,
|
2025-10-12 18:47:33 +02:00
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Clone inventory service data for a virtual demo tenant
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
This endpoint creates fresh demo data by:
|
|
|
|
|
1. Loading seed data from JSON files
|
|
|
|
|
2. Applying XOR-based ID transformation
|
|
|
|
|
3. Adjusting dates relative to session creation time
|
|
|
|
|
4. Creating records in the virtual tenant
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
Args:
|
2025-12-13 23:57:54 +01:00
|
|
|
base_tenant_id: Template tenant UUID (for reference)
|
2025-10-12 18:47:33 +02:00
|
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
|
|
|
demo_account_type: Type of demo account
|
|
|
|
|
session_id: Originating session ID for tracing
|
2025-12-13 23:57:54 +01:00
|
|
|
session_created_at: Session creation timestamp for date adjustment
|
|
|
|
|
db: Database session
|
2025-12-14 11:58:14 +01:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
Returns:
|
2025-12-13 23:57:54 +01:00
|
|
|
Dictionary with cloning results
|
2025-12-14 11:58:14 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
Raises:
|
|
|
|
|
HTTPException: On validation or cloning errors
|
2025-10-12 18:47:33 +02:00
|
|
|
"""
|
2025-12-14 11:58:14 +01:00
|
|
|
start_time = datetime.now(timezone.utc)
|
2025-12-13 23:57:54 +01:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
try:
|
|
|
|
|
# Validate UUIDs
|
2025-12-13 23:57:54 +01:00
|
|
|
virtual_uuid = UUID(virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
# Parse session creation time for date adjustment
|
|
|
|
|
if session_created_at:
|
|
|
|
|
try:
|
|
|
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
|
|
|
except (ValueError, AttributeError):
|
|
|
|
|
session_time = start_time
|
|
|
|
|
else:
|
|
|
|
|
session_time = start_time
|
|
|
|
|
|
|
|
|
|
# Debug logging for UUID values
|
|
|
|
|
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
if not all([base_tenant_id, virtual_tenant_id, session_id]):
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
|
|
|
|
|
# Validate UUID format before processing
|
|
|
|
|
try:
|
|
|
|
|
UUID(base_tenant_id)
|
|
|
|
|
UUID(virtual_tenant_id)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Invalid UUID format in request",
|
|
|
|
|
base_tenant_id=base_tenant_id,
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
error=str(e))
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=f"Invalid UUID format: {str(e)}"
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Parse session creation time
|
|
|
|
|
if session_created_at:
|
|
|
|
|
try:
|
|
|
|
|
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
|
|
|
except (ValueError, AttributeError):
|
2025-12-14 11:58:14 +01:00
|
|
|
session_created_at_parsed = datetime.now(timezone.utc)
|
2025-12-13 23:57:54 +01:00
|
|
|
else:
|
2025-12-14 11:58:14 +01:00
|
|
|
session_created_at_parsed = datetime.now(timezone.utc)
|
2025-12-13 23:57:54 +01:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
logger.info(
|
2025-12-13 23:57:54 +01:00
|
|
|
"Starting inventory data cloning with date adjustment",
|
|
|
|
|
base_tenant_id=base_tenant_id,
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
demo_account_type=demo_account_type,
|
|
|
|
|
session_id=session_id,
|
|
|
|
|
session_time=session_created_at_parsed.isoformat()
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
|
2025-12-17 13:03:52 +01:00
|
|
|
# Load seed data from JSON files
|
|
|
|
|
from shared.utils.seed_data_paths import get_seed_data_path
|
|
|
|
|
|
|
|
|
|
if demo_account_type == "professional":
|
|
|
|
|
json_file = get_seed_data_path("professional", "03-inventory.json")
|
|
|
|
|
elif demo_account_type == "enterprise":
|
|
|
|
|
json_file = get_seed_data_path("enterprise", "03-inventory.json")
|
|
|
|
|
elif demo_account_type == "enterprise_child":
|
|
|
|
|
json_file = get_seed_data_path("enterprise", "03-inventory.json", child_id=base_tenant_id)
|
|
|
|
|
else:
|
|
|
|
|
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
2025-10-17 07:31:14 +02:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Load JSON data
|
|
|
|
|
with open(json_file, 'r', encoding='utf-8') as f:
|
|
|
|
|
seed_data = json.load(f)
|
2025-10-17 07:31:14 +02:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Check if data already exists for this virtual tenant (idempotency)
|
|
|
|
|
from sqlalchemy import select, delete
|
|
|
|
|
existing_check = await db.execute(
|
|
|
|
|
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
|
2025-10-17 07:31:14 +02:00
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
existing_ingredient = existing_check.scalar_one_or_none()
|
2025-10-17 07:31:14 +02:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
if existing_ingredient:
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Demo data already exists, skipping clone",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id
|
2025-10-17 07:31:14 +02:00
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
return {
|
|
|
|
|
"status": "skipped",
|
|
|
|
|
"reason": "Data already exists",
|
|
|
|
|
"records_cloned": 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Transform and insert data
|
|
|
|
|
records_cloned = 0
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Clone ingredients
|
|
|
|
|
for ingredient_data in seed_data.get('ingredients', []):
|
|
|
|
|
# Transform ID
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
ingredient_uuid = UUID(ingredient_data['id'])
|
|
|
|
|
tenant_uuid = UUID(virtual_tenant_id)
|
|
|
|
|
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to parse UUIDs for ID transformation",
|
|
|
|
|
ingredient_id=ingredient_data['id'],
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
error=str(e))
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=f"Invalid UUID format in ingredient data: {str(e)}"
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
# Transform dates using standardized helper
|
|
|
|
|
ingredient_data['created_at'] = parse_date_field(
|
|
|
|
|
ingredient_data.get('created_at'), session_time, 'created_at'
|
|
|
|
|
) or session_time
|
|
|
|
|
ingredient_data['updated_at'] = parse_date_field(
|
|
|
|
|
ingredient_data.get('updated_at'), session_time, 'updated_at'
|
|
|
|
|
) or session_time
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Map category field to ingredient_category enum
|
|
|
|
|
if 'category' in ingredient_data:
|
|
|
|
|
category_value = ingredient_data.pop('category')
|
|
|
|
|
# Convert category string to IngredientCategory enum
|
|
|
|
|
from app.models.inventory import IngredientCategory
|
|
|
|
|
try:
|
|
|
|
|
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
|
|
|
|
|
except KeyError:
|
|
|
|
|
# If category not found in enum, use OTHER
|
|
|
|
|
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Map unit_of_measure string to enum
|
|
|
|
|
if 'unit_of_measure' in ingredient_data:
|
|
|
|
|
from app.models.inventory import UnitOfMeasure
|
|
|
|
|
unit_mapping = {
|
|
|
|
|
'kilograms': UnitOfMeasure.KILOGRAMS,
|
|
|
|
|
'grams': UnitOfMeasure.GRAMS,
|
|
|
|
|
'liters': UnitOfMeasure.LITERS,
|
|
|
|
|
'milliliters': UnitOfMeasure.MILLILITERS,
|
|
|
|
|
'units': UnitOfMeasure.UNITS,
|
|
|
|
|
'pieces': UnitOfMeasure.PIECES,
|
|
|
|
|
'packages': UnitOfMeasure.PACKAGES,
|
|
|
|
|
'bags': UnitOfMeasure.BAGS,
|
|
|
|
|
'boxes': UnitOfMeasure.BOXES
|
|
|
|
|
}
|
2025-12-14 11:58:14 +01:00
|
|
|
|
|
|
|
|
# Also support uppercase versions
|
|
|
|
|
unit_mapping.update({
|
|
|
|
|
'KILOGRAMS': UnitOfMeasure.KILOGRAMS,
|
|
|
|
|
'GRAMS': UnitOfMeasure.GRAMS,
|
|
|
|
|
'LITERS': UnitOfMeasure.LITERS,
|
|
|
|
|
'MILLILITERS': UnitOfMeasure.MILLILITERS,
|
|
|
|
|
'UNITS': UnitOfMeasure.UNITS,
|
|
|
|
|
'PIECES': UnitOfMeasure.PIECES,
|
|
|
|
|
'PACKAGES': UnitOfMeasure.PACKAGES,
|
|
|
|
|
'BAGS': UnitOfMeasure.BAGS,
|
|
|
|
|
'BOXES': UnitOfMeasure.BOXES
|
|
|
|
|
})
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
unit_str = ingredient_data['unit_of_measure']
|
|
|
|
|
if unit_str in unit_mapping:
|
|
|
|
|
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
|
|
|
|
|
else:
|
|
|
|
|
# Default to units if not found
|
|
|
|
|
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
2025-12-14 11:58:14 +01:00
|
|
|
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
2025-12-13 23:57:54 +01:00
|
|
|
original_unit=unit_str)
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Note: All seed data fields now match the model schema exactly
|
|
|
|
|
# No field filtering needed
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Remove original id and tenant_id from ingredient_data to avoid conflict
|
|
|
|
|
ingredient_data.pop('id', None)
|
|
|
|
|
ingredient_data.pop('tenant_id', None)
|
2025-12-17 13:03:52 +01:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Create ingredient
|
|
|
|
|
ingredient = Ingredient(
|
|
|
|
|
id=str(transformed_id),
|
|
|
|
|
tenant_id=str(virtual_tenant_id),
|
|
|
|
|
**ingredient_data
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
db.add(ingredient)
|
|
|
|
|
records_cloned += 1
|
|
|
|
|
|
2025-12-17 13:03:52 +01:00
|
|
|
# Commit ingredients before creating stock to ensure foreign key references exist
|
|
|
|
|
await db.flush() # Use flush instead of commit to maintain transaction while continuing
|
|
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# Clone stock batches
|
2025-12-14 16:04:16 +01:00
|
|
|
for stock_data in seed_data.get('stock', []):
|
2025-12-13 23:57:54 +01:00
|
|
|
# Transform ID - handle both UUID and string IDs
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
# Try to parse as UUID first
|
|
|
|
|
stock_uuid = UUID(stock_data['id'])
|
|
|
|
|
tenant_uuid = UUID(virtual_tenant_id)
|
|
|
|
|
transformed_id = transform_id(stock_data['id'], tenant_uuid)
|
|
|
|
|
except ValueError:
|
|
|
|
|
# If not a UUID, generate a deterministic UUID from the string ID
|
|
|
|
|
import hashlib
|
|
|
|
|
stock_id_string = stock_data['id']
|
|
|
|
|
tenant_uuid = UUID(virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
# Create a deterministic UUID from the string ID and tenant ID
|
|
|
|
|
combined = f"{stock_id_string}-{tenant_uuid}"
|
|
|
|
|
hash_obj = hashlib.sha256(combined.encode('utf-8'))
|
|
|
|
|
transformed_id = UUID(hash_obj.hexdigest()[:32])
|
|
|
|
|
|
|
|
|
|
logger.info("Generated UUID for non-UUID stock ID",
|
|
|
|
|
original_id=stock_id_string,
|
|
|
|
|
generated_id=str(transformed_id))
|
|
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
# Transform dates using standardized helper
|
|
|
|
|
stock_data['received_date'] = parse_date_field(
|
|
|
|
|
stock_data.get('received_date'), session_time, 'received_date'
|
|
|
|
|
)
|
|
|
|
|
stock_data['expiration_date'] = parse_date_field(
|
|
|
|
|
stock_data.get('expiration_date'), session_time, 'expiration_date'
|
|
|
|
|
)
|
|
|
|
|
stock_data['best_before_date'] = parse_date_field(
|
|
|
|
|
stock_data.get('best_before_date'), session_time, 'best_before_date'
|
|
|
|
|
)
|
|
|
|
|
stock_data['created_at'] = parse_date_field(
|
|
|
|
|
stock_data.get('created_at'), session_time, 'created_at'
|
|
|
|
|
) or session_time
|
|
|
|
|
stock_data['updated_at'] = parse_date_field(
|
|
|
|
|
stock_data.get('updated_at'), session_time, 'updated_at'
|
|
|
|
|
) or session_time
|
2025-12-13 23:57:54 +01:00
|
|
|
|
|
|
|
|
# Remove original id and tenant_id from stock_data to avoid conflict
|
|
|
|
|
stock_data.pop('id', None)
|
|
|
|
|
stock_data.pop('tenant_id', None)
|
2025-12-14 16:04:16 +01:00
|
|
|
# Remove notes field as it doesn't exist in the Stock model
|
|
|
|
|
stock_data.pop('notes', None)
|
|
|
|
|
|
|
|
|
|
# Transform ingredient_id to match transformed ingredient IDs
|
|
|
|
|
if 'ingredient_id' in stock_data:
|
|
|
|
|
ingredient_id_str = stock_data['ingredient_id']
|
|
|
|
|
try:
|
|
|
|
|
ingredient_uuid = UUID(ingredient_id_str)
|
|
|
|
|
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
|
|
|
|
|
stock_data['ingredient_id'] = str(transformed_ingredient_id)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to transform ingredient_id",
|
|
|
|
|
original_ingredient_id=ingredient_id_str,
|
|
|
|
|
error=str(e))
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=f"Invalid ingredient_id format: {str(e)}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Transform supplier_id if present
|
|
|
|
|
if 'supplier_id' in stock_data:
|
|
|
|
|
supplier_id_str = stock_data['supplier_id']
|
|
|
|
|
try:
|
|
|
|
|
supplier_uuid = UUID(supplier_id_str)
|
|
|
|
|
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
|
|
|
|
|
stock_data['supplier_id'] = str(transformed_supplier_id)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to transform supplier_id",
|
|
|
|
|
original_supplier_id=supplier_id_str,
|
|
|
|
|
error=str(e))
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=400,
|
|
|
|
|
detail=f"Invalid supplier_id format: {str(e)}"
|
|
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
|
|
|
|
|
# Create stock batch
|
|
|
|
|
stock = Stock(
|
|
|
|
|
id=str(transformed_id),
|
|
|
|
|
tenant_id=str(virtual_tenant_id),
|
|
|
|
|
**stock_data
|
|
|
|
|
)
|
|
|
|
|
db.add(stock)
|
|
|
|
|
records_cloned += 1
|
2025-10-24 13:05:04 +02:00
|
|
|
|
2025-12-15 13:39:33 +01:00
|
|
|
# Clone stock movements (for waste tracking and sustainability metrics)
|
|
|
|
|
from app.models.inventory import StockMovement, StockMovementType
|
|
|
|
|
|
|
|
|
|
for movement_data in seed_data.get('stock_movements', []):
|
|
|
|
|
# Transform ID
|
|
|
|
|
from shared.utils.demo_id_transformer import transform_id
|
|
|
|
|
try:
|
|
|
|
|
movement_uuid = UUID(movement_data['id'])
|
|
|
|
|
tenant_uuid = UUID(virtual_tenant_id)
|
|
|
|
|
transformed_id = transform_id(movement_data['id'], tenant_uuid)
|
|
|
|
|
except ValueError:
|
|
|
|
|
import hashlib
|
|
|
|
|
movement_id_string = movement_data['id']
|
|
|
|
|
tenant_uuid = UUID(virtual_tenant_id)
|
|
|
|
|
combined = f"{movement_id_string}-{tenant_uuid}"
|
|
|
|
|
hash_obj = hashlib.sha256(combined.encode('utf-8'))
|
|
|
|
|
transformed_id = UUID(hash_obj.hexdigest()[:32])
|
|
|
|
|
|
|
|
|
|
# Transform dates
|
|
|
|
|
movement_data['movement_date'] = parse_date_field(
|
|
|
|
|
movement_data.get('movement_date'), session_time, 'movement_date'
|
|
|
|
|
) or session_time
|
|
|
|
|
movement_data['created_at'] = parse_date_field(
|
|
|
|
|
movement_data.get('created_at'), session_time, 'created_at'
|
|
|
|
|
) or session_time
|
|
|
|
|
|
|
|
|
|
# Transform related IDs
|
|
|
|
|
if 'ingredient_id' in movement_data:
|
|
|
|
|
ingredient_id_str = movement_data['ingredient_id']
|
|
|
|
|
try:
|
|
|
|
|
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
|
|
|
|
|
movement_data['ingredient_id'] = str(transformed_ingredient_id)
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Failed to transform ingredient_id in movement",
|
|
|
|
|
original_id=ingredient_id_str, error=str(e))
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Invalid ingredient_id: {str(e)}")
|
|
|
|
|
|
|
|
|
|
if 'stock_id' in movement_data and movement_data['stock_id']:
|
|
|
|
|
stock_id_str = movement_data['stock_id']
|
|
|
|
|
try:
|
|
|
|
|
transformed_stock_id = transform_id(stock_id_str, tenant_uuid)
|
|
|
|
|
movement_data['stock_id'] = str(transformed_stock_id)
|
|
|
|
|
except ValueError:
|
|
|
|
|
# If stock_id doesn't exist or can't be transformed, set to None
|
|
|
|
|
movement_data['stock_id'] = None
|
|
|
|
|
|
|
|
|
|
if 'supplier_id' in movement_data and movement_data['supplier_id']:
|
|
|
|
|
supplier_id_str = movement_data['supplier_id']
|
|
|
|
|
try:
|
|
|
|
|
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
|
|
|
|
|
movement_data['supplier_id'] = str(transformed_supplier_id)
|
|
|
|
|
except ValueError:
|
|
|
|
|
movement_data['supplier_id'] = None
|
|
|
|
|
|
|
|
|
|
if 'created_by' in movement_data and movement_data['created_by']:
|
|
|
|
|
created_by_str = movement_data['created_by']
|
|
|
|
|
try:
|
|
|
|
|
transformed_created_by = transform_id(created_by_str, tenant_uuid)
|
|
|
|
|
movement_data['created_by'] = str(transformed_created_by)
|
|
|
|
|
except ValueError:
|
|
|
|
|
movement_data['created_by'] = None
|
|
|
|
|
|
|
|
|
|
# Remove original id and tenant_id
|
|
|
|
|
movement_data.pop('id', None)
|
|
|
|
|
movement_data.pop('tenant_id', None)
|
|
|
|
|
|
|
|
|
|
# Create stock movement
|
|
|
|
|
stock_movement = StockMovement(
|
|
|
|
|
id=str(transformed_id),
|
|
|
|
|
tenant_id=str(virtual_tenant_id),
|
|
|
|
|
**movement_data
|
|
|
|
|
)
|
|
|
|
|
db.add(stock_movement)
|
|
|
|
|
records_cloned += 1
|
|
|
|
|
|
2025-12-14 16:04:16 +01:00
|
|
|
# Note: Edge cases are now handled exclusively through JSON seed data
|
|
|
|
|
# The seed data files already contain comprehensive edge cases including:
|
|
|
|
|
# - Low stock items below reorder points
|
|
|
|
|
# - Items expiring soon
|
|
|
|
|
# - Freshly received stock
|
2025-12-15 13:39:33 +01:00
|
|
|
# - Waste movements for sustainability tracking
|
2025-12-14 16:04:16 +01:00
|
|
|
# This ensures standardization and single source of truth for demo data
|
2025-12-15 13:39:33 +01:00
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
logger.info(
|
2025-12-14 16:04:16 +01:00
|
|
|
"Edge cases handled by JSON seed data - no manual creation needed",
|
2025-12-15 13:39:33 +01:00
|
|
|
seed_data_edge_cases="low_stock, expiring_soon, fresh_stock, waste_movements"
|
2025-12-14 11:58:14 +01:00
|
|
|
)
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
await db.commit()
|
|
|
|
|
|
2025-12-14 11:58:14 +01:00
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
logger.info(
|
2025-12-13 23:57:54 +01:00
|
|
|
"Inventory data cloned successfully",
|
2025-10-12 18:47:33 +02:00
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
2025-12-13 23:57:54 +01:00
|
|
|
records_cloned=records_cloned,
|
|
|
|
|
duration_ms=duration_ms,
|
|
|
|
|
ingredients_cloned=len(seed_data.get('ingredients', [])),
|
2025-12-15 13:39:33 +01:00
|
|
|
stock_batches_cloned=len(seed_data.get('stock', [])),
|
|
|
|
|
stock_movements_cloned=len(seed_data.get('stock_movements', []))
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"status": "completed",
|
2025-12-13 23:57:54 +01:00
|
|
|
"records_cloned": records_cloned,
|
2025-10-12 18:47:33 +02:00
|
|
|
"duration_ms": duration_ms,
|
2025-12-13 23:57:54 +01:00
|
|
|
"details": {
|
|
|
|
|
"ingredients": len(seed_data.get('ingredients', [])),
|
2025-12-14 16:04:16 +01:00
|
|
|
"stock": len(seed_data.get('stock', [])),
|
2025-12-13 23:57:54 +01:00
|
|
|
"virtual_tenant_id": str(virtual_tenant_id)
|
|
|
|
|
}
|
2025-10-12 18:47:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except ValueError as e:
|
2025-12-13 23:57:54 +01:00
|
|
|
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
2025-10-12 18:47:33 +02:00
|
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Failed to clone inventory data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
exc_info=True
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Rollback on error
|
|
|
|
|
await db.rollback()
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"status": "failed",
|
|
|
|
|
"records_cloned": 0,
|
2025-12-14 11:58:14 +01:00
|
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
2025-10-12 18:47:33 +02:00
|
|
|
"error": str(e)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/clone/health")
|
|
|
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
|
|
|
"""
|
|
|
|
|
Health check for internal cloning endpoint
|
|
|
|
|
Used by orchestrator to verify service availability
|
|
|
|
|
"""
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"clone_endpoint": "available",
|
|
|
|
|
"version": "2.0.0"
|
|
|
|
|
}
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
2025-12-13 23:57:54 +01:00
|
|
|
async def delete_demo_tenant_data(
|
|
|
|
|
virtual_tenant_id: UUID,
|
2025-10-24 13:05:04 +02:00
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
2025-12-13 23:57:54 +01:00
|
|
|
Delete all demo data for a virtual tenant.
|
|
|
|
|
This endpoint is idempotent - safe to call multiple times.
|
2025-10-24 13:05:04 +02:00
|
|
|
"""
|
2025-12-14 11:58:14 +01:00
|
|
|
start_time = datetime.now(timezone.utc)
|
2025-12-13 23:57:54 +01:00
|
|
|
|
|
|
|
|
records_deleted = {
|
|
|
|
|
"ingredients": 0,
|
|
|
|
|
"stock": 0,
|
|
|
|
|
"total": 0
|
|
|
|
|
}
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
try:
|
2025-12-13 23:57:54 +01:00
|
|
|
# Delete in reverse dependency order
|
|
|
|
|
|
|
|
|
|
# 1. Delete stock batches (depends on ingredients)
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
delete(Stock)
|
|
|
|
|
.where(Stock.tenant_id == virtual_tenant_id)
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
records_deleted["stock"] = result.rowcount
|
2025-10-24 13:05:04 +02:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
# 2. Delete ingredients
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
delete(Ingredient)
|
|
|
|
|
.where(Ingredient.tenant_id == virtual_tenant_id)
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
2025-12-13 23:57:54 +01:00
|
|
|
records_deleted["ingredients"] = result.rowcount
|
2025-10-24 13:05:04 +02:00
|
|
|
|
2025-12-13 23:57:54 +01:00
|
|
|
records_deleted["total"] = sum(records_deleted.values())
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
2025-12-13 23:57:54 +01:00
|
|
|
"demo_data_deleted",
|
|
|
|
|
service="inventory",
|
|
|
|
|
virtual_tenant_id=str(virtual_tenant_id),
|
|
|
|
|
records_deleted=records_deleted
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"status": "deleted",
|
2025-12-13 23:57:54 +01:00
|
|
|
"virtual_tenant_id": str(virtual_tenant_id),
|
|
|
|
|
"records_deleted": records_deleted,
|
2025-12-14 11:58:14 +01:00
|
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
2025-10-24 13:05:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-12-13 23:57:54 +01:00
|
|
|
await db.rollback()
|
2025-10-24 13:05:04 +02:00
|
|
|
logger.error(
|
2025-12-13 23:57:54 +01:00
|
|
|
"demo_data_deletion_failed",
|
|
|
|
|
service="inventory",
|
|
|
|
|
virtual_tenant_id=str(virtual_tenant_id),
|
|
|
|
|
error=str(e)
|
2025-10-24 13:05:04 +02:00
|
|
|
)
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
2025-12-13 23:57:54 +01:00
|
|
|
detail=f"Failed to delete demo data: {str(e)}"
|
2025-12-27 21:30:42 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/internal/count")
|
|
|
|
|
async def get_ingredient_count(
|
|
|
|
|
tenant_id: str,
|
|
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Get count of active ingredients for onboarding status check.
|
|
|
|
|
Internal endpoint for tenant service.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from sqlalchemy import select, func
|
|
|
|
|
|
|
|
|
|
count = await db.scalar(
|
|
|
|
|
select(func.count()).select_from(Ingredient)
|
|
|
|
|
.where(
|
|
|
|
|
Ingredient.tenant_id == UUID(tenant_id),
|
|
|
|
|
Ingredient.is_active == True
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"count": count or 0,
|
|
|
|
|
"tenant_id": tenant_id
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get ingredient count", tenant_id=tenant_id, error=str(e))
|
|
|
|
|
raise HTTPException(status_code=500, detail=f"Failed to get ingredient count: {str(e)}")
|