2025-10-12 18:47:33 +02:00
|
|
|
"""
|
|
|
|
|
Internal Demo Cloning API for Inventory Service
|
2025-10-17 07:31:14 +02:00
|
|
|
Service-to-service endpoint for cloning inventory data with date adjustment
|
2025-10-12 18:47:33 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
|
|
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
2025-10-24 13:05:04 +02:00
|
|
|
from sqlalchemy import select, func
|
2025-10-12 18:47:33 +02:00
|
|
|
import structlog
|
|
|
|
|
import uuid
|
|
|
|
|
from datetime import datetime, timezone
|
|
|
|
|
from typing import Optional
|
|
|
|
|
import os
|
2025-10-17 07:31:14 +02:00
|
|
|
import sys
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
# Add shared path
|
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
from app.core.database import get_db
|
2025-10-24 13:05:04 +02:00
|
|
|
from app.models.inventory import Ingredient, Stock, StockMovement
|
2025-10-17 07:31:14 +02:00
|
|
|
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
|
|
|
|
|
|
|
|
|
# Internal API key for service-to-service auth
|
|
|
|
|
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
|
|
|
|
|
|
|
|
|
|
# Base demo tenant IDs
|
|
|
|
|
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
|
|
|
|
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
|
|
|
|
"""Verify internal API key for service-to-service communication"""
|
|
|
|
|
if x_internal_api_key != INTERNAL_API_KEY:
|
|
|
|
|
logger.warning("Unauthorized internal API access attempted")
|
|
|
|
|
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/clone")
|
|
|
|
|
async def clone_demo_data(
|
|
|
|
|
base_tenant_id: str,
|
|
|
|
|
virtual_tenant_id: str,
|
|
|
|
|
demo_account_type: str,
|
|
|
|
|
session_id: Optional[str] = None,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_created_at: Optional[str] = None,
|
2025-10-12 18:47:33 +02:00
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Clone inventory service data for a virtual demo tenant
|
|
|
|
|
|
|
|
|
|
Clones:
|
|
|
|
|
- Ingredients from template tenant
|
2025-10-17 07:31:14 +02:00
|
|
|
- Stock batches with date-adjusted expiration dates
|
|
|
|
|
- Generates inventory alerts based on stock status
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
base_tenant_id: Template tenant UUID to clone from
|
|
|
|
|
virtual_tenant_id: Target virtual tenant UUID
|
|
|
|
|
demo_account_type: Type of demo account
|
|
|
|
|
session_id: Originating session ID for tracing
|
2025-11-27 15:52:40 +01:00
|
|
|
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Cloning status and record counts
|
|
|
|
|
"""
|
|
|
|
|
start_time = datetime.now(timezone.utc)
|
2025-11-27 15:52:40 +01:00
|
|
|
|
|
|
|
|
# Parse session_created_at or fallback to now
|
|
|
|
|
if session_created_at:
|
|
|
|
|
try:
|
|
|
|
|
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
|
|
|
|
except (ValueError, AttributeError) as e:
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Invalid session_created_at format, using current time",
|
|
|
|
|
session_created_at=session_created_at,
|
|
|
|
|
error=str(e)
|
|
|
|
|
)
|
|
|
|
|
session_time = datetime.now(timezone.utc)
|
|
|
|
|
else:
|
|
|
|
|
logger.warning("session_created_at not provided, using current time")
|
|
|
|
|
session_time = datetime.now(timezone.utc)
|
2025-10-12 18:47:33 +02:00
|
|
|
|
|
|
|
|
logger.info(
|
2025-10-17 07:31:14 +02:00
|
|
|
"Starting inventory data cloning with date adjustment",
|
2025-10-12 18:47:33 +02:00
|
|
|
base_tenant_id=base_tenant_id,
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
demo_account_type=demo_account_type,
|
2025-10-17 07:31:14 +02:00
|
|
|
session_id=session_id,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time=session_time.isoformat()
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Validate UUIDs
|
|
|
|
|
base_uuid = uuid.UUID(base_tenant_id)
|
|
|
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
|
|
2025-10-24 13:05:04 +02:00
|
|
|
# Check if data already exists for this virtual tenant (idempotency)
|
|
|
|
|
existing_check = await db.execute(
|
|
|
|
|
select(Ingredient).where(Ingredient.tenant_id == virtual_uuid).limit(1)
|
|
|
|
|
)
|
|
|
|
|
existing_ingredient = existing_check.scalars().first()
|
|
|
|
|
|
|
|
|
|
if existing_ingredient:
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Data already exists for virtual tenant - cleaning before re-clone",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
base_tenant_id=base_tenant_id
|
|
|
|
|
)
|
|
|
|
|
# Clean up existing data first to ensure fresh clone
|
|
|
|
|
from sqlalchemy import delete
|
|
|
|
|
|
|
|
|
|
await db.execute(
|
|
|
|
|
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
await db.execute(
|
|
|
|
|
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
await db.execute(
|
|
|
|
|
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Existing data cleaned, proceeding with fresh clone",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id
|
|
|
|
|
)
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
# Track cloning statistics
|
|
|
|
|
stats = {
|
|
|
|
|
"ingredients": 0,
|
2025-10-17 07:31:14 +02:00
|
|
|
"stock_batches": 0,
|
2025-10-24 13:05:04 +02:00
|
|
|
"stock_movements": 0,
|
2025-10-17 07:31:14 +02:00
|
|
|
"alerts_generated": 0
|
2025-10-12 18:47:33 +02:00
|
|
|
}
|
|
|
|
|
|
2025-10-17 07:31:14 +02:00
|
|
|
# Mapping from base ingredient ID to virtual ingredient ID
|
|
|
|
|
ingredient_id_mapping = {}
|
2025-10-24 13:05:04 +02:00
|
|
|
# Mapping from base stock ID to virtual stock ID
|
|
|
|
|
stock_id_mapping = {}
|
2025-10-17 07:31:14 +02:00
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
# Clone Ingredients
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
select(Ingredient).where(Ingredient.tenant_id == base_uuid)
|
|
|
|
|
)
|
|
|
|
|
base_ingredients = result.scalars().all()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Found ingredients to clone",
|
|
|
|
|
count=len(base_ingredients),
|
|
|
|
|
base_tenant=str(base_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for ingredient in base_ingredients:
|
2025-10-21 19:50:07 +02:00
|
|
|
# Transform ingredient ID using XOR to ensure consistency across services
|
|
|
|
|
# This formula matches the suppliers service ID transformation
|
|
|
|
|
# Formula: virtual_ingredient_id = virtual_tenant_id XOR base_ingredient_id
|
|
|
|
|
|
|
|
|
|
base_ingredient_int = int(ingredient.id.hex, 16)
|
|
|
|
|
virtual_tenant_int = int(virtual_uuid.hex, 16)
|
|
|
|
|
base_tenant_int = int(base_uuid.hex, 16)
|
|
|
|
|
|
|
|
|
|
# Reverse the original XOR to get the base ingredient ID
|
|
|
|
|
# base_ingredient = base_tenant ^ base_ingredient_id
|
|
|
|
|
# So: base_ingredient_id = base_tenant ^ base_ingredient
|
|
|
|
|
base_ingredient_id_int = base_tenant_int ^ base_ingredient_int
|
|
|
|
|
|
|
|
|
|
# Now apply virtual tenant XOR to get the new ingredient ID
|
|
|
|
|
new_ingredient_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_id_int)
|
|
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"Transforming ingredient ID using XOR",
|
|
|
|
|
base_ingredient_id=str(ingredient.id),
|
|
|
|
|
new_ingredient_id=str(new_ingredient_id),
|
|
|
|
|
ingredient_sku=ingredient.sku,
|
|
|
|
|
ingredient_name=ingredient.name
|
|
|
|
|
)
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
new_ingredient = Ingredient(
|
2025-10-17 07:31:14 +02:00
|
|
|
id=new_ingredient_id,
|
2025-10-12 18:47:33 +02:00
|
|
|
tenant_id=virtual_uuid,
|
|
|
|
|
name=ingredient.name,
|
|
|
|
|
sku=ingredient.sku,
|
|
|
|
|
barcode=ingredient.barcode,
|
|
|
|
|
product_type=ingredient.product_type,
|
|
|
|
|
ingredient_category=ingredient.ingredient_category,
|
|
|
|
|
product_category=ingredient.product_category,
|
|
|
|
|
subcategory=ingredient.subcategory,
|
|
|
|
|
description=ingredient.description,
|
|
|
|
|
brand=ingredient.brand,
|
|
|
|
|
unit_of_measure=ingredient.unit_of_measure,
|
|
|
|
|
package_size=ingredient.package_size,
|
|
|
|
|
average_cost=ingredient.average_cost,
|
|
|
|
|
last_purchase_price=ingredient.last_purchase_price,
|
|
|
|
|
standard_cost=ingredient.standard_cost,
|
|
|
|
|
low_stock_threshold=ingredient.low_stock_threshold,
|
|
|
|
|
reorder_point=ingredient.reorder_point,
|
|
|
|
|
reorder_quantity=ingredient.reorder_quantity,
|
|
|
|
|
max_stock_level=ingredient.max_stock_level,
|
|
|
|
|
shelf_life_days=ingredient.shelf_life_days,
|
2025-10-17 07:31:14 +02:00
|
|
|
display_life_hours=ingredient.display_life_hours,
|
|
|
|
|
best_before_hours=ingredient.best_before_hours,
|
|
|
|
|
storage_instructions=ingredient.storage_instructions,
|
2025-10-12 18:47:33 +02:00
|
|
|
is_perishable=ingredient.is_perishable,
|
|
|
|
|
is_active=ingredient.is_active,
|
2025-10-17 07:31:14 +02:00
|
|
|
allergen_info=ingredient.allergen_info,
|
|
|
|
|
nutritional_info=ingredient.nutritional_info
|
2025-10-12 18:47:33 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_ingredient)
|
|
|
|
|
stats["ingredients"] += 1
|
|
|
|
|
|
2025-10-17 07:31:14 +02:00
|
|
|
# Store mapping for stock cloning
|
|
|
|
|
ingredient_id_mapping[ingredient.id] = new_ingredient_id
|
|
|
|
|
|
|
|
|
|
await db.flush() # Ensure ingredients are persisted before stock
|
|
|
|
|
|
|
|
|
|
# Clone Stock batches with date adjustment
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
select(Stock).where(Stock.tenant_id == base_uuid)
|
|
|
|
|
)
|
|
|
|
|
base_stocks = result.scalars().all()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Found stock batches to clone",
|
|
|
|
|
count=len(base_stocks),
|
|
|
|
|
base_tenant=str(base_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for stock in base_stocks:
|
|
|
|
|
# Map ingredient ID
|
|
|
|
|
new_ingredient_id = ingredient_id_mapping.get(stock.ingredient_id)
|
|
|
|
|
if not new_ingredient_id:
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Stock references non-existent ingredient, skipping",
|
|
|
|
|
stock_id=str(stock.id),
|
|
|
|
|
ingredient_id=str(stock.ingredient_id)
|
|
|
|
|
)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Adjust dates relative to session creation
|
|
|
|
|
adjusted_expiration = adjust_date_for_demo(
|
|
|
|
|
stock.expiration_date,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time,
|
2025-10-17 07:31:14 +02:00
|
|
|
BASE_REFERENCE_DATE
|
|
|
|
|
)
|
|
|
|
|
adjusted_received = adjust_date_for_demo(
|
|
|
|
|
stock.received_date,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time,
|
2025-10-17 07:31:14 +02:00
|
|
|
BASE_REFERENCE_DATE
|
|
|
|
|
)
|
|
|
|
|
adjusted_best_before = adjust_date_for_demo(
|
|
|
|
|
stock.best_before_date,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time,
|
2025-10-17 07:31:14 +02:00
|
|
|
BASE_REFERENCE_DATE
|
|
|
|
|
)
|
|
|
|
|
adjusted_created = adjust_date_for_demo(
|
|
|
|
|
stock.created_at,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time,
|
2025-10-17 07:31:14 +02:00
|
|
|
BASE_REFERENCE_DATE
|
2025-11-27 15:52:40 +01:00
|
|
|
) or session_time
|
2025-10-17 07:31:14 +02:00
|
|
|
|
2025-10-24 13:05:04 +02:00
|
|
|
# Create new stock batch with new ID
|
|
|
|
|
new_stock_id = uuid.uuid4()
|
|
|
|
|
|
2025-10-17 07:31:14 +02:00
|
|
|
new_stock = Stock(
|
2025-10-24 13:05:04 +02:00
|
|
|
id=new_stock_id,
|
2025-10-17 07:31:14 +02:00
|
|
|
tenant_id=virtual_uuid,
|
|
|
|
|
ingredient_id=new_ingredient_id,
|
|
|
|
|
supplier_id=stock.supplier_id,
|
|
|
|
|
batch_number=stock.batch_number,
|
|
|
|
|
lot_number=stock.lot_number,
|
|
|
|
|
supplier_batch_ref=stock.supplier_batch_ref,
|
|
|
|
|
production_stage=stock.production_stage,
|
|
|
|
|
current_quantity=stock.current_quantity,
|
|
|
|
|
reserved_quantity=stock.reserved_quantity,
|
|
|
|
|
available_quantity=stock.available_quantity,
|
|
|
|
|
received_date=adjusted_received,
|
|
|
|
|
expiration_date=adjusted_expiration,
|
|
|
|
|
best_before_date=adjusted_best_before,
|
|
|
|
|
unit_cost=stock.unit_cost,
|
|
|
|
|
total_cost=stock.total_cost,
|
|
|
|
|
storage_location=stock.storage_location,
|
|
|
|
|
warehouse_zone=stock.warehouse_zone,
|
|
|
|
|
shelf_position=stock.shelf_position,
|
|
|
|
|
requires_refrigeration=stock.requires_refrigeration,
|
|
|
|
|
requires_freezing=stock.requires_freezing,
|
|
|
|
|
storage_temperature_min=stock.storage_temperature_min,
|
|
|
|
|
storage_temperature_max=stock.storage_temperature_max,
|
|
|
|
|
storage_humidity_max=stock.storage_humidity_max,
|
|
|
|
|
shelf_life_days=stock.shelf_life_days,
|
|
|
|
|
storage_instructions=stock.storage_instructions,
|
|
|
|
|
is_available=stock.is_available,
|
|
|
|
|
is_expired=stock.is_expired,
|
|
|
|
|
quality_status=stock.quality_status,
|
|
|
|
|
created_at=adjusted_created,
|
2025-11-27 15:52:40 +01:00
|
|
|
updated_at=session_time
|
2025-10-17 07:31:14 +02:00
|
|
|
)
|
|
|
|
|
db.add(new_stock)
|
|
|
|
|
stats["stock_batches"] += 1
|
|
|
|
|
|
2025-10-24 13:05:04 +02:00
|
|
|
# Store mapping for movement cloning
|
|
|
|
|
stock_id_mapping[stock.id] = new_stock_id
|
|
|
|
|
|
|
|
|
|
await db.flush() # Ensure stock is persisted before movements
|
|
|
|
|
|
|
|
|
|
# Clone Stock Movements with date adjustment
|
|
|
|
|
result = await db.execute(
|
|
|
|
|
select(StockMovement).where(StockMovement.tenant_id == base_uuid)
|
|
|
|
|
)
|
|
|
|
|
base_movements = result.scalars().all()
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Found stock movements to clone",
|
|
|
|
|
count=len(base_movements),
|
|
|
|
|
base_tenant=str(base_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for movement in base_movements:
|
|
|
|
|
# Map ingredient ID and stock ID
|
|
|
|
|
new_ingredient_id = ingredient_id_mapping.get(movement.ingredient_id)
|
|
|
|
|
new_stock_id = stock_id_mapping.get(movement.stock_id) if movement.stock_id else None
|
|
|
|
|
|
|
|
|
|
if not new_ingredient_id:
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Movement references non-existent ingredient, skipping",
|
|
|
|
|
movement_id=str(movement.id),
|
|
|
|
|
ingredient_id=str(movement.ingredient_id)
|
|
|
|
|
)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Adjust movement date relative to session creation
|
|
|
|
|
adjusted_movement_date = adjust_date_for_demo(
|
|
|
|
|
movement.movement_date,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time,
|
2025-10-24 13:05:04 +02:00
|
|
|
BASE_REFERENCE_DATE
|
2025-11-27 15:52:40 +01:00
|
|
|
) or session_time
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
adjusted_created_at = adjust_date_for_demo(
|
|
|
|
|
movement.created_at,
|
2025-11-27 15:52:40 +01:00
|
|
|
session_time,
|
2025-10-24 13:05:04 +02:00
|
|
|
BASE_REFERENCE_DATE
|
2025-11-27 15:52:40 +01:00
|
|
|
) or session_time
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
# Create new stock movement
|
|
|
|
|
new_movement = StockMovement(
|
|
|
|
|
id=uuid.uuid4(),
|
|
|
|
|
tenant_id=virtual_uuid,
|
|
|
|
|
ingredient_id=new_ingredient_id,
|
|
|
|
|
stock_id=new_stock_id,
|
|
|
|
|
movement_type=movement.movement_type,
|
|
|
|
|
quantity=movement.quantity,
|
|
|
|
|
unit_cost=movement.unit_cost,
|
|
|
|
|
total_cost=movement.total_cost,
|
|
|
|
|
quantity_before=movement.quantity_before,
|
|
|
|
|
quantity_after=movement.quantity_after,
|
|
|
|
|
reference_number=movement.reference_number,
|
|
|
|
|
supplier_id=movement.supplier_id,
|
|
|
|
|
notes=movement.notes,
|
|
|
|
|
reason_code=movement.reason_code,
|
|
|
|
|
movement_date=adjusted_movement_date,
|
|
|
|
|
created_at=adjusted_created_at,
|
|
|
|
|
created_by=movement.created_by
|
|
|
|
|
)
|
|
|
|
|
db.add(new_movement)
|
|
|
|
|
stats["stock_movements"] += 1
|
|
|
|
|
|
2025-10-12 18:47:33 +02:00
|
|
|
# Commit all changes
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
2025-10-23 07:44:54 +02:00
|
|
|
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
|
|
|
|
# inventory_alert_service which runs scheduled checks every 2-5 minutes.
|
|
|
|
|
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
|
|
|
|
stats["alerts_generated"] = 0
|
|
|
|
|
|
|
|
|
|
total_records = stats["ingredients"] + stats["stock_batches"]
|
2025-10-12 18:47:33 +02:00
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
|
|
|
|
|
|
logger.info(
|
2025-10-17 07:31:14 +02:00
|
|
|
"Inventory data cloning completed with date adjustment",
|
2025-10-12 18:47:33 +02:00
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
total_records=total_records,
|
|
|
|
|
stats=stats,
|
|
|
|
|
duration_ms=duration_ms
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"status": "completed",
|
|
|
|
|
"records_cloned": total_records,
|
|
|
|
|
"duration_ms": duration_ms,
|
|
|
|
|
"details": stats
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Failed to clone inventory data",
|
|
|
|
|
error=str(e),
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
exc_info=True
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Rollback on error
|
|
|
|
|
await db.rollback()
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"status": "failed",
|
|
|
|
|
"records_cloned": 0,
|
|
|
|
|
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
|
|
|
|
"error": str(e)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/clone/health")
|
|
|
|
|
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
|
|
|
|
"""
|
|
|
|
|
Health check for internal cloning endpoint
|
|
|
|
|
Used by orchestrator to verify service availability
|
|
|
|
|
"""
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"clone_endpoint": "available",
|
|
|
|
|
"version": "2.0.0"
|
|
|
|
|
}
|
2025-10-24 13:05:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.delete("/tenant/{virtual_tenant_id}")
|
|
|
|
|
async def delete_demo_data(
|
|
|
|
|
virtual_tenant_id: str,
|
|
|
|
|
db: AsyncSession = Depends(get_db),
|
|
|
|
|
_: bool = Depends(verify_internal_api_key)
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Delete all inventory data for a virtual demo tenant
|
|
|
|
|
|
|
|
|
|
Called by demo session cleanup service to remove ephemeral data
|
|
|
|
|
when demo sessions expire or are destroyed.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
virtual_tenant_id: Virtual tenant UUID to delete
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Deletion status and count of records deleted
|
|
|
|
|
"""
|
|
|
|
|
from sqlalchemy import delete
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Deleting inventory data for virtual tenant",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
start_time = datetime.now(timezone.utc)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
|
|
|
|
|
|
|
|
|
# Count records before deletion for reporting
|
|
|
|
|
stock_count = await db.scalar(
|
|
|
|
|
select(func.count(Stock.id)).where(Stock.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
ingredient_count = await db.scalar(
|
|
|
|
|
select(func.count(Ingredient.id)).where(Ingredient.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
movement_count = await db.scalar(
|
|
|
|
|
select(func.count(StockMovement.id)).where(StockMovement.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Delete in correct order to respect foreign key constraints
|
|
|
|
|
# 1. Delete StockMovements (references Stock)
|
|
|
|
|
await db.execute(
|
|
|
|
|
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# 2. Delete Stock batches (references Ingredient)
|
|
|
|
|
await db.execute(
|
|
|
|
|
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# 3. Delete Ingredients
|
|
|
|
|
await db.execute(
|
|
|
|
|
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
await db.commit()
|
|
|
|
|
|
|
|
|
|
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
"Inventory data deleted successfully",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
stocks_deleted=stock_count,
|
|
|
|
|
ingredients_deleted=ingredient_count,
|
|
|
|
|
movements_deleted=movement_count,
|
|
|
|
|
duration_ms=duration_ms
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"service": "inventory",
|
|
|
|
|
"status": "deleted",
|
|
|
|
|
"virtual_tenant_id": virtual_tenant_id,
|
|
|
|
|
"records_deleted": {
|
|
|
|
|
"stock_batches": stock_count,
|
|
|
|
|
"ingredients": ingredient_count,
|
|
|
|
|
"stock_movements": movement_count,
|
|
|
|
|
"total": stock_count + ingredient_count + movement_count
|
|
|
|
|
},
|
|
|
|
|
"duration_ms": duration_ms
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
|
logger.error("Invalid UUID format", error=str(e))
|
|
|
|
|
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(
|
|
|
|
|
"Failed to delete inventory data",
|
|
|
|
|
virtual_tenant_id=virtual_tenant_id,
|
|
|
|
|
error=str(e),
|
|
|
|
|
exc_info=True
|
|
|
|
|
)
|
|
|
|
|
await db.rollback()
|
|
|
|
|
raise HTTPException(
|
|
|
|
|
status_code=500,
|
|
|
|
|
detail=f"Failed to delete inventory data: {str(e)}"
|
|
|
|
|
)
|