demo seed change

This commit is contained in:
Urtzi Alfaro
2025-12-13 23:57:54 +01:00
parent f3688dfb04
commit ff830a3415
299 changed files with 20328 additions and 19485 deletions

View File

@@ -0,0 +1,87 @@
# services/inventory/app/api/internal_alert_trigger.py
"""
Internal API for triggering inventory alerts.
Used by demo session cloning to generate realistic inventory alerts.
URL Pattern: /api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger
This follows the tenant-scoped pattern so gateway can proxy correctly.
"""
from fastapi import APIRouter, HTTPException, Request, Path
from uuid import UUID
import structlog
logger = structlog.get_logger()
router = APIRouter()
# New URL pattern: tenant-scoped so gateway proxies to inventory service correctly
@router.post("/api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger")
async def trigger_inventory_alerts(
tenant_id: UUID = Path(..., description="Tenant ID to check inventory for"),
request: Request = None
) -> dict:
"""
Trigger comprehensive inventory alert checks for a specific tenant (internal use only).
This endpoint is called by the demo session cloning process after inventory
data is seeded to generate realistic inventory alerts including:
- Critical stock shortages
- Expiring ingredients
- Overstock situations
Security: Protected by X-Internal-Service header check.
"""
try:
# Verify internal service header
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
raise HTTPException(
status_code=403,
detail="This endpoint is for internal service use only"
)
# Get inventory scheduler from app state
inventory_scheduler = getattr(request.app.state, 'inventory_scheduler', None)
if not inventory_scheduler:
logger.error("Inventory scheduler not initialized")
raise HTTPException(
status_code=500,
detail="Inventory scheduler not available"
)
# Trigger comprehensive inventory alert checks for the specific tenant
logger.info("Triggering comprehensive inventory alert checks", tenant_id=str(tenant_id))
# Call the scheduler's manual trigger method
result = await inventory_scheduler.trigger_manual_check(tenant_id)
if result.get("success", False):
logger.info(
"Inventory alert checks completed successfully",
tenant_id=str(tenant_id),
alerts_generated=result.get("alerts_generated", 0)
)
else:
logger.error(
"Inventory alert checks failed",
tenant_id=str(tenant_id),
error=result.get("error", "Unknown error")
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(
"Error triggering inventory alerts",
tenant_id=str(tenant_id),
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to trigger inventory alerts: {str(e)}"
)

View File

@@ -1,44 +1,37 @@
"""
Internal Demo Cloning API for Inventory Service
Service-to-service endpoint for cloning inventory data with date adjustment
Handles internal demo data cloning operations
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func
import structlog
import uuid
from datetime import datetime, timezone
from typing import Optional
import os
import sys
import structlog
import json
from pathlib import Path
# Add shared path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from datetime import datetime
import uuid
from uuid import UUID
from app.core.database import get_db
from app.models.inventory import Ingredient, Stock, StockMovement
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from app.core.config import settings
from app.models import Ingredient, Stock, ProductType
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Base demo tenant IDs
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
router = APIRouter()
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
"""Verify internal API key for service-to-service communication"""
from app.core.config import settings
if x_internal_api_key != settings.INTERNAL_API_KEY:
required_key = settings.INTERNAL_API_KEY
if x_internal_api_key != required_key:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
@router.post("/internal/demo/clone")
async def clone_demo_data_internal(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
@@ -50,350 +43,346 @@ async def clone_demo_data(
"""
Clone inventory service data for a virtual demo tenant
Clones:
- Ingredients from template tenant
- Stock batches with date-adjusted expiration dates
- Generates inventory alerts based on stock status
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
base_tenant_id: Template tenant UUID to clone from
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: ISO timestamp when demo session was created (for date adjustment)
session_created_at: Session creation timestamp for date adjustment
db: Database session
Returns:
Cloning status and record counts
Dictionary with cloning results
Raises:
HTTPException: On validation or cloning errors
"""
start_time = datetime.now(timezone.utc)
# Parse session_created_at or fallback to now
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError) as e:
logger.warning(
"Invalid session_created_at format, using current time",
session_created_at=session_created_at,
error=str(e)
)
session_time = datetime.now(timezone.utc)
else:
logger.warning("session_created_at not provided, using current time")
session_time = datetime.now(timezone.utc)
logger.info(
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_time.isoformat()
)
start_time = datetime.now()
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
virtual_uuid = UUID(virtual_tenant_id)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
# Debug logging for UUID values
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
if not all([base_tenant_id, virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
)
# Validate UUID format before processing
try:
UUID(base_tenant_id)
UUID(virtual_tenant_id)
except ValueError as e:
logger.error("Invalid UUID format in request",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format: {str(e)}"
)
# Parse session creation time
if session_created_at:
try:
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_created_at_parsed = datetime.now()
else:
session_created_at_parsed = datetime.now()
# Determine profile based on demo_account_type
if demo_account_type == "enterprise":
profile = "enterprise"
else:
profile = "professional"
logger.info(
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_created_at_parsed.isoformat()
)
# Load seed data using shared utility
try:
from shared.utils.seed_data_paths import get_seed_data_path
if profile == "professional":
json_file = get_seed_data_path("professional", "03-inventory.json")
elif profile == "enterprise":
json_file = get_seed_data_path("enterprise", "03-inventory.json")
else:
raise ValueError(f"Invalid profile: {profile}")
except ImportError:
# Fallback to original path
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
if profile == "professional":
json_file = seed_data_dir / "professional" / "03-inventory.json"
elif profile == "enterprise":
json_file = seed_data_dir / "enterprise" / "parent" / "03-inventory.json"
else:
raise ValueError(f"Invalid profile: {profile}")
if not json_file.exists():
raise HTTPException(
status_code=404,
detail=f"Seed data file not found: {json_file}"
)
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Check if data already exists for this virtual tenant (idempotency)
from sqlalchemy import select, delete
existing_check = await db.execute(
select(Ingredient).where(Ingredient.tenant_id == virtual_uuid).limit(1)
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
)
existing_ingredient = existing_check.scalars().first()
existing_ingredient = existing_check.scalar_one_or_none()
if existing_ingredient:
logger.warning(
"Data already exists for virtual tenant - cleaning before re-clone",
virtual_tenant_id=virtual_tenant_id,
base_tenant_id=base_tenant_id
)
# Clean up existing data first to ensure fresh clone
from sqlalchemy import delete
await db.execute(
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
)
await db.execute(
delete(Stock).where(Stock.tenant_id == virtual_uuid)
)
await db.execute(
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
)
await db.commit()
logger.info(
"Existing data cleaned, proceeding with fresh clone",
"Demo data already exists, skipping clone",
virtual_tenant_id=virtual_tenant_id
)
return {
"status": "skipped",
"reason": "Data already exists",
"records_cloned": 0
}
# Track cloning statistics
stats = {
"ingredients": 0,
"stock_batches": 0,
"stock_movements": 0,
"alerts_generated": 0
}
# Mapping from base ingredient ID to virtual ingredient ID
ingredient_id_mapping = {}
# Mapping from base stock ID to virtual stock ID
stock_id_mapping = {}
# Clone Ingredients
result = await db.execute(
select(Ingredient).where(Ingredient.tenant_id == base_uuid)
)
base_ingredients = result.scalars().all()
logger.info(
"Found ingredients to clone",
count=len(base_ingredients),
base_tenant=str(base_uuid)
)
for ingredient in base_ingredients:
# Transform ingredient ID using XOR to ensure consistency across services
# This formula matches the suppliers service ID transformation
# Formula: virtual_ingredient_id = virtual_tenant_id XOR base_ingredient_id
base_ingredient_int = int(ingredient.id.hex, 16)
virtual_tenant_int = int(virtual_uuid.hex, 16)
base_tenant_int = int(base_uuid.hex, 16)
# Reverse the original XOR to get the base ingredient ID
# base_ingredient = base_tenant ^ base_ingredient_id
# So: base_ingredient_id = base_tenant ^ base_ingredient
base_ingredient_id_int = base_tenant_int ^ base_ingredient_int
# Now apply virtual tenant XOR to get the new ingredient ID
new_ingredient_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_id_int)
logger.debug(
"Transforming ingredient ID using XOR",
base_ingredient_id=str(ingredient.id),
new_ingredient_id=str(new_ingredient_id),
ingredient_sku=ingredient.sku,
ingredient_name=ingredient.name
)
new_ingredient = Ingredient(
id=new_ingredient_id,
tenant_id=virtual_uuid,
name=ingredient.name,
sku=ingredient.sku,
barcode=ingredient.barcode,
product_type=ingredient.product_type,
ingredient_category=ingredient.ingredient_category,
product_category=ingredient.product_category,
subcategory=ingredient.subcategory,
description=ingredient.description,
brand=ingredient.brand,
unit_of_measure=ingredient.unit_of_measure,
package_size=ingredient.package_size,
average_cost=ingredient.average_cost,
last_purchase_price=ingredient.last_purchase_price,
standard_cost=ingredient.standard_cost,
low_stock_threshold=ingredient.low_stock_threshold,
reorder_point=ingredient.reorder_point,
reorder_quantity=ingredient.reorder_quantity,
max_stock_level=ingredient.max_stock_level,
shelf_life_days=ingredient.shelf_life_days,
display_life_hours=ingredient.display_life_hours,
best_before_hours=ingredient.best_before_hours,
storage_instructions=ingredient.storage_instructions,
is_perishable=ingredient.is_perishable,
is_active=ingredient.is_active,
allergen_info=ingredient.allergen_info,
nutritional_info=ingredient.nutritional_info
)
db.add(new_ingredient)
stats["ingredients"] += 1
# Store mapping for stock cloning
ingredient_id_mapping[ingredient.id] = new_ingredient_id
await db.flush() # Ensure ingredients are persisted before stock
# Clone Stock batches with date adjustment
result = await db.execute(
select(Stock).where(Stock.tenant_id == base_uuid)
)
base_stocks = result.scalars().all()
logger.info(
"Found stock batches to clone",
count=len(base_stocks),
base_tenant=str(base_uuid)
)
for stock in base_stocks:
# Map ingredient ID
new_ingredient_id = ingredient_id_mapping.get(stock.ingredient_id)
if not new_ingredient_id:
logger.warning(
"Stock references non-existent ingredient, skipping",
stock_id=str(stock.id),
ingredient_id=str(stock.ingredient_id)
# Transform and insert data
records_cloned = 0
# Clone ingredients
for ingredient_data in seed_data.get('ingredients', []):
# Transform ID
from shared.utils.demo_id_transformer import transform_id
try:
ingredient_uuid = UUID(ingredient_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
except ValueError as e:
logger.error("Failed to parse UUIDs for ID transformation",
ingredient_id=ingredient_data['id'],
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in ingredient data: {str(e)}"
)
continue
# Adjust dates relative to session creation
adjusted_expiration = adjust_date_for_demo(
stock.expiration_date,
session_time,
BASE_REFERENCE_DATE
# Transform dates
from shared.utils.demo_dates import adjust_date_for_demo
for date_field in ['expiration_date', 'received_date', 'created_at', 'updated_at']:
if date_field in ingredient_data:
try:
date_value = ingredient_data[date_field]
# Handle both string dates and date objects
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value)
elif hasattr(date_value, 'isoformat'):
# Already a date/datetime object
original_date = date_value
else:
# Skip if not a valid date format
logger.warning("Skipping invalid date format",
date_field=date_field,
date_value=date_value)
continue
adjusted_date = adjust_date_for_demo(
original_date,
session_created_at_parsed
)
ingredient_data[date_field] = adjusted_date
except (ValueError, AttributeError) as e:
logger.warning("Failed to parse date, skipping",
date_field=date_field,
date_value=ingredient_data[date_field],
error=str(e))
# Remove invalid date to avoid model errors
ingredient_data.pop(date_field, None)
# Map category field to ingredient_category enum
if 'category' in ingredient_data:
category_value = ingredient_data.pop('category')
# Convert category string to IngredientCategory enum
from app.models.inventory import IngredientCategory
try:
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
except KeyError:
# If category not found in enum, use OTHER
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
# Map unit_of_measure string to enum
if 'unit_of_measure' in ingredient_data:
from app.models.inventory import UnitOfMeasure
unit_mapping = {
'kilograms': UnitOfMeasure.KILOGRAMS,
'grams': UnitOfMeasure.GRAMS,
'liters': UnitOfMeasure.LITERS,
'milliliters': UnitOfMeasure.MILLILITERS,
'units': UnitOfMeasure.UNITS,
'pieces': UnitOfMeasure.PIECES,
'packages': UnitOfMeasure.PACKAGES,
'bags': UnitOfMeasure.BAGS,
'boxes': UnitOfMeasure.BOXES
}
unit_str = ingredient_data['unit_of_measure']
if unit_str in unit_mapping:
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
else:
# Default to units if not found
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
original_unit=unit_str)
# Note: All seed data fields now match the model schema exactly
# No field filtering needed
# Remove original id and tenant_id from ingredient_data to avoid conflict
ingredient_data.pop('id', None)
ingredient_data.pop('tenant_id', None)
# Create ingredient
ingredient = Ingredient(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**ingredient_data
)
adjusted_received = adjust_date_for_demo(
stock.received_date,
session_time,
BASE_REFERENCE_DATE
db.add(ingredient)
records_cloned += 1
# Clone stock batches
for stock_data in seed_data.get('stock_batches', []):
# Transform ID - handle both UUID and string IDs
from shared.utils.demo_id_transformer import transform_id
try:
# Try to parse as UUID first
stock_uuid = UUID(stock_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(stock_data['id'], tenant_uuid)
except ValueError:
# If not a UUID, generate a deterministic UUID from the string ID
import hashlib
stock_id_string = stock_data['id']
tenant_uuid = UUID(virtual_tenant_id)
# Create a deterministic UUID from the string ID and tenant ID
combined = f"{stock_id_string}-{tenant_uuid}"
hash_obj = hashlib.sha256(combined.encode('utf-8'))
transformed_id = UUID(hash_obj.hexdigest()[:32])
logger.info("Generated UUID for non-UUID stock ID",
original_id=stock_id_string,
generated_id=str(transformed_id))
# Transform dates - handle both timestamp dictionaries and ISO strings
for date_field in ['received_date', 'expiration_date', 'best_before_date', 'original_expiration_date', 'transformation_date', 'final_expiration_date', 'created_at', 'updated_at']:
if date_field in stock_data:
try:
date_value = stock_data[date_field]
# Handle timestamp dictionaries (offset_days, hour, minute)
if isinstance(date_value, dict) and 'offset_days' in date_value:
from shared.utils.demo_dates import calculate_demo_datetime
original_date = calculate_demo_datetime(
offset_days=date_value.get('offset_days', 0),
hour=date_value.get('hour', 0),
minute=date_value.get('minute', 0),
session_created_at=session_created_at_parsed
)
elif isinstance(date_value, str):
# ISO string
original_date = datetime.fromisoformat(date_value)
elif hasattr(date_value, 'isoformat'):
# Already a date/datetime object
original_date = date_value
else:
# Skip if not a valid date format
logger.warning("Skipping invalid date format",
date_field=date_field,
date_value=date_value)
continue
adjusted_stock_date = adjust_date_for_demo(
original_date,
session_created_at_parsed
)
stock_data[date_field] = adjusted_stock_date
except (ValueError, AttributeError) as e:
logger.warning("Failed to parse date, skipping",
date_field=date_field,
date_value=stock_data[date_field],
error=str(e))
# Remove invalid date to avoid model errors
stock_data.pop(date_field, None)
# Remove original id and tenant_id from stock_data to avoid conflict
stock_data.pop('id', None)
stock_data.pop('tenant_id', None)
# Create stock batch
stock = Stock(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**stock_data
)
adjusted_best_before = adjust_date_for_demo(
stock.best_before_date,
session_time,
BASE_REFERENCE_DATE
)
adjusted_created = adjust_date_for_demo(
stock.created_at,
session_time,
BASE_REFERENCE_DATE
) or session_time
db.add(stock)
records_cloned += 1
# Create new stock batch with new ID
new_stock_id = uuid.uuid4()
new_stock = Stock(
id=new_stock_id,
tenant_id=virtual_uuid,
ingredient_id=new_ingredient_id,
supplier_id=stock.supplier_id,
batch_number=stock.batch_number,
lot_number=stock.lot_number,
supplier_batch_ref=stock.supplier_batch_ref,
production_stage=stock.production_stage,
current_quantity=stock.current_quantity,
reserved_quantity=stock.reserved_quantity,
available_quantity=stock.available_quantity,
received_date=adjusted_received,
expiration_date=adjusted_expiration,
best_before_date=adjusted_best_before,
unit_cost=stock.unit_cost,
total_cost=stock.total_cost,
storage_location=stock.storage_location,
warehouse_zone=stock.warehouse_zone,
shelf_position=stock.shelf_position,
requires_refrigeration=stock.requires_refrigeration,
requires_freezing=stock.requires_freezing,
storage_temperature_min=stock.storage_temperature_min,
storage_temperature_max=stock.storage_temperature_max,
storage_humidity_max=stock.storage_humidity_max,
shelf_life_days=stock.shelf_life_days,
storage_instructions=stock.storage_instructions,
is_available=stock.is_available,
is_expired=stock.is_expired,
quality_status=stock.quality_status,
created_at=adjusted_created,
updated_at=session_time
)
db.add(new_stock)
stats["stock_batches"] += 1
# Store mapping for movement cloning
stock_id_mapping[stock.id] = new_stock_id
await db.flush() # Ensure stock is persisted before movements
# Clone Stock Movements with date adjustment
result = await db.execute(
select(StockMovement).where(StockMovement.tenant_id == base_uuid)
)
base_movements = result.scalars().all()
logger.info(
"Found stock movements to clone",
count=len(base_movements),
base_tenant=str(base_uuid)
)
for movement in base_movements:
# Map ingredient ID and stock ID
new_ingredient_id = ingredient_id_mapping.get(movement.ingredient_id)
new_stock_id = stock_id_mapping.get(movement.stock_id) if movement.stock_id else None
if not new_ingredient_id:
logger.warning(
"Movement references non-existent ingredient, skipping",
movement_id=str(movement.id),
ingredient_id=str(movement.ingredient_id)
)
continue
# Adjust movement date relative to session creation
adjusted_movement_date = adjust_date_for_demo(
movement.movement_date,
session_time,
BASE_REFERENCE_DATE
) or session_time
adjusted_created_at = adjust_date_for_demo(
movement.created_at,
session_time,
BASE_REFERENCE_DATE
) or session_time
# Create new stock movement
new_movement = StockMovement(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
ingredient_id=new_ingredient_id,
stock_id=new_stock_id,
movement_type=movement.movement_type,
quantity=movement.quantity,
unit_cost=movement.unit_cost,
total_cost=movement.total_cost,
quantity_before=movement.quantity_before,
quantity_after=movement.quantity_after,
reference_number=movement.reference_number,
supplier_id=movement.supplier_id,
notes=movement.notes,
reason_code=movement.reason_code,
movement_date=adjusted_movement_date,
created_at=adjusted_created_at,
created_by=movement.created_by
)
db.add(new_movement)
stats["stock_movements"] += 1
# Commit all changes
await db.commit()
# NOTE: Alert generation removed - alerts are now generated automatically by the
# inventory_alert_service which runs scheduled checks every 2-5 minutes.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
total_records = stats["ingredients"] + stats["stock_batches"]
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
logger.info(
"Inventory data cloning completed with date adjustment",
"Inventory data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
records_cloned=records_cloned,
duration_ms=duration_ms,
ingredients_cloned=len(seed_data.get('ingredients', [])),
stock_batches_cloned=len(seed_data.get('stock_batches', []))
)
return {
"service": "inventory",
"status": "completed",
"records_cloned": total_records,
"records_cloned": records_cloned,
"duration_ms": duration_ms,
"details": stats
"details": {
"ingredients": len(seed_data.get('ingredients', [])),
"stock_batches": len(seed_data.get('stock_batches', [])),
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
@@ -411,7 +400,7 @@ async def clone_demo_data(
"service": "inventory",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000),
"error": str(e)
}
@@ -430,101 +419,68 @@ async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_data(
virtual_tenant_id: str,
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Delete all inventory data for a virtual demo tenant
Called by demo session cleanup service to remove ephemeral data
when demo sessions expire or are destroyed.
Args:
virtual_tenant_id: Virtual tenant UUID to delete
Returns:
Deletion status and count of records deleted
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
"""
from sqlalchemy import delete
logger.info(
"Deleting inventory data for virtual tenant",
virtual_tenant_id=virtual_tenant_id
)
start_time = datetime.now(timezone.utc)
start_time = datetime.now()
records_deleted = {
"ingredients": 0,
"stock": 0,
"total": 0
}
try:
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Delete in reverse dependency order
# 1. Delete stock batches (depends on ingredients)
result = await db.execute(
delete(Stock)
.where(Stock.tenant_id == virtual_tenant_id)
)
records_deleted["stock"] = result.rowcount
# Count records before deletion for reporting
stock_count = await db.scalar(
select(func.count(Stock.id)).where(Stock.tenant_id == virtual_uuid)
)
ingredient_count = await db.scalar(
select(func.count(Ingredient.id)).where(Ingredient.tenant_id == virtual_uuid)
)
movement_count = await db.scalar(
select(func.count(StockMovement.id)).where(StockMovement.tenant_id == virtual_uuid)
# 2. Delete ingredients
result = await db.execute(
delete(Ingredient)
.where(Ingredient.tenant_id == virtual_tenant_id)
)
records_deleted["ingredients"] = result.rowcount
# Delete in correct order to respect foreign key constraints
# 1. Delete StockMovements (references Stock)
await db.execute(
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
)
# 2. Delete Stock batches (references Ingredient)
await db.execute(
delete(Stock).where(Stock.tenant_id == virtual_uuid)
)
# 3. Delete Ingredients
await db.execute(
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
)
records_deleted["total"] = sum(records_deleted.values())
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Inventory data deleted successfully",
virtual_tenant_id=virtual_tenant_id,
stocks_deleted=stock_count,
ingredients_deleted=ingredient_count,
movements_deleted=movement_count,
duration_ms=duration_ms
"demo_data_deleted",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
)
return {
"service": "inventory",
"status": "deleted",
"virtual_tenant_id": virtual_tenant_id,
"records_deleted": {
"stock_batches": stock_count,
"ingredients": ingredient_count,
"stock_movements": movement_count,
"total": stock_count + ingredient_count + movement_count
},
"duration_ms": duration_ms
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to delete inventory data",
virtual_tenant_id=virtual_tenant_id,
error=str(e),
exc_info=True
)
await db.rollback()
logger.error(
"demo_data_deletion_failed",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Failed to delete inventory data: {str(e)}"
)
detail=f"Failed to delete demo data: {str(e)}"
)

View File

@@ -319,3 +319,89 @@ async def ml_insights_health():
"POST /ml/insights/optimize-safety-stock"
]
}
# ================================================================
# INTERNAL ENDPOINTS (for demo-session service)
# ================================================================
from fastapi import Request
# Create a separate router for internal endpoints to avoid the tenant prefix
internal_router = APIRouter(
tags=["ML Insights - Internal"]
)
@internal_router.post("/api/v1/tenants/{tenant_id}/inventory/internal/ml/generate-safety-stock-insights")
async def generate_safety_stock_insights_internal(
tenant_id: str,
request: Request,
db: AsyncSession = Depends(get_db)
):
"""
Internal endpoint to trigger safety stock insights generation for demo sessions.
This endpoint is called by the demo-session service after cloning data.
It uses the same ML logic as the public endpoint but with optimized defaults.
Security: Protected by X-Internal-Service header check.
Args:
tenant_id: The tenant UUID
request: FastAPI request object
db: Database session
Returns:
{
"insights_posted": int,
"tenant_id": str,
"status": str
}
"""
# Verify internal service header
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
raise HTTPException(
status_code=403,
detail="This endpoint is for internal service use only"
)
logger.info("Internal safety stock insights generation triggered", tenant_id=tenant_id)
try:
# Use the existing safety stock optimization logic with sensible defaults
request_data = SafetyStockOptimizationRequest(
product_ids=None, # Analyze all products
lookback_days=90, # 3 months of history
min_history_days=30 # Minimum 30 days required
)
# Call the existing safety stock optimization endpoint logic
result = await trigger_safety_stock_optimization(
tenant_id=tenant_id,
request_data=request_data,
db=db
)
# Return simplified response for internal use
return {
"insights_posted": result.total_insights_posted,
"tenant_id": tenant_id,
"status": "success" if result.success else "failed",
"message": result.message,
"products_optimized": result.products_optimized,
"total_cost_savings": result.total_cost_savings
}
except Exception as e:
logger.error(
"Internal safety stock insights generation failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Internal safety stock insights generation failed: {str(e)}"
)