Files
bakery-ia/services/inventory/app/api/internal_demo.py

348 lines
13 KiB
Python
Raw Normal View History

"""
Internal Demo Cloning API for Inventory Service
2025-10-17 07:31:14 +02:00
Service-to-service endpoint for cloning inventory data with date adjustment
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
import structlog
import uuid
from datetime import datetime, timezone
from typing import Optional
import os
2025-10-17 07:31:14 +02:00
import sys
from pathlib import Path
# Add shared path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from app.core.database import get_db
2025-10-17 07:31:14 +02:00
from app.models.inventory import Ingredient, Stock
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
2025-10-19 19:22:37 +02:00
from shared.messaging.rabbitmq import RabbitMQClient
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Internal API key for service-to-service auth
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True
@router.post("/clone")
async def clone_demo_data(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
db: AsyncSession = Depends(get_db),
_: bool = Depends(verify_internal_api_key)
):
"""
Clone inventory service data for a virtual demo tenant
Clones:
- Ingredients from template tenant
2025-10-17 07:31:14 +02:00
- Stock batches with date-adjusted expiration dates
- Generates inventory alerts based on stock status
Args:
base_tenant_id: Template tenant UUID to clone from
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
Returns:
Cloning status and record counts
"""
start_time = datetime.now(timezone.utc)
2025-10-17 07:31:14 +02:00
session_created_at = datetime.now(timezone.utc)
logger.info(
2025-10-17 07:31:14 +02:00
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
2025-10-17 07:31:14 +02:00
session_id=session_id,
session_created_at=session_created_at.isoformat()
)
try:
# Validate UUIDs
base_uuid = uuid.UUID(base_tenant_id)
virtual_uuid = uuid.UUID(virtual_tenant_id)
# Track cloning statistics
stats = {
"ingredients": 0,
2025-10-17 07:31:14 +02:00
"stock_batches": 0,
"alerts_generated": 0
}
2025-10-17 07:31:14 +02:00
# Mapping from base ingredient ID to virtual ingredient ID
ingredient_id_mapping = {}
# Clone Ingredients
result = await db.execute(
select(Ingredient).where(Ingredient.tenant_id == base_uuid)
)
base_ingredients = result.scalars().all()
logger.info(
"Found ingredients to clone",
count=len(base_ingredients),
base_tenant=str(base_uuid)
)
for ingredient in base_ingredients:
2025-10-21 19:50:07 +02:00
# Transform ingredient ID using XOR to ensure consistency across services
# This formula matches the suppliers service ID transformation
# Formula: virtual_ingredient_id = virtual_tenant_id XOR base_ingredient_id
base_ingredient_int = int(ingredient.id.hex, 16)
virtual_tenant_int = int(virtual_uuid.hex, 16)
base_tenant_int = int(base_uuid.hex, 16)
# Reverse the original XOR to get the base ingredient ID
# base_ingredient = base_tenant ^ base_ingredient_id
# So: base_ingredient_id = base_tenant ^ base_ingredient
base_ingredient_id_int = base_tenant_int ^ base_ingredient_int
# Now apply virtual tenant XOR to get the new ingredient ID
new_ingredient_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_id_int)
logger.debug(
"Transforming ingredient ID using XOR",
base_ingredient_id=str(ingredient.id),
new_ingredient_id=str(new_ingredient_id),
ingredient_sku=ingredient.sku,
ingredient_name=ingredient.name
)
new_ingredient = Ingredient(
2025-10-17 07:31:14 +02:00
id=new_ingredient_id,
tenant_id=virtual_uuid,
name=ingredient.name,
sku=ingredient.sku,
barcode=ingredient.barcode,
product_type=ingredient.product_type,
ingredient_category=ingredient.ingredient_category,
product_category=ingredient.product_category,
subcategory=ingredient.subcategory,
description=ingredient.description,
brand=ingredient.brand,
unit_of_measure=ingredient.unit_of_measure,
package_size=ingredient.package_size,
average_cost=ingredient.average_cost,
last_purchase_price=ingredient.last_purchase_price,
standard_cost=ingredient.standard_cost,
low_stock_threshold=ingredient.low_stock_threshold,
reorder_point=ingredient.reorder_point,
reorder_quantity=ingredient.reorder_quantity,
max_stock_level=ingredient.max_stock_level,
shelf_life_days=ingredient.shelf_life_days,
2025-10-17 07:31:14 +02:00
display_life_hours=ingredient.display_life_hours,
best_before_hours=ingredient.best_before_hours,
storage_instructions=ingredient.storage_instructions,
is_perishable=ingredient.is_perishable,
is_active=ingredient.is_active,
2025-10-17 07:31:14 +02:00
allergen_info=ingredient.allergen_info,
nutritional_info=ingredient.nutritional_info
)
db.add(new_ingredient)
stats["ingredients"] += 1
2025-10-17 07:31:14 +02:00
# Store mapping for stock cloning
ingredient_id_mapping[ingredient.id] = new_ingredient_id
await db.flush() # Ensure ingredients are persisted before stock
# Clone Stock batches with date adjustment
result = await db.execute(
select(Stock).where(Stock.tenant_id == base_uuid)
)
base_stocks = result.scalars().all()
logger.info(
"Found stock batches to clone",
count=len(base_stocks),
base_tenant=str(base_uuid)
)
for stock in base_stocks:
# Map ingredient ID
new_ingredient_id = ingredient_id_mapping.get(stock.ingredient_id)
if not new_ingredient_id:
logger.warning(
"Stock references non-existent ingredient, skipping",
stock_id=str(stock.id),
ingredient_id=str(stock.ingredient_id)
)
continue
# Adjust dates relative to session creation
adjusted_expiration = adjust_date_for_demo(
stock.expiration_date,
session_created_at,
BASE_REFERENCE_DATE
)
adjusted_received = adjust_date_for_demo(
stock.received_date,
session_created_at,
BASE_REFERENCE_DATE
)
adjusted_best_before = adjust_date_for_demo(
stock.best_before_date,
session_created_at,
BASE_REFERENCE_DATE
)
adjusted_created = adjust_date_for_demo(
stock.created_at,
session_created_at,
BASE_REFERENCE_DATE
) or session_created_at
# Create new stock batch
new_stock = Stock(
id=uuid.uuid4(),
tenant_id=virtual_uuid,
ingredient_id=new_ingredient_id,
supplier_id=stock.supplier_id,
batch_number=stock.batch_number,
lot_number=stock.lot_number,
supplier_batch_ref=stock.supplier_batch_ref,
production_stage=stock.production_stage,
current_quantity=stock.current_quantity,
reserved_quantity=stock.reserved_quantity,
available_quantity=stock.available_quantity,
received_date=adjusted_received,
expiration_date=adjusted_expiration,
best_before_date=adjusted_best_before,
unit_cost=stock.unit_cost,
total_cost=stock.total_cost,
storage_location=stock.storage_location,
warehouse_zone=stock.warehouse_zone,
shelf_position=stock.shelf_position,
requires_refrigeration=stock.requires_refrigeration,
requires_freezing=stock.requires_freezing,
storage_temperature_min=stock.storage_temperature_min,
storage_temperature_max=stock.storage_temperature_max,
storage_humidity_max=stock.storage_humidity_max,
shelf_life_days=stock.shelf_life_days,
storage_instructions=stock.storage_instructions,
is_available=stock.is_available,
is_expired=stock.is_expired,
quality_status=stock.quality_status,
created_at=adjusted_created,
updated_at=session_created_at
)
db.add(new_stock)
stats["stock_batches"] += 1
# Commit all changes
await db.commit()
2025-10-19 19:22:37 +02:00
# Generate inventory alerts with RabbitMQ publishing
rabbitmq_client = None
2025-10-17 07:31:14 +02:00
try:
from shared.utils.alert_generator import generate_inventory_alerts
2025-10-19 19:22:37 +02:00
# Initialize RabbitMQ client for alert publishing
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="inventory")
await rabbitmq_client.connect()
# Generate alerts and publish to RabbitMQ
alerts_count = await generate_inventory_alerts(
db,
virtual_uuid,
session_created_at,
rabbitmq_client=rabbitmq_client
)
2025-10-17 07:31:14 +02:00
stats["alerts_generated"] = alerts_count
2025-10-19 19:22:37 +02:00
await db.commit()
2025-10-17 07:31:14 +02:00
logger.info(f"Generated {alerts_count} inventory alerts", virtual_tenant_id=virtual_tenant_id)
except Exception as e:
logger.warning(f"Failed to generate alerts: {str(e)}", exc_info=True)
stats["alerts_generated"] = 0
2025-10-19 19:22:37 +02:00
finally:
# Clean up RabbitMQ connection
if rabbitmq_client:
try:
await rabbitmq_client.disconnect()
except Exception as cleanup_error:
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
2025-10-17 07:31:14 +02:00
total_records = sum(stats.values())
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
2025-10-17 07:31:14 +02:00
"Inventory data cloning completed with date adjustment",
virtual_tenant_id=virtual_tenant_id,
total_records=total_records,
stats=stats,
duration_ms=duration_ms
)
return {
"service": "inventory",
"status": "completed",
"records_cloned": total_records,
"duration_ms": duration_ms,
"details": stats
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone inventory data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "inventory",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "inventory",
"clone_endpoint": "available",
"version": "2.0.0"
}