demo seed change
This commit is contained in:
87
services/inventory/app/api/internal_alert_trigger.py
Normal file
87
services/inventory/app/api/internal_alert_trigger.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# services/inventory/app/api/internal_alert_trigger.py
|
||||
"""
|
||||
Internal API for triggering inventory alerts.
|
||||
Used by demo session cloning to generate realistic inventory alerts.
|
||||
|
||||
URL Pattern: /api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger
|
||||
This follows the tenant-scoped pattern so gateway can proxy correctly.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# New URL pattern: tenant-scoped so gateway proxies to inventory service correctly
|
||||
@router.post("/api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger")
|
||||
async def trigger_inventory_alerts(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check inventory for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger comprehensive inventory alert checks for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after inventory
|
||||
data is seeded to generate realistic inventory alerts including:
|
||||
- Critical stock shortages
|
||||
- Expiring ingredients
|
||||
- Overstock situations
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
"""
|
||||
try:
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get inventory scheduler from app state
|
||||
inventory_scheduler = getattr(request.app.state, 'inventory_scheduler', None)
|
||||
|
||||
if not inventory_scheduler:
|
||||
logger.error("Inventory scheduler not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Inventory scheduler not available"
|
||||
)
|
||||
|
||||
# Trigger comprehensive inventory alert checks for the specific tenant
|
||||
logger.info("Triggering comprehensive inventory alert checks", tenant_id=str(tenant_id))
|
||||
|
||||
# Call the scheduler's manual trigger method
|
||||
result = await inventory_scheduler.trigger_manual_check(tenant_id)
|
||||
|
||||
if result.get("success", False):
|
||||
logger.info(
|
||||
"Inventory alert checks completed successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Inventory alert checks failed",
|
||||
tenant_id=str(tenant_id),
|
||||
error=result.get("error", "Unknown error")
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering inventory alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger inventory alerts: {str(e)}"
|
||||
)
|
||||
@@ -1,44 +1,37 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Inventory Service
|
||||
Service-to-service endpoint for cloning inventory data with date adjustment
|
||||
Handles internal demo data cloning operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
import structlog
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
# Add shared path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.inventory import Ingredient, Stock, StockMovement
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from app.core.config import settings
|
||||
from app.models import Ingredient, Stock, ProductType
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
from app.core.config import settings
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
required_key = settings.INTERNAL_API_KEY
|
||||
if x_internal_api_key != required_key:
|
||||
logger.warning("Unauthorized internal API access attempted")
|
||||
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data_internal(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
@@ -50,350 +43,346 @@ async def clone_demo_data(
|
||||
"""
|
||||
Clone inventory service data for a virtual demo tenant
|
||||
|
||||
Clones:
|
||||
- Ingredients from template tenant
|
||||
- Stock batches with date-adjusted expiration dates
|
||||
- Generates inventory alerts based on stock status
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
Dictionary with cloning results
|
||||
|
||||
Raises:
|
||||
HTTPException: On validation or cloning errors
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_created_at or fallback to now
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Invalid session_created_at format, using current time",
|
||||
session_created_at=session_created_at,
|
||||
error=str(e)
|
||||
)
|
||||
session_time = datetime.now(timezone.utc)
|
||||
else:
|
||||
logger.warning("session_created_at not provided, using current time")
|
||||
session_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting inventory data cloning with date adjustment",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_time.isoformat()
|
||||
)
|
||||
|
||||
start_time = datetime.now()
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
virtual_uuid = UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
# Debug logging for UUID values
|
||||
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
|
||||
|
||||
if not all([base_tenant_id, virtual_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
|
||||
)
|
||||
|
||||
# Validate UUID format before processing
|
||||
try:
|
||||
UUID(base_tenant_id)
|
||||
UUID(virtual_tenant_id)
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format in request",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format: {str(e)}"
|
||||
)
|
||||
|
||||
# Parse session creation time
|
||||
if session_created_at:
|
||||
try:
|
||||
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_created_at_parsed = datetime.now()
|
||||
else:
|
||||
session_created_at_parsed = datetime.now()
|
||||
|
||||
# Determine profile based on demo_account_type
|
||||
if demo_account_type == "enterprise":
|
||||
profile = "enterprise"
|
||||
else:
|
||||
profile = "professional"
|
||||
|
||||
logger.info(
|
||||
"Starting inventory data cloning with date adjustment",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_created_at_parsed.isoformat()
|
||||
)
|
||||
|
||||
# Load seed data using shared utility
|
||||
try:
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if profile == "professional":
|
||||
json_file = get_seed_data_path("professional", "03-inventory.json")
|
||||
elif profile == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "03-inventory.json")
|
||||
else:
|
||||
raise ValueError(f"Invalid profile: {profile}")
|
||||
|
||||
except ImportError:
|
||||
# Fallback to original path
|
||||
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
|
||||
if profile == "professional":
|
||||
json_file = seed_data_dir / "professional" / "03-inventory.json"
|
||||
elif profile == "enterprise":
|
||||
json_file = seed_data_dir / "enterprise" / "parent" / "03-inventory.json"
|
||||
else:
|
||||
raise ValueError(f"Invalid profile: {profile}")
|
||||
|
||||
if not json_file.exists():
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Seed data file not found: {json_file}"
|
||||
)
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Check if data already exists for this virtual tenant (idempotency)
|
||||
from sqlalchemy import select, delete
|
||||
existing_check = await db.execute(
|
||||
select(Ingredient).where(Ingredient.tenant_id == virtual_uuid).limit(1)
|
||||
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
|
||||
)
|
||||
existing_ingredient = existing_check.scalars().first()
|
||||
existing_ingredient = existing_check.scalar_one_or_none()
|
||||
|
||||
if existing_ingredient:
|
||||
logger.warning(
|
||||
"Data already exists for virtual tenant - cleaning before re-clone",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
base_tenant_id=base_tenant_id
|
||||
)
|
||||
# Clean up existing data first to ensure fresh clone
|
||||
from sqlalchemy import delete
|
||||
|
||||
await db.execute(
|
||||
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"Existing data cleaned, proceeding with fresh clone",
|
||||
"Demo data already exists, skipping clone",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
return {
|
||||
"status": "skipped",
|
||||
"reason": "Data already exists",
|
||||
"records_cloned": 0
|
||||
}
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"ingredients": 0,
|
||||
"stock_batches": 0,
|
||||
"stock_movements": 0,
|
||||
"alerts_generated": 0
|
||||
}
|
||||
|
||||
# Mapping from base ingredient ID to virtual ingredient ID
|
||||
ingredient_id_mapping = {}
|
||||
# Mapping from base stock ID to virtual stock ID
|
||||
stock_id_mapping = {}
|
||||
|
||||
# Clone Ingredients
|
||||
result = await db.execute(
|
||||
select(Ingredient).where(Ingredient.tenant_id == base_uuid)
|
||||
)
|
||||
base_ingredients = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found ingredients to clone",
|
||||
count=len(base_ingredients),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for ingredient in base_ingredients:
|
||||
# Transform ingredient ID using XOR to ensure consistency across services
|
||||
# This formula matches the suppliers service ID transformation
|
||||
# Formula: virtual_ingredient_id = virtual_tenant_id XOR base_ingredient_id
|
||||
|
||||
base_ingredient_int = int(ingredient.id.hex, 16)
|
||||
virtual_tenant_int = int(virtual_uuid.hex, 16)
|
||||
base_tenant_int = int(base_uuid.hex, 16)
|
||||
|
||||
# Reverse the original XOR to get the base ingredient ID
|
||||
# base_ingredient = base_tenant ^ base_ingredient_id
|
||||
# So: base_ingredient_id = base_tenant ^ base_ingredient
|
||||
base_ingredient_id_int = base_tenant_int ^ base_ingredient_int
|
||||
|
||||
# Now apply virtual tenant XOR to get the new ingredient ID
|
||||
new_ingredient_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_id_int)
|
||||
|
||||
logger.debug(
|
||||
"Transforming ingredient ID using XOR",
|
||||
base_ingredient_id=str(ingredient.id),
|
||||
new_ingredient_id=str(new_ingredient_id),
|
||||
ingredient_sku=ingredient.sku,
|
||||
ingredient_name=ingredient.name
|
||||
)
|
||||
|
||||
new_ingredient = Ingredient(
|
||||
id=new_ingredient_id,
|
||||
tenant_id=virtual_uuid,
|
||||
name=ingredient.name,
|
||||
sku=ingredient.sku,
|
||||
barcode=ingredient.barcode,
|
||||
product_type=ingredient.product_type,
|
||||
ingredient_category=ingredient.ingredient_category,
|
||||
product_category=ingredient.product_category,
|
||||
subcategory=ingredient.subcategory,
|
||||
description=ingredient.description,
|
||||
brand=ingredient.brand,
|
||||
unit_of_measure=ingredient.unit_of_measure,
|
||||
package_size=ingredient.package_size,
|
||||
average_cost=ingredient.average_cost,
|
||||
last_purchase_price=ingredient.last_purchase_price,
|
||||
standard_cost=ingredient.standard_cost,
|
||||
low_stock_threshold=ingredient.low_stock_threshold,
|
||||
reorder_point=ingredient.reorder_point,
|
||||
reorder_quantity=ingredient.reorder_quantity,
|
||||
max_stock_level=ingredient.max_stock_level,
|
||||
shelf_life_days=ingredient.shelf_life_days,
|
||||
display_life_hours=ingredient.display_life_hours,
|
||||
best_before_hours=ingredient.best_before_hours,
|
||||
storage_instructions=ingredient.storage_instructions,
|
||||
is_perishable=ingredient.is_perishable,
|
||||
is_active=ingredient.is_active,
|
||||
allergen_info=ingredient.allergen_info,
|
||||
nutritional_info=ingredient.nutritional_info
|
||||
)
|
||||
db.add(new_ingredient)
|
||||
stats["ingredients"] += 1
|
||||
|
||||
# Store mapping for stock cloning
|
||||
ingredient_id_mapping[ingredient.id] = new_ingredient_id
|
||||
|
||||
await db.flush() # Ensure ingredients are persisted before stock
|
||||
|
||||
# Clone Stock batches with date adjustment
|
||||
result = await db.execute(
|
||||
select(Stock).where(Stock.tenant_id == base_uuid)
|
||||
)
|
||||
base_stocks = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found stock batches to clone",
|
||||
count=len(base_stocks),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for stock in base_stocks:
|
||||
# Map ingredient ID
|
||||
new_ingredient_id = ingredient_id_mapping.get(stock.ingredient_id)
|
||||
if not new_ingredient_id:
|
||||
logger.warning(
|
||||
"Stock references non-existent ingredient, skipping",
|
||||
stock_id=str(stock.id),
|
||||
ingredient_id=str(stock.ingredient_id)
|
||||
# Transform and insert data
|
||||
records_cloned = 0
|
||||
|
||||
# Clone ingredients
|
||||
for ingredient_data in seed_data.get('ingredients', []):
|
||||
# Transform ID
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse UUIDs for ID transformation",
|
||||
ingredient_id=ingredient_data['id'],
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in ingredient data: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Adjust dates relative to session creation
|
||||
adjusted_expiration = adjust_date_for_demo(
|
||||
stock.expiration_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
|
||||
# Transform dates
|
||||
from shared.utils.demo_dates import adjust_date_for_demo
|
||||
for date_field in ['expiration_date', 'received_date', 'created_at', 'updated_at']:
|
||||
if date_field in ingredient_data:
|
||||
try:
|
||||
date_value = ingredient_data[date_field]
|
||||
# Handle both string dates and date objects
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
ingredient_data[date_field] = adjusted_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=ingredient_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
ingredient_data.pop(date_field, None)
|
||||
|
||||
# Map category field to ingredient_category enum
|
||||
if 'category' in ingredient_data:
|
||||
category_value = ingredient_data.pop('category')
|
||||
# Convert category string to IngredientCategory enum
|
||||
from app.models.inventory import IngredientCategory
|
||||
try:
|
||||
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
|
||||
except KeyError:
|
||||
# If category not found in enum, use OTHER
|
||||
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
|
||||
|
||||
# Map unit_of_measure string to enum
|
||||
if 'unit_of_measure' in ingredient_data:
|
||||
from app.models.inventory import UnitOfMeasure
|
||||
unit_mapping = {
|
||||
'kilograms': UnitOfMeasure.KILOGRAMS,
|
||||
'grams': UnitOfMeasure.GRAMS,
|
||||
'liters': UnitOfMeasure.LITERS,
|
||||
'milliliters': UnitOfMeasure.MILLILITERS,
|
||||
'units': UnitOfMeasure.UNITS,
|
||||
'pieces': UnitOfMeasure.PIECES,
|
||||
'packages': UnitOfMeasure.PACKAGES,
|
||||
'bags': UnitOfMeasure.BAGS,
|
||||
'boxes': UnitOfMeasure.BOXES
|
||||
}
|
||||
|
||||
unit_str = ingredient_data['unit_of_measure']
|
||||
if unit_str in unit_mapping:
|
||||
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
|
||||
else:
|
||||
# Default to units if not found
|
||||
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
original_unit=unit_str)
|
||||
|
||||
# Note: All seed data fields now match the model schema exactly
|
||||
# No field filtering needed
|
||||
|
||||
# Remove original id and tenant_id from ingredient_data to avoid conflict
|
||||
ingredient_data.pop('id', None)
|
||||
ingredient_data.pop('tenant_id', None)
|
||||
|
||||
# Create ingredient
|
||||
ingredient = Ingredient(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**ingredient_data
|
||||
)
|
||||
adjusted_received = adjust_date_for_demo(
|
||||
stock.received_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
db.add(ingredient)
|
||||
records_cloned += 1
|
||||
|
||||
# Clone stock batches
|
||||
for stock_data in seed_data.get('stock_batches', []):
|
||||
# Transform ID - handle both UUID and string IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
# Try to parse as UUID first
|
||||
stock_uuid = UUID(stock_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(stock_data['id'], tenant_uuid)
|
||||
except ValueError:
|
||||
# If not a UUID, generate a deterministic UUID from the string ID
|
||||
import hashlib
|
||||
stock_id_string = stock_data['id']
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
|
||||
# Create a deterministic UUID from the string ID and tenant ID
|
||||
combined = f"{stock_id_string}-{tenant_uuid}"
|
||||
hash_obj = hashlib.sha256(combined.encode('utf-8'))
|
||||
transformed_id = UUID(hash_obj.hexdigest()[:32])
|
||||
|
||||
logger.info("Generated UUID for non-UUID stock ID",
|
||||
original_id=stock_id_string,
|
||||
generated_id=str(transformed_id))
|
||||
|
||||
# Transform dates - handle both timestamp dictionaries and ISO strings
|
||||
for date_field in ['received_date', 'expiration_date', 'best_before_date', 'original_expiration_date', 'transformation_date', 'final_expiration_date', 'created_at', 'updated_at']:
|
||||
if date_field in stock_data:
|
||||
try:
|
||||
date_value = stock_data[date_field]
|
||||
|
||||
# Handle timestamp dictionaries (offset_days, hour, minute)
|
||||
if isinstance(date_value, dict) and 'offset_days' in date_value:
|
||||
from shared.utils.demo_dates import calculate_demo_datetime
|
||||
original_date = calculate_demo_datetime(
|
||||
offset_days=date_value.get('offset_days', 0),
|
||||
hour=date_value.get('hour', 0),
|
||||
minute=date_value.get('minute', 0),
|
||||
session_created_at=session_created_at_parsed
|
||||
)
|
||||
elif isinstance(date_value, str):
|
||||
# ISO string
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_stock_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
stock_data[date_field] = adjusted_stock_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=stock_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
stock_data.pop(date_field, None)
|
||||
|
||||
# Remove original id and tenant_id from stock_data to avoid conflict
|
||||
stock_data.pop('id', None)
|
||||
stock_data.pop('tenant_id', None)
|
||||
|
||||
# Create stock batch
|
||||
stock = Stock(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**stock_data
|
||||
)
|
||||
adjusted_best_before = adjust_date_for_demo(
|
||||
stock.best_before_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_created = adjust_date_for_demo(
|
||||
stock.created_at,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_time
|
||||
db.add(stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Create new stock batch with new ID
|
||||
new_stock_id = uuid.uuid4()
|
||||
|
||||
new_stock = Stock(
|
||||
id=new_stock_id,
|
||||
tenant_id=virtual_uuid,
|
||||
ingredient_id=new_ingredient_id,
|
||||
supplier_id=stock.supplier_id,
|
||||
batch_number=stock.batch_number,
|
||||
lot_number=stock.lot_number,
|
||||
supplier_batch_ref=stock.supplier_batch_ref,
|
||||
production_stage=stock.production_stage,
|
||||
current_quantity=stock.current_quantity,
|
||||
reserved_quantity=stock.reserved_quantity,
|
||||
available_quantity=stock.available_quantity,
|
||||
received_date=adjusted_received,
|
||||
expiration_date=adjusted_expiration,
|
||||
best_before_date=adjusted_best_before,
|
||||
unit_cost=stock.unit_cost,
|
||||
total_cost=stock.total_cost,
|
||||
storage_location=stock.storage_location,
|
||||
warehouse_zone=stock.warehouse_zone,
|
||||
shelf_position=stock.shelf_position,
|
||||
requires_refrigeration=stock.requires_refrigeration,
|
||||
requires_freezing=stock.requires_freezing,
|
||||
storage_temperature_min=stock.storage_temperature_min,
|
||||
storage_temperature_max=stock.storage_temperature_max,
|
||||
storage_humidity_max=stock.storage_humidity_max,
|
||||
shelf_life_days=stock.shelf_life_days,
|
||||
storage_instructions=stock.storage_instructions,
|
||||
is_available=stock.is_available,
|
||||
is_expired=stock.is_expired,
|
||||
quality_status=stock.quality_status,
|
||||
created_at=adjusted_created,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_stock)
|
||||
stats["stock_batches"] += 1
|
||||
|
||||
# Store mapping for movement cloning
|
||||
stock_id_mapping[stock.id] = new_stock_id
|
||||
|
||||
await db.flush() # Ensure stock is persisted before movements
|
||||
|
||||
# Clone Stock Movements with date adjustment
|
||||
result = await db.execute(
|
||||
select(StockMovement).where(StockMovement.tenant_id == base_uuid)
|
||||
)
|
||||
base_movements = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found stock movements to clone",
|
||||
count=len(base_movements),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for movement in base_movements:
|
||||
# Map ingredient ID and stock ID
|
||||
new_ingredient_id = ingredient_id_mapping.get(movement.ingredient_id)
|
||||
new_stock_id = stock_id_mapping.get(movement.stock_id) if movement.stock_id else None
|
||||
|
||||
if not new_ingredient_id:
|
||||
logger.warning(
|
||||
"Movement references non-existent ingredient, skipping",
|
||||
movement_id=str(movement.id),
|
||||
ingredient_id=str(movement.ingredient_id)
|
||||
)
|
||||
continue
|
||||
|
||||
# Adjust movement date relative to session creation
|
||||
adjusted_movement_date = adjust_date_for_demo(
|
||||
movement.movement_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_time
|
||||
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
movement.created_at,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_time
|
||||
|
||||
# Create new stock movement
|
||||
new_movement = StockMovement(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
ingredient_id=new_ingredient_id,
|
||||
stock_id=new_stock_id,
|
||||
movement_type=movement.movement_type,
|
||||
quantity=movement.quantity,
|
||||
unit_cost=movement.unit_cost,
|
||||
total_cost=movement.total_cost,
|
||||
quantity_before=movement.quantity_before,
|
||||
quantity_after=movement.quantity_after,
|
||||
reference_number=movement.reference_number,
|
||||
supplier_id=movement.supplier_id,
|
||||
notes=movement.notes,
|
||||
reason_code=movement.reason_code,
|
||||
movement_date=adjusted_movement_date,
|
||||
created_at=adjusted_created_at,
|
||||
created_by=movement.created_by
|
||||
)
|
||||
db.add(new_movement)
|
||||
stats["stock_movements"] += 1
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
||||
# inventory_alert_service which runs scheduled checks every 2-5 minutes.
|
||||
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
||||
stats["alerts_generated"] = 0
|
||||
|
||||
total_records = stats["ingredients"] + stats["stock_batches"]
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data cloning completed with date adjustment",
|
||||
"Inventory data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
records_cloned=records_cloned,
|
||||
duration_ms=duration_ms,
|
||||
ingredients_cloned=len(seed_data.get('ingredients', [])),
|
||||
stock_batches_cloned=len(seed_data.get('stock_batches', []))
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"records_cloned": records_cloned,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats
|
||||
"details": {
|
||||
"ingredients": len(seed_data.get('ingredients', [])),
|
||||
"stock_batches": len(seed_data.get('stock_batches', [])),
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
@@ -411,7 +400,7 @@ async def clone_demo_data(
|
||||
"service": "inventory",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
@@ -430,101 +419,68 @@ async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Delete all inventory data for a virtual demo tenant
|
||||
|
||||
Called by demo session cleanup service to remove ephemeral data
|
||||
when demo sessions expire or are destroyed.
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant UUID to delete
|
||||
|
||||
Returns:
|
||||
Deletion status and count of records deleted
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
from sqlalchemy import delete
|
||||
|
||||
logger.info(
|
||||
"Deleting inventory data for virtual tenant",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
start_time = datetime.now(timezone.utc)
|
||||
start_time = datetime.now()
|
||||
|
||||
records_deleted = {
|
||||
"ingredients": 0,
|
||||
"stock": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
# Delete in reverse dependency order
|
||||
|
||||
# 1. Delete stock batches (depends on ingredients)
|
||||
result = await db.execute(
|
||||
delete(Stock)
|
||||
.where(Stock.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["stock"] = result.rowcount
|
||||
|
||||
# Count records before deletion for reporting
|
||||
stock_count = await db.scalar(
|
||||
select(func.count(Stock.id)).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
ingredient_count = await db.scalar(
|
||||
select(func.count(Ingredient.id)).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
movement_count = await db.scalar(
|
||||
select(func.count(StockMovement.id)).where(StockMovement.tenant_id == virtual_uuid)
|
||||
# 2. Delete ingredients
|
||||
result = await db.execute(
|
||||
delete(Ingredient)
|
||||
.where(Ingredient.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["ingredients"] = result.rowcount
|
||||
|
||||
# Delete in correct order to respect foreign key constraints
|
||||
# 1. Delete StockMovements (references Stock)
|
||||
await db.execute(
|
||||
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# 2. Delete Stock batches (references Ingredient)
|
||||
await db.execute(
|
||||
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# 3. Delete Ingredients
|
||||
await db.execute(
|
||||
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
records_deleted["total"] = sum(records_deleted.values())
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data deleted successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
stocks_deleted=stock_count,
|
||||
ingredients_deleted=ingredient_count,
|
||||
movements_deleted=movement_count,
|
||||
duration_ms=duration_ms
|
||||
"demo_data_deleted",
|
||||
service="inventory",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"stock_batches": stock_count,
|
||||
"ingredients": ingredient_count,
|
||||
"stock_movements": movement_count,
|
||||
"total": stock_count + ingredient_count + movement_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete inventory data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="inventory",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete inventory data: {str(e)}"
|
||||
)
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
@@ -319,3 +319,89 @@ async def ml_insights_health():
|
||||
"POST /ml/insights/optimize-safety-stock"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL ENDPOINTS (for demo-session service)
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
)
|
||||
|
||||
|
||||
@internal_router.post("/api/v1/tenants/{tenant_id}/inventory/internal/ml/generate-safety-stock-insights")
|
||||
async def generate_safety_stock_insights_internal(
|
||||
tenant_id: str,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to trigger safety stock insights generation for demo sessions.
|
||||
|
||||
This endpoint is called by the demo-session service after cloning data.
|
||||
It uses the same ML logic as the public endpoint but with optimized defaults.
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
request: FastAPI request object
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
{
|
||||
"insights_posted": int,
|
||||
"tenant_id": str,
|
||||
"status": str
|
||||
}
|
||||
"""
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
logger.info("Internal safety stock insights generation triggered", tenant_id=tenant_id)
|
||||
|
||||
try:
|
||||
# Use the existing safety stock optimization logic with sensible defaults
|
||||
request_data = SafetyStockOptimizationRequest(
|
||||
product_ids=None, # Analyze all products
|
||||
lookback_days=90, # 3 months of history
|
||||
min_history_days=30 # Minimum 30 days required
|
||||
)
|
||||
|
||||
# Call the existing safety stock optimization endpoint logic
|
||||
result = await trigger_safety_stock_optimization(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return simplified response for internal use
|
||||
return {
|
||||
"insights_posted": result.total_insights_posted,
|
||||
"tenant_id": tenant_id,
|
||||
"status": "success" if result.success else "failed",
|
||||
"message": result.message,
|
||||
"products_optimized": result.products_optimized,
|
||||
"total_cost_savings": result.total_cost_savings
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Internal safety stock insights generation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Internal safety stock insights generation failed: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -11,12 +11,14 @@ from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.services.inventory_alert_service import InventoryAlertService
|
||||
from app.services.inventory_scheduler import InventoryScheduler
|
||||
from app.consumers.delivery_event_consumer import DeliveryEventConsumer
|
||||
from shared.service_base import StandardFastAPIService
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
import asyncio
|
||||
|
||||
from app.api import (
|
||||
internal_demo,
|
||||
batch,
|
||||
ingredients,
|
||||
stock_entries,
|
||||
@@ -29,10 +31,11 @@ from app.api import (
|
||||
dashboard,
|
||||
analytics,
|
||||
sustainability,
|
||||
internal_demo,
|
||||
audit,
|
||||
ml_insights
|
||||
)
|
||||
from app.api.internal_alert_trigger import router as internal_alert_trigger_router
|
||||
from app.api.internal_demo import router as internal_demo_router
|
||||
|
||||
|
||||
class InventoryService(StandardFastAPIService):
|
||||
@@ -115,8 +118,14 @@ class InventoryService(StandardFastAPIService):
|
||||
await alert_service.start()
|
||||
self.logger.info("Inventory alert service started")
|
||||
|
||||
# Store alert service in app state
|
||||
# Initialize inventory scheduler with alert service and database manager
|
||||
inventory_scheduler = InventoryScheduler(alert_service, self.database_manager)
|
||||
await inventory_scheduler.start()
|
||||
self.logger.info("Inventory scheduler started")
|
||||
|
||||
# Store services in app state
|
||||
app.state.alert_service = alert_service
|
||||
app.state.inventory_scheduler = inventory_scheduler # Store scheduler for manual triggering
|
||||
else:
|
||||
self.logger.error("Event publisher not initialized, alert service unavailable")
|
||||
|
||||
@@ -136,6 +145,11 @@ class InventoryService(StandardFastAPIService):
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for inventory service"""
|
||||
# Stop inventory scheduler
|
||||
if hasattr(app.state, 'inventory_scheduler') and app.state.inventory_scheduler:
|
||||
await app.state.inventory_scheduler.stop()
|
||||
self.logger.info("Inventory scheduler stopped")
|
||||
|
||||
# Cancel delivery consumer task
|
||||
if self.delivery_consumer_task and not self.delivery_consumer_task.done():
|
||||
self.delivery_consumer_task.cancel()
|
||||
@@ -198,8 +212,10 @@ service.add_router(food_safety_operations.router)
|
||||
service.add_router(dashboard.router)
|
||||
service.add_router(analytics.router)
|
||||
service.add_router(sustainability.router)
|
||||
service.add_router(internal_demo.router)
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
service.add_router(ml_insights.router) # ML insights endpoint
|
||||
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint for demo cloning
|
||||
service.add_router(internal_alert_trigger_router) # Internal alert trigger for demo cloning
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -211,4 +227,4 @@ if __name__ == "__main__":
|
||||
port=8000,
|
||||
reload=os.getenv("RELOAD", "false").lower() == "true",
|
||||
log_level="info"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -277,3 +277,22 @@ class FoodSafetyRepository:
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate ingredient", error=str(e))
|
||||
raise
|
||||
|
||||
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
|
||||
"""
|
||||
Mark a temperature log as having triggered an alert
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE temperature_logs
|
||||
SET alert_triggered = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": log_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
|
||||
raise
|
||||
|
||||
@@ -1,301 +0,0 @@
|
||||
# services/inventory/app/repositories/inventory_alert_repository.py
|
||||
"""
|
||||
Inventory Alert Repository
|
||||
Data access layer for inventory alert detection and analysis
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryAlertRepository:
|
||||
"""Repository for inventory alert data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_stock_issues(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get stock level issues with CTE analysis
|
||||
Returns list of critical, low, and overstock situations
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
i.max_stock_level as maximum_stock,
|
||||
i.reorder_point,
|
||||
0 as tomorrow_needed,
|
||||
0 as avg_daily_usage,
|
||||
7 as lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
|
||||
ELSE 'normal'
|
||||
END as status,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
|
||||
)
|
||||
SELECT * FROM stock_analysis WHERE status != 'normal'
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'critical' THEN 1
|
||||
WHEN 'low' THEN 2
|
||||
WHEN 'overstock' THEN 3
|
||||
END,
|
||||
shortage_amount DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock issues", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get products expiring soon or already expired
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
s.id as stock_id,
|
||||
s.batch_number,
|
||||
s.expiration_date,
|
||||
s.current_quantity,
|
||||
i.unit_of_measure,
|
||||
s.unit_cost,
|
||||
(s.current_quantity * s.unit_cost) as total_value,
|
||||
CASE
|
||||
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
|
||||
ELSE 'warning'
|
||||
END as urgency,
|
||||
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date <= CURRENT_DATE + (INTERVAL '1 day' * :days_threshold)
|
||||
ORDER BY s.expiration_date ASC, total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"days_threshold": days_threshold
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get temperature monitoring breaches
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
tl.id,
|
||||
tl.equipment_id,
|
||||
tl.equipment_name,
|
||||
tl.storage_type,
|
||||
tl.temperature_celsius,
|
||||
tl.min_threshold,
|
||||
tl.max_threshold,
|
||||
tl.is_within_range,
|
||||
tl.recorded_at,
|
||||
tl.alert_triggered,
|
||||
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
|
||||
CASE
|
||||
WHEN tl.temperature_celsius < tl.min_threshold
|
||||
THEN tl.min_threshold - tl.temperature_celsius
|
||||
WHEN tl.temperature_celsius > tl.max_threshold
|
||||
THEN tl.temperature_celsius - tl.max_threshold
|
||||
ELSE 0
|
||||
END as deviation
|
||||
FROM temperature_logs tl
|
||||
WHERE tl.tenant_id = :tenant_id
|
||||
AND tl.is_within_range = false
|
||||
AND tl.recorded_at > NOW() - (INTERVAL '1 hour' * :hours_back)
|
||||
AND tl.alert_triggered = false
|
||||
ORDER BY deviation DESC, tl.recorded_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"hours_back": hours_back
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
|
||||
"""
|
||||
Mark a temperature log as having triggered an alert
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE temperature_logs
|
||||
SET alert_triggered = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": log_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
|
||||
raise
|
||||
|
||||
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Identify waste reduction opportunities
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH waste_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.ingredient_category,
|
||||
COUNT(sm.id) as waste_incidents,
|
||||
SUM(sm.quantity) as total_waste_quantity,
|
||||
SUM(sm.total_cost) as total_waste_cost,
|
||||
AVG(sm.quantity) as avg_waste_per_incident,
|
||||
MAX(sm.movement_date) as last_waste_date
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '30 days'
|
||||
GROUP BY i.id, i.name, i.ingredient_category
|
||||
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
|
||||
)
|
||||
SELECT * FROM waste_analysis
|
||||
ORDER BY total_waste_cost DESC, waste_incidents DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ingredients that need reordering based on stock levels and usage
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH usage_analysis AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.reorder_point,
|
||||
i.low_stock_threshold,
|
||||
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
|
||||
i.preferred_supplier_id,
|
||||
i.standard_order_quantity
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
|
||||
AND sm.movement_type = 'PRODUCTION_USE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
|
||||
i.preferred_supplier_id, i.standard_order_quantity
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
|
||||
ELSE 999
|
||||
END as days_of_stock,
|
||||
GREATEST(
|
||||
standard_order_quantity,
|
||||
CEIL(daily_usage * 14)
|
||||
) as recommended_order_quantity
|
||||
FROM usage_analysis
|
||||
WHERE current_stock <= reorder_point
|
||||
ORDER BY days_of_stock ASC, current_stock ASC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_active_tenant_ids(self) -> List[UUID]:
|
||||
"""
|
||||
Get list of active tenant IDs from ingredients table
|
||||
"""
|
||||
try:
|
||||
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
|
||||
result = await self.session.execute(query)
|
||||
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row.tenant_id
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenant IDs", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Dict[str, Any]:
|
||||
"""
|
||||
Get stock information after hypothetical order
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT i.id, i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.id = :ingredient_id
|
||||
GROUP BY i.id, i.name, i.low_stock_threshold
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"ingredient_id": ingredient_id,
|
||||
"order_quantity": order_quantity
|
||||
})
|
||||
row = result.fetchone()
|
||||
return dict(row._mapping) if row else None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock after order", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
@@ -745,4 +745,176 @@ class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate], BatchCoun
|
||||
error=str(e),
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get products expiring soon or already expired
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
s.id as stock_id,
|
||||
s.batch_number,
|
||||
s.expiration_date,
|
||||
s.current_quantity,
|
||||
i.unit_of_measure,
|
||||
s.unit_cost,
|
||||
(s.current_quantity * s.unit_cost) as total_value,
|
||||
CASE
|
||||
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
|
||||
ELSE 'warning'
|
||||
END as urgency,
|
||||
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date <= CURRENT_DATE + (INTERVAL '1 day' * :days_threshold)
|
||||
ORDER BY s.expiration_date ASC, total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"days_threshold": days_threshold
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get temperature monitoring breaches
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
SELECT
|
||||
tl.id,
|
||||
tl.equipment_id,
|
||||
tl.equipment_name,
|
||||
tl.storage_type,
|
||||
tl.temperature_celsius,
|
||||
tl.min_threshold,
|
||||
tl.max_threshold,
|
||||
tl.is_within_range,
|
||||
tl.recorded_at,
|
||||
tl.alert_triggered,
|
||||
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
|
||||
CASE
|
||||
WHEN tl.temperature_celsius < tl.min_threshold
|
||||
THEN tl.min_threshold - tl.temperature_celsius
|
||||
WHEN tl.temperature_celsius > tl.max_threshold
|
||||
THEN tl.temperature_celsius - tl.max_threshold
|
||||
ELSE 0
|
||||
END as deviation
|
||||
FROM temperature_logs tl
|
||||
WHERE tl.tenant_id = :tenant_id
|
||||
AND tl.is_within_range = false
|
||||
AND tl.recorded_at > NOW() - (INTERVAL '1 hour' * :hours_back)
|
||||
AND tl.alert_triggered = false
|
||||
ORDER BY deviation DESC, tl.recorded_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"hours_back": hours_back
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Identify waste reduction opportunities
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH waste_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.ingredient_category,
|
||||
COUNT(sm.id) as waste_incidents,
|
||||
SUM(sm.quantity) as total_waste_quantity,
|
||||
SUM(sm.total_cost) as total_waste_cost,
|
||||
AVG(sm.quantity) as avg_waste_per_incident,
|
||||
MAX(sm.movement_date) as last_waste_date
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '30 days'
|
||||
GROUP BY i.id, i.name, i.ingredient_category
|
||||
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
|
||||
)
|
||||
SELECT * FROM waste_analysis
|
||||
ORDER BY total_waste_cost DESC, waste_incidents DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ingredients that need reordering based on stock levels and usage
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH usage_analysis AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.reorder_point,
|
||||
i.low_stock_threshold,
|
||||
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
|
||||
i.preferred_supplier_id,
|
||||
i.standard_order_quantity
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
|
||||
AND sm.movement_type = 'PRODUCTION_USE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
|
||||
i.preferred_supplier_id, i.standard_order_quantity
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
|
||||
ELSE 999
|
||||
END as days_of_stock,
|
||||
GREATEST(
|
||||
standard_order_quantity,
|
||||
CEIL(daily_usage * 14)
|
||||
) as recommended_order_quantity
|
||||
FROM usage_analysis
|
||||
WHERE current_stock <= reorder_point
|
||||
ORDER BY days_of_stock ASC, current_stock ASC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
@@ -12,7 +12,6 @@ from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -188,10 +187,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="expired_products",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.expired_products",
|
||||
severity="urgent",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -222,10 +220,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="urgent_expiry",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.urgent_expiry",
|
||||
severity="high",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -256,10 +253,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="overstock_warning",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.overstock_warning",
|
||||
severity="medium",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -287,10 +283,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="expired_batches_auto_processed",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.expired_batches_auto_processed",
|
||||
severity="medium",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
|
||||
1046
services/inventory/app/services/inventory_scheduler.py
Normal file
1046
services/inventory/app/services/inventory_scheduler.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,7 @@ from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.config import settings
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
from app.repositories.food_safety_repository import FoodSafetyRepository
|
||||
from shared.clients.production_client import create_production_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -320,9 +320,9 @@ class SustainabilityService:
|
||||
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
|
||||
}
|
||||
|
||||
# Get waste incidents from inventory alert repository
|
||||
alert_repo = InventoryAlertRepository(db)
|
||||
waste_opportunities = await alert_repo.get_waste_opportunities(tenant_id)
|
||||
# Get waste incidents from food safety repository
|
||||
food_safety_repo = FoodSafetyRepository(db)
|
||||
waste_opportunities = await food_safety_repo.get_waste_opportunities(tenant_id)
|
||||
|
||||
# Sum up all waste incidents for the period
|
||||
total_waste_incidents = sum(item['waste_incidents'] for item in waste_opportunities) if waste_opportunities else 0
|
||||
|
||||
@@ -1,330 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Demo Inventory Seeding Script for Inventory Service
|
||||
Creates realistic Spanish ingredients for demo template tenants
|
||||
|
||||
This script runs as a Kubernetes init job inside the inventory-service container.
|
||||
It populates the template tenants with a comprehensive catalog of ingredients.
|
||||
|
||||
Usage:
|
||||
python /app/scripts/demo/seed_demo_inventory.py
|
||||
|
||||
Environment Variables Required:
|
||||
INVENTORY_DATABASE_URL - PostgreSQL connection string for inventory database
|
||||
DEMO_MODE - Set to 'production' for production seeding
|
||||
LOG_LEVEL - Logging level (default: INFO)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
# Add app to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select
|
||||
import structlog
|
||||
|
||||
from app.models.inventory import Ingredient
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.dev.ConsoleRenderer()
|
||||
]
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Fixed Demo Tenant IDs (must match tenant service)
|
||||
DEMO_TENANT_PROFESSIONAL = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6")
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
|
||||
|
||||
|
||||
def load_ingredients_data():
|
||||
"""Load ingredients data from JSON file"""
|
||||
# Look for data file in the same directory as this script
|
||||
data_file = Path(__file__).parent / "ingredientes_es.json"
|
||||
|
||||
if not data_file.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Ingredients data file not found: {data_file}. "
|
||||
"Make sure ingredientes_es.json is in the same directory as this script."
|
||||
)
|
||||
|
||||
logger.info("Loading ingredients data", file=str(data_file))
|
||||
|
||||
with open(data_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Flatten all ingredient categories into a single list
|
||||
all_ingredients = []
|
||||
for category_name, ingredients in data.items():
|
||||
logger.debug(f"Loading category: {category_name} ({len(ingredients)} items)")
|
||||
all_ingredients.extend(ingredients)
|
||||
|
||||
logger.info(f"Loaded {len(all_ingredients)} ingredients from JSON")
|
||||
return all_ingredients
|
||||
|
||||
|
||||
async def seed_ingredients_for_tenant(
|
||||
db: AsyncSession,
|
||||
tenant_id: uuid.UUID,
|
||||
tenant_name: str,
|
||||
ingredients_data: list
|
||||
) -> dict:
|
||||
"""
|
||||
Seed ingredients for a specific tenant using pre-defined UUIDs
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: UUID of the tenant
|
||||
tenant_name: Name of the tenant (for logging)
|
||||
ingredients_data: List of ingredient dictionaries with pre-defined IDs
|
||||
|
||||
Returns:
|
||||
Dict with seeding statistics
|
||||
"""
|
||||
logger.info("─" * 80)
|
||||
logger.info(f"Seeding ingredients for: {tenant_name}")
|
||||
logger.info(f"Tenant ID: {tenant_id}")
|
||||
logger.info("─" * 80)
|
||||
|
||||
created_count = 0
|
||||
updated_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for ing_data in ingredients_data:
|
||||
sku = ing_data["sku"]
|
||||
name = ing_data["name"]
|
||||
|
||||
# Check if ingredient already exists for this tenant with this SKU
|
||||
result = await db.execute(
|
||||
select(Ingredient).where(
|
||||
Ingredient.tenant_id == tenant_id,
|
||||
Ingredient.sku == sku
|
||||
)
|
||||
)
|
||||
existing_ingredient = result.scalars().first()
|
||||
|
||||
if existing_ingredient:
|
||||
logger.debug(f" ⏭️ Skipping (exists): {sku} - {name}")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Generate tenant-specific UUID by combining base UUID with tenant ID
|
||||
# This ensures each tenant has unique IDs but they're deterministic (same on re-run)
|
||||
base_id = uuid.UUID(ing_data["id"])
|
||||
# XOR the base ID with the tenant ID to create a tenant-specific ID
|
||||
tenant_int = int(tenant_id.hex, 16)
|
||||
base_int = int(base_id.hex, 16)
|
||||
ingredient_id = uuid.UUID(int=tenant_int ^ base_int)
|
||||
|
||||
# Create new ingredient
|
||||
ingredient = Ingredient(
|
||||
id=ingredient_id,
|
||||
tenant_id=tenant_id,
|
||||
name=name,
|
||||
sku=sku,
|
||||
barcode=None, # Could generate EAN-13 barcodes if needed
|
||||
product_type=ing_data["product_type"],
|
||||
ingredient_category=ing_data["ingredient_category"],
|
||||
product_category=ing_data["product_category"],
|
||||
subcategory=ing_data.get("subcategory"),
|
||||
description=ing_data["description"],
|
||||
brand=ing_data.get("brand"),
|
||||
unit_of_measure=ing_data["unit_of_measure"],
|
||||
package_size=None,
|
||||
average_cost=ing_data["average_cost"],
|
||||
last_purchase_price=ing_data["average_cost"],
|
||||
standard_cost=ing_data["average_cost"],
|
||||
low_stock_threshold=ing_data.get("low_stock_threshold", 10.0),
|
||||
reorder_point=ing_data.get("reorder_point", 20.0),
|
||||
reorder_quantity=ing_data.get("reorder_point", 20.0) * 2,
|
||||
max_stock_level=ing_data.get("reorder_point", 20.0) * 5,
|
||||
shelf_life_days=ing_data.get("shelf_life_days"),
|
||||
is_perishable=ing_data.get("is_perishable", False),
|
||||
is_active=True,
|
||||
allergen_info=ing_data.get("allergen_info") if ing_data.get("allergen_info") else None,
|
||||
# NEW: Local production support (Sprint 5)
|
||||
produced_locally=ing_data.get("produced_locally", False),
|
||||
recipe_id=uuid.UUID(ing_data["recipe_id"]) if ing_data.get("recipe_id") else None,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
db.add(ingredient)
|
||||
created_count += 1
|
||||
|
||||
logger.debug(f" ✅ Created: {sku} - {name}")
|
||||
|
||||
# Commit all changes for this tenant
|
||||
await db.commit()
|
||||
|
||||
logger.info(f" 📊 Created: {created_count}, Skipped: {skipped_count}")
|
||||
logger.info("")
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tenant_name": tenant_name,
|
||||
"created": created_count,
|
||||
"skipped": skipped_count,
|
||||
"total": len(ingredients_data)
|
||||
}
|
||||
|
||||
|
||||
async def seed_inventory(db: AsyncSession):
|
||||
"""
|
||||
Seed inventory for all demo template tenants
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dict with overall seeding statistics
|
||||
"""
|
||||
logger.info("=" * 80)
|
||||
logger.info("📦 Starting Demo Inventory Seeding")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Load ingredients data once
|
||||
try:
|
||||
ingredients_data = load_ingredients_data()
|
||||
except FileNotFoundError as e:
|
||||
logger.error(str(e))
|
||||
raise
|
||||
|
||||
results = []
|
||||
|
||||
# Seed for Professional Bakery (single location)
|
||||
logger.info("")
|
||||
result_professional = await seed_ingredients_for_tenant(
|
||||
db,
|
||||
DEMO_TENANT_PROFESSIONAL,
|
||||
"Panadería Artesana Madrid (Professional)",
|
||||
ingredients_data
|
||||
)
|
||||
results.append(result_professional)
|
||||
|
||||
# Seed for Enterprise Parent (central production - Obrador)
|
||||
logger.info("")
|
||||
result_enterprise_parent = await seed_ingredients_for_tenant(
|
||||
db,
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN,
|
||||
"Panadería Central - Obrador Madrid (Enterprise Parent)",
|
||||
ingredients_data
|
||||
)
|
||||
results.append(result_enterprise_parent)
|
||||
|
||||
# Calculate totals
|
||||
total_created = sum(r["created"] for r in results)
|
||||
total_skipped = sum(r["skipped"] for r in results)
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("✅ Demo Inventory Seeding Completed")
|
||||
logger.info("=" * 80)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"tenants_seeded": len(results),
|
||||
"total_created": total_created,
|
||||
"total_skipped": total_skipped,
|
||||
"results": results
|
||||
}
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main execution function"""
|
||||
|
||||
logger.info("Demo Inventory Seeding Script Starting")
|
||||
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
|
||||
logger.info("Log Level: %s", os.getenv("LOG_LEVEL", "INFO"))
|
||||
|
||||
# Get database URL from environment
|
||||
database_url = os.getenv("INVENTORY_DATABASE_URL") or os.getenv("DATABASE_URL")
|
||||
if not database_url:
|
||||
logger.error("❌ INVENTORY_DATABASE_URL or DATABASE_URL environment variable must be set")
|
||||
return 1
|
||||
|
||||
# Convert to async URL if needed
|
||||
if database_url.startswith("postgresql://"):
|
||||
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
||||
|
||||
logger.info("Connecting to inventory database")
|
||||
|
||||
# Create engine and session
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_size=5,
|
||||
max_overflow=10
|
||||
)
|
||||
|
||||
async_session = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
try:
|
||||
async with async_session() as session:
|
||||
result = await seed_inventory(session)
|
||||
|
||||
logger.info("")
|
||||
logger.info("📊 Seeding Summary:")
|
||||
logger.info(f" ✅ Tenants seeded: {result['tenants_seeded']}")
|
||||
logger.info(f" ✅ Total created: {result['total_created']}")
|
||||
logger.info(f" ⏭️ Total skipped: {result['total_skipped']}")
|
||||
logger.info("")
|
||||
|
||||
# Print per-tenant details
|
||||
for tenant_result in result['results']:
|
||||
logger.info(
|
||||
f" {tenant_result['tenant_name']}: "
|
||||
f"{tenant_result['created']} created, {tenant_result['skipped']} skipped"
|
||||
)
|
||||
|
||||
logger.info("")
|
||||
logger.info("🎉 Success! Ingredient catalog is ready for cloning.")
|
||||
logger.info("")
|
||||
logger.info("Ingredients by category:")
|
||||
logger.info(" • Harinas: 6 tipos (T55, T65, Fuerza, Integral, Centeno, Espelta)")
|
||||
logger.info(" • Lácteos: 4 tipos (Mantequilla, Leche, Nata, Huevos)")
|
||||
logger.info(" • Levaduras: 3 tipos (Fresca, Seca, Masa Madre)")
|
||||
logger.info(" • Básicos: 3 tipos (Sal, Azúcar, Agua)")
|
||||
logger.info(" • Especiales: 5 tipos (Chocolate, Almendras, etc.)")
|
||||
logger.info(" • Productos: 3 referencias")
|
||||
logger.info("")
|
||||
logger.info("Next steps:")
|
||||
logger.info(" 1. Run seed jobs for other services (recipes, suppliers, etc.)")
|
||||
logger.info(" 2. Verify ingredient data in database")
|
||||
logger.info(" 3. Test demo session creation with inventory cloning")
|
||||
logger.info("")
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 80)
|
||||
logger.error("❌ Demo Inventory Seeding Failed")
|
||||
logger.error("=" * 80)
|
||||
logger.error("Error: %s", str(e))
|
||||
logger.error("", exc_info=True)
|
||||
return 1
|
||||
|
||||
finally:
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit_code = asyncio.run(main())
|
||||
sys.exit(exit_code)
|
||||
@@ -1,347 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Demo Inventory Retail Seeding Script for Inventory Service
|
||||
Creates finished product inventory for enterprise child tenants (retail outlets)
|
||||
|
||||
This script runs as a Kubernetes init job inside the inventory-service container.
|
||||
It populates the child retail tenants with FINISHED PRODUCTS ONLY (no raw ingredients).
|
||||
|
||||
Usage:
|
||||
python /app/scripts/demo/seed_demo_inventory_retail.py
|
||||
|
||||
Environment Variables Required:
|
||||
INVENTORY_DATABASE_URL - PostgreSQL connection string for inventory database
|
||||
DEMO_MODE - Set to 'production' for production seeding
|
||||
LOG_LEVEL - Logging level (default: INFO)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
# Add app to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
# Add shared to path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select
|
||||
import structlog
|
||||
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
from app.models.inventory import Ingredient, ProductType
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.dev.ConsoleRenderer()
|
||||
]
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Fixed Demo Tenant IDs (must match tenant service)
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
|
||||
DEMO_TENANT_CHILD_1 = uuid.UUID("d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9") # Madrid Centro
|
||||
DEMO_TENANT_CHILD_2 = uuid.UUID("e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0") # Barcelona Gràcia
|
||||
DEMO_TENANT_CHILD_3 = uuid.UUID("f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1") # Valencia Ruzafa
|
||||
|
||||
# Child tenant configurations
|
||||
CHILD_TENANTS = [
|
||||
(DEMO_TENANT_CHILD_1, "Madrid Centro"),
|
||||
(DEMO_TENANT_CHILD_2, "Barcelona Gràcia"),
|
||||
(DEMO_TENANT_CHILD_3, "Valencia Ruzafa")
|
||||
]
|
||||
|
||||
|
||||
def load_finished_products_data():
|
||||
"""Load ONLY finished products from JSON file (no raw ingredients)"""
|
||||
# Look for data file in the same directory as this script
|
||||
data_file = Path(__file__).parent / "ingredientes_es.json"
|
||||
|
||||
if not data_file.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Ingredients data file not found: {data_file}. "
|
||||
"Make sure ingredientes_es.json is in the same directory as this script."
|
||||
)
|
||||
|
||||
logger.info("Loading finished products data", file=str(data_file))
|
||||
|
||||
with open(data_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Extract ONLY finished products (not raw ingredients)
|
||||
finished_products = data.get("productos_terminados", [])
|
||||
|
||||
logger.info(f"Loaded {len(finished_products)} finished products from JSON")
|
||||
logger.info("NOTE: Raw ingredients (flour, yeast, etc.) are NOT seeded for retail outlets")
|
||||
|
||||
return finished_products
|
||||
|
||||
|
||||
async def seed_retail_inventory_for_tenant(
|
||||
db: AsyncSession,
|
||||
tenant_id: uuid.UUID,
|
||||
parent_tenant_id: uuid.UUID,
|
||||
tenant_name: str,
|
||||
products_data: list
|
||||
) -> dict:
|
||||
"""
|
||||
Seed finished product inventory for a child retail tenant using XOR ID transformation
|
||||
|
||||
This ensures retail outlets have the same product catalog as their parent (central production),
|
||||
using deterministic UUIDs that map correctly across tenants.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: UUID of the child tenant
|
||||
parent_tenant_id: UUID of the parent tenant (for XOR transformation)
|
||||
tenant_name: Name of the tenant (for logging)
|
||||
products_data: List of finished product dictionaries with pre-defined IDs
|
||||
|
||||
Returns:
|
||||
Dict with seeding statistics
|
||||
"""
|
||||
logger.info("─" * 80)
|
||||
logger.info(f"Seeding retail inventory for: {tenant_name}")
|
||||
logger.info(f"Child Tenant ID: {tenant_id}")
|
||||
logger.info(f"Parent Tenant ID: {parent_tenant_id}")
|
||||
logger.info("─" * 80)
|
||||
|
||||
created_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for product_data in products_data:
|
||||
sku = product_data["sku"]
|
||||
name = product_data["name"]
|
||||
|
||||
# Check if product already exists for this tenant with this SKU
|
||||
result = await db.execute(
|
||||
select(Ingredient).where(
|
||||
Ingredient.tenant_id == tenant_id,
|
||||
Ingredient.sku == sku
|
||||
)
|
||||
)
|
||||
existing_product = result.scalars().first()
|
||||
|
||||
if existing_product:
|
||||
logger.debug(f" ⏭️ Skipping (exists): {sku} - {name}")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Generate tenant-specific UUID using XOR transformation
|
||||
# This ensures the child's product IDs map to the parent's product IDs
|
||||
base_id = uuid.UUID(product_data["id"])
|
||||
tenant_int = int(tenant_id.hex, 16)
|
||||
base_int = int(base_id.hex, 16)
|
||||
product_id = uuid.UUID(int=tenant_int ^ base_int)
|
||||
|
||||
# Create new finished product for retail outlet
|
||||
product = Ingredient(
|
||||
id=product_id,
|
||||
tenant_id=tenant_id,
|
||||
name=name,
|
||||
sku=sku,
|
||||
barcode=None, # Could be set by retail outlet
|
||||
product_type=ProductType.FINISHED_PRODUCT, # CRITICAL: Only finished products
|
||||
ingredient_category=None, # Not applicable for finished products
|
||||
product_category=product_data["product_category"], # BREAD, CROISSANTS, PASTRIES, etc.
|
||||
subcategory=product_data.get("subcategory"),
|
||||
description=product_data["description"],
|
||||
brand=f"Obrador Madrid", # Branded from central production
|
||||
unit_of_measure=product_data["unit_of_measure"],
|
||||
package_size=None,
|
||||
average_cost=product_data["average_cost"], # Transfer price from central production
|
||||
last_purchase_price=product_data["average_cost"],
|
||||
standard_cost=product_data["average_cost"],
|
||||
# Retail outlets typically don't manage reorder points - they order from parent
|
||||
low_stock_threshold=None,
|
||||
reorder_point=None,
|
||||
reorder_quantity=None,
|
||||
max_stock_level=None,
|
||||
shelf_life_days=product_data.get("shelf_life_days"),
|
||||
is_perishable=product_data.get("is_perishable", True), # Bakery products are perishable
|
||||
is_active=True,
|
||||
allergen_info=product_data.get("allergen_info") if product_data.get("allergen_info") else None,
|
||||
# Retail outlets receive products, don't produce them locally
|
||||
produced_locally=False,
|
||||
recipe_id=None, # Recipes belong to central production, not retail
|
||||
created_at=BASE_REFERENCE_DATE,
|
||||
updated_at=BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
db.add(product)
|
||||
created_count += 1
|
||||
|
||||
logger.debug(f" ✅ Created: {sku} - {name}")
|
||||
|
||||
# Commit all changes for this tenant
|
||||
await db.commit()
|
||||
|
||||
logger.info(f" 📊 Created: {created_count}, Skipped: {skipped_count}")
|
||||
logger.info("")
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tenant_name": tenant_name,
|
||||
"created": created_count,
|
||||
"skipped": skipped_count,
|
||||
"total": len(products_data)
|
||||
}
|
||||
|
||||
|
||||
async def seed_retail_inventory(db: AsyncSession):
|
||||
"""
|
||||
Seed retail inventory for all child tenant templates
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dict with overall seeding statistics
|
||||
"""
|
||||
logger.info("=" * 80)
|
||||
logger.info("🏪 Starting Demo Retail Inventory Seeding")
|
||||
logger.info("=" * 80)
|
||||
logger.info("NOTE: Seeding FINISHED PRODUCTS ONLY for child retail outlets")
|
||||
logger.info("Raw ingredients (flour, yeast, etc.) are NOT seeded for retail tenants")
|
||||
logger.info("")
|
||||
|
||||
# Load finished products data once
|
||||
try:
|
||||
products_data = load_finished_products_data()
|
||||
except FileNotFoundError as e:
|
||||
logger.error(str(e))
|
||||
raise
|
||||
|
||||
results = []
|
||||
|
||||
# Seed for each child retail outlet
|
||||
for child_tenant_id, child_tenant_name in CHILD_TENANTS:
|
||||
logger.info("")
|
||||
result = await seed_retail_inventory_for_tenant(
|
||||
db,
|
||||
child_tenant_id,
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN,
|
||||
f"{child_tenant_name} (Retail Outlet)",
|
||||
products_data
|
||||
)
|
||||
results.append(result)
|
||||
|
||||
# Calculate totals
|
||||
total_created = sum(r["created"] for r in results)
|
||||
total_skipped = sum(r["skipped"] for r in results)
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("✅ Demo Retail Inventory Seeding Completed")
|
||||
logger.info("=" * 80)
|
||||
|
||||
return {
|
||||
"service": "inventory_retail",
|
||||
"tenants_seeded": len(results),
|
||||
"total_created": total_created,
|
||||
"total_skipped": total_skipped,
|
||||
"results": results
|
||||
}
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main execution function"""
|
||||
|
||||
logger.info("Demo Retail Inventory Seeding Script Starting")
|
||||
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
|
||||
logger.info("Log Level: %s", os.getenv("LOG_LEVEL", "INFO"))
|
||||
|
||||
# Get database URL from environment
|
||||
database_url = os.getenv("INVENTORY_DATABASE_URL") or os.getenv("DATABASE_URL")
|
||||
if not database_url:
|
||||
logger.error("❌ INVENTORY_DATABASE_URL or DATABASE_URL environment variable must be set")
|
||||
return 1
|
||||
|
||||
# Convert to async URL if needed
|
||||
if database_url.startswith("postgresql://"):
|
||||
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
||||
|
||||
logger.info("Connecting to inventory database")
|
||||
|
||||
# Create engine and session
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_size=5,
|
||||
max_overflow=10
|
||||
)
|
||||
|
||||
async_session = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
try:
|
||||
async with async_session() as session:
|
||||
result = await seed_retail_inventory(session)
|
||||
|
||||
logger.info("")
|
||||
logger.info("📊 Retail Inventory Seeding Summary:")
|
||||
logger.info(f" ✅ Retail outlets seeded: {result['tenants_seeded']}")
|
||||
logger.info(f" ✅ Total products created: {result['total_created']}")
|
||||
logger.info(f" ⏭️ Total skipped: {result['total_skipped']}")
|
||||
logger.info("")
|
||||
|
||||
# Print per-tenant details
|
||||
for tenant_result in result['results']:
|
||||
logger.info(
|
||||
f" {tenant_result['tenant_name']}: "
|
||||
f"{tenant_result['created']} products created, {tenant_result['skipped']} skipped"
|
||||
)
|
||||
|
||||
logger.info("")
|
||||
logger.info("🎉 Success! Retail inventory catalog is ready for cloning.")
|
||||
logger.info("")
|
||||
logger.info("Finished products seeded:")
|
||||
logger.info(" • Baguette Tradicional")
|
||||
logger.info(" • Croissant de Mantequilla")
|
||||
logger.info(" • Pan de Pueblo")
|
||||
logger.info(" • Napolitana de Chocolate")
|
||||
logger.info("")
|
||||
logger.info("Key points:")
|
||||
logger.info(" ✓ Only finished products seeded (no raw ingredients)")
|
||||
logger.info(" ✓ Product IDs use XOR transformation to match parent catalog")
|
||||
logger.info(" ✓ All products marked as produced_locally=False (received from parent)")
|
||||
logger.info(" ✓ Retail outlets will receive stock from central production via distribution")
|
||||
logger.info("")
|
||||
logger.info("Next steps:")
|
||||
logger.info(" 1. Seed retail stock levels (initial inventory)")
|
||||
logger.info(" 2. Seed retail sales history")
|
||||
logger.info(" 3. Seed customer data and orders")
|
||||
logger.info(" 4. Test enterprise demo session creation")
|
||||
logger.info("")
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 80)
|
||||
logger.error("❌ Demo Retail Inventory Seeding Failed")
|
||||
logger.error("=" * 80)
|
||||
logger.error("Error: %s", str(e))
|
||||
logger.error("", exc_info=True)
|
||||
return 1
|
||||
|
||||
finally:
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit_code = asyncio.run(main())
|
||||
sys.exit(exit_code)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,394 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Demo Retail Stock Seeding Script for Inventory Service
|
||||
Creates realistic stock levels for finished products at child retail outlets
|
||||
|
||||
This script runs as a Kubernetes init job inside the inventory-service container.
|
||||
It populates child retail tenants with stock levels for FINISHED PRODUCTS ONLY.
|
||||
|
||||
Usage:
|
||||
python /app/scripts/demo/seed_demo_stock_retail.py
|
||||
|
||||
Environment Variables Required:
|
||||
INVENTORY_DATABASE_URL - PostgreSQL connection string for inventory database
|
||||
DEMO_MODE - Set to 'production' for production seeding
|
||||
LOG_LEVEL - Logging level (default: INFO)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
import random
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pathlib import Path
|
||||
from decimal import Decimal
|
||||
|
||||
# Add app to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
# Add shared to path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select
|
||||
import structlog
|
||||
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
from app.models.inventory import Ingredient, Stock, ProductType
|
||||
|
||||
# Configure logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.dev.ConsoleRenderer()
|
||||
]
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Fixed Demo Tenant IDs (must match tenant service)
|
||||
DEMO_TENANT_ENTERPRISE_CHAIN = uuid.UUID("c3d4e5f6-a7b8-49c0-d1e2-f3a4b5c6d7e8") # Enterprise parent (Obrador)
|
||||
DEMO_TENANT_CHILD_1 = uuid.UUID("d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9") # Madrid Centro
|
||||
DEMO_TENANT_CHILD_2 = uuid.UUID("e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0") # Barcelona Gràcia
|
||||
DEMO_TENANT_CHILD_3 = uuid.UUID("f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1") # Valencia Ruzafa
|
||||
|
||||
# Child tenant configurations
|
||||
CHILD_TENANTS = [
|
||||
(DEMO_TENANT_CHILD_1, "Madrid Centro", 1.2), # Larger store, 20% more stock
|
||||
(DEMO_TENANT_CHILD_2, "Barcelona Gràcia", 1.0), # Medium store, baseline stock
|
||||
(DEMO_TENANT_CHILD_3, "Valencia Ruzafa", 0.8) # Smaller store, 20% less stock
|
||||
]
|
||||
|
||||
# Retail stock configuration for finished products
|
||||
# Daily sales estimates (units per day) for each product type
|
||||
DAILY_SALES_BY_SKU = {
|
||||
"PRO-BAG-001": 80, # Baguette Tradicional - high volume
|
||||
"PRO-CRO-001": 50, # Croissant de Mantequilla - popular breakfast item
|
||||
"PRO-PUE-001": 30, # Pan de Pueblo - specialty item
|
||||
"PRO-NAP-001": 40 # Napolitana de Chocolate - pastry item
|
||||
}
|
||||
|
||||
# Storage locations for retail outlets
|
||||
RETAIL_STORAGE_LOCATIONS = ["Display Case", "Back Room", "Cooling Shelf", "Storage Area"]
|
||||
|
||||
|
||||
def generate_retail_batch_number(tenant_id: uuid.UUID, product_sku: str, days_ago: int) -> str:
|
||||
"""Generate a realistic batch number for retail stock"""
|
||||
tenant_short = str(tenant_id).split('-')[0].upper()[:4]
|
||||
date_code = (BASE_REFERENCE_DATE - timedelta(days=days_ago)).strftime("%Y%m%d")
|
||||
return f"RET-{tenant_short}-{product_sku}-{date_code}"
|
||||
|
||||
|
||||
def calculate_retail_stock_quantity(
|
||||
product_sku: str,
|
||||
size_multiplier: float,
|
||||
create_some_low_stock: bool = False
|
||||
) -> float:
|
||||
"""
|
||||
Calculate realistic retail stock quantity based on daily sales
|
||||
|
||||
Args:
|
||||
product_sku: SKU of the finished product
|
||||
size_multiplier: Store size multiplier (0.8 for small, 1.0 for medium, 1.2 for large)
|
||||
create_some_low_stock: If True, 20% chance of low stock scenario
|
||||
|
||||
Returns:
|
||||
Stock quantity in units
|
||||
"""
|
||||
daily_sales = DAILY_SALES_BY_SKU.get(product_sku, 20)
|
||||
|
||||
# Retail outlets typically stock 1-3 days worth (fresh bakery products)
|
||||
if create_some_low_stock and random.random() < 0.2:
|
||||
# Low stock: 0.3-0.8 days worth (need restock soon)
|
||||
days_of_supply = random.uniform(0.3, 0.8)
|
||||
else:
|
||||
# Normal: 1-2.5 days worth
|
||||
days_of_supply = random.uniform(1.0, 2.5)
|
||||
|
||||
quantity = daily_sales * days_of_supply * size_multiplier
|
||||
|
||||
# Add realistic variability
|
||||
quantity *= random.uniform(0.85, 1.15)
|
||||
|
||||
return max(5.0, round(quantity)) # Minimum 5 units
|
||||
|
||||
|
||||
async def seed_retail_stock_for_tenant(
|
||||
db: AsyncSession,
|
||||
tenant_id: uuid.UUID,
|
||||
tenant_name: str,
|
||||
size_multiplier: float
|
||||
) -> dict:
|
||||
"""
|
||||
Seed realistic stock levels for a child retail tenant
|
||||
|
||||
Creates multiple stock batches per product with varied freshness levels,
|
||||
simulating realistic retail bakery inventory with:
|
||||
- Fresh stock from today's/yesterday's delivery
|
||||
- Some expiring soon items
|
||||
- Varied batch sizes and locations
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: UUID of the child tenant
|
||||
tenant_name: Name of the tenant (for logging)
|
||||
size_multiplier: Store size multiplier for stock quantities
|
||||
|
||||
Returns:
|
||||
Dict with seeding statistics
|
||||
"""
|
||||
logger.info("─" * 80)
|
||||
logger.info(f"Seeding retail stock for: {tenant_name}")
|
||||
logger.info(f"Tenant ID: {tenant_id}")
|
||||
logger.info(f"Size Multiplier: {size_multiplier}x")
|
||||
logger.info("─" * 80)
|
||||
|
||||
# Get all finished products for this tenant
|
||||
result = await db.execute(
|
||||
select(Ingredient).where(
|
||||
Ingredient.tenant_id == tenant_id,
|
||||
Ingredient.product_type == ProductType.FINISHED_PRODUCT,
|
||||
Ingredient.is_active == True
|
||||
)
|
||||
)
|
||||
products = result.scalars().all()
|
||||
|
||||
if not products:
|
||||
logger.warning(f"No finished products found for tenant {tenant_id}")
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tenant_name": tenant_name,
|
||||
"stock_batches_created": 0,
|
||||
"products_stocked": 0
|
||||
}
|
||||
|
||||
created_batches = 0
|
||||
|
||||
for product in products:
|
||||
# Create 2-4 batches per product (simulating multiple deliveries/batches)
|
||||
num_batches = random.randint(2, 4)
|
||||
|
||||
for batch_index in range(num_batches):
|
||||
# Vary delivery dates (0-2 days ago for fresh bakery products)
|
||||
days_ago = random.randint(0, 2)
|
||||
received_date = BASE_REFERENCE_DATE - timedelta(days=days_ago)
|
||||
|
||||
# Calculate expiration based on shelf life
|
||||
shelf_life_days = product.shelf_life_days or 2 # Default 2 days for bakery
|
||||
expiration_date = received_date + timedelta(days=shelf_life_days)
|
||||
|
||||
# Calculate quantity for this batch
|
||||
# Split total quantity across batches with variation
|
||||
batch_quantity_factor = random.uniform(0.3, 0.7) # Each batch is 30-70% of average
|
||||
quantity = calculate_retail_stock_quantity(
|
||||
product.sku,
|
||||
size_multiplier,
|
||||
create_some_low_stock=(batch_index == 0) # First batch might be low
|
||||
) * batch_quantity_factor
|
||||
|
||||
# Determine if product is still good
|
||||
days_until_expiration = (expiration_date - BASE_REFERENCE_DATE).days
|
||||
is_expired = days_until_expiration < 0
|
||||
is_available = not is_expired
|
||||
quality_status = "expired" if is_expired else "good"
|
||||
|
||||
# Random storage location
|
||||
storage_location = random.choice(RETAIL_STORAGE_LOCATIONS)
|
||||
|
||||
# Create stock batch
|
||||
stock_batch = Stock(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=product.id,
|
||||
supplier_id=DEMO_TENANT_ENTERPRISE_CHAIN, # Supplied by parent (Obrador)
|
||||
batch_number=generate_retail_batch_number(tenant_id, product.sku, days_ago),
|
||||
lot_number=f"LOT-{BASE_REFERENCE_DATE.strftime('%Y%m%d')}-{batch_index+1:02d}",
|
||||
supplier_batch_ref=f"OBRADOR-{received_date.strftime('%Y%m%d')}-{random.randint(1000, 9999)}",
|
||||
production_stage="fully_baked", # Retail receives fully baked products
|
||||
transformation_reference=None,
|
||||
current_quantity=quantity,
|
||||
reserved_quantity=0.0,
|
||||
available_quantity=quantity if is_available else 0.0,
|
||||
received_date=received_date,
|
||||
expiration_date=expiration_date,
|
||||
best_before_date=expiration_date - timedelta(hours=12) if shelf_life_days == 1 else None,
|
||||
original_expiration_date=None,
|
||||
transformation_date=None,
|
||||
final_expiration_date=expiration_date,
|
||||
unit_cost=Decimal(str(product.average_cost or 0.5)),
|
||||
total_cost=Decimal(str(product.average_cost or 0.5)) * Decimal(str(quantity)),
|
||||
storage_location=storage_location,
|
||||
warehouse_zone=None, # Retail outlets don't have warehouse zones
|
||||
shelf_position=None,
|
||||
requires_refrigeration=False, # Most bakery products don't require refrigeration
|
||||
requires_freezing=False,
|
||||
storage_temperature_min=None,
|
||||
storage_temperature_max=25.0 if product.is_perishable else None, # Room temp
|
||||
storage_humidity_max=65.0 if product.is_perishable else None,
|
||||
shelf_life_days=shelf_life_days,
|
||||
storage_instructions=product.storage_instructions if hasattr(product, 'storage_instructions') else None,
|
||||
is_available=is_available,
|
||||
is_expired=is_expired,
|
||||
quality_status=quality_status,
|
||||
created_at=received_date,
|
||||
updated_at=BASE_REFERENCE_DATE
|
||||
)
|
||||
|
||||
db.add(stock_batch)
|
||||
created_batches += 1
|
||||
|
||||
logger.debug(
|
||||
f" ✅ Created stock batch: {product.name} - "
|
||||
f"{quantity:.0f} units, expires in {days_until_expiration} days"
|
||||
)
|
||||
|
||||
# Commit all changes for this tenant
|
||||
await db.commit()
|
||||
|
||||
logger.info(f" 📊 Stock batches created: {created_batches} across {len(products)} products")
|
||||
logger.info("")
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"tenant_name": tenant_name,
|
||||
"stock_batches_created": created_batches,
|
||||
"products_stocked": len(products)
|
||||
}
|
||||
|
||||
|
||||
async def seed_retail_stock(db: AsyncSession):
|
||||
"""
|
||||
Seed retail stock for all child tenant templates
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dict with overall seeding statistics
|
||||
"""
|
||||
logger.info("=" * 80)
|
||||
logger.info("📦 Starting Demo Retail Stock Seeding")
|
||||
logger.info("=" * 80)
|
||||
logger.info("Creating stock levels for finished products at retail outlets")
|
||||
logger.info("")
|
||||
|
||||
results = []
|
||||
|
||||
# Seed for each child retail outlet
|
||||
for child_tenant_id, child_tenant_name, size_multiplier in CHILD_TENANTS:
|
||||
logger.info("")
|
||||
result = await seed_retail_stock_for_tenant(
|
||||
db,
|
||||
child_tenant_id,
|
||||
f"{child_tenant_name} (Retail Outlet)",
|
||||
size_multiplier
|
||||
)
|
||||
results.append(result)
|
||||
|
||||
# Calculate totals
|
||||
total_batches = sum(r["stock_batches_created"] for r in results)
|
||||
total_products = sum(r["products_stocked"] for r in results)
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("✅ Demo Retail Stock Seeding Completed")
|
||||
logger.info("=" * 80)
|
||||
|
||||
return {
|
||||
"service": "inventory_stock_retail",
|
||||
"tenants_seeded": len(results),
|
||||
"total_batches_created": total_batches,
|
||||
"total_products_stocked": total_products,
|
||||
"results": results
|
||||
}
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main execution function"""
|
||||
|
||||
logger.info("Demo Retail Stock Seeding Script Starting")
|
||||
logger.info("Mode: %s", os.getenv("DEMO_MODE", "development"))
|
||||
logger.info("Log Level: %s", os.getenv("LOG_LEVEL", "INFO"))
|
||||
|
||||
# Get database URL from environment
|
||||
database_url = os.getenv("INVENTORY_DATABASE_URL") or os.getenv("DATABASE_URL")
|
||||
if not database_url:
|
||||
logger.error("❌ INVENTORY_DATABASE_URL or DATABASE_URL environment variable must be set")
|
||||
return 1
|
||||
|
||||
# Convert to async URL if needed
|
||||
if database_url.startswith("postgresql://"):
|
||||
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
||||
|
||||
logger.info("Connecting to inventory database")
|
||||
|
||||
# Create engine and session
|
||||
engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_size=5,
|
||||
max_overflow=10
|
||||
)
|
||||
|
||||
async_session = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
try:
|
||||
async with async_session() as session:
|
||||
result = await seed_retail_stock(session)
|
||||
|
||||
logger.info("")
|
||||
logger.info("📊 Retail Stock Seeding Summary:")
|
||||
logger.info(f" ✅ Retail outlets seeded: {result['tenants_seeded']}")
|
||||
logger.info(f" ✅ Total stock batches: {result['total_batches_created']}")
|
||||
logger.info(f" ✅ Products stocked: {result['total_products_stocked']}")
|
||||
logger.info("")
|
||||
|
||||
# Print per-tenant details
|
||||
for tenant_result in result['results']:
|
||||
logger.info(
|
||||
f" {tenant_result['tenant_name']}: "
|
||||
f"{tenant_result['stock_batches_created']} batches, "
|
||||
f"{tenant_result['products_stocked']} products"
|
||||
)
|
||||
|
||||
logger.info("")
|
||||
logger.info("🎉 Success! Retail stock levels are ready for cloning.")
|
||||
logger.info("")
|
||||
logger.info("Stock characteristics:")
|
||||
logger.info(" ✓ Multiple batches per product (2-4 batches)")
|
||||
logger.info(" ✓ Varied freshness levels (0-2 days old)")
|
||||
logger.info(" ✓ Realistic quantities based on store size")
|
||||
logger.info(" ✓ Some low-stock scenarios for demo alerts")
|
||||
logger.info(" ✓ Expiration tracking enabled")
|
||||
logger.info("")
|
||||
logger.info("Next steps:")
|
||||
logger.info(" 1. Seed retail sales history")
|
||||
logger.info(" 2. Seed customer data")
|
||||
logger.info(" 3. Test stock alerts and reorder triggers")
|
||||
logger.info("")
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 80)
|
||||
logger.error("❌ Demo Retail Stock Seeding Failed")
|
||||
logger.error("=" * 80)
|
||||
logger.error("Error: %s", str(e))
|
||||
logger.error("", exc_info=True)
|
||||
return 1
|
||||
|
||||
finally:
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit_code = asyncio.run(main())
|
||||
sys.exit(exit_code)
|
||||
Reference in New Issue
Block a user