demo seed change
This commit is contained in:
87
services/inventory/app/api/internal_alert_trigger.py
Normal file
87
services/inventory/app/api/internal_alert_trigger.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# services/inventory/app/api/internal_alert_trigger.py
|
||||
"""
|
||||
Internal API for triggering inventory alerts.
|
||||
Used by demo session cloning to generate realistic inventory alerts.
|
||||
|
||||
URL Pattern: /api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger
|
||||
This follows the tenant-scoped pattern so gateway can proxy correctly.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# New URL pattern: tenant-scoped so gateway proxies to inventory service correctly
|
||||
@router.post("/api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger")
|
||||
async def trigger_inventory_alerts(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check inventory for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger comprehensive inventory alert checks for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after inventory
|
||||
data is seeded to generate realistic inventory alerts including:
|
||||
- Critical stock shortages
|
||||
- Expiring ingredients
|
||||
- Overstock situations
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
"""
|
||||
try:
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get inventory scheduler from app state
|
||||
inventory_scheduler = getattr(request.app.state, 'inventory_scheduler', None)
|
||||
|
||||
if not inventory_scheduler:
|
||||
logger.error("Inventory scheduler not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Inventory scheduler not available"
|
||||
)
|
||||
|
||||
# Trigger comprehensive inventory alert checks for the specific tenant
|
||||
logger.info("Triggering comprehensive inventory alert checks", tenant_id=str(tenant_id))
|
||||
|
||||
# Call the scheduler's manual trigger method
|
||||
result = await inventory_scheduler.trigger_manual_check(tenant_id)
|
||||
|
||||
if result.get("success", False):
|
||||
logger.info(
|
||||
"Inventory alert checks completed successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Inventory alert checks failed",
|
||||
tenant_id=str(tenant_id),
|
||||
error=result.get("error", "Unknown error")
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering inventory alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger inventory alerts: {str(e)}"
|
||||
)
|
||||
@@ -1,44 +1,37 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Inventory Service
|
||||
Service-to-service endpoint for cloning inventory data with date adjustment
|
||||
Handles internal demo data cloning operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
import structlog
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
import structlog
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
# Add shared path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.inventory import Ingredient, Stock, StockMovement
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from app.core.config import settings
|
||||
from app.models import Ingredient, Stock, ProductType
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
|
||||
async def verify_internal_api_key(x_internal_api_key: str = Header(None)):
|
||||
"""Verify internal API key for service-to-service communication"""
|
||||
from app.core.config import settings
|
||||
if x_internal_api_key != settings.INTERNAL_API_KEY:
|
||||
required_key = settings.INTERNAL_API_KEY
|
||||
if x_internal_api_key != required_key:
|
||||
logger.warning("Unauthorized internal API access attempted")
|
||||
raise HTTPException(status_code=403, detail="Invalid internal API key")
|
||||
return True
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data(
|
||||
@router.post("/internal/demo/clone")
|
||||
async def clone_demo_data_internal(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
@@ -50,350 +43,346 @@ async def clone_demo_data(
|
||||
"""
|
||||
Clone inventory service data for a virtual demo tenant
|
||||
|
||||
Clones:
|
||||
- Ingredients from template tenant
|
||||
- Stock batches with date-adjusted expiration dates
|
||||
- Generates inventory alerts based on stock status
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID to clone from
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
Dictionary with cloning results
|
||||
|
||||
Raises:
|
||||
HTTPException: On validation or cloning errors
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_created_at or fallback to now
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Invalid session_created_at format, using current time",
|
||||
session_created_at=session_created_at,
|
||||
error=str(e)
|
||||
)
|
||||
session_time = datetime.now(timezone.utc)
|
||||
else:
|
||||
logger.warning("session_created_at not provided, using current time")
|
||||
session_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting inventory data cloning with date adjustment",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_time.isoformat()
|
||||
)
|
||||
|
||||
start_time = datetime.now()
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
base_uuid = uuid.UUID(base_tenant_id)
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
virtual_uuid = UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
# Debug logging for UUID values
|
||||
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
|
||||
|
||||
if not all([base_tenant_id, virtual_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
|
||||
)
|
||||
|
||||
# Validate UUID format before processing
|
||||
try:
|
||||
UUID(base_tenant_id)
|
||||
UUID(virtual_tenant_id)
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format in request",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format: {str(e)}"
|
||||
)
|
||||
|
||||
# Parse session creation time
|
||||
if session_created_at:
|
||||
try:
|
||||
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_created_at_parsed = datetime.now()
|
||||
else:
|
||||
session_created_at_parsed = datetime.now()
|
||||
|
||||
# Determine profile based on demo_account_type
|
||||
if demo_account_type == "enterprise":
|
||||
profile = "enterprise"
|
||||
else:
|
||||
profile = "professional"
|
||||
|
||||
logger.info(
|
||||
"Starting inventory data cloning with date adjustment",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_created_at_parsed.isoformat()
|
||||
)
|
||||
|
||||
# Load seed data using shared utility
|
||||
try:
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if profile == "professional":
|
||||
json_file = get_seed_data_path("professional", "03-inventory.json")
|
||||
elif profile == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "03-inventory.json")
|
||||
else:
|
||||
raise ValueError(f"Invalid profile: {profile}")
|
||||
|
||||
except ImportError:
|
||||
# Fallback to original path
|
||||
seed_data_dir = Path(__file__).parent.parent.parent.parent / "infrastructure" / "seed-data"
|
||||
if profile == "professional":
|
||||
json_file = seed_data_dir / "professional" / "03-inventory.json"
|
||||
elif profile == "enterprise":
|
||||
json_file = seed_data_dir / "enterprise" / "parent" / "03-inventory.json"
|
||||
else:
|
||||
raise ValueError(f"Invalid profile: {profile}")
|
||||
|
||||
if not json_file.exists():
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Seed data file not found: {json_file}"
|
||||
)
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Check if data already exists for this virtual tenant (idempotency)
|
||||
from sqlalchemy import select, delete
|
||||
existing_check = await db.execute(
|
||||
select(Ingredient).where(Ingredient.tenant_id == virtual_uuid).limit(1)
|
||||
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
|
||||
)
|
||||
existing_ingredient = existing_check.scalars().first()
|
||||
existing_ingredient = existing_check.scalar_one_or_none()
|
||||
|
||||
if existing_ingredient:
|
||||
logger.warning(
|
||||
"Data already exists for virtual tenant - cleaning before re-clone",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
base_tenant_id=base_tenant_id
|
||||
)
|
||||
# Clean up existing data first to ensure fresh clone
|
||||
from sqlalchemy import delete
|
||||
|
||||
await db.execute(
|
||||
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.execute(
|
||||
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"Existing data cleaned, proceeding with fresh clone",
|
||||
"Demo data already exists, skipping clone",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
return {
|
||||
"status": "skipped",
|
||||
"reason": "Data already exists",
|
||||
"records_cloned": 0
|
||||
}
|
||||
|
||||
# Track cloning statistics
|
||||
stats = {
|
||||
"ingredients": 0,
|
||||
"stock_batches": 0,
|
||||
"stock_movements": 0,
|
||||
"alerts_generated": 0
|
||||
}
|
||||
|
||||
# Mapping from base ingredient ID to virtual ingredient ID
|
||||
ingredient_id_mapping = {}
|
||||
# Mapping from base stock ID to virtual stock ID
|
||||
stock_id_mapping = {}
|
||||
|
||||
# Clone Ingredients
|
||||
result = await db.execute(
|
||||
select(Ingredient).where(Ingredient.tenant_id == base_uuid)
|
||||
)
|
||||
base_ingredients = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found ingredients to clone",
|
||||
count=len(base_ingredients),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for ingredient in base_ingredients:
|
||||
# Transform ingredient ID using XOR to ensure consistency across services
|
||||
# This formula matches the suppliers service ID transformation
|
||||
# Formula: virtual_ingredient_id = virtual_tenant_id XOR base_ingredient_id
|
||||
|
||||
base_ingredient_int = int(ingredient.id.hex, 16)
|
||||
virtual_tenant_int = int(virtual_uuid.hex, 16)
|
||||
base_tenant_int = int(base_uuid.hex, 16)
|
||||
|
||||
# Reverse the original XOR to get the base ingredient ID
|
||||
# base_ingredient = base_tenant ^ base_ingredient_id
|
||||
# So: base_ingredient_id = base_tenant ^ base_ingredient
|
||||
base_ingredient_id_int = base_tenant_int ^ base_ingredient_int
|
||||
|
||||
# Now apply virtual tenant XOR to get the new ingredient ID
|
||||
new_ingredient_id = uuid.UUID(int=virtual_tenant_int ^ base_ingredient_id_int)
|
||||
|
||||
logger.debug(
|
||||
"Transforming ingredient ID using XOR",
|
||||
base_ingredient_id=str(ingredient.id),
|
||||
new_ingredient_id=str(new_ingredient_id),
|
||||
ingredient_sku=ingredient.sku,
|
||||
ingredient_name=ingredient.name
|
||||
)
|
||||
|
||||
new_ingredient = Ingredient(
|
||||
id=new_ingredient_id,
|
||||
tenant_id=virtual_uuid,
|
||||
name=ingredient.name,
|
||||
sku=ingredient.sku,
|
||||
barcode=ingredient.barcode,
|
||||
product_type=ingredient.product_type,
|
||||
ingredient_category=ingredient.ingredient_category,
|
||||
product_category=ingredient.product_category,
|
||||
subcategory=ingredient.subcategory,
|
||||
description=ingredient.description,
|
||||
brand=ingredient.brand,
|
||||
unit_of_measure=ingredient.unit_of_measure,
|
||||
package_size=ingredient.package_size,
|
||||
average_cost=ingredient.average_cost,
|
||||
last_purchase_price=ingredient.last_purchase_price,
|
||||
standard_cost=ingredient.standard_cost,
|
||||
low_stock_threshold=ingredient.low_stock_threshold,
|
||||
reorder_point=ingredient.reorder_point,
|
||||
reorder_quantity=ingredient.reorder_quantity,
|
||||
max_stock_level=ingredient.max_stock_level,
|
||||
shelf_life_days=ingredient.shelf_life_days,
|
||||
display_life_hours=ingredient.display_life_hours,
|
||||
best_before_hours=ingredient.best_before_hours,
|
||||
storage_instructions=ingredient.storage_instructions,
|
||||
is_perishable=ingredient.is_perishable,
|
||||
is_active=ingredient.is_active,
|
||||
allergen_info=ingredient.allergen_info,
|
||||
nutritional_info=ingredient.nutritional_info
|
||||
)
|
||||
db.add(new_ingredient)
|
||||
stats["ingredients"] += 1
|
||||
|
||||
# Store mapping for stock cloning
|
||||
ingredient_id_mapping[ingredient.id] = new_ingredient_id
|
||||
|
||||
await db.flush() # Ensure ingredients are persisted before stock
|
||||
|
||||
# Clone Stock batches with date adjustment
|
||||
result = await db.execute(
|
||||
select(Stock).where(Stock.tenant_id == base_uuid)
|
||||
)
|
||||
base_stocks = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found stock batches to clone",
|
||||
count=len(base_stocks),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for stock in base_stocks:
|
||||
# Map ingredient ID
|
||||
new_ingredient_id = ingredient_id_mapping.get(stock.ingredient_id)
|
||||
if not new_ingredient_id:
|
||||
logger.warning(
|
||||
"Stock references non-existent ingredient, skipping",
|
||||
stock_id=str(stock.id),
|
||||
ingredient_id=str(stock.ingredient_id)
|
||||
# Transform and insert data
|
||||
records_cloned = 0
|
||||
|
||||
# Clone ingredients
|
||||
for ingredient_data in seed_data.get('ingredients', []):
|
||||
# Transform ID
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse UUIDs for ID transformation",
|
||||
ingredient_id=ingredient_data['id'],
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in ingredient data: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Adjust dates relative to session creation
|
||||
adjusted_expiration = adjust_date_for_demo(
|
||||
stock.expiration_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
|
||||
# Transform dates
|
||||
from shared.utils.demo_dates import adjust_date_for_demo
|
||||
for date_field in ['expiration_date', 'received_date', 'created_at', 'updated_at']:
|
||||
if date_field in ingredient_data:
|
||||
try:
|
||||
date_value = ingredient_data[date_field]
|
||||
# Handle both string dates and date objects
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
ingredient_data[date_field] = adjusted_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=ingredient_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
ingredient_data.pop(date_field, None)
|
||||
|
||||
# Map category field to ingredient_category enum
|
||||
if 'category' in ingredient_data:
|
||||
category_value = ingredient_data.pop('category')
|
||||
# Convert category string to IngredientCategory enum
|
||||
from app.models.inventory import IngredientCategory
|
||||
try:
|
||||
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
|
||||
except KeyError:
|
||||
# If category not found in enum, use OTHER
|
||||
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
|
||||
|
||||
# Map unit_of_measure string to enum
|
||||
if 'unit_of_measure' in ingredient_data:
|
||||
from app.models.inventory import UnitOfMeasure
|
||||
unit_mapping = {
|
||||
'kilograms': UnitOfMeasure.KILOGRAMS,
|
||||
'grams': UnitOfMeasure.GRAMS,
|
||||
'liters': UnitOfMeasure.LITERS,
|
||||
'milliliters': UnitOfMeasure.MILLILITERS,
|
||||
'units': UnitOfMeasure.UNITS,
|
||||
'pieces': UnitOfMeasure.PIECES,
|
||||
'packages': UnitOfMeasure.PACKAGES,
|
||||
'bags': UnitOfMeasure.BAGS,
|
||||
'boxes': UnitOfMeasure.BOXES
|
||||
}
|
||||
|
||||
unit_str = ingredient_data['unit_of_measure']
|
||||
if unit_str in unit_mapping:
|
||||
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
|
||||
else:
|
||||
# Default to units if not found
|
||||
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
original_unit=unit_str)
|
||||
|
||||
# Note: All seed data fields now match the model schema exactly
|
||||
# No field filtering needed
|
||||
|
||||
# Remove original id and tenant_id from ingredient_data to avoid conflict
|
||||
ingredient_data.pop('id', None)
|
||||
ingredient_data.pop('tenant_id', None)
|
||||
|
||||
# Create ingredient
|
||||
ingredient = Ingredient(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**ingredient_data
|
||||
)
|
||||
adjusted_received = adjust_date_for_demo(
|
||||
stock.received_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
db.add(ingredient)
|
||||
records_cloned += 1
|
||||
|
||||
# Clone stock batches
|
||||
for stock_data in seed_data.get('stock_batches', []):
|
||||
# Transform ID - handle both UUID and string IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
# Try to parse as UUID first
|
||||
stock_uuid = UUID(stock_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(stock_data['id'], tenant_uuid)
|
||||
except ValueError:
|
||||
# If not a UUID, generate a deterministic UUID from the string ID
|
||||
import hashlib
|
||||
stock_id_string = stock_data['id']
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
|
||||
# Create a deterministic UUID from the string ID and tenant ID
|
||||
combined = f"{stock_id_string}-{tenant_uuid}"
|
||||
hash_obj = hashlib.sha256(combined.encode('utf-8'))
|
||||
transformed_id = UUID(hash_obj.hexdigest()[:32])
|
||||
|
||||
logger.info("Generated UUID for non-UUID stock ID",
|
||||
original_id=stock_id_string,
|
||||
generated_id=str(transformed_id))
|
||||
|
||||
# Transform dates - handle both timestamp dictionaries and ISO strings
|
||||
for date_field in ['received_date', 'expiration_date', 'best_before_date', 'original_expiration_date', 'transformation_date', 'final_expiration_date', 'created_at', 'updated_at']:
|
||||
if date_field in stock_data:
|
||||
try:
|
||||
date_value = stock_data[date_field]
|
||||
|
||||
# Handle timestamp dictionaries (offset_days, hour, minute)
|
||||
if isinstance(date_value, dict) and 'offset_days' in date_value:
|
||||
from shared.utils.demo_dates import calculate_demo_datetime
|
||||
original_date = calculate_demo_datetime(
|
||||
offset_days=date_value.get('offset_days', 0),
|
||||
hour=date_value.get('hour', 0),
|
||||
minute=date_value.get('minute', 0),
|
||||
session_created_at=session_created_at_parsed
|
||||
)
|
||||
elif isinstance(date_value, str):
|
||||
# ISO string
|
||||
original_date = datetime.fromisoformat(date_value)
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
# Already a date/datetime object
|
||||
original_date = date_value
|
||||
else:
|
||||
# Skip if not a valid date format
|
||||
logger.warning("Skipping invalid date format",
|
||||
date_field=date_field,
|
||||
date_value=date_value)
|
||||
continue
|
||||
|
||||
adjusted_stock_date = adjust_date_for_demo(
|
||||
original_date,
|
||||
session_created_at_parsed
|
||||
)
|
||||
stock_data[date_field] = adjusted_stock_date
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning("Failed to parse date, skipping",
|
||||
date_field=date_field,
|
||||
date_value=stock_data[date_field],
|
||||
error=str(e))
|
||||
# Remove invalid date to avoid model errors
|
||||
stock_data.pop(date_field, None)
|
||||
|
||||
# Remove original id and tenant_id from stock_data to avoid conflict
|
||||
stock_data.pop('id', None)
|
||||
stock_data.pop('tenant_id', None)
|
||||
|
||||
# Create stock batch
|
||||
stock = Stock(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**stock_data
|
||||
)
|
||||
adjusted_best_before = adjust_date_for_demo(
|
||||
stock.best_before_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
)
|
||||
adjusted_created = adjust_date_for_demo(
|
||||
stock.created_at,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_time
|
||||
db.add(stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Create new stock batch with new ID
|
||||
new_stock_id = uuid.uuid4()
|
||||
|
||||
new_stock = Stock(
|
||||
id=new_stock_id,
|
||||
tenant_id=virtual_uuid,
|
||||
ingredient_id=new_ingredient_id,
|
||||
supplier_id=stock.supplier_id,
|
||||
batch_number=stock.batch_number,
|
||||
lot_number=stock.lot_number,
|
||||
supplier_batch_ref=stock.supplier_batch_ref,
|
||||
production_stage=stock.production_stage,
|
||||
current_quantity=stock.current_quantity,
|
||||
reserved_quantity=stock.reserved_quantity,
|
||||
available_quantity=stock.available_quantity,
|
||||
received_date=adjusted_received,
|
||||
expiration_date=adjusted_expiration,
|
||||
best_before_date=adjusted_best_before,
|
||||
unit_cost=stock.unit_cost,
|
||||
total_cost=stock.total_cost,
|
||||
storage_location=stock.storage_location,
|
||||
warehouse_zone=stock.warehouse_zone,
|
||||
shelf_position=stock.shelf_position,
|
||||
requires_refrigeration=stock.requires_refrigeration,
|
||||
requires_freezing=stock.requires_freezing,
|
||||
storage_temperature_min=stock.storage_temperature_min,
|
||||
storage_temperature_max=stock.storage_temperature_max,
|
||||
storage_humidity_max=stock.storage_humidity_max,
|
||||
shelf_life_days=stock.shelf_life_days,
|
||||
storage_instructions=stock.storage_instructions,
|
||||
is_available=stock.is_available,
|
||||
is_expired=stock.is_expired,
|
||||
quality_status=stock.quality_status,
|
||||
created_at=adjusted_created,
|
||||
updated_at=session_time
|
||||
)
|
||||
db.add(new_stock)
|
||||
stats["stock_batches"] += 1
|
||||
|
||||
# Store mapping for movement cloning
|
||||
stock_id_mapping[stock.id] = new_stock_id
|
||||
|
||||
await db.flush() # Ensure stock is persisted before movements
|
||||
|
||||
# Clone Stock Movements with date adjustment
|
||||
result = await db.execute(
|
||||
select(StockMovement).where(StockMovement.tenant_id == base_uuid)
|
||||
)
|
||||
base_movements = result.scalars().all()
|
||||
|
||||
logger.info(
|
||||
"Found stock movements to clone",
|
||||
count=len(base_movements),
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
for movement in base_movements:
|
||||
# Map ingredient ID and stock ID
|
||||
new_ingredient_id = ingredient_id_mapping.get(movement.ingredient_id)
|
||||
new_stock_id = stock_id_mapping.get(movement.stock_id) if movement.stock_id else None
|
||||
|
||||
if not new_ingredient_id:
|
||||
logger.warning(
|
||||
"Movement references non-existent ingredient, skipping",
|
||||
movement_id=str(movement.id),
|
||||
ingredient_id=str(movement.ingredient_id)
|
||||
)
|
||||
continue
|
||||
|
||||
# Adjust movement date relative to session creation
|
||||
adjusted_movement_date = adjust_date_for_demo(
|
||||
movement.movement_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_time
|
||||
|
||||
adjusted_created_at = adjust_date_for_demo(
|
||||
movement.created_at,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) or session_time
|
||||
|
||||
# Create new stock movement
|
||||
new_movement = StockMovement(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
ingredient_id=new_ingredient_id,
|
||||
stock_id=new_stock_id,
|
||||
movement_type=movement.movement_type,
|
||||
quantity=movement.quantity,
|
||||
unit_cost=movement.unit_cost,
|
||||
total_cost=movement.total_cost,
|
||||
quantity_before=movement.quantity_before,
|
||||
quantity_after=movement.quantity_after,
|
||||
reference_number=movement.reference_number,
|
||||
supplier_id=movement.supplier_id,
|
||||
notes=movement.notes,
|
||||
reason_code=movement.reason_code,
|
||||
movement_date=adjusted_movement_date,
|
||||
created_at=adjusted_created_at,
|
||||
created_by=movement.created_by
|
||||
)
|
||||
db.add(new_movement)
|
||||
stats["stock_movements"] += 1
|
||||
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
||||
# inventory_alert_service which runs scheduled checks every 2-5 minutes.
|
||||
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
||||
stats["alerts_generated"] = 0
|
||||
|
||||
total_records = stats["ingredients"] + stats["stock_batches"]
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data cloning completed with date adjustment",
|
||||
"Inventory data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
total_records=total_records,
|
||||
stats=stats,
|
||||
duration_ms=duration_ms
|
||||
records_cloned=records_cloned,
|
||||
duration_ms=duration_ms,
|
||||
ingredients_cloned=len(seed_data.get('ingredients', [])),
|
||||
stock_batches_cloned=len(seed_data.get('stock_batches', []))
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "completed",
|
||||
"records_cloned": total_records,
|
||||
"records_cloned": records_cloned,
|
||||
"duration_ms": duration_ms,
|
||||
"details": stats
|
||||
"details": {
|
||||
"ingredients": len(seed_data.get('ingredients', [])),
|
||||
"stock_batches": len(seed_data.get('stock_batches', [])),
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
@@ -411,7 +400,7 @@ async def clone_demo_data(
|
||||
"service": "inventory",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
@@ -430,101 +419,68 @@ async def clone_health_check(_: bool = Depends(verify_internal_api_key)):
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
"""
|
||||
Delete all inventory data for a virtual demo tenant
|
||||
|
||||
Called by demo session cleanup service to remove ephemeral data
|
||||
when demo sessions expire or are destroyed.
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant UUID to delete
|
||||
|
||||
Returns:
|
||||
Deletion status and count of records deleted
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
from sqlalchemy import delete
|
||||
|
||||
logger.info(
|
||||
"Deleting inventory data for virtual tenant",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
|
||||
start_time = datetime.now(timezone.utc)
|
||||
start_time = datetime.now()
|
||||
|
||||
records_deleted = {
|
||||
"ingredients": 0,
|
||||
"stock": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
virtual_uuid = uuid.UUID(virtual_tenant_id)
|
||||
# Delete in reverse dependency order
|
||||
|
||||
# 1. Delete stock batches (depends on ingredients)
|
||||
result = await db.execute(
|
||||
delete(Stock)
|
||||
.where(Stock.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["stock"] = result.rowcount
|
||||
|
||||
# Count records before deletion for reporting
|
||||
stock_count = await db.scalar(
|
||||
select(func.count(Stock.id)).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
ingredient_count = await db.scalar(
|
||||
select(func.count(Ingredient.id)).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
movement_count = await db.scalar(
|
||||
select(func.count(StockMovement.id)).where(StockMovement.tenant_id == virtual_uuid)
|
||||
# 2. Delete ingredients
|
||||
result = await db.execute(
|
||||
delete(Ingredient)
|
||||
.where(Ingredient.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["ingredients"] = result.rowcount
|
||||
|
||||
# Delete in correct order to respect foreign key constraints
|
||||
# 1. Delete StockMovements (references Stock)
|
||||
await db.execute(
|
||||
delete(StockMovement).where(StockMovement.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# 2. Delete Stock batches (references Ingredient)
|
||||
await db.execute(
|
||||
delete(Stock).where(Stock.tenant_id == virtual_uuid)
|
||||
)
|
||||
|
||||
# 3. Delete Ingredients
|
||||
await db.execute(
|
||||
delete(Ingredient).where(Ingredient.tenant_id == virtual_uuid)
|
||||
)
|
||||
records_deleted["total"] = sum(records_deleted.values())
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data deleted successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
stocks_deleted=stock_count,
|
||||
ingredients_deleted=ingredient_count,
|
||||
movements_deleted=movement_count,
|
||||
duration_ms=duration_ms
|
||||
"demo_data_deleted",
|
||||
service="inventory",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"records_deleted": {
|
||||
"stock_batches": stock_count,
|
||||
"ingredients": ingredient_count,
|
||||
"stock_movements": movement_count,
|
||||
"total": stock_count + ingredient_count + movement_count
|
||||
},
|
||||
"duration_ms": duration_ms
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete inventory data",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="inventory",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete inventory data: {str(e)}"
|
||||
)
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
@@ -319,3 +319,89 @@ async def ml_insights_health():
|
||||
"POST /ml/insights/optimize-safety-stock"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL ENDPOINTS (for demo-session service)
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
)
|
||||
|
||||
|
||||
@internal_router.post("/api/v1/tenants/{tenant_id}/inventory/internal/ml/generate-safety-stock-insights")
|
||||
async def generate_safety_stock_insights_internal(
|
||||
tenant_id: str,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to trigger safety stock insights generation for demo sessions.
|
||||
|
||||
This endpoint is called by the demo-session service after cloning data.
|
||||
It uses the same ML logic as the public endpoint but with optimized defaults.
|
||||
|
||||
Security: Protected by X-Internal-Service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
request: FastAPI request object
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
{
|
||||
"insights_posted": int,
|
||||
"tenant_id": str,
|
||||
"status": str
|
||||
}
|
||||
"""
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("X-Internal-Service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
logger.info("Internal safety stock insights generation triggered", tenant_id=tenant_id)
|
||||
|
||||
try:
|
||||
# Use the existing safety stock optimization logic with sensible defaults
|
||||
request_data = SafetyStockOptimizationRequest(
|
||||
product_ids=None, # Analyze all products
|
||||
lookback_days=90, # 3 months of history
|
||||
min_history_days=30 # Minimum 30 days required
|
||||
)
|
||||
|
||||
# Call the existing safety stock optimization endpoint logic
|
||||
result = await trigger_safety_stock_optimization(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return simplified response for internal use
|
||||
return {
|
||||
"insights_posted": result.total_insights_posted,
|
||||
"tenant_id": tenant_id,
|
||||
"status": "success" if result.success else "failed",
|
||||
"message": result.message,
|
||||
"products_optimized": result.products_optimized,
|
||||
"total_cost_savings": result.total_cost_savings
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Internal safety stock insights generation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Internal safety stock insights generation failed: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -11,12 +11,14 @@ from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.services.inventory_alert_service import InventoryAlertService
|
||||
from app.services.inventory_scheduler import InventoryScheduler
|
||||
from app.consumers.delivery_event_consumer import DeliveryEventConsumer
|
||||
from shared.service_base import StandardFastAPIService
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
import asyncio
|
||||
|
||||
from app.api import (
|
||||
internal_demo,
|
||||
batch,
|
||||
ingredients,
|
||||
stock_entries,
|
||||
@@ -29,10 +31,11 @@ from app.api import (
|
||||
dashboard,
|
||||
analytics,
|
||||
sustainability,
|
||||
internal_demo,
|
||||
audit,
|
||||
ml_insights
|
||||
)
|
||||
from app.api.internal_alert_trigger import router as internal_alert_trigger_router
|
||||
from app.api.internal_demo import router as internal_demo_router
|
||||
|
||||
|
||||
class InventoryService(StandardFastAPIService):
|
||||
@@ -115,8 +118,14 @@ class InventoryService(StandardFastAPIService):
|
||||
await alert_service.start()
|
||||
self.logger.info("Inventory alert service started")
|
||||
|
||||
# Store alert service in app state
|
||||
# Initialize inventory scheduler with alert service and database manager
|
||||
inventory_scheduler = InventoryScheduler(alert_service, self.database_manager)
|
||||
await inventory_scheduler.start()
|
||||
self.logger.info("Inventory scheduler started")
|
||||
|
||||
# Store services in app state
|
||||
app.state.alert_service = alert_service
|
||||
app.state.inventory_scheduler = inventory_scheduler # Store scheduler for manual triggering
|
||||
else:
|
||||
self.logger.error("Event publisher not initialized, alert service unavailable")
|
||||
|
||||
@@ -136,6 +145,11 @@ class InventoryService(StandardFastAPIService):
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for inventory service"""
|
||||
# Stop inventory scheduler
|
||||
if hasattr(app.state, 'inventory_scheduler') and app.state.inventory_scheduler:
|
||||
await app.state.inventory_scheduler.stop()
|
||||
self.logger.info("Inventory scheduler stopped")
|
||||
|
||||
# Cancel delivery consumer task
|
||||
if self.delivery_consumer_task and not self.delivery_consumer_task.done():
|
||||
self.delivery_consumer_task.cancel()
|
||||
@@ -198,8 +212,10 @@ service.add_router(food_safety_operations.router)
|
||||
service.add_router(dashboard.router)
|
||||
service.add_router(analytics.router)
|
||||
service.add_router(sustainability.router)
|
||||
service.add_router(internal_demo.router)
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
service.add_router(ml_insights.router) # ML insights endpoint
|
||||
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint for demo cloning
|
||||
service.add_router(internal_alert_trigger_router) # Internal alert trigger for demo cloning
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -211,4 +227,4 @@ if __name__ == "__main__":
|
||||
port=8000,
|
||||
reload=os.getenv("RELOAD", "false").lower() == "true",
|
||||
log_level="info"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -277,3 +277,22 @@ class FoodSafetyRepository:
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate ingredient", error=str(e))
|
||||
raise
|
||||
|
||||
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
|
||||
"""
|
||||
Mark a temperature log as having triggered an alert
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE temperature_logs
|
||||
SET alert_triggered = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": log_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
|
||||
raise
|
||||
|
||||
@@ -1,301 +0,0 @@
|
||||
# services/inventory/app/repositories/inventory_alert_repository.py
|
||||
"""
|
||||
Inventory Alert Repository
|
||||
Data access layer for inventory alert detection and analysis
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryAlertRepository:
|
||||
"""Repository for inventory alert data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_stock_issues(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get stock level issues with CTE analysis
|
||||
Returns list of critical, low, and overstock situations
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
i.max_stock_level as maximum_stock,
|
||||
i.reorder_point,
|
||||
0 as tomorrow_needed,
|
||||
0 as avg_daily_usage,
|
||||
7 as lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
|
||||
ELSE 'normal'
|
||||
END as status,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
|
||||
)
|
||||
SELECT * FROM stock_analysis WHERE status != 'normal'
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'critical' THEN 1
|
||||
WHEN 'low' THEN 2
|
||||
WHEN 'overstock' THEN 3
|
||||
END,
|
||||
shortage_amount DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock issues", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get products expiring soon or already expired
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
s.id as stock_id,
|
||||
s.batch_number,
|
||||
s.expiration_date,
|
||||
s.current_quantity,
|
||||
i.unit_of_measure,
|
||||
s.unit_cost,
|
||||
(s.current_quantity * s.unit_cost) as total_value,
|
||||
CASE
|
||||
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
|
||||
ELSE 'warning'
|
||||
END as urgency,
|
||||
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date <= CURRENT_DATE + (INTERVAL '1 day' * :days_threshold)
|
||||
ORDER BY s.expiration_date ASC, total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"days_threshold": days_threshold
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get temperature monitoring breaches
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
tl.id,
|
||||
tl.equipment_id,
|
||||
tl.equipment_name,
|
||||
tl.storage_type,
|
||||
tl.temperature_celsius,
|
||||
tl.min_threshold,
|
||||
tl.max_threshold,
|
||||
tl.is_within_range,
|
||||
tl.recorded_at,
|
||||
tl.alert_triggered,
|
||||
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
|
||||
CASE
|
||||
WHEN tl.temperature_celsius < tl.min_threshold
|
||||
THEN tl.min_threshold - tl.temperature_celsius
|
||||
WHEN tl.temperature_celsius > tl.max_threshold
|
||||
THEN tl.temperature_celsius - tl.max_threshold
|
||||
ELSE 0
|
||||
END as deviation
|
||||
FROM temperature_logs tl
|
||||
WHERE tl.tenant_id = :tenant_id
|
||||
AND tl.is_within_range = false
|
||||
AND tl.recorded_at > NOW() - (INTERVAL '1 hour' * :hours_back)
|
||||
AND tl.alert_triggered = false
|
||||
ORDER BY deviation DESC, tl.recorded_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"hours_back": hours_back
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
|
||||
"""
|
||||
Mark a temperature log as having triggered an alert
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE temperature_logs
|
||||
SET alert_triggered = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": log_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
|
||||
raise
|
||||
|
||||
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Identify waste reduction opportunities
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH waste_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.ingredient_category,
|
||||
COUNT(sm.id) as waste_incidents,
|
||||
SUM(sm.quantity) as total_waste_quantity,
|
||||
SUM(sm.total_cost) as total_waste_cost,
|
||||
AVG(sm.quantity) as avg_waste_per_incident,
|
||||
MAX(sm.movement_date) as last_waste_date
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '30 days'
|
||||
GROUP BY i.id, i.name, i.ingredient_category
|
||||
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
|
||||
)
|
||||
SELECT * FROM waste_analysis
|
||||
ORDER BY total_waste_cost DESC, waste_incidents DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ingredients that need reordering based on stock levels and usage
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH usage_analysis AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.reorder_point,
|
||||
i.low_stock_threshold,
|
||||
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
|
||||
i.preferred_supplier_id,
|
||||
i.standard_order_quantity
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
|
||||
AND sm.movement_type = 'PRODUCTION_USE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
|
||||
i.preferred_supplier_id, i.standard_order_quantity
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
|
||||
ELSE 999
|
||||
END as days_of_stock,
|
||||
GREATEST(
|
||||
standard_order_quantity,
|
||||
CEIL(daily_usage * 14)
|
||||
) as recommended_order_quantity
|
||||
FROM usage_analysis
|
||||
WHERE current_stock <= reorder_point
|
||||
ORDER BY days_of_stock ASC, current_stock ASC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_active_tenant_ids(self) -> List[UUID]:
|
||||
"""
|
||||
Get list of active tenant IDs from ingredients table
|
||||
"""
|
||||
try:
|
||||
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
|
||||
result = await self.session.execute(query)
|
||||
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row.tenant_id
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenant IDs", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Dict[str, Any]:
|
||||
"""
|
||||
Get stock information after hypothetical order
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT i.id, i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.id = :ingredient_id
|
||||
GROUP BY i.id, i.name, i.low_stock_threshold
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"ingredient_id": ingredient_id,
|
||||
"order_quantity": order_quantity
|
||||
})
|
||||
row = result.fetchone()
|
||||
return dict(row._mapping) if row else None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock after order", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
@@ -745,4 +745,176 @@ class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate], BatchCoun
|
||||
error=str(e),
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get products expiring soon or already expired
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
s.id as stock_id,
|
||||
s.batch_number,
|
||||
s.expiration_date,
|
||||
s.current_quantity,
|
||||
i.unit_of_measure,
|
||||
s.unit_cost,
|
||||
(s.current_quantity * s.unit_cost) as total_value,
|
||||
CASE
|
||||
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
|
||||
ELSE 'warning'
|
||||
END as urgency,
|
||||
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date <= CURRENT_DATE + (INTERVAL '1 day' * :days_threshold)
|
||||
ORDER BY s.expiration_date ASC, total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"days_threshold": days_threshold
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get temperature monitoring breaches
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
SELECT
|
||||
tl.id,
|
||||
tl.equipment_id,
|
||||
tl.equipment_name,
|
||||
tl.storage_type,
|
||||
tl.temperature_celsius,
|
||||
tl.min_threshold,
|
||||
tl.max_threshold,
|
||||
tl.is_within_range,
|
||||
tl.recorded_at,
|
||||
tl.alert_triggered,
|
||||
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
|
||||
CASE
|
||||
WHEN tl.temperature_celsius < tl.min_threshold
|
||||
THEN tl.min_threshold - tl.temperature_celsius
|
||||
WHEN tl.temperature_celsius > tl.max_threshold
|
||||
THEN tl.temperature_celsius - tl.max_threshold
|
||||
ELSE 0
|
||||
END as deviation
|
||||
FROM temperature_logs tl
|
||||
WHERE tl.tenant_id = :tenant_id
|
||||
AND tl.is_within_range = false
|
||||
AND tl.recorded_at > NOW() - (INTERVAL '1 hour' * :hours_back)
|
||||
AND tl.alert_triggered = false
|
||||
ORDER BY deviation DESC, tl.recorded_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"hours_back": hours_back
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Identify waste reduction opportunities
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH waste_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.ingredient_category,
|
||||
COUNT(sm.id) as waste_incidents,
|
||||
SUM(sm.quantity) as total_waste_quantity,
|
||||
SUM(sm.total_cost) as total_waste_cost,
|
||||
AVG(sm.quantity) as avg_waste_per_incident,
|
||||
MAX(sm.movement_date) as last_waste_date
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '30 days'
|
||||
GROUP BY i.id, i.name, i.ingredient_category
|
||||
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
|
||||
)
|
||||
SELECT * FROM waste_analysis
|
||||
ORDER BY total_waste_cost DESC, waste_incidents DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ingredients that need reordering based on stock levels and usage
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH usage_analysis AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.reorder_point,
|
||||
i.low_stock_threshold,
|
||||
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
|
||||
i.preferred_supplier_id,
|
||||
i.standard_order_quantity
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
|
||||
AND sm.movement_type = 'PRODUCTION_USE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
|
||||
i.preferred_supplier_id, i.standard_order_quantity
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
|
||||
ELSE 999
|
||||
END as days_of_stock,
|
||||
GREATEST(
|
||||
standard_order_quantity,
|
||||
CEIL(daily_usage * 14)
|
||||
) as recommended_order_quantity
|
||||
FROM usage_analysis
|
||||
WHERE current_stock <= reorder_point
|
||||
ORDER BY days_of_stock ASC, current_stock ASC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
@@ -12,7 +12,6 @@ from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -188,10 +187,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="expired_products",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.expired_products",
|
||||
severity="urgent",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -222,10 +220,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="urgent_expiry",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.urgent_expiry",
|
||||
severity="high",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -256,10 +253,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="overstock_warning",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.overstock_warning",
|
||||
severity="medium",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -287,10 +283,9 @@ class InventoryAlertService:
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="expired_batches_auto_processed",
|
||||
event_domain="inventory",
|
||||
event_type="inventory.expired_batches_auto_processed",
|
||||
severity="medium",
|
||||
metadata=metadata
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
|
||||
1046
services/inventory/app/services/inventory_scheduler.py
Normal file
1046
services/inventory/app/services/inventory_scheduler.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,7 @@ from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.config import settings
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
from app.repositories.food_safety_repository import FoodSafetyRepository
|
||||
from shared.clients.production_client import create_production_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
@@ -320,9 +320,9 @@ class SustainabilityService:
|
||||
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
|
||||
}
|
||||
|
||||
# Get waste incidents from inventory alert repository
|
||||
alert_repo = InventoryAlertRepository(db)
|
||||
waste_opportunities = await alert_repo.get_waste_opportunities(tenant_id)
|
||||
# Get waste incidents from food safety repository
|
||||
food_safety_repo = FoodSafetyRepository(db)
|
||||
waste_opportunities = await food_safety_repo.get_waste_opportunities(tenant_id)
|
||||
|
||||
# Sum up all waste incidents for the period
|
||||
total_waste_incidents = sum(item['waste_incidents'] for item in waste_opportunities) if waste_opportunities else 0
|
||||
|
||||
Reference in New Issue
Block a user