Initial commit - production deployment
This commit is contained in:
0
services/inventory/app/__init__.py
Normal file
0
services/inventory/app/__init__.py
Normal file
0
services/inventory/app/api/__init__.py
Normal file
0
services/inventory/app/api/__init__.py
Normal file
314
services/inventory/app/api/analytics.py
Normal file
314
services/inventory/app/api/analytics.py
Normal file
@@ -0,0 +1,314 @@
|
||||
# services/inventory/app/api/analytics.py
|
||||
"""
|
||||
Analytics API endpoints for Inventory Service
|
||||
Following standardized URL structure: /api/v1/tenants/{tenant_id}/inventory/analytics/{operation}
|
||||
Requires: Professional or Enterprise subscription tier
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import analytics_tier_required
|
||||
from app.core.database import get_db
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.services.dashboard_service import DashboardService
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.schemas.dashboard import (
|
||||
InventoryAnalytics,
|
||||
BusinessModelInsights,
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('inventory')
|
||||
|
||||
router = APIRouter(tags=["inventory-analytics"])
|
||||
|
||||
|
||||
# ===== Dependency Injection =====
|
||||
|
||||
async def get_dashboard_service(db: AsyncSession = Depends(get_db)) -> DashboardService:
|
||||
"""Get dashboard service with dependencies"""
|
||||
return DashboardService(
|
||||
inventory_service=InventoryService(),
|
||||
food_safety_service=FoodSafetyService()
|
||||
)
|
||||
|
||||
|
||||
# ===== ANALYTICS ENDPOINTS (Professional/Enterprise Only) =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("inventory-insights"),
|
||||
response_model=InventoryAnalytics
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_inventory_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_back: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get advanced inventory analytics (Professional/Enterprise only)
|
||||
|
||||
Provides:
|
||||
- Stock turnover rates
|
||||
- Inventory valuation trends
|
||||
- ABC analysis
|
||||
- Stockout risk predictions
|
||||
- Seasonal patterns
|
||||
"""
|
||||
try:
|
||||
analytics = await dashboard_service.get_inventory_analytics(db, tenant_id, days_back)
|
||||
|
||||
logger.info("Inventory analytics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
days_analyzed=days_back,
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting inventory analytics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve inventory analytics"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("business-model"),
|
||||
response_model=BusinessModelInsights
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_business_model_insights(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get business model insights based on inventory patterns (Professional/Enterprise only)
|
||||
|
||||
Analyzes inventory patterns to provide insights on:
|
||||
- Detected business model (retail, wholesale, production, etc.)
|
||||
- Product mix recommendations
|
||||
- Inventory optimization suggestions
|
||||
"""
|
||||
try:
|
||||
insights = await dashboard_service.get_business_model_insights(db, tenant_id)
|
||||
|
||||
logger.info("Business model insights retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
detected_model=insights.detected_model,
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return insights
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting business model insights",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve business model insights"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("turnover-rate"),
|
||||
response_model=dict
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_inventory_turnover_rate(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date for analysis"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date for analysis"),
|
||||
category: Optional[str] = Query(None, description="Filter by category"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Calculate inventory turnover rate (Professional/Enterprise only)
|
||||
|
||||
Metrics:
|
||||
- Overall turnover rate
|
||||
- By category
|
||||
- By product
|
||||
- Trend analysis
|
||||
"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
|
||||
# Set default dates if not provided
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=90)
|
||||
|
||||
# Calculate turnover metrics
|
||||
turnover_data = await service.calculate_turnover_rate(
|
||||
tenant_id,
|
||||
start_date,
|
||||
end_date,
|
||||
category
|
||||
)
|
||||
|
||||
logger.info("Turnover rate calculated",
|
||||
tenant_id=str(tenant_id),
|
||||
category=category,
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return turnover_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating turnover rate",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to calculate turnover rate"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("abc-analysis"),
|
||||
response_model=dict
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_abc_analysis(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_back: int = Query(90, ge=30, le=365, description="Days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Perform ABC analysis on inventory (Professional/Enterprise only)
|
||||
|
||||
Categorizes inventory items by:
|
||||
- A: High-value items requiring tight control
|
||||
- B: Moderate-value items with moderate control
|
||||
- C: Low-value items with simple control
|
||||
"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
|
||||
abc_analysis = await service.perform_abc_analysis(tenant_id, days_back)
|
||||
|
||||
logger.info("ABC analysis completed",
|
||||
tenant_id=str(tenant_id),
|
||||
days_analyzed=days_back,
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return abc_analysis
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error performing ABC analysis",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to perform ABC analysis"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("stockout-predictions"),
|
||||
response_model=dict
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_stockout_predictions(
|
||||
tenant_id: UUID = Path(...),
|
||||
forecast_days: int = Query(30, ge=7, le=90, description="Days to forecast"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Predict potential stockouts (Professional/Enterprise only)
|
||||
|
||||
Provides:
|
||||
- Items at risk of stockout
|
||||
- Predicted stockout dates
|
||||
- Recommended reorder quantities
|
||||
- Lead time considerations
|
||||
"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
|
||||
predictions = await service.predict_stockouts(tenant_id, forecast_days)
|
||||
|
||||
logger.info("Stockout predictions generated",
|
||||
tenant_id=str(tenant_id),
|
||||
forecast_days=forecast_days,
|
||||
at_risk_items=len(predictions.get('items_at_risk', [])),
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return predictions
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error predicting stockouts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to predict stockouts"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("waste-analysis"),
|
||||
response_model=dict
|
||||
)
|
||||
@analytics_tier_required
|
||||
async def get_waste_analysis(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Analyze inventory waste and expiration (Professional/Enterprise only)
|
||||
|
||||
Metrics:
|
||||
- Total waste value
|
||||
- Waste by category
|
||||
- Expiration patterns
|
||||
- Optimization recommendations
|
||||
"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
|
||||
# Set default dates
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
waste_analysis = await service.analyze_waste(tenant_id, start_date, end_date)
|
||||
|
||||
logger.info("Waste analysis completed",
|
||||
tenant_id=str(tenant_id),
|
||||
total_waste_value=waste_analysis.get('total_waste_value', 0),
|
||||
user_id=current_user.get('user_id'))
|
||||
|
||||
return waste_analysis
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error analyzing waste",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to analyze waste"
|
||||
)
|
||||
237
services/inventory/app/api/audit.py
Normal file
237
services/inventory/app/api/audit.py
Normal file
@@ -0,0 +1,237 @@
|
||||
# services/inventory/app/api/audit.py
|
||||
"""
|
||||
Audit Logs API - Retrieve audit trail for inventory service
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import AuditLog
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.models.audit_log_schemas import (
|
||||
AuditLogResponse,
|
||||
AuditLogListResponse,
|
||||
AuditLogStatsResponse
|
||||
)
|
||||
from app.core.database import database_manager
|
||||
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["audit-logs"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""Database session dependency"""
|
||||
async with database_manager.get_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs"),
|
||||
response_model=AuditLogListResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_logs(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
|
||||
action: Optional[str] = Query(None, description="Filter by action type"),
|
||||
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
|
||||
severity: Optional[str] = Query(None, description="Filter by severity level"),
|
||||
search: Optional[str] = Query(None, description="Search in description field"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit logs for inventory service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit logs",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id"),
|
||||
filters={
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"action": action,
|
||||
"resource_type": resource_type,
|
||||
"severity": severity
|
||||
}
|
||||
)
|
||||
|
||||
# Build query filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
if user_id:
|
||||
filters.append(AuditLog.user_id == user_id)
|
||||
if action:
|
||||
filters.append(AuditLog.action == action)
|
||||
if resource_type:
|
||||
filters.append(AuditLog.resource_type == resource_type)
|
||||
if severity:
|
||||
filters.append(AuditLog.severity == severity)
|
||||
if search:
|
||||
filters.append(AuditLog.description.ilike(f"%{search}%"))
|
||||
|
||||
# Count total matching records
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
# Fetch paginated results
|
||||
query = (
|
||||
select(AuditLog)
|
||||
.where(and_(*filters))
|
||||
.order_by(AuditLog.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
audit_logs = result.scalars().all()
|
||||
|
||||
# Convert to response models
|
||||
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit logs",
|
||||
tenant_id=tenant_id,
|
||||
total=total,
|
||||
returned=len(items)
|
||||
)
|
||||
|
||||
return AuditLogListResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
has_more=(offset + len(items)) < total
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit logs",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit logs: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("audit-logs/stats"),
|
||||
response_model=AuditLogStatsResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def get_audit_log_stats(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
|
||||
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get audit log statistics for inventory service.
|
||||
Requires admin or owner role.
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Retrieving audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
# Build base filters
|
||||
filters = [AuditLog.tenant_id == tenant_id]
|
||||
if start_date:
|
||||
filters.append(AuditLog.created_at >= start_date)
|
||||
if end_date:
|
||||
filters.append(AuditLog.created_at <= end_date)
|
||||
|
||||
# Total events
|
||||
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
|
||||
total_result = await db.execute(count_query)
|
||||
total_events = total_result.scalar() or 0
|
||||
|
||||
# Events by action
|
||||
action_query = (
|
||||
select(AuditLog.action, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.action)
|
||||
)
|
||||
action_result = await db.execute(action_query)
|
||||
events_by_action = {row.action: row.count for row in action_result}
|
||||
|
||||
# Events by severity
|
||||
severity_query = (
|
||||
select(AuditLog.severity, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.severity)
|
||||
)
|
||||
severity_result = await db.execute(severity_query)
|
||||
events_by_severity = {row.severity: row.count for row in severity_result}
|
||||
|
||||
# Events by resource type
|
||||
resource_query = (
|
||||
select(AuditLog.resource_type, func.count().label('count'))
|
||||
.where(and_(*filters))
|
||||
.group_by(AuditLog.resource_type)
|
||||
)
|
||||
resource_result = await db.execute(resource_query)
|
||||
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
|
||||
|
||||
# Date range
|
||||
date_range_query = (
|
||||
select(
|
||||
func.min(AuditLog.created_at).label('min_date'),
|
||||
func.max(AuditLog.created_at).label('max_date')
|
||||
)
|
||||
.where(and_(*filters))
|
||||
)
|
||||
date_result = await db.execute(date_range_query)
|
||||
date_row = date_result.one()
|
||||
|
||||
logger.info(
|
||||
"Successfully retrieved audit log statistics",
|
||||
tenant_id=tenant_id,
|
||||
total_events=total_events
|
||||
)
|
||||
|
||||
return AuditLogStatsResponse(
|
||||
total_events=total_events,
|
||||
events_by_action=events_by_action,
|
||||
events_by_severity=events_by_severity,
|
||||
events_by_resource_type=events_by_resource_type,
|
||||
date_range={
|
||||
"min": date_row.min_date,
|
||||
"max": date_row.max_date
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve audit log statistics",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve audit log statistics: {str(e)}"
|
||||
)
|
||||
149
services/inventory/app/api/batch.py
Normal file
149
services/inventory/app/api/batch.py
Normal file
@@ -0,0 +1,149 @@
|
||||
# services/inventory/app/api/batch.py
|
||||
"""
|
||||
Inventory Batch API - Batch operations for enterprise dashboards
|
||||
|
||||
Phase 2 optimization: Eliminate N+1 query patterns by fetching inventory data
|
||||
for multiple tenants in a single request.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Body
|
||||
from typing import List, Dict, Any
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
import asyncio
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.dashboard_service import DashboardService
|
||||
from app.services.inventory_service import InventoryService
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
|
||||
router = APIRouter(tags=["inventory-batch"])
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventorySummaryBatchRequest(BaseModel):
|
||||
"""Request model for batch inventory summary"""
|
||||
tenant_ids: List[str] = Field(..., description="List of tenant IDs", max_length=100)
|
||||
|
||||
|
||||
class InventorySummary(BaseModel):
|
||||
"""Inventory summary for a single tenant"""
|
||||
tenant_id: str
|
||||
total_value: float
|
||||
out_of_stock_count: int
|
||||
low_stock_count: int
|
||||
adequate_stock_count: int
|
||||
total_ingredients: int
|
||||
|
||||
|
||||
@router.post("/batch/inventory-summary", response_model=Dict[str, InventorySummary])
|
||||
async def get_inventory_summary_batch(
|
||||
request: InventorySummaryBatchRequest = Body(...),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get inventory summary for multiple tenants in a single request.
|
||||
|
||||
Optimized for enterprise dashboards to eliminate N+1 query patterns.
|
||||
Fetches inventory data for all tenants in parallel.
|
||||
|
||||
Args:
|
||||
request: Batch request with tenant IDs
|
||||
|
||||
Returns:
|
||||
Dictionary mapping tenant_id -> inventory summary
|
||||
|
||||
Example:
|
||||
POST /api/v1/inventory/batch/inventory-summary
|
||||
{
|
||||
"tenant_ids": ["tenant-1", "tenant-2", "tenant-3"]
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"tenant-1": {"tenant_id": "tenant-1", "total_value": 15000, ...},
|
||||
"tenant-2": {"tenant_id": "tenant-2", "total_value": 12000, ...},
|
||||
"tenant-3": {"tenant_id": "tenant-3", "total_value": 18000, ...}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
if len(request.tenant_ids) > 100:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Maximum 100 tenant IDs allowed per batch request"
|
||||
)
|
||||
|
||||
if not request.tenant_ids:
|
||||
return {}
|
||||
|
||||
logger.info(
|
||||
"Batch fetching inventory summaries",
|
||||
tenant_count=len(request.tenant_ids)
|
||||
)
|
||||
|
||||
async def fetch_tenant_inventory(tenant_id: str) -> tuple[str, InventorySummary]:
|
||||
"""Fetch inventory summary for a single tenant"""
|
||||
try:
|
||||
tenant_uuid = UUID(tenant_id)
|
||||
dashboard_service = DashboardService(
|
||||
inventory_service=InventoryService(),
|
||||
food_safety_service=None
|
||||
)
|
||||
|
||||
overview = await dashboard_service.get_inventory_overview(db, tenant_uuid)
|
||||
|
||||
return tenant_id, InventorySummary(
|
||||
tenant_id=tenant_id,
|
||||
total_value=float(overview.get('total_value', 0)),
|
||||
out_of_stock_count=int(overview.get('out_of_stock_count', 0)),
|
||||
low_stock_count=int(overview.get('low_stock_count', 0)),
|
||||
adequate_stock_count=int(overview.get('adequate_stock_count', 0)),
|
||||
total_ingredients=int(overview.get('total_ingredients', 0))
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to fetch inventory for tenant in batch",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
return tenant_id, InventorySummary(
|
||||
tenant_id=tenant_id,
|
||||
total_value=0.0,
|
||||
out_of_stock_count=0,
|
||||
low_stock_count=0,
|
||||
adequate_stock_count=0,
|
||||
total_ingredients=0
|
||||
)
|
||||
|
||||
# Fetch all tenant inventory data in parallel
|
||||
tasks = [fetch_tenant_inventory(tid) for tid in request.tenant_ids]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Build result dictionary
|
||||
result_dict = {}
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
logger.error("Exception in batch inventory fetch", error=str(result))
|
||||
continue
|
||||
tenant_id, summary = result
|
||||
result_dict[tenant_id] = summary
|
||||
|
||||
logger.info(
|
||||
"Batch inventory summaries retrieved",
|
||||
requested_count=len(request.tenant_ids),
|
||||
successful_count=len(result_dict)
|
||||
)
|
||||
|
||||
return result_dict
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error in batch inventory summary", error=str(e), exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to fetch batch inventory summaries: {str(e)}"
|
||||
)
|
||||
498
services/inventory/app/api/dashboard.py
Normal file
498
services/inventory/app/api/dashboard.py
Normal file
@@ -0,0 +1,498 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/api/dashboard.py
|
||||
# ================================================================
|
||||
"""
|
||||
Dashboard API endpoints for Inventory Service
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, analytics_tier_required
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.services.dashboard_service import DashboardService
|
||||
from app.schemas.dashboard import (
|
||||
InventoryDashboardSummary,
|
||||
FoodSafetyDashboard,
|
||||
BusinessModelInsights,
|
||||
InventoryAnalytics,
|
||||
DashboardFilter,
|
||||
AlertsFilter,
|
||||
StockStatusSummary,
|
||||
AlertSummary,
|
||||
RecentActivity
|
||||
)
|
||||
from app.utils.cache import get_cached, set_cached, make_cache_key
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('inventory')
|
||||
|
||||
router = APIRouter(tags=["dashboard"])
|
||||
|
||||
|
||||
# ===== Dependency Injection =====
|
||||
|
||||
async def get_dashboard_service(db: AsyncSession = Depends(get_db)) -> DashboardService:
|
||||
"""Get dashboard service with dependencies"""
|
||||
return DashboardService(
|
||||
inventory_service=InventoryService(),
|
||||
food_safety_service=FoodSafetyService()
|
||||
)
|
||||
|
||||
|
||||
# ===== Main Dashboard Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("summary"),
|
||||
response_model=InventoryDashboardSummary
|
||||
)
|
||||
async def get_inventory_dashboard_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
filters: Optional[DashboardFilter] = None,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get comprehensive inventory dashboard summary with caching (30s TTL)"""
|
||||
try:
|
||||
# PHASE 2: Check cache first (only if no filters applied)
|
||||
cache_key = None
|
||||
if filters is None:
|
||||
cache_key = make_cache_key("inventory_dashboard", str(tenant_id))
|
||||
cached_result = await get_cached(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug("Cache hit for inventory dashboard", cache_key=cache_key, tenant_id=str(tenant_id))
|
||||
return InventoryDashboardSummary(**cached_result)
|
||||
|
||||
# Cache miss or filters applied - fetch from database
|
||||
summary = await dashboard_service.get_inventory_dashboard_summary(db, tenant_id, filters)
|
||||
|
||||
# PHASE 2: Cache the result (30s TTL for inventory levels)
|
||||
if cache_key:
|
||||
await set_cached(cache_key, summary.model_dump(), ttl=30)
|
||||
logger.debug("Cached inventory dashboard", cache_key=cache_key, ttl=30, tenant_id=str(tenant_id))
|
||||
|
||||
logger.info("Dashboard summary retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
total_ingredients=summary.total_ingredients)
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting dashboard summary",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve dashboard summary"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("overview")
|
||||
)
|
||||
async def get_inventory_dashboard_overview(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get lightweight inventory dashboard overview for health checks.
|
||||
|
||||
This endpoint is optimized for frequent polling by the orchestrator service
|
||||
for dashboard health-status checks. It returns only essential metrics needed
|
||||
to determine inventory health status.
|
||||
"""
|
||||
try:
|
||||
overview = await dashboard_service.get_inventory_overview(db, tenant_id)
|
||||
|
||||
logger.info("Inventory dashboard overview retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
out_of_stock_count=overview.get('out_of_stock_count', 0))
|
||||
|
||||
return overview
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting inventory dashboard overview",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve inventory dashboard overview"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("food-safety"),
|
||||
response_model=FoodSafetyDashboard
|
||||
)
|
||||
async def get_food_safety_dashboard(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(lambda: FoodSafetyService()),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get food safety dashboard data"""
|
||||
try:
|
||||
dashboard = await food_safety_service.get_food_safety_dashboard(db, tenant_id)
|
||||
|
||||
logger.info("Food safety dashboard retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
compliance_percentage=dashboard.compliance_percentage)
|
||||
|
||||
return dashboard
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting food safety dashboard",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve food safety dashboard"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("analytics"),
|
||||
response_model=InventoryAnalytics
|
||||
)
|
||||
|
||||
async def get_inventory_analytics(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_back: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get advanced inventory analytics"""
|
||||
try:
|
||||
analytics = await dashboard_service.get_inventory_analytics(db, tenant_id, days_back)
|
||||
|
||||
logger.info("Inventory analytics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
days_analyzed=days_back)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting inventory analytics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve inventory analytics"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("business-model"),
|
||||
response_model=BusinessModelInsights
|
||||
)
|
||||
async def get_business_model_insights(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get business model insights based on inventory patterns"""
|
||||
try:
|
||||
insights = await dashboard_service.get_business_model_insights(db, tenant_id)
|
||||
|
||||
logger.info("Business model insights retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
detected_model=insights.detected_model)
|
||||
|
||||
return insights
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting business model insights",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve business model insights"
|
||||
)
|
||||
|
||||
|
||||
# ===== Detailed Dashboard Data Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("stock-status"),
|
||||
response_model=List[StockStatusSummary]
|
||||
)
|
||||
async def get_stock_status_by_category(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock status breakdown by category"""
|
||||
try:
|
||||
stock_status = await dashboard_service.get_stock_status_by_category(db, tenant_id)
|
||||
|
||||
return stock_status
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting stock status by category",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve stock status by category"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("alerts-summary"),
|
||||
response_model=List[AlertSummary]
|
||||
)
|
||||
async def get_alerts_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
filters: Optional[AlertsFilter] = None,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get alerts summary by type and severity"""
|
||||
try:
|
||||
alerts_summary = await dashboard_service.get_alerts_summary(db, tenant_id, filters)
|
||||
|
||||
return alerts_summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting alerts summary",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve alerts summary"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("recent-activity"),
|
||||
response_model=List[RecentActivity]
|
||||
)
|
||||
async def get_recent_activity(
|
||||
tenant_id: UUID = Path(...),
|
||||
limit: int = Query(20, ge=1, le=100, description="Number of activities to return"),
|
||||
activity_types: Optional[List[str]] = Query(None, description="Filter by activity types"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get recent inventory activity"""
|
||||
try:
|
||||
activities = await dashboard_service.get_recent_activity(
|
||||
db, tenant_id, limit, activity_types
|
||||
)
|
||||
|
||||
return activities
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting recent activity",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve recent activity"
|
||||
)
|
||||
|
||||
|
||||
# ===== Real-time Data Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("live-metrics")
|
||||
)
|
||||
async def get_live_metrics(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get real-time inventory metrics"""
|
||||
try:
|
||||
metrics = await dashboard_service.get_live_metrics(db, tenant_id)
|
||||
|
||||
return {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"metrics": metrics,
|
||||
"cache_ttl": 60 # Seconds
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting live metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve live metrics"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("temperature-status")
|
||||
)
|
||||
async def get_temperature_monitoring_status(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(lambda: FoodSafetyService()),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get current temperature monitoring status"""
|
||||
try:
|
||||
temp_status = await food_safety_service.get_temperature_monitoring_status(db, tenant_id)
|
||||
|
||||
return {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"temperature_monitoring": temp_status
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting temperature status",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve temperature monitoring status"
|
||||
)
|
||||
|
||||
|
||||
# ===== Dashboard Configuration Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_dashboard_route("config")
|
||||
)
|
||||
async def get_dashboard_config(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep)
|
||||
):
|
||||
"""Get dashboard configuration and settings"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
|
||||
config = {
|
||||
"refresh_intervals": {
|
||||
"dashboard_cache_ttl": settings.DASHBOARD_CACHE_TTL,
|
||||
"alerts_refresh_interval": settings.ALERTS_REFRESH_INTERVAL,
|
||||
"temperature_log_interval": settings.TEMPERATURE_LOG_INTERVAL
|
||||
},
|
||||
"features": {
|
||||
"food_safety_enabled": settings.FOOD_SAFETY_ENABLED,
|
||||
"temperature_monitoring_enabled": settings.TEMPERATURE_MONITORING_ENABLED,
|
||||
"business_model_detection": settings.ENABLE_BUSINESS_MODEL_DETECTION
|
||||
},
|
||||
"thresholds": {
|
||||
"low_stock_default": settings.DEFAULT_LOW_STOCK_THRESHOLD,
|
||||
"reorder_point_default": settings.DEFAULT_REORDER_POINT,
|
||||
"expiration_warning_days": settings.EXPIRATION_WARNING_DAYS,
|
||||
"critical_expiration_hours": settings.CRITICAL_EXPIRATION_HOURS
|
||||
},
|
||||
"business_model_thresholds": {
|
||||
"central_bakery_ingredients": settings.CENTRAL_BAKERY_THRESHOLD_INGREDIENTS,
|
||||
"individual_bakery_ingredients": settings.INDIVIDUAL_BAKERY_THRESHOLD_INGREDIENTS
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting dashboard config",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve dashboard configuration"
|
||||
)
|
||||
|
||||
|
||||
# ===== Export and Reporting Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("export/summary")
|
||||
)
|
||||
async def export_dashboard_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
format: str = Query("json", description="Export format: json, csv, excel"),
|
||||
date_from: Optional[datetime] = Query(None, description="Start date for data export"),
|
||||
date_to: Optional[datetime] = Query(None, description="End date for data export"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
dashboard_service: DashboardService = Depends(get_dashboard_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Export dashboard summary data"""
|
||||
try:
|
||||
if format.lower() not in ["json", "csv", "excel"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Unsupported export format. Use: json, csv, excel"
|
||||
)
|
||||
|
||||
export_data = await dashboard_service.export_dashboard_data(
|
||||
db, tenant_id, format, date_from, date_to
|
||||
)
|
||||
|
||||
logger.info("Dashboard data exported",
|
||||
tenant_id=str(tenant_id),
|
||||
format=format)
|
||||
|
||||
return export_data
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error exporting dashboard data",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to export dashboard data"
|
||||
)
|
||||
|
||||
|
||||
# ===== Health and Status Endpoints =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("health")
|
||||
)
|
||||
async def get_dashboard_health(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep)
|
||||
):
|
||||
"""Get dashboard service health status"""
|
||||
try:
|
||||
return {
|
||||
"service": "inventory-dashboard",
|
||||
"status": "healthy",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"tenant_id": str(tenant_id),
|
||||
"features": {
|
||||
"food_safety": "enabled",
|
||||
"temperature_monitoring": "enabled",
|
||||
"business_model_detection": "enabled",
|
||||
"real_time_alerts": "enabled"
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting dashboard health",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get dashboard health status"
|
||||
)
|
||||
314
services/inventory/app/api/enterprise_inventory.py
Normal file
314
services/inventory/app/api/enterprise_inventory.py
Normal file
@@ -0,0 +1,314 @@
|
||||
"""
|
||||
Enterprise Inventory API Endpoints
|
||||
APIs for enterprise-level inventory management across outlets
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import List, Optional
|
||||
from datetime import date
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
|
||||
from app.services.enterprise_inventory_service import EnterpriseInventoryService
|
||||
from shared.auth.tenant_access import verify_tenant_permission_dep
|
||||
from shared.clients import get_inventory_client, get_tenant_client
|
||||
from app.core.config import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# Pydantic models for request/response
|
||||
class InventoryCoverageResponse(BaseModel):
|
||||
outlet_id: str = Field(..., description="Outlet tenant ID")
|
||||
outlet_name: str = Field(..., description="Outlet name")
|
||||
overall_coverage: float = Field(..., description="Overall inventory coverage percentage (0-100)")
|
||||
critical_items_count: int = Field(..., description="Number of items at critical stock levels")
|
||||
high_risk_items_count: int = Field(..., description="Number of items at high risk of stockout")
|
||||
medium_risk_items_count: int = Field(..., description="Number of items at medium risk")
|
||||
low_risk_items_count: int = Field(..., description="Number of items at low risk")
|
||||
fulfillment_rate: float = Field(..., description="Order fulfillment rate percentage (0-100)")
|
||||
last_updated: str = Field(..., description="Last inventory update timestamp")
|
||||
status: str = Field(..., description="Overall status: normal, warning, critical")
|
||||
|
||||
|
||||
class ProductCoverageDetail(BaseModel):
|
||||
product_id: str = Field(..., description="Product ID")
|
||||
product_name: str = Field(..., description="Product name")
|
||||
current_stock: int = Field(..., description="Current stock quantity")
|
||||
safety_stock: int = Field(..., description="Safety stock threshold")
|
||||
coverage_percentage: float = Field(..., description="Coverage percentage (current/safety)")
|
||||
risk_level: str = Field(..., description="Risk level: critical, high, medium, low")
|
||||
days_until_stockout: Optional[int] = Field(None, description="Estimated days until stockout")
|
||||
|
||||
|
||||
class OutletInventoryDetailResponse(BaseModel):
|
||||
outlet_id: str = Field(..., description="Outlet tenant ID")
|
||||
outlet_name: str = Field(..., description="Outlet name")
|
||||
overall_coverage: float = Field(..., description="Overall inventory coverage percentage")
|
||||
products: List[ProductCoverageDetail] = Field(..., description="Product-level inventory details")
|
||||
last_updated: str = Field(..., description="Last update timestamp")
|
||||
|
||||
|
||||
class NetworkInventorySummary(BaseModel):
|
||||
total_outlets: int = Field(..., description="Total number of outlets")
|
||||
average_coverage: float = Field(..., description="Network average inventory coverage")
|
||||
average_fulfillment_rate: float = Field(..., description="Network average fulfillment rate")
|
||||
critical_outlets: int = Field(..., description="Number of outlets with critical status")
|
||||
warning_outlets: int = Field(..., description="Number of outlets with warning status")
|
||||
normal_outlets: int = Field(..., description="Number of outlets with normal status")
|
||||
total_critical_items: int = Field(..., description="Total critical items across network")
|
||||
network_health_score: float = Field(..., description="Overall network health score (0-100)")
|
||||
|
||||
|
||||
class InventoryAlert(BaseModel):
|
||||
alert_id: str = Field(..., description="Alert ID")
|
||||
outlet_id: str = Field(..., description="Outlet ID")
|
||||
outlet_name: str = Field(..., description="Outlet name")
|
||||
product_id: Optional[str] = Field(None, description="Product ID if applicable")
|
||||
product_name: Optional[str] = Field(None, description="Product name if applicable")
|
||||
alert_type: str = Field(..., description="Type of alert: stockout_risk, low_coverage, etc.")
|
||||
severity: str = Field(..., description="Severity: critical, high, medium, low")
|
||||
current_coverage: float = Field(..., description="Current inventory coverage percentage")
|
||||
threshold: float = Field(..., description="Threshold that triggered alert")
|
||||
timestamp: str = Field(..., description="Alert timestamp")
|
||||
message: str = Field(..., description="Alert message")
|
||||
|
||||
|
||||
async def get_enterprise_inventory_service() -> "EnterpriseInventoryService":
|
||||
"""Dependency injection for EnterpriseInventoryService"""
|
||||
inventory_client = get_inventory_client(settings, "inventory-service")
|
||||
tenant_client = get_tenant_client(settings, "inventory-service")
|
||||
return EnterpriseInventoryService(
|
||||
inventory_client=inventory_client,
|
||||
tenant_client=tenant_client
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tenants/{parent_id}/outlets/inventory-coverage",
|
||||
response_model=List[InventoryCoverageResponse],
|
||||
summary="Get inventory coverage for all outlets in network")
|
||||
async def get_outlet_inventory_coverage(
|
||||
parent_id: str,
|
||||
min_coverage: Optional[float] = Query(None, description="Filter outlets with coverage below this threshold"),
|
||||
risk_level: Optional[str] = Query(None, description="Filter by risk level: critical, high, medium, low"),
|
||||
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get inventory coverage metrics for all child outlets in a parent tenant's network
|
||||
|
||||
This endpoint provides a comprehensive view of inventory health across all outlets,
|
||||
enabling enterprise managers to identify stockout risks and prioritize inventory transfers.
|
||||
"""
|
||||
try:
|
||||
# Verify this is a parent tenant
|
||||
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
|
||||
if tenant_info.get('tenant_type') != 'parent':
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Only parent tenants can access outlet inventory coverage"
|
||||
)
|
||||
|
||||
# Get all child outlets for this parent
|
||||
child_outlets = await enterprise_inventory_service.get_child_outlets(parent_id)
|
||||
|
||||
if not child_outlets:
|
||||
return []
|
||||
|
||||
# Get inventory coverage for each outlet
|
||||
coverage_data = []
|
||||
for outlet in child_outlets:
|
||||
outlet_id = outlet['id']
|
||||
|
||||
# Get inventory coverage data
|
||||
coverage = await enterprise_inventory_service.get_inventory_coverage(outlet_id)
|
||||
|
||||
if coverage:
|
||||
# Apply filters if specified
|
||||
if min_coverage is not None and coverage['overall_coverage'] >= min_coverage:
|
||||
continue
|
||||
if risk_level is not None and coverage.get('status') != risk_level:
|
||||
continue
|
||||
|
||||
coverage_data.append(coverage)
|
||||
|
||||
# Sort by coverage (lowest first) to prioritize critical outlets
|
||||
coverage_data.sort(key=lambda x: x['overall_coverage'])
|
||||
|
||||
return coverage_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get outlet inventory coverage", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get inventory coverage: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{parent_id}/outlets/inventory-summary",
|
||||
response_model=NetworkInventorySummary,
|
||||
summary="Get network-wide inventory summary")
|
||||
async def get_network_inventory_summary(
|
||||
parent_id: str,
|
||||
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get aggregated inventory summary across the entire network
|
||||
|
||||
Provides key metrics for network health monitoring and decision making.
|
||||
"""
|
||||
try:
|
||||
# Verify this is a parent tenant
|
||||
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
|
||||
if tenant_info.get('tenant_type') != 'parent':
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Only parent tenants can access network inventory summary"
|
||||
)
|
||||
|
||||
return await enterprise_inventory_service.get_network_inventory_summary(parent_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get network inventory summary", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get inventory summary: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{parent_id}/outlets/{outlet_id}/inventory-details",
|
||||
response_model=OutletInventoryDetailResponse,
|
||||
summary="Get detailed inventory for specific outlet")
|
||||
async def get_outlet_inventory_details(
|
||||
parent_id: str,
|
||||
outlet_id: str,
|
||||
product_id: Optional[str] = Query(None, description="Filter by specific product ID"),
|
||||
risk_level: Optional[str] = Query(None, description="Filter products by risk level"),
|
||||
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get detailed product-level inventory data for a specific outlet
|
||||
|
||||
Enables drill-down analysis of inventory issues at the product level.
|
||||
"""
|
||||
try:
|
||||
# Verify parent-child relationship
|
||||
await enterprise_inventory_service.verify_parent_child_relationship(parent_id, outlet_id)
|
||||
|
||||
return await enterprise_inventory_service.get_outlet_inventory_details(outlet_id, product_id, risk_level)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get outlet inventory details", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get inventory details: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{parent_id}/inventory-alerts",
|
||||
response_model=List[InventoryAlert],
|
||||
summary="Get real-time inventory alerts across network")
|
||||
async def get_network_inventory_alerts(
|
||||
parent_id: str,
|
||||
severity: Optional[str] = Query(None, description="Filter by severity: critical, high, medium, low"),
|
||||
alert_type: Optional[str] = Query(None, description="Filter by alert type"),
|
||||
limit: int = Query(50, description="Maximum number of alerts to return"),
|
||||
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get real-time inventory alerts across all outlets
|
||||
|
||||
Provides actionable alerts for inventory management and stockout prevention.
|
||||
"""
|
||||
try:
|
||||
# Verify this is a parent tenant
|
||||
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
|
||||
if tenant_info.get('tenant_type') != 'parent':
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Only parent tenants can access network inventory alerts"
|
||||
)
|
||||
|
||||
alerts = await enterprise_inventory_service.get_inventory_alerts(parent_id)
|
||||
|
||||
# Apply filters
|
||||
if severity:
|
||||
alerts = [alert for alert in alerts if alert.get('severity') == severity]
|
||||
if alert_type:
|
||||
alerts = [alert for alert in alerts if alert.get('alert_type') == alert_type]
|
||||
|
||||
# Sort by severity (critical first) and timestamp (newest first)
|
||||
severity_order = {'critical': 1, 'high': 2, 'medium': 3, 'low': 4}
|
||||
alerts.sort(key=lambda x: (severity_order.get(x.get('severity', 'low'), 5), -int(x.get('timestamp', 0))))
|
||||
|
||||
return alerts[:limit]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory alerts", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get inventory alerts: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tenants/{parent_id}/inventory-transfers/recommend",
|
||||
summary="Get inventory transfer recommendations")
|
||||
async def get_inventory_transfer_recommendations(
|
||||
parent_id: str,
|
||||
urgency: str = Query("medium", description="Urgency level: low, medium, high, critical"),
|
||||
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get AI-powered inventory transfer recommendations
|
||||
|
||||
Analyzes inventory levels across outlets and suggests optimal transfers
|
||||
to prevent stockouts and balance inventory.
|
||||
"""
|
||||
try:
|
||||
# Verify this is a parent tenant
|
||||
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
|
||||
if tenant_info.get('tenant_type') != 'parent':
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Only parent tenants can request transfer recommendations"
|
||||
)
|
||||
|
||||
recommendations = await enterprise_inventory_service.get_transfer_recommendations(parent_id, urgency)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'recommendations': recommendations,
|
||||
'message': f'Generated {len(recommendations)} transfer recommendations'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transfer recommendations", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get recommendations: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tenants/{parent_id}/inventory/coverage-trends",
|
||||
summary="Get inventory coverage trends over time")
|
||||
async def get_inventory_coverage_trends(
|
||||
parent_id: str,
|
||||
days: int = Query(30, description="Number of days to analyze"),
|
||||
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
|
||||
verified_tenant: str = Depends(verify_tenant_permission_dep)
|
||||
):
|
||||
"""
|
||||
Get historical inventory coverage trends
|
||||
|
||||
Enables analysis of inventory performance over time.
|
||||
"""
|
||||
try:
|
||||
# Verify this is a parent tenant
|
||||
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
|
||||
if tenant_info.get('tenant_type') != 'parent':
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Only parent tenants can access coverage trends"
|
||||
)
|
||||
|
||||
trends = await enterprise_inventory_service.get_coverage_trends(parent_id, days)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'trends': trends,
|
||||
'period': f'Last {days} days'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get coverage trends", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get coverage trends: {str(e)}")
|
||||
262
services/inventory/app/api/food_safety_alerts.py
Normal file
262
services/inventory/app/api/food_safety_alerts.py
Normal file
@@ -0,0 +1,262 @@
|
||||
# services/inventory/app/api/food_safety_alerts.py
|
||||
"""
|
||||
Food Safety Alerts API - ATOMIC CRUD operations on FoodSafetyAlert model
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.schemas.food_safety import (
|
||||
FoodSafetyAlertCreate,
|
||||
FoodSafetyAlertUpdate,
|
||||
FoodSafetyAlertResponse
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["food-safety-alerts"])
|
||||
|
||||
|
||||
async def get_food_safety_service() -> FoodSafetyService:
|
||||
"""Get food safety service instance"""
|
||||
return FoodSafetyService()
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("food-safety/alerts"),
|
||||
response_model=FoodSafetyAlertResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_food_safety_alert(
|
||||
alert_data: FoodSafetyAlertCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a food safety alert"""
|
||||
try:
|
||||
alert_data.tenant_id = tenant_id
|
||||
|
||||
alert = await food_safety_service.create_food_safety_alert(
|
||||
db,
|
||||
alert_data,
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Food safety alert created",
|
||||
alert_id=str(alert.id),
|
||||
alert_type=alert.alert_type)
|
||||
|
||||
return alert
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error creating food safety alert", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create food safety alert"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("food-safety/alerts"),
|
||||
response_model=List[FoodSafetyAlertResponse]
|
||||
)
|
||||
async def get_food_safety_alerts(
|
||||
tenant_id: UUID = Path(...),
|
||||
alert_type: Optional[str] = Query(None, description="Filter by alert type"),
|
||||
severity: Optional[str] = Query(None, description="Filter by severity"),
|
||||
status_filter: Optional[str] = Query(None, description="Filter by status"),
|
||||
unresolved_only: bool = Query(True, description="Show only unresolved alerts"),
|
||||
skip: int = Query(0, ge=0, description="Number of alerts to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of alerts to return"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get food safety alerts with filtering"""
|
||||
try:
|
||||
where_conditions = ["tenant_id = :tenant_id"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if alert_type:
|
||||
where_conditions.append("alert_type = :alert_type")
|
||||
params["alert_type"] = alert_type
|
||||
|
||||
if severity:
|
||||
where_conditions.append("severity = :severity")
|
||||
params["severity"] = severity
|
||||
|
||||
if status_filter:
|
||||
where_conditions.append("status = :status")
|
||||
params["status"] = status_filter
|
||||
elif unresolved_only:
|
||||
where_conditions.append("status NOT IN ('resolved', 'dismissed')")
|
||||
|
||||
where_clause = " AND ".join(where_conditions)
|
||||
|
||||
query = f"""
|
||||
SELECT * FROM food_safety_alerts
|
||||
WHERE {where_clause}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT :limit OFFSET :skip
|
||||
"""
|
||||
params.update({"limit": limit, "skip": skip})
|
||||
|
||||
result = await db.execute(query, params)
|
||||
alerts = result.fetchall()
|
||||
|
||||
return [
|
||||
FoodSafetyAlertResponse(**dict(alert))
|
||||
for alert in alerts
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting food safety alerts", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve food safety alerts"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("food-safety/alerts", "alert_id"),
|
||||
response_model=FoodSafetyAlertResponse
|
||||
)
|
||||
async def get_food_safety_alert(
|
||||
alert_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific food safety alert"""
|
||||
try:
|
||||
query = "SELECT * FROM food_safety_alerts WHERE id = :alert_id AND tenant_id = :tenant_id"
|
||||
result = await db.execute(query, {"alert_id": alert_id, "tenant_id": tenant_id})
|
||||
alert = result.fetchone()
|
||||
|
||||
if not alert:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Food safety alert not found"
|
||||
)
|
||||
|
||||
return FoodSafetyAlertResponse(**dict(alert))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting food safety alert", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve food safety alert"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("food-safety/alerts", "alert_id"),
|
||||
response_model=FoodSafetyAlertResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_food_safety_alert(
|
||||
alert_data: FoodSafetyAlertUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
alert_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update a food safety alert"""
|
||||
try:
|
||||
alert_query = "SELECT * FROM food_safety_alerts WHERE id = :alert_id AND tenant_id = :tenant_id"
|
||||
result = await db.execute(alert_query, {"alert_id": alert_id, "tenant_id": tenant_id})
|
||||
alert_record = result.fetchone()
|
||||
|
||||
if not alert_record:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Food safety alert not found"
|
||||
)
|
||||
|
||||
update_fields = alert_data.dict(exclude_unset=True)
|
||||
if update_fields:
|
||||
set_clauses = []
|
||||
params = {"alert_id": alert_id, "tenant_id": tenant_id}
|
||||
|
||||
for field, value in update_fields.items():
|
||||
set_clauses.append(f"{field} = :{field}")
|
||||
params[field] = value
|
||||
|
||||
set_clauses.append("updated_at = NOW()")
|
||||
set_clauses.append("updated_by = :updated_by")
|
||||
params["updated_by"] = UUID(current_user["user_id"])
|
||||
|
||||
update_query = f"""
|
||||
UPDATE food_safety_alerts
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = :alert_id AND tenant_id = :tenant_id
|
||||
"""
|
||||
|
||||
await db.execute(update_query, params)
|
||||
await db.commit()
|
||||
|
||||
result = await db.execute(alert_query, {"alert_id": alert_id, "tenant_id": tenant_id})
|
||||
updated_alert = result.fetchone()
|
||||
|
||||
logger.info("Food safety alert updated",
|
||||
alert_id=str(alert_id))
|
||||
|
||||
return FoodSafetyAlertResponse(**dict(updated_alert))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating food safety alert",
|
||||
alert_id=str(alert_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to update food safety alert"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("food-safety/alerts", "alert_id"),
|
||||
status_code=status.HTTP_204_NO_CONTENT
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_food_safety_alert(
|
||||
alert_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete food safety alert"""
|
||||
try:
|
||||
query = "DELETE FROM food_safety_alerts WHERE id = :alert_id AND tenant_id = :tenant_id"
|
||||
result = await db.execute(query, {"alert_id": alert_id, "tenant_id": tenant_id})
|
||||
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Food safety alert not found"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
return None
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting food safety alert", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete food safety alert"
|
||||
)
|
||||
302
services/inventory/app/api/food_safety_compliance.py
Normal file
302
services/inventory/app/api/food_safety_compliance.py
Normal file
@@ -0,0 +1,302 @@
|
||||
# services/inventory/app/api/food_safety_compliance.py
|
||||
"""
|
||||
Food Safety Compliance API - ATOMIC CRUD operations on FoodSafetyCompliance model
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.food_safety import (
|
||||
FoodSafetyComplianceCreate,
|
||||
FoodSafetyComplianceUpdate,
|
||||
FoodSafetyComplianceResponse
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["food-safety-compliance"])
|
||||
|
||||
|
||||
async def get_food_safety_service() -> FoodSafetyService:
|
||||
"""Get food safety service instance"""
|
||||
return FoodSafetyService()
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("food-safety/compliance"),
|
||||
response_model=FoodSafetyComplianceResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_compliance_record(
|
||||
compliance_data: FoodSafetyComplianceCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new food safety compliance record"""
|
||||
try:
|
||||
compliance_data.tenant_id = tenant_id
|
||||
|
||||
compliance = await food_safety_service.create_compliance_record(
|
||||
db,
|
||||
compliance_data,
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Compliance record created",
|
||||
compliance_id=str(compliance.id),
|
||||
standard=compliance.standard)
|
||||
|
||||
return compliance
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Invalid compliance data", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Error creating compliance record", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create compliance record"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("food-safety/compliance"),
|
||||
response_model=List[FoodSafetyComplianceResponse]
|
||||
)
|
||||
async def get_compliance_records(
|
||||
tenant_id: UUID = Path(...),
|
||||
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient ID"),
|
||||
standard: Optional[str] = Query(None, description="Filter by compliance standard"),
|
||||
status_filter: Optional[str] = Query(None, description="Filter by compliance status"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get compliance records with filtering"""
|
||||
try:
|
||||
filters = {}
|
||||
if ingredient_id:
|
||||
filters["ingredient_id"] = ingredient_id
|
||||
if standard:
|
||||
filters["standard"] = standard
|
||||
if status_filter:
|
||||
filters["compliance_status"] = status_filter
|
||||
|
||||
query = """
|
||||
SELECT * FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
"""
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if filters:
|
||||
for key, value in filters.items():
|
||||
query += f" AND {key} = :{key}"
|
||||
params[key] = value
|
||||
|
||||
query += " ORDER BY created_at DESC LIMIT :limit OFFSET :skip"
|
||||
params.update({"limit": limit, "skip": skip})
|
||||
|
||||
result = await db.execute(query, params)
|
||||
records = result.fetchall()
|
||||
|
||||
return [
|
||||
FoodSafetyComplianceResponse(**dict(record))
|
||||
for record in records
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting compliance records", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve compliance records"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
|
||||
response_model=FoodSafetyComplianceResponse
|
||||
)
|
||||
async def get_compliance_record(
|
||||
compliance_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific compliance record"""
|
||||
try:
|
||||
query = "SELECT * FROM food_safety_compliance WHERE id = :compliance_id AND tenant_id = :tenant_id"
|
||||
result = await db.execute(query, {"compliance_id": compliance_id, "tenant_id": tenant_id})
|
||||
record = result.fetchone()
|
||||
|
||||
if not record:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Compliance record not found"
|
||||
)
|
||||
|
||||
return FoodSafetyComplianceResponse(**dict(record))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting compliance record", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve compliance record"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
|
||||
response_model=FoodSafetyComplianceResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_compliance_record(
|
||||
compliance_data: FoodSafetyComplianceUpdate,
|
||||
tenant_id: UUID = Path(...),
|
||||
compliance_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update an existing compliance record"""
|
||||
try:
|
||||
compliance = await food_safety_service.update_compliance_record(
|
||||
db,
|
||||
compliance_id,
|
||||
tenant_id,
|
||||
compliance_data,
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
if not compliance:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Compliance record not found"
|
||||
)
|
||||
|
||||
logger.info("Compliance record updated",
|
||||
compliance_id=str(compliance.id))
|
||||
|
||||
return compliance
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error updating compliance record",
|
||||
compliance_id=str(compliance_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to update compliance record"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
|
||||
status_code=status.HTTP_403_FORBIDDEN
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_compliance_record(
|
||||
compliance_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Compliance records CANNOT be deleted for regulatory compliance.
|
||||
Use the archive endpoint to mark records as inactive.
|
||||
"""
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail={
|
||||
"error": "compliance_records_cannot_be_deleted",
|
||||
"message": "Compliance records cannot be deleted for regulatory compliance. Use PUT /food-safety/compliance/{id}/archive to archive records instead.",
|
||||
"reason": "Food safety compliance records must be retained for regulatory audits",
|
||||
"alternative_endpoint": f"/api/v1/tenants/{tenant_id}/inventory/food-safety/compliance/{compliance_id}/archive"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_nested_resource_route("food-safety/compliance", "compliance_id", "archive"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def archive_compliance_record(
|
||||
compliance_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Archive (soft delete) compliance record - marks as inactive but retains for audit"""
|
||||
try:
|
||||
query = """
|
||||
UPDATE food_safety_compliance
|
||||
SET is_active = false, updated_at = NOW(), updated_by = :user_id
|
||||
WHERE id = :compliance_id AND tenant_id = :tenant_id
|
||||
"""
|
||||
result = await db.execute(query, {
|
||||
"compliance_id": compliance_id,
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": UUID(current_user["user_id"])
|
||||
})
|
||||
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Compliance record not found"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Log audit event for archiving compliance record
|
||||
try:
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
audit_logger = create_audit_logger("inventory-service", AuditLog)
|
||||
await audit_logger.log_event(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
action="archive",
|
||||
resource_type="compliance_record",
|
||||
resource_id=str(compliance_id),
|
||||
severity=AuditSeverity.HIGH.value,
|
||||
description=f"Archived compliance record (retained for regulatory compliance)",
|
||||
endpoint=f"/food-safety/compliance/{compliance_id}/archive",
|
||||
method="PUT"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return {
|
||||
"message": "Compliance record archived successfully",
|
||||
"compliance_id": str(compliance_id),
|
||||
"archived": True,
|
||||
"note": "Record retained for regulatory compliance audits"
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error archiving compliance record", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to archive compliance record"
|
||||
)
|
||||
287
services/inventory/app/api/food_safety_operations.py
Normal file
287
services/inventory/app/api/food_safety_operations.py
Normal file
@@ -0,0 +1,287 @@
|
||||
# services/inventory/app/api/food_safety_operations.py
|
||||
"""
|
||||
Food Safety Operations API - Business operations for food safety management
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from decimal import Decimal
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, analytics_tier_required
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.schemas.food_safety import FoodSafetyMetrics
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["food-safety-operations"])
|
||||
|
||||
|
||||
async def get_food_safety_service() -> FoodSafetyService:
|
||||
"""Get food safety service instance"""
|
||||
return FoodSafetyService()
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_nested_resource_route("food-safety/alerts", "alert_id", "acknowledge"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def acknowledge_alert(
|
||||
tenant_id: UUID = Path(...),
|
||||
alert_id: UUID = Path(...),
|
||||
notes: Optional[str] = Query(None, description="Acknowledgment notes"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Acknowledge a food safety alert"""
|
||||
try:
|
||||
update_query = """
|
||||
UPDATE food_safety_alerts
|
||||
SET status = 'acknowledged',
|
||||
acknowledged_at = NOW(),
|
||||
acknowledged_by = :user_id,
|
||||
investigation_notes = COALESCE(investigation_notes, '') || :notes,
|
||||
updated_at = NOW(),
|
||||
updated_by = :user_id
|
||||
WHERE id = :alert_id AND tenant_id = :tenant_id
|
||||
"""
|
||||
|
||||
result = await db.execute(update_query, {
|
||||
"alert_id": alert_id,
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": UUID(current_user["user_id"]),
|
||||
"notes": f"\nAcknowledged: {notes}" if notes else "\nAcknowledged"
|
||||
})
|
||||
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Food safety alert not found"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info("Food safety alert acknowledged",
|
||||
alert_id=str(alert_id))
|
||||
|
||||
return {"message": "Alert acknowledged successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error acknowledging alert",
|
||||
alert_id=str(alert_id),
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to acknowledge alert"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_analytics_route("food-safety-metrics"),
|
||||
response_model=FoodSafetyMetrics
|
||||
)
|
||||
async def get_food_safety_metrics(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_back: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get food safety performance metrics"""
|
||||
try:
|
||||
compliance_query = """
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN compliance_status = 'compliant' THEN 1 END) as compliant
|
||||
FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
"""
|
||||
|
||||
result = await db.execute(compliance_query, {"tenant_id": tenant_id})
|
||||
compliance_stats = result.fetchone()
|
||||
|
||||
compliance_rate = 0.0
|
||||
if compliance_stats.total > 0:
|
||||
compliance_rate = (compliance_stats.compliant / compliance_stats.total) * 100
|
||||
|
||||
temp_query = """
|
||||
SELECT
|
||||
COUNT(*) as total_readings,
|
||||
COUNT(CASE WHEN is_within_range THEN 1 END) as compliant_readings
|
||||
FROM temperature_logs
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND recorded_at > NOW() - INTERVAL '%s days'
|
||||
""" % days_back
|
||||
|
||||
result = await db.execute(temp_query, {"tenant_id": tenant_id})
|
||||
temp_stats = result.fetchone()
|
||||
|
||||
temp_compliance_rate = 0.0
|
||||
if temp_stats.total_readings > 0:
|
||||
temp_compliance_rate = (temp_stats.compliant_readings / temp_stats.total_readings) * 100
|
||||
|
||||
alert_query = """
|
||||
SELECT
|
||||
COUNT(*) as total_alerts,
|
||||
COUNT(CASE WHEN is_recurring THEN 1 END) as recurring_alerts,
|
||||
COUNT(CASE WHEN regulatory_action_required THEN 1 END) as regulatory_violations,
|
||||
AVG(CASE WHEN response_time_minutes IS NOT NULL THEN response_time_minutes END) as avg_response_time,
|
||||
AVG(CASE WHEN resolution_time_minutes IS NOT NULL THEN resolution_time_minutes END) as avg_resolution_time
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at > NOW() - INTERVAL '%s days'
|
||||
""" % days_back
|
||||
|
||||
result = await db.execute(alert_query, {"tenant_id": tenant_id})
|
||||
alert_stats = result.fetchone()
|
||||
|
||||
return FoodSafetyMetrics(
|
||||
compliance_rate=Decimal(str(compliance_rate)),
|
||||
temperature_compliance_rate=Decimal(str(temp_compliance_rate)),
|
||||
alert_response_time_avg=Decimal(str(alert_stats.avg_response_time or 0)),
|
||||
alert_resolution_time_avg=Decimal(str(alert_stats.avg_resolution_time or 0)),
|
||||
recurring_issues_count=alert_stats.recurring_alerts or 0,
|
||||
regulatory_violations=alert_stats.regulatory_violations or 0,
|
||||
certification_coverage=Decimal(str(compliance_rate)),
|
||||
audit_score_avg=Decimal("85.0"),
|
||||
risk_score=Decimal("3.2")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting food safety metrics", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve food safety metrics"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("food-safety/status")
|
||||
)
|
||||
async def get_food_safety_status(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep)
|
||||
):
|
||||
"""Get food safety service status"""
|
||||
try:
|
||||
return {
|
||||
"service": "food-safety",
|
||||
"status": "healthy",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"tenant_id": str(tenant_id),
|
||||
"features": {
|
||||
"compliance_tracking": "enabled",
|
||||
"temperature_monitoring": "enabled",
|
||||
"automated_alerts": "enabled",
|
||||
"regulatory_reporting": "enabled"
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting food safety status", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get food safety status"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("food-safety/temperature/violations")
|
||||
)
|
||||
async def get_temperature_violations(
|
||||
tenant_id: UUID = Path(...),
|
||||
days_back: int = Query(7, ge=1, le=90, description="Days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get temperature violations summary"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_violations,
|
||||
COUNT(DISTINCT storage_location) as affected_locations,
|
||||
COUNT(DISTINCT equipment_id) as affected_equipment,
|
||||
AVG(ABS(temperature_celsius - (min_temp_celsius + max_temp_celsius)/2)) as avg_deviation
|
||||
FROM temperature_logs
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND is_within_range = false
|
||||
AND recorded_at > NOW() - INTERVAL '%s days'
|
||||
""" % days_back
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
stats = result.fetchone()
|
||||
|
||||
return {
|
||||
"period_days": days_back,
|
||||
"total_violations": stats.total_violations or 0,
|
||||
"affected_locations": stats.affected_locations or 0,
|
||||
"affected_equipment": stats.affected_equipment or 0,
|
||||
"average_deviation_celsius": float(stats.avg_deviation or 0)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting temperature violations", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get temperature violations"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("food-safety/compliance/summary")
|
||||
)
|
||||
async def get_compliance_summary(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get compliance summary by standard"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
standard,
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN compliance_status = 'compliant' THEN 1 END) as compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'non_compliant' THEN 1 END) as non_compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'pending' THEN 1 END) as pending
|
||||
FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
GROUP BY standard
|
||||
ORDER BY standard
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
records = result.fetchall()
|
||||
|
||||
summary = []
|
||||
for record in records:
|
||||
compliance_rate = (record.compliant / record.total * 100) if record.total > 0 else 0
|
||||
summary.append({
|
||||
"standard": record.standard,
|
||||
"total_items": record.total,
|
||||
"compliant": record.compliant,
|
||||
"non_compliant": record.non_compliant,
|
||||
"pending": record.pending,
|
||||
"compliance_rate": round(compliance_rate, 2)
|
||||
})
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"standards": summary,
|
||||
"total_standards": len(summary)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting compliance summary", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get compliance summary"
|
||||
)
|
||||
556
services/inventory/app/api/ingredients.py
Normal file
556
services/inventory/app/api/ingredients.py
Normal file
@@ -0,0 +1,556 @@
|
||||
# services/inventory/app/api/ingredients.py
|
||||
"""
|
||||
Base CRUD operations for inventory ingredients resources
|
||||
Following standardized URL structure: /api/v1/tenants/{tenant_id}/inventory/{resource}
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import httpx
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.models import AuditLog
|
||||
from app.schemas.inventory import (
|
||||
IngredientCreate,
|
||||
IngredientUpdate,
|
||||
IngredientResponse,
|
||||
StockResponse,
|
||||
StockCreate,
|
||||
StockUpdate,
|
||||
BulkIngredientCreate,
|
||||
BulkIngredientResponse,
|
||||
BulkIngredientResult,
|
||||
)
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required, owner_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('inventory')
|
||||
|
||||
router = APIRouter(tags=["ingredients"])
|
||||
|
||||
# Initialize audit logger
|
||||
audit_logger = create_audit_logger("inventory-service", AuditLog)
|
||||
|
||||
# Helper function to extract user ID from user object
|
||||
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
|
||||
"""Extract user ID from current user context"""
|
||||
user_id = current_user.get('user_id')
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User ID not found in context"
|
||||
)
|
||||
return UUID(user_id)
|
||||
|
||||
|
||||
# ===== INGREDIENTS ENDPOINTS =====
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("ingredients"),
|
||||
response_model=IngredientResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def create_ingredient(
|
||||
ingredient_data: IngredientCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new ingredient (Admin/Manager only)"""
|
||||
try:
|
||||
# CRITICAL: Check subscription limit before creating
|
||||
from app.core.config import settings
|
||||
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
try:
|
||||
limit_check_response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/subscription/limits/products",
|
||||
headers={
|
||||
"x-user-id": str(current_user.get('user_id')),
|
||||
"x-tenant-id": str(tenant_id)
|
||||
}
|
||||
)
|
||||
|
||||
if limit_check_response.status_code == 200:
|
||||
limit_check = limit_check_response.json()
|
||||
|
||||
if not limit_check.get('can_add', False):
|
||||
logger.warning(
|
||||
"Product limit exceeded",
|
||||
tenant_id=str(tenant_id),
|
||||
current=limit_check.get('current_count'),
|
||||
max=limit_check.get('max_allowed'),
|
||||
reason=limit_check.get('reason')
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail={
|
||||
"error": "product_limit_exceeded",
|
||||
"message": limit_check.get('reason', 'Product limit exceeded'),
|
||||
"current_count": limit_check.get('current_count'),
|
||||
"max_allowed": limit_check.get('max_allowed'),
|
||||
"upgrade_required": True
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Failed to check product limit, allowing creation",
|
||||
tenant_id=str(tenant_id),
|
||||
status_code=limit_check_response.status_code
|
||||
)
|
||||
except httpx.TimeoutException:
|
||||
logger.warning(
|
||||
"Timeout checking product limit, allowing creation",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(
|
||||
"Error checking product limit, allowing creation",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Extract user ID - handle service tokens
|
||||
raw_user_id = current_user.get('user_id')
|
||||
if current_user.get('type') == 'service':
|
||||
user_id = None
|
||||
else:
|
||||
try:
|
||||
user_id = UUID(raw_user_id)
|
||||
except (ValueError, TypeError):
|
||||
user_id = None
|
||||
|
||||
service = InventoryService()
|
||||
ingredient = await service.create_ingredient(ingredient_data, tenant_id, user_id)
|
||||
|
||||
logger.info(
|
||||
"Ingredient created successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient.id),
|
||||
ingredient_name=ingredient.name
|
||||
)
|
||||
|
||||
return ingredient
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to create ingredient",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create ingredient"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("ingredients/bulk"),
|
||||
response_model=BulkIngredientResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def bulk_create_ingredients(
|
||||
bulk_data: BulkIngredientCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create multiple ingredients in a single transaction (Admin/Manager only)"""
|
||||
import uuid
|
||||
transaction_id = str(uuid.uuid4())
|
||||
|
||||
try:
|
||||
# CRITICAL: Check subscription limit ONCE before creating any ingredients
|
||||
from app.core.config import settings
|
||||
total_requested = len(bulk_data.ingredients)
|
||||
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
try:
|
||||
# Check if we can add this many products
|
||||
limit_check_response = await client.get(
|
||||
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/subscription/limits/products",
|
||||
headers={
|
||||
"x-user-id": str(current_user.get('user_id')),
|
||||
"x-tenant-id": str(tenant_id)
|
||||
}
|
||||
)
|
||||
|
||||
if limit_check_response.status_code == 200:
|
||||
limit_check = limit_check_response.json()
|
||||
|
||||
if not limit_check.get('can_add', False):
|
||||
logger.warning(
|
||||
"Bulk product limit exceeded",
|
||||
tenant_id=str(tenant_id),
|
||||
requested=total_requested,
|
||||
current=limit_check.get('current_count'),
|
||||
max=limit_check.get('max_allowed'),
|
||||
reason=limit_check.get('reason')
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail={
|
||||
"error": "product_limit_exceeded",
|
||||
"message": limit_check.get('reason', 'Product limit exceeded'),
|
||||
"requested": total_requested,
|
||||
"current_count": limit_check.get('current_count'),
|
||||
"max_allowed": limit_check.get('max_allowed'),
|
||||
"upgrade_required": True
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"Failed to check product limit, allowing bulk creation",
|
||||
tenant_id=str(tenant_id),
|
||||
status_code=limit_check_response.status_code
|
||||
)
|
||||
except httpx.TimeoutException:
|
||||
logger.warning(
|
||||
"Timeout checking product limit, allowing bulk creation",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(
|
||||
"Error checking product limit, allowing bulk creation",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Extract user ID - handle service tokens
|
||||
raw_user_id = current_user.get('user_id')
|
||||
if current_user.get('type') == 'service':
|
||||
user_id = None
|
||||
else:
|
||||
try:
|
||||
user_id = UUID(raw_user_id)
|
||||
except (ValueError, TypeError):
|
||||
user_id = None
|
||||
|
||||
# Create all ingredients
|
||||
service = InventoryService()
|
||||
results: List[BulkIngredientResult] = []
|
||||
total_created = 0
|
||||
total_failed = 0
|
||||
|
||||
for index, ingredient_data in enumerate(bulk_data.ingredients):
|
||||
try:
|
||||
ingredient = await service.create_ingredient(ingredient_data, tenant_id, user_id)
|
||||
results.append(BulkIngredientResult(
|
||||
index=index,
|
||||
success=True,
|
||||
ingredient=IngredientResponse.from_orm(ingredient),
|
||||
error=None
|
||||
))
|
||||
total_created += 1
|
||||
|
||||
logger.debug(
|
||||
"Ingredient created in bulk operation",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient.id),
|
||||
ingredient_name=ingredient.name,
|
||||
index=index,
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
except Exception as e:
|
||||
results.append(BulkIngredientResult(
|
||||
index=index,
|
||||
success=False,
|
||||
ingredient=None,
|
||||
error=str(e)
|
||||
))
|
||||
total_failed += 1
|
||||
|
||||
logger.warning(
|
||||
"Failed to create ingredient in bulk operation",
|
||||
tenant_id=str(tenant_id),
|
||||
index=index,
|
||||
error=str(e),
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Bulk ingredient creation completed",
|
||||
tenant_id=str(tenant_id),
|
||||
total_requested=total_requested,
|
||||
total_created=total_created,
|
||||
total_failed=total_failed,
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
|
||||
return BulkIngredientResponse(
|
||||
total_requested=total_requested,
|
||||
total_created=total_created,
|
||||
total_failed=total_failed,
|
||||
results=results,
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to process bulk ingredient creation",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to process bulk ingredient creation"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("ingredients/count"),
|
||||
response_model=dict
|
||||
)
|
||||
async def count_ingredients(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get count of ingredients for a tenant (All users)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
count = await service.count_ingredients_by_tenant(tenant_id)
|
||||
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"ingredient_count": count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to count ingredients: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("ingredients", "ingredient_id"),
|
||||
response_model=IngredientResponse
|
||||
)
|
||||
async def get_ingredient(
|
||||
ingredient_id: UUID,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get ingredient by ID (All users)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
ingredient = await service.get_ingredient(ingredient_id, tenant_id)
|
||||
|
||||
if not ingredient:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Ingredient not found"
|
||||
)
|
||||
|
||||
return ingredient
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get ingredient"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("ingredients", "ingredient_id"),
|
||||
response_model=IngredientResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_ingredient(
|
||||
ingredient_id: UUID,
|
||||
ingredient_data: IngredientUpdate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update ingredient (Admin/Manager/User)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
ingredient = await service.update_ingredient(ingredient_id, ingredient_data, tenant_id)
|
||||
|
||||
if not ingredient:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Ingredient not found"
|
||||
)
|
||||
|
||||
return ingredient
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to update ingredient"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("ingredients"),
|
||||
response_model=List[IngredientResponse]
|
||||
)
|
||||
async def list_ingredients(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
category: Optional[str] = Query(None, description="Filter by category"),
|
||||
product_type: Optional[str] = Query(None, description="Filter by product type"),
|
||||
is_active: Optional[bool] = Query(None, description="Filter by active status"),
|
||||
is_low_stock: Optional[bool] = Query(None, description="Filter by low stock status"),
|
||||
needs_reorder: Optional[bool] = Query(None, description="Filter by reorder needed"),
|
||||
search: Optional[str] = Query(None, description="Search in name, SKU, or barcode"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""List ingredients with filtering (All users)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
|
||||
# Build filters
|
||||
filters = {}
|
||||
if category:
|
||||
filters['category'] = category
|
||||
if product_type:
|
||||
filters['product_type'] = product_type
|
||||
if is_active is not None:
|
||||
filters['is_active'] = is_active
|
||||
if is_low_stock is not None:
|
||||
filters['is_low_stock'] = is_low_stock
|
||||
if needs_reorder is not None:
|
||||
filters['needs_reorder'] = needs_reorder
|
||||
if search:
|
||||
filters['search'] = search
|
||||
|
||||
ingredients = await service.get_ingredients(tenant_id, skip, limit, filters)
|
||||
return ingredients
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to list ingredients"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("ingredients", "ingredient_id"),
|
||||
status_code=status.HTTP_204_NO_CONTENT
|
||||
)
|
||||
@admin_role_required
|
||||
async def soft_delete_ingredient(
|
||||
ingredient_id: UUID,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Soft delete ingredient - mark as inactive (Admin only)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
result = await service.soft_delete_ingredient(ingredient_id, tenant_id)
|
||||
return None
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to soft delete ingredient"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_nested_resource_route("ingredients", "ingredient_id", "hard"),
|
||||
response_model=dict
|
||||
)
|
||||
@admin_role_required
|
||||
async def hard_delete_ingredient(
|
||||
ingredient_id: UUID,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Hard delete ingredient and all associated data (Admin only)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
deletion_summary = await service.hard_delete_ingredient(ingredient_id, tenant_id)
|
||||
|
||||
# Log audit event for hard deletion
|
||||
try:
|
||||
await audit_logger.log_deletion(
|
||||
db_session=db,
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user["user_id"],
|
||||
resource_type="ingredient",
|
||||
resource_id=str(ingredient_id),
|
||||
resource_data=deletion_summary,
|
||||
description=f"Hard deleted ingredient and all associated data",
|
||||
endpoint=f"/ingredients/{ingredient_id}/hard",
|
||||
method="DELETE"
|
||||
)
|
||||
except Exception as audit_error:
|
||||
import structlog
|
||||
logger = structlog.get_logger()
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
return deletion_summary
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to hard delete ingredient"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_nested_resource_route("ingredients", "ingredient_id", "stock"),
|
||||
response_model=List[StockResponse]
|
||||
)
|
||||
async def get_ingredient_stock(
|
||||
ingredient_id: UUID,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
include_unavailable: bool = Query(False, description="Include unavailable stock"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock entries for an ingredient (All users)"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
stock_entries = await service.get_stock_by_ingredient(
|
||||
ingredient_id, tenant_id, include_unavailable
|
||||
)
|
||||
return stock_entries
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get ingredient stock"
|
||||
)
|
||||
46
services/inventory/app/api/internal.py
Normal file
46
services/inventory/app/api/internal.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
Internal API for Inventory Service
|
||||
Handles internal service-to-service operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.models import Ingredient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal", tags=["internal"])
|
||||
|
||||
|
||||
|
||||
@router.get("/count")
|
||||
async def get_ingredient_count(
|
||||
tenant_id: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get count of active ingredients for onboarding status check.
|
||||
Internal endpoint for tenant service.
|
||||
"""
|
||||
try:
|
||||
count = await db.scalar(
|
||||
select(func.count()).select_from(Ingredient)
|
||||
.where(
|
||||
Ingredient.tenant_id == UUID(tenant_id),
|
||||
Ingredient.is_active == True
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"count": count or 0,
|
||||
"tenant_id": tenant_id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient count", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get ingredient count: {str(e)}")
|
||||
87
services/inventory/app/api/internal_alert_trigger.py
Normal file
87
services/inventory/app/api/internal_alert_trigger.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# services/inventory/app/api/internal_alert_trigger.py
|
||||
"""
|
||||
Internal API for triggering inventory alerts.
|
||||
Used by demo session cloning to generate realistic inventory alerts.
|
||||
|
||||
URL Pattern: /api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger
|
||||
This follows the tenant-scoped pattern so gateway can proxy correctly.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, Path
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# New URL pattern: tenant-scoped so gateway proxies to inventory service correctly
|
||||
@router.post("/api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger")
|
||||
async def trigger_inventory_alerts(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID to check inventory for"),
|
||||
request: Request = None
|
||||
) -> dict:
|
||||
"""
|
||||
Trigger comprehensive inventory alert checks for a specific tenant (internal use only).
|
||||
|
||||
This endpoint is called by the demo session cloning process after inventory
|
||||
data is seeded to generate realistic inventory alerts including:
|
||||
- Critical stock shortages
|
||||
- Expiring ingredients
|
||||
- Overstock situations
|
||||
|
||||
Security: Protected by x-internal-service header check.
|
||||
"""
|
||||
try:
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
# Get inventory scheduler from app state
|
||||
inventory_scheduler = getattr(request.app.state, 'inventory_scheduler', None)
|
||||
|
||||
if not inventory_scheduler:
|
||||
logger.error("Inventory scheduler not initialized")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Inventory scheduler not available"
|
||||
)
|
||||
|
||||
# Trigger comprehensive inventory alert checks for the specific tenant
|
||||
logger.info("Triggering comprehensive inventory alert checks", tenant_id=str(tenant_id))
|
||||
|
||||
# Call the scheduler's manual trigger method
|
||||
result = await inventory_scheduler.trigger_manual_check(tenant_id)
|
||||
|
||||
if result.get("success", False):
|
||||
logger.info(
|
||||
"Inventory alert checks completed successfully",
|
||||
tenant_id=str(tenant_id),
|
||||
alerts_generated=result.get("alerts_generated", 0)
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Inventory alert checks failed",
|
||||
tenant_id=str(tenant_id),
|
||||
error=result.get("error", "Unknown error")
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error triggering inventory alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to trigger inventory alerts: {str(e)}"
|
||||
)
|
||||
602
services/inventory/app/api/internal_demo.py
Normal file
602
services/inventory/app/api/internal_demo.py
Normal file
@@ -0,0 +1,602 @@
|
||||
"""
|
||||
Internal Demo Cloning API for Inventory Service
|
||||
Handles internal demo data cloning operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete
|
||||
from typing import Optional
|
||||
import structlog
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import uuid
|
||||
from uuid import UUID
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.config import settings
|
||||
from app.models import Ingredient, Stock, ProductType
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
|
||||
|
||||
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
|
||||
"""
|
||||
Parse date field, handling both ISO strings and BASE_TS markers.
|
||||
|
||||
Supports:
|
||||
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
|
||||
- ISO 8601 strings: "2025-01-15T06:00:00Z"
|
||||
- None values (returns None)
|
||||
|
||||
Returns timezone-aware datetime or None.
|
||||
"""
|
||||
if not date_value:
|
||||
return None
|
||||
|
||||
# Check if it's a BASE_TS marker
|
||||
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
|
||||
try:
|
||||
return resolve_time_marker(date_value, session_time)
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
f"Invalid BASE_TS marker in {field_name}",
|
||||
marker=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
# Handle regular ISO date strings
|
||||
try:
|
||||
if isinstance(date_value, str):
|
||||
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
elif hasattr(date_value, 'isoformat'):
|
||||
original_date = date_value
|
||||
else:
|
||||
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
|
||||
return None
|
||||
|
||||
return adjust_date_for_demo(original_date, session_time)
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
f"Invalid date format in {field_name}",
|
||||
date_value=date_value,
|
||||
error=str(e)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/clone")
|
||||
async def clone_demo_data_internal(
|
||||
base_tenant_id: str,
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Clone inventory service data for a virtual demo tenant
|
||||
|
||||
This endpoint creates fresh demo data by:
|
||||
1. Loading seed data from JSON files
|
||||
2. Applying XOR-based ID transformation
|
||||
3. Adjusting dates relative to session creation time
|
||||
4. Creating records in the virtual tenant
|
||||
|
||||
Args:
|
||||
base_tenant_id: Template tenant UUID (for reference)
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: Session creation timestamp for date adjustment
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dictionary with cloning results
|
||||
|
||||
Raises:
|
||||
HTTPException: On validation or cloning errors
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
# Validate UUIDs
|
||||
virtual_uuid = UUID(virtual_tenant_id)
|
||||
|
||||
# Parse session creation time for date adjustment
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_time = start_time
|
||||
else:
|
||||
session_time = start_time
|
||||
|
||||
# Debug logging for UUID values
|
||||
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
|
||||
|
||||
if not all([base_tenant_id, virtual_tenant_id, session_id]):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
|
||||
)
|
||||
|
||||
# Validate UUID format before processing
|
||||
try:
|
||||
UUID(base_tenant_id)
|
||||
UUID(virtual_tenant_id)
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format in request",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format: {str(e)}"
|
||||
)
|
||||
|
||||
# Parse session creation time
|
||||
if session_created_at:
|
||||
try:
|
||||
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError):
|
||||
session_created_at_parsed = datetime.now(timezone.utc)
|
||||
else:
|
||||
session_created_at_parsed = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting inventory data cloning with date adjustment",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id,
|
||||
session_time=session_created_at_parsed.isoformat()
|
||||
)
|
||||
|
||||
# Load seed data from JSON files
|
||||
from shared.utils.seed_data_paths import get_seed_data_path
|
||||
|
||||
if demo_account_type == "professional":
|
||||
json_file = get_seed_data_path("professional", "03-inventory.json")
|
||||
elif demo_account_type == "enterprise":
|
||||
json_file = get_seed_data_path("enterprise", "03-inventory.json")
|
||||
elif demo_account_type == "enterprise_child":
|
||||
json_file = get_seed_data_path("enterprise", "03-inventory.json", child_id=base_tenant_id)
|
||||
else:
|
||||
raise ValueError(f"Invalid demo account type: {demo_account_type}")
|
||||
|
||||
# Load JSON data
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
seed_data = json.load(f)
|
||||
|
||||
# Check if data already exists for this virtual tenant (idempotency)
|
||||
existing_check = await db.execute(
|
||||
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
|
||||
)
|
||||
existing_ingredient = existing_check.scalar_one_or_none()
|
||||
|
||||
if existing_ingredient:
|
||||
logger.warning(
|
||||
"Demo data already exists, skipping clone",
|
||||
virtual_tenant_id=virtual_tenant_id
|
||||
)
|
||||
return {
|
||||
"status": "skipped",
|
||||
"reason": "Data already exists",
|
||||
"records_cloned": 0
|
||||
}
|
||||
|
||||
# Transform and insert data
|
||||
records_cloned = 0
|
||||
|
||||
# Clone ingredients
|
||||
for ingredient_data in seed_data.get('ingredients', []):
|
||||
# Transform ID
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to parse UUIDs for ID transformation",
|
||||
ingredient_id=ingredient_data['id'],
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid UUID format in ingredient data: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform dates using standardized helper
|
||||
ingredient_data['created_at'] = parse_date_field(
|
||||
ingredient_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
ingredient_data['updated_at'] = parse_date_field(
|
||||
ingredient_data.get('updated_at'), session_time, 'updated_at'
|
||||
) or session_time
|
||||
|
||||
# Map category field to ingredient_category enum
|
||||
if 'category' in ingredient_data:
|
||||
category_value = ingredient_data.pop('category')
|
||||
# Convert category string to IngredientCategory enum
|
||||
from app.models.inventory import IngredientCategory
|
||||
try:
|
||||
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
|
||||
except KeyError:
|
||||
# If category not found in enum, use OTHER
|
||||
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
|
||||
|
||||
# Map unit_of_measure string to enum
|
||||
if 'unit_of_measure' in ingredient_data:
|
||||
from app.models.inventory import UnitOfMeasure
|
||||
unit_mapping = {
|
||||
'kilograms': UnitOfMeasure.KILOGRAMS,
|
||||
'grams': UnitOfMeasure.GRAMS,
|
||||
'liters': UnitOfMeasure.LITERS,
|
||||
'milliliters': UnitOfMeasure.MILLILITERS,
|
||||
'units': UnitOfMeasure.UNITS,
|
||||
'pieces': UnitOfMeasure.PIECES,
|
||||
'packages': UnitOfMeasure.PACKAGES,
|
||||
'bags': UnitOfMeasure.BAGS,
|
||||
'boxes': UnitOfMeasure.BOXES
|
||||
}
|
||||
|
||||
# Also support uppercase versions
|
||||
unit_mapping.update({
|
||||
'KILOGRAMS': UnitOfMeasure.KILOGRAMS,
|
||||
'GRAMS': UnitOfMeasure.GRAMS,
|
||||
'LITERS': UnitOfMeasure.LITERS,
|
||||
'MILLILITERS': UnitOfMeasure.MILLILITERS,
|
||||
'UNITS': UnitOfMeasure.UNITS,
|
||||
'PIECES': UnitOfMeasure.PIECES,
|
||||
'PACKAGES': UnitOfMeasure.PACKAGES,
|
||||
'BAGS': UnitOfMeasure.BAGS,
|
||||
'BOXES': UnitOfMeasure.BOXES
|
||||
})
|
||||
|
||||
unit_str = ingredient_data['unit_of_measure']
|
||||
if unit_str in unit_mapping:
|
||||
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
|
||||
else:
|
||||
# Default to units if not found
|
||||
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
|
||||
original_unit=unit_str)
|
||||
|
||||
# Note: All seed data fields now match the model schema exactly
|
||||
# No field filtering needed
|
||||
|
||||
# Remove original id and tenant_id from ingredient_data to avoid conflict
|
||||
ingredient_data.pop('id', None)
|
||||
ingredient_data.pop('tenant_id', None)
|
||||
|
||||
# Create ingredient
|
||||
ingredient = Ingredient(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**ingredient_data
|
||||
)
|
||||
db.add(ingredient)
|
||||
records_cloned += 1
|
||||
|
||||
# Commit ingredients before creating stock to ensure foreign key references exist
|
||||
await db.flush() # Use flush instead of commit to maintain transaction while continuing
|
||||
|
||||
# Clone stock batches
|
||||
for stock_data in seed_data.get('stock', []):
|
||||
# Transform ID - handle both UUID and string IDs
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
# Try to parse as UUID first
|
||||
stock_uuid = UUID(stock_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(stock_data['id'], tenant_uuid)
|
||||
except ValueError:
|
||||
# If not a UUID, generate a deterministic UUID from the string ID
|
||||
import hashlib
|
||||
stock_id_string = stock_data['id']
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
|
||||
# Create a deterministic UUID from the string ID and tenant ID
|
||||
combined = f"{stock_id_string}-{tenant_uuid}"
|
||||
hash_obj = hashlib.sha256(combined.encode('utf-8'))
|
||||
transformed_id = UUID(hash_obj.hexdigest()[:32])
|
||||
|
||||
logger.info("Generated UUID for non-UUID stock ID",
|
||||
original_id=stock_id_string,
|
||||
generated_id=str(transformed_id))
|
||||
|
||||
# Transform dates using standardized helper
|
||||
stock_data['received_date'] = parse_date_field(
|
||||
stock_data.get('received_date'), session_time, 'received_date'
|
||||
)
|
||||
stock_data['expiration_date'] = parse_date_field(
|
||||
stock_data.get('expiration_date'), session_time, 'expiration_date'
|
||||
)
|
||||
stock_data['best_before_date'] = parse_date_field(
|
||||
stock_data.get('best_before_date'), session_time, 'best_before_date'
|
||||
)
|
||||
stock_data['created_at'] = parse_date_field(
|
||||
stock_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
stock_data['updated_at'] = parse_date_field(
|
||||
stock_data.get('updated_at'), session_time, 'updated_at'
|
||||
) or session_time
|
||||
|
||||
# Remove original id and tenant_id from stock_data to avoid conflict
|
||||
stock_data.pop('id', None)
|
||||
stock_data.pop('tenant_id', None)
|
||||
# Remove notes field as it doesn't exist in the Stock model
|
||||
stock_data.pop('notes', None)
|
||||
|
||||
# Transform ingredient_id to match transformed ingredient IDs
|
||||
if 'ingredient_id' in stock_data:
|
||||
ingredient_id_str = stock_data['ingredient_id']
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_id_str)
|
||||
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
|
||||
stock_data['ingredient_id'] = str(transformed_ingredient_id)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to transform ingredient_id",
|
||||
original_ingredient_id=ingredient_id_str,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid ingredient_id format: {str(e)}"
|
||||
)
|
||||
|
||||
# Transform supplier_id if present
|
||||
if 'supplier_id' in stock_data:
|
||||
supplier_id_str = stock_data['supplier_id']
|
||||
try:
|
||||
supplier_uuid = UUID(supplier_id_str)
|
||||
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
|
||||
stock_data['supplier_id'] = str(transformed_supplier_id)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to transform supplier_id",
|
||||
original_supplier_id=supplier_id_str,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid supplier_id format: {str(e)}"
|
||||
)
|
||||
|
||||
# Create stock batch
|
||||
stock = Stock(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**stock_data
|
||||
)
|
||||
db.add(stock)
|
||||
records_cloned += 1
|
||||
|
||||
# Clone stock movements (for waste tracking and sustainability metrics)
|
||||
from app.models.inventory import StockMovement, StockMovementType
|
||||
|
||||
for movement_data in seed_data.get('stock_movements', []):
|
||||
# Transform ID
|
||||
from shared.utils.demo_id_transformer import transform_id
|
||||
try:
|
||||
movement_uuid = UUID(movement_data['id'])
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
transformed_id = transform_id(movement_data['id'], tenant_uuid)
|
||||
except ValueError:
|
||||
import hashlib
|
||||
movement_id_string = movement_data['id']
|
||||
tenant_uuid = UUID(virtual_tenant_id)
|
||||
combined = f"{movement_id_string}-{tenant_uuid}"
|
||||
hash_obj = hashlib.sha256(combined.encode('utf-8'))
|
||||
transformed_id = UUID(hash_obj.hexdigest()[:32])
|
||||
|
||||
# Transform dates
|
||||
movement_data['movement_date'] = parse_date_field(
|
||||
movement_data.get('movement_date'), session_time, 'movement_date'
|
||||
) or session_time
|
||||
movement_data['created_at'] = parse_date_field(
|
||||
movement_data.get('created_at'), session_time, 'created_at'
|
||||
) or session_time
|
||||
|
||||
# Transform related IDs
|
||||
if 'ingredient_id' in movement_data:
|
||||
ingredient_id_str = movement_data['ingredient_id']
|
||||
try:
|
||||
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
|
||||
movement_data['ingredient_id'] = str(transformed_ingredient_id)
|
||||
except ValueError as e:
|
||||
logger.error("Failed to transform ingredient_id in movement",
|
||||
original_id=ingredient_id_str, error=str(e))
|
||||
raise HTTPException(status_code=400, detail=f"Invalid ingredient_id: {str(e)}")
|
||||
|
||||
if 'stock_id' in movement_data and movement_data['stock_id']:
|
||||
stock_id_str = movement_data['stock_id']
|
||||
try:
|
||||
transformed_stock_id = transform_id(stock_id_str, tenant_uuid)
|
||||
movement_data['stock_id'] = str(transformed_stock_id)
|
||||
except ValueError:
|
||||
# If stock_id doesn't exist or can't be transformed, set to None
|
||||
movement_data['stock_id'] = None
|
||||
|
||||
if 'supplier_id' in movement_data and movement_data['supplier_id']:
|
||||
supplier_id_str = movement_data['supplier_id']
|
||||
try:
|
||||
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
|
||||
movement_data['supplier_id'] = str(transformed_supplier_id)
|
||||
except ValueError:
|
||||
movement_data['supplier_id'] = None
|
||||
|
||||
if 'created_by' in movement_data and movement_data['created_by']:
|
||||
created_by_str = movement_data['created_by']
|
||||
try:
|
||||
transformed_created_by = transform_id(created_by_str, tenant_uuid)
|
||||
movement_data['created_by'] = str(transformed_created_by)
|
||||
except ValueError:
|
||||
movement_data['created_by'] = None
|
||||
|
||||
# Remove original id and tenant_id
|
||||
movement_data.pop('id', None)
|
||||
movement_data.pop('tenant_id', None)
|
||||
|
||||
# Create stock movement
|
||||
stock_movement = StockMovement(
|
||||
id=str(transformed_id),
|
||||
tenant_id=str(virtual_tenant_id),
|
||||
**movement_data
|
||||
)
|
||||
db.add(stock_movement)
|
||||
records_cloned += 1
|
||||
|
||||
# Note: Edge cases are now handled exclusively through JSON seed data
|
||||
# The seed data files already contain comprehensive edge cases including:
|
||||
# - Low stock items below reorder points
|
||||
# - Items expiring soon
|
||||
# - Freshly received stock
|
||||
# - Waste movements for sustainability tracking
|
||||
# This ensures standardization and single source of truth for demo data
|
||||
|
||||
logger.info(
|
||||
"Edge cases handled by JSON seed data - no manual creation needed",
|
||||
seed_data_edge_cases="low_stock, expiring_soon, fresh_stock, waste_movements"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
"Inventory data cloned successfully",
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
records_cloned=records_cloned,
|
||||
duration_ms=duration_ms,
|
||||
ingredients_cloned=len(seed_data.get('ingredients', [])),
|
||||
stock_batches_cloned=len(seed_data.get('stock', [])),
|
||||
stock_movements_cloned=len(seed_data.get('stock_movements', []))
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "completed",
|
||||
"records_cloned": records_cloned,
|
||||
"duration_ms": duration_ms,
|
||||
"details": {
|
||||
"ingredients": len(seed_data.get('ingredients', [])),
|
||||
"stock": len(seed_data.get('stock', [])),
|
||||
"virtual_tenant_id": str(virtual_tenant_id)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
|
||||
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to clone inventory data",
|
||||
error=str(e),
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Rollback on error
|
||||
await db.rollback()
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "failed",
|
||||
"records_cloned": 0,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/clone/health")
|
||||
async def clone_health_check():
|
||||
"""
|
||||
Health check for internal cloning endpoint
|
||||
Used by orchestrator to verify service availability
|
||||
"""
|
||||
return {
|
||||
"service": "inventory",
|
||||
"clone_endpoint": "available",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/tenant/{virtual_tenant_id}")
|
||||
async def delete_demo_tenant_data(
|
||||
virtual_tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all demo data for a virtual tenant.
|
||||
This endpoint is idempotent - safe to call multiple times.
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
from app.models.inventory import StockMovement
|
||||
|
||||
records_deleted = {
|
||||
"stock_movements": 0,
|
||||
"stock": 0,
|
||||
"ingredients": 0,
|
||||
"total": 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Delete in reverse dependency order
|
||||
|
||||
# 1. Delete stock movements (depends on stock and ingredients)
|
||||
result = await db.execute(
|
||||
delete(StockMovement)
|
||||
.where(StockMovement.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["stock_movements"] = result.rowcount
|
||||
|
||||
# 2. Delete stock batches (depends on ingredients)
|
||||
result = await db.execute(
|
||||
delete(Stock)
|
||||
.where(Stock.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["stock"] = result.rowcount
|
||||
|
||||
# 3. Delete ingredients
|
||||
result = await db.execute(
|
||||
delete(Ingredient)
|
||||
.where(Ingredient.tenant_id == virtual_tenant_id)
|
||||
)
|
||||
records_deleted["ingredients"] = result.rowcount
|
||||
|
||||
records_deleted["total"] = records_deleted["stock_movements"] + records_deleted["stock"] + records_deleted["ingredients"]
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"demo_data_deleted",
|
||||
service="inventory",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
records_deleted=records_deleted
|
||||
)
|
||||
|
||||
return {
|
||||
"service": "inventory",
|
||||
"status": "deleted",
|
||||
"virtual_tenant_id": str(virtual_tenant_id),
|
||||
"records_deleted": records_deleted,
|
||||
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"demo_data_deletion_failed",
|
||||
service="inventory",
|
||||
virtual_tenant_id=str(virtual_tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete demo data: {str(e)}"
|
||||
)
|
||||
747
services/inventory/app/api/inventory_operations.py
Normal file
747
services/inventory/app/api/inventory_operations.py
Normal file
@@ -0,0 +1,747 @@
|
||||
# services/inventory/app/api/inventory_operations.py
|
||||
"""
|
||||
Inventory Operations API - Business operations for inventory management
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID, uuid4
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel, Field
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.services.product_classifier import ProductClassifierService, get_product_classifier
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["inventory-operations"])
|
||||
|
||||
|
||||
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
|
||||
"""Extract user ID from current user context"""
|
||||
user_id = current_user.get('user_id')
|
||||
if not user_id:
|
||||
if current_user.get('type') == 'service':
|
||||
return None
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User ID not found in context"
|
||||
)
|
||||
try:
|
||||
return UUID(user_id)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
# ===== Stock Operations =====
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("consume-stock"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def consume_stock(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
ingredient_id: UUID = Query(..., description="Ingredient ID to consume"),
|
||||
quantity: float = Query(..., gt=0, description="Quantity to consume"),
|
||||
reference_number: Optional[str] = Query(None, description="Reference number"),
|
||||
notes: Optional[str] = Query(None, description="Additional notes"),
|
||||
fifo: bool = Query(True, description="Use FIFO method"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Consume stock for production"""
|
||||
try:
|
||||
user_id = get_current_user_id(current_user)
|
||||
service = InventoryService()
|
||||
consumed_items = await service.consume_stock(
|
||||
ingredient_id, quantity, tenant_id, user_id, reference_number, notes, fifo
|
||||
)
|
||||
return {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"total_quantity_consumed": quantity,
|
||||
"consumed_items": consumed_items,
|
||||
"method": "FIFO" if fifo else "LIFO"
|
||||
}
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to consume stock"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("stock/expiring"),
|
||||
response_model=List[dict]
|
||||
)
|
||||
async def get_expiring_stock(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days_ahead: int = Query(7, ge=1, le=365, description="Days ahead to check"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock items expiring within specified days"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
expiring_items = await service.check_expiration_alerts(tenant_id, days_ahead)
|
||||
return expiring_items
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get expiring stock"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("stock/low-stock"),
|
||||
response_model=List[dict]
|
||||
)
|
||||
async def get_low_stock(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get ingredients with low stock levels"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
low_stock_items = await service.check_low_stock_alerts(tenant_id)
|
||||
return low_stock_items
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get low stock items"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("stock/summary"),
|
||||
response_model=dict
|
||||
)
|
||||
async def get_stock_summary(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock summary for tenant"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
summary = await service.get_inventory_summary(tenant_id)
|
||||
return summary.dict()
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get stock summary"
|
||||
)
|
||||
|
||||
|
||||
# ===== Product Classification Operations =====
|
||||
|
||||
class ProductClassificationRequest(BaseModel):
|
||||
"""Request for single product classification"""
|
||||
product_name: str = Field(..., description="Product name to classify")
|
||||
sales_volume: float = Field(None, description="Total sales volume for context")
|
||||
sales_data: Dict[str, Any] = Field(default_factory=dict, description="Additional sales context")
|
||||
|
||||
|
||||
class BatchClassificationRequest(BaseModel):
|
||||
"""Request for batch product classification"""
|
||||
products: List[ProductClassificationRequest] = Field(..., description="Products to classify")
|
||||
|
||||
|
||||
class ProductSuggestionResponse(BaseModel):
|
||||
"""Response with product classification suggestion"""
|
||||
suggestion_id: str
|
||||
original_name: str
|
||||
suggested_name: str
|
||||
product_type: str
|
||||
category: str
|
||||
unit_of_measure: str
|
||||
confidence_score: float
|
||||
estimated_shelf_life_days: Optional[int] = None
|
||||
requires_refrigeration: bool = False
|
||||
requires_freezing: bool = False
|
||||
is_seasonal: bool = False
|
||||
suggested_supplier: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class BusinessModelAnalysisResponse(BaseModel):
|
||||
"""Response with business model analysis"""
|
||||
model: str
|
||||
confidence: float
|
||||
ingredient_count: int
|
||||
finished_product_count: int
|
||||
ingredient_ratio: float
|
||||
recommendations: List[str]
|
||||
|
||||
|
||||
class BatchClassificationResponse(BaseModel):
|
||||
"""Response for batch classification"""
|
||||
suggestions: List[ProductSuggestionResponse]
|
||||
business_model_analysis: BusinessModelAnalysisResponse
|
||||
total_products: int
|
||||
high_confidence_count: int
|
||||
low_confidence_count: int
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("classify-product"),
|
||||
response_model=ProductSuggestionResponse
|
||||
)
|
||||
async def classify_single_product(
|
||||
request: ProductClassificationRequest,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
classifier: ProductClassifierService = Depends(get_product_classifier)
|
||||
):
|
||||
"""Classify a single product for inventory creation"""
|
||||
try:
|
||||
suggestion = classifier.classify_product(
|
||||
request.product_name,
|
||||
request.sales_volume
|
||||
)
|
||||
|
||||
response = ProductSuggestionResponse(
|
||||
suggestion_id=str(uuid4()),
|
||||
original_name=suggestion.original_name,
|
||||
suggested_name=suggestion.suggested_name,
|
||||
product_type=suggestion.product_type.value,
|
||||
category=suggestion.category,
|
||||
unit_of_measure=suggestion.unit_of_measure.value,
|
||||
confidence_score=suggestion.confidence_score,
|
||||
estimated_shelf_life_days=suggestion.estimated_shelf_life_days,
|
||||
requires_refrigeration=suggestion.requires_refrigeration,
|
||||
requires_freezing=suggestion.requires_freezing,
|
||||
is_seasonal=suggestion.is_seasonal,
|
||||
suggested_supplier=suggestion.suggested_supplier,
|
||||
notes=suggestion.notes
|
||||
)
|
||||
|
||||
logger.info("Classified single product",
|
||||
product=request.product_name,
|
||||
classification=suggestion.product_type.value,
|
||||
confidence=suggestion.confidence_score,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to classify product",
|
||||
error=str(e), product=request.product_name, tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Classification failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("classify-products-batch"),
|
||||
response_model=BatchClassificationResponse
|
||||
)
|
||||
async def classify_products_batch(
|
||||
request: BatchClassificationRequest,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
classifier: ProductClassifierService = Depends(get_product_classifier)
|
||||
):
|
||||
"""Classify multiple products for onboarding automation"""
|
||||
try:
|
||||
if not request.products:
|
||||
raise HTTPException(status_code=400, detail="No products provided for classification")
|
||||
|
||||
product_names = [p.product_name for p in request.products]
|
||||
sales_volumes = {p.product_name: p.sales_volume for p in request.products if p.sales_volume}
|
||||
|
||||
suggestions = classifier.classify_products_batch(product_names, sales_volumes)
|
||||
|
||||
suggestion_responses = []
|
||||
for suggestion in suggestions:
|
||||
suggestion_responses.append(ProductSuggestionResponse(
|
||||
suggestion_id=str(uuid4()),
|
||||
original_name=suggestion.original_name,
|
||||
suggested_name=suggestion.suggested_name,
|
||||
product_type=suggestion.product_type.value,
|
||||
category=suggestion.category,
|
||||
unit_of_measure=suggestion.unit_of_measure.value,
|
||||
confidence_score=suggestion.confidence_score,
|
||||
estimated_shelf_life_days=suggestion.estimated_shelf_life_days,
|
||||
requires_refrigeration=suggestion.requires_refrigeration,
|
||||
requires_freezing=suggestion.requires_freezing,
|
||||
is_seasonal=suggestion.is_seasonal,
|
||||
suggested_supplier=suggestion.suggested_supplier,
|
||||
notes=suggestion.notes
|
||||
))
|
||||
|
||||
# Analyze business model
|
||||
ingredient_count = sum(1 for s in suggestions if s.product_type.value == 'INGREDIENT')
|
||||
finished_count = sum(1 for s in suggestions if s.product_type.value == 'FINISHED_PRODUCT')
|
||||
semi_finished_count = sum(1 for s in suggestions if 'semi' in s.suggested_name.lower() or 'frozen' in s.suggested_name.lower() or 'pre' in s.suggested_name.lower())
|
||||
total = len(suggestions)
|
||||
ingredient_ratio = ingredient_count / total if total > 0 else 0
|
||||
semi_finished_ratio = semi_finished_count / total if total > 0 else 0
|
||||
|
||||
if ingredient_ratio >= 0.7:
|
||||
model = 'individual_bakery'
|
||||
elif ingredient_ratio <= 0.2 and semi_finished_ratio >= 0.3:
|
||||
model = 'central_baker_satellite'
|
||||
elif ingredient_ratio <= 0.3:
|
||||
model = 'retail_bakery'
|
||||
else:
|
||||
model = 'hybrid_bakery'
|
||||
|
||||
if model == 'individual_bakery':
|
||||
confidence = min(ingredient_ratio * 1.2, 0.95)
|
||||
elif model == 'central_baker_satellite':
|
||||
confidence = min((semi_finished_ratio + (1 - ingredient_ratio)) / 2 * 1.2, 0.95)
|
||||
else:
|
||||
confidence = max(abs(ingredient_ratio - 0.5) * 2, 0.1)
|
||||
|
||||
recommendations = {
|
||||
'individual_bakery': [
|
||||
'Set up raw ingredient inventory management',
|
||||
'Configure recipe cost calculation and production planning',
|
||||
'Enable supplier relationships for flour, yeast, sugar, etc.',
|
||||
'Set up full production workflow with proofing and baking schedules',
|
||||
'Enable waste tracking for overproduction'
|
||||
],
|
||||
'central_baker_satellite': [
|
||||
'Configure central baker delivery schedules',
|
||||
'Set up semi-finished product inventory (frozen dough, par-baked items)',
|
||||
'Enable finish-baking workflow and timing optimization',
|
||||
'Track freshness and shelf-life for received products',
|
||||
'Focus on customer demand forecasting for final products'
|
||||
],
|
||||
'retail_bakery': [
|
||||
'Set up finished product supplier relationships',
|
||||
'Configure delivery schedule tracking',
|
||||
'Enable freshness monitoring and expiration management',
|
||||
'Focus on sales forecasting and customer preferences'
|
||||
],
|
||||
'hybrid_bakery': [
|
||||
'Configure both ingredient and semi-finished product management',
|
||||
'Set up flexible production workflows',
|
||||
'Enable both supplier and central baker relationships',
|
||||
'Configure multi-tier inventory categories'
|
||||
]
|
||||
}
|
||||
|
||||
business_model_analysis = BusinessModelAnalysisResponse(
|
||||
model=model,
|
||||
confidence=confidence,
|
||||
ingredient_count=ingredient_count,
|
||||
finished_product_count=finished_count,
|
||||
ingredient_ratio=ingredient_ratio,
|
||||
recommendations=recommendations.get(model, [])
|
||||
)
|
||||
|
||||
high_confidence_count = sum(1 for s in suggestions if s.confidence_score >= 0.7)
|
||||
low_confidence_count = sum(1 for s in suggestions if s.confidence_score < 0.6)
|
||||
|
||||
response = BatchClassificationResponse(
|
||||
suggestions=suggestion_responses,
|
||||
business_model_analysis=business_model_analysis,
|
||||
total_products=len(suggestions),
|
||||
high_confidence_count=high_confidence_count,
|
||||
low_confidence_count=low_confidence_count
|
||||
)
|
||||
|
||||
logger.info("Batch classification complete",
|
||||
total_products=len(suggestions),
|
||||
business_model=model,
|
||||
high_confidence=high_confidence_count,
|
||||
low_confidence=low_confidence_count,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed batch classification",
|
||||
error=str(e), products_count=len(request.products), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Batch classification failed: {str(e)}")
|
||||
|
||||
|
||||
class BatchProductResolutionRequest(BaseModel):
|
||||
"""Request for batch product resolution or creation"""
|
||||
products: List[Dict[str, Any]] = Field(..., description="Products to resolve or create")
|
||||
|
||||
|
||||
class BatchProductResolutionResponse(BaseModel):
|
||||
"""Response with product name to inventory ID mappings"""
|
||||
product_mappings: Dict[str, str] = Field(..., description="Product name to inventory product ID mapping")
|
||||
created_count: int = Field(..., description="Number of products created")
|
||||
resolved_count: int = Field(..., description="Number of existing products resolved")
|
||||
failed_count: int = Field(0, description="Number of products that failed")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("resolve-or-create-products-batch"),
|
||||
response_model=BatchProductResolutionResponse
|
||||
)
|
||||
async def resolve_or_create_products_batch(
|
||||
request: BatchProductResolutionRequest,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
classifier: ProductClassifierService = Depends(get_product_classifier)
|
||||
):
|
||||
"""Resolve or create multiple products in a single optimized operation for sales import"""
|
||||
try:
|
||||
if not request.products:
|
||||
raise HTTPException(status_code=400, detail="No products provided")
|
||||
|
||||
service = InventoryService()
|
||||
product_mappings = {}
|
||||
created_count = 0
|
||||
resolved_count = 0
|
||||
failed_count = 0
|
||||
|
||||
for product_data in request.products:
|
||||
product_name = product_data.get('name', product_data.get('product_name', ''))
|
||||
if not product_name:
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
existing = await service.search_ingredients_by_name(product_name, tenant_id, db)
|
||||
|
||||
if existing:
|
||||
product_mappings[product_name] = str(existing.id)
|
||||
resolved_count += 1
|
||||
logger.debug("Resolved existing product", product=product_name, tenant_id=tenant_id)
|
||||
else:
|
||||
# Use the product classifier to determine the appropriate type
|
||||
suggestion = classifier.classify_product(product_name)
|
||||
category = product_data.get('category', suggestion.category if hasattr(suggestion, 'category') else 'general')
|
||||
|
||||
ingredient_data = {
|
||||
'name': product_name,
|
||||
'type': suggestion.product_type.value if hasattr(suggestion, 'product_type') else 'finished_product',
|
||||
'unit': suggestion.unit_of_measure.value if hasattr(suggestion, 'unit_of_measure') else 'unit',
|
||||
'current_stock': 0,
|
||||
'reorder_point': 0,
|
||||
'cost_per_unit': 0,
|
||||
'category': category
|
||||
}
|
||||
|
||||
created = await service.create_ingredient_fast(ingredient_data, tenant_id, db)
|
||||
product_mappings[product_name] = str(created.id)
|
||||
created_count += 1
|
||||
logger.debug("Created new product", product=product_name,
|
||||
product_type=ingredient_data['type'], tenant_id=tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to resolve/create product",
|
||||
product=product_name, error=str(e), tenant_id=tenant_id)
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
logger.info("Batch product resolution complete",
|
||||
total=len(request.products),
|
||||
created=created_count,
|
||||
resolved=resolved_count,
|
||||
failed=failed_count,
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return BatchProductResolutionResponse(
|
||||
product_mappings=product_mappings,
|
||||
created_count=created_count,
|
||||
resolved_count=resolved_count,
|
||||
failed_count=failed_count
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Batch product resolution failed",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Batch resolution failed: {str(e)}")
|
||||
|
||||
|
||||
# ================================================================
|
||||
# NEW: BATCH API ENDPOINTS FOR ORCHESTRATOR
|
||||
# ================================================================
|
||||
|
||||
class BatchIngredientsRequest(BaseModel):
|
||||
"""Request for batch ingredient fetching"""
|
||||
ingredient_ids: List[UUID] = Field(..., description="List of ingredient IDs to fetch")
|
||||
|
||||
|
||||
class BatchIngredientsResponse(BaseModel):
|
||||
"""Response with ingredient data"""
|
||||
ingredients: List[Dict[str, Any]] = Field(..., description="List of ingredient data")
|
||||
found_count: int = Field(..., description="Number of ingredients found")
|
||||
missing_ids: List[str] = Field(default_factory=list, description="IDs not found")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("ingredients/batch"),
|
||||
response_model=BatchIngredientsResponse
|
||||
)
|
||||
async def get_ingredients_batch(
|
||||
request: BatchIngredientsRequest,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Fetch multiple ingredients in a single request (for Orchestrator).
|
||||
|
||||
This endpoint reduces N API calls to 1, improving performance when
|
||||
the orchestrator needs ingredient data for production/procurement planning.
|
||||
"""
|
||||
try:
|
||||
if not request.ingredient_ids:
|
||||
return BatchIngredientsResponse(
|
||||
ingredients=[],
|
||||
found_count=0,
|
||||
missing_ids=[]
|
||||
)
|
||||
|
||||
service = InventoryService()
|
||||
ingredients = []
|
||||
found_ids = set()
|
||||
|
||||
for ingredient_id in request.ingredient_ids:
|
||||
try:
|
||||
ingredient = await service.get_ingredient_by_id(ingredient_id, tenant_id, db)
|
||||
if ingredient:
|
||||
ingredients.append({
|
||||
'id': str(ingredient.id),
|
||||
'name': ingredient.name,
|
||||
'type': ingredient.type,
|
||||
'unit': ingredient.unit,
|
||||
'current_stock': float(ingredient.current_stock) if ingredient.current_stock else 0,
|
||||
'reorder_point': float(ingredient.reorder_point) if ingredient.reorder_point else 0,
|
||||
'cost_per_unit': float(ingredient.cost_per_unit) if ingredient.cost_per_unit else 0,
|
||||
'category': ingredient.category,
|
||||
'is_active': ingredient.is_active,
|
||||
'shelf_life_days': ingredient.shelf_life_days
|
||||
})
|
||||
found_ids.add(str(ingredient_id))
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to fetch ingredient in batch",
|
||||
ingredient_id=str(ingredient_id),
|
||||
error=str(e)
|
||||
)
|
||||
continue
|
||||
|
||||
missing_ids = [str(id) for id in request.ingredient_ids if str(id) not in found_ids]
|
||||
|
||||
logger.info(
|
||||
"Batch ingredient fetch complete",
|
||||
requested=len(request.ingredient_ids),
|
||||
found=len(ingredients),
|
||||
missing=len(missing_ids),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
return BatchIngredientsResponse(
|
||||
ingredients=ingredients,
|
||||
found_count=len(ingredients),
|
||||
missing_ids=missing_ids
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Batch ingredient fetch failed",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Batch ingredient fetch failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
class BatchStockLevelsRequest(BaseModel):
|
||||
"""Request for batch stock level fetching"""
|
||||
ingredient_ids: List[UUID] = Field(..., description="List of ingredient IDs")
|
||||
|
||||
|
||||
class BatchStockLevelsResponse(BaseModel):
|
||||
"""Response with stock level data"""
|
||||
stock_levels: Dict[str, float] = Field(..., description="Ingredient ID to stock level mapping")
|
||||
found_count: int = Field(..., description="Number of stock levels found")
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("stock-levels/batch"),
|
||||
response_model=BatchStockLevelsResponse
|
||||
)
|
||||
async def get_stock_levels_batch(
|
||||
request: BatchStockLevelsRequest,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Fetch stock levels for multiple ingredients in a single request.
|
||||
|
||||
Optimized endpoint for Orchestrator to quickly check inventory levels
|
||||
without making individual API calls per ingredient.
|
||||
"""
|
||||
try:
|
||||
if not request.ingredient_ids:
|
||||
return BatchStockLevelsResponse(
|
||||
stock_levels={},
|
||||
found_count=0
|
||||
)
|
||||
|
||||
service = InventoryService()
|
||||
stock_levels = {}
|
||||
|
||||
for ingredient_id in request.ingredient_ids:
|
||||
try:
|
||||
ingredient = await service.get_ingredient_by_id(ingredient_id, tenant_id, db)
|
||||
if ingredient:
|
||||
stock_levels[str(ingredient_id)] = float(ingredient.current_stock) if ingredient.current_stock else 0.0
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to fetch stock level in batch",
|
||||
ingredient_id=str(ingredient_id),
|
||||
error=str(e)
|
||||
)
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Batch stock level fetch complete",
|
||||
requested=len(request.ingredient_ids),
|
||||
found=len(stock_levels),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
return BatchStockLevelsResponse(
|
||||
stock_levels=stock_levels,
|
||||
found_count=len(stock_levels)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Batch stock level fetch failed",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Batch stock level fetch failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tenant Data Deletion Operations (Internal Service Only)
|
||||
# ============================================================================
|
||||
|
||||
from shared.auth.access_control import service_only_access
|
||||
from shared.services.tenant_deletion import TenantDataDeletionResult
|
||||
from app.services.tenant_deletion_service import InventoryTenantDeletionService
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def delete_tenant_data(
|
||||
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete all inventory data for a tenant (Internal service only)
|
||||
|
||||
This endpoint is called by the orchestrator during tenant deletion.
|
||||
It permanently deletes all inventory-related data.
|
||||
|
||||
**WARNING**: This operation is irreversible!
|
||||
|
||||
Returns:
|
||||
Deletion summary with counts of deleted records
|
||||
"""
|
||||
try:
|
||||
logger.info("inventory.tenant_deletion.api_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = InventoryTenantDeletionService(db)
|
||||
result = await deletion_service.safe_delete_tenant_data(tenant_id)
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Tenant data deletion completed successfully",
|
||||
"summary": result.to_dict()
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("inventory.tenant_deletion.api_error",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to delete tenant data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
|
||||
response_model=dict
|
||||
)
|
||||
@service_only_access
|
||||
async def preview_tenant_data_deletion(
|
||||
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Preview what data would be deleted for a tenant (dry-run)
|
||||
|
||||
This endpoint shows counts of all data that would be deleted
|
||||
without actually deleting anything.
|
||||
|
||||
Returns:
|
||||
Preview with counts of records to be deleted
|
||||
"""
|
||||
try:
|
||||
logger.info("inventory.tenant_deletion.preview_called", tenant_id=tenant_id)
|
||||
|
||||
deletion_service = InventoryTenantDeletionService(db)
|
||||
preview_data = await deletion_service.get_tenant_data_preview(tenant_id)
|
||||
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=deletion_service.service_name)
|
||||
result.deleted_counts = preview_data
|
||||
result.success = True
|
||||
|
||||
if not result.success:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Tenant deletion preview failed: {', '.join(result.errors)}"
|
||||
)
|
||||
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"service": "inventory-service",
|
||||
"data_counts": result.deleted_counts,
|
||||
"total_items": sum(result.deleted_counts.values())
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("inventory.tenant_deletion.preview_error",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to preview tenant data deletion: {str(e)}"
|
||||
)
|
||||
413
services/inventory/app/api/ml_insights.py
Normal file
413
services/inventory/app/api/ml_insights.py
Normal file
@@ -0,0 +1,413 @@
|
||||
"""
|
||||
ML Insights API Endpoints for Inventory Service
|
||||
|
||||
Provides endpoints to trigger ML insight generation for:
|
||||
- Safety stock optimization
|
||||
- Inventory level recommendations
|
||||
- Demand pattern analysis
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import pandas as pd
|
||||
|
||||
from app.core.database import get_db
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/v1/tenants/{tenant_id}/inventory/ml/insights",
|
||||
tags=["ML Insights"]
|
||||
)
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class SafetyStockOptimizationRequest(BaseModel):
|
||||
"""Request schema for safety stock optimization"""
|
||||
product_ids: Optional[List[str]] = Field(
|
||||
None,
|
||||
description="Specific product IDs to optimize. If None, optimizes all products"
|
||||
)
|
||||
lookback_days: int = Field(
|
||||
90,
|
||||
description="Days of historical demand to analyze",
|
||||
ge=30,
|
||||
le=365
|
||||
)
|
||||
min_history_days: int = Field(
|
||||
30,
|
||||
description="Minimum days of history required",
|
||||
ge=7,
|
||||
le=180
|
||||
)
|
||||
|
||||
|
||||
class SafetyStockOptimizationResponse(BaseModel):
|
||||
"""Response schema for safety stock optimization"""
|
||||
success: bool
|
||||
message: str
|
||||
tenant_id: str
|
||||
products_optimized: int
|
||||
total_insights_generated: int
|
||||
total_insights_posted: int
|
||||
total_cost_savings: float
|
||||
insights_by_product: dict
|
||||
errors: List[str] = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# API ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.post("/optimize-safety-stock", response_model=SafetyStockOptimizationResponse)
|
||||
async def trigger_safety_stock_optimization(
|
||||
tenant_id: str,
|
||||
request_data: SafetyStockOptimizationRequest,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Trigger safety stock optimization for inventory products.
|
||||
|
||||
This endpoint:
|
||||
1. Fetches historical demand data for specified products
|
||||
2. Runs the SafetyStockInsightsOrchestrator to optimize levels
|
||||
3. Generates insights about safety stock recommendations
|
||||
4. Posts insights to AI Insights Service
|
||||
5. Publishes recommendation events to RabbitMQ
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
request_data: Optimization parameters
|
||||
request: FastAPI request (for app state access)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
SafetyStockOptimizationResponse with optimization results
|
||||
"""
|
||||
logger.info(
|
||||
"ML insights safety stock optimization requested",
|
||||
tenant_id=tenant_id,
|
||||
product_ids=request_data.product_ids,
|
||||
lookback_days=request_data.lookback_days
|
||||
)
|
||||
|
||||
try:
|
||||
# Import ML orchestrator
|
||||
from app.ml.safety_stock_insights_orchestrator import SafetyStockInsightsOrchestrator
|
||||
from app.models.inventory import Ingredient
|
||||
from sqlalchemy import select
|
||||
|
||||
# Get event publisher from app state (if available)
|
||||
event_publisher = getattr(request.app.state, 'event_publisher', None) if hasattr(request, 'app') else None
|
||||
|
||||
# Initialize orchestrator
|
||||
orchestrator = SafetyStockInsightsOrchestrator(
|
||||
event_publisher=event_publisher
|
||||
)
|
||||
|
||||
# Get products to optimize
|
||||
if request_data.product_ids:
|
||||
query = select(Ingredient).where(
|
||||
Ingredient.tenant_id == UUID(tenant_id),
|
||||
Ingredient.id.in_([UUID(pid) for pid in request_data.product_ids])
|
||||
)
|
||||
else:
|
||||
query = select(Ingredient).where(
|
||||
Ingredient.tenant_id == UUID(tenant_id)
|
||||
).limit(10) # Limit to prevent timeout
|
||||
|
||||
result = await db.execute(query)
|
||||
products = result.scalars().all()
|
||||
|
||||
if not products:
|
||||
return SafetyStockOptimizationResponse(
|
||||
success=False,
|
||||
message="No products found for optimization",
|
||||
tenant_id=tenant_id,
|
||||
products_optimized=0,
|
||||
total_insights_generated=0,
|
||||
total_insights_posted=0,
|
||||
total_cost_savings=0.0,
|
||||
insights_by_product={},
|
||||
errors=["No products found"]
|
||||
)
|
||||
|
||||
# Calculate date range for demand history
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=request_data.lookback_days)
|
||||
|
||||
# Process each product
|
||||
total_insights_generated = 0
|
||||
total_insights_posted = 0
|
||||
total_cost_savings = 0.0
|
||||
insights_by_product = {}
|
||||
errors = []
|
||||
|
||||
for product in products:
|
||||
try:
|
||||
product_id = str(product.id)
|
||||
logger.info(f"Optimizing safety stock for {product.name} ({product_id})")
|
||||
|
||||
# Fetch real sales/demand history from sales service
|
||||
from shared.clients.sales_client import SalesServiceClient
|
||||
from app.core.config import settings
|
||||
|
||||
sales_client = SalesServiceClient(settings)
|
||||
|
||||
try:
|
||||
# Fetch sales data for this product
|
||||
sales_data = await sales_client.get_sales_data(
|
||||
tenant_id=tenant_id,
|
||||
product_id=product_id,
|
||||
start_date=start_date.strftime('%Y-%m-%d'),
|
||||
end_date=end_date.strftime('%Y-%m-%d')
|
||||
)
|
||||
|
||||
if not sales_data:
|
||||
logger.warning(
|
||||
f"No sales history for product {product_id}, skipping"
|
||||
)
|
||||
continue
|
||||
demand_data = []
|
||||
|
||||
for sale in sales_data:
|
||||
demand_data.append({
|
||||
'date': pd.to_datetime(sale.get('date') or sale.get('sale_date')),
|
||||
'quantity': float(sale.get('quantity', 0))
|
||||
})
|
||||
|
||||
if not demand_data:
|
||||
logger.warning(
|
||||
f"No valid demand data for product {product_id}, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
demand_history = pd.DataFrame(demand_data)
|
||||
|
||||
# Aggregate by date if there are multiple sales per day
|
||||
demand_history = demand_history.groupby('date').agg({
|
||||
'quantity': 'sum'
|
||||
}).reset_index()
|
||||
|
||||
if len(demand_history) < request_data.min_history_days:
|
||||
logger.warning(
|
||||
f"Insufficient demand history for product {product_id}: "
|
||||
f"{len(demand_history)} days < {request_data.min_history_days} required"
|
||||
)
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error fetching sales data for product {product_id}: {e}",
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
# Get lead time from supplier if available
|
||||
lead_time_days = 7 # Default fallback
|
||||
if product.supplier_id:
|
||||
try:
|
||||
from shared.clients.suppliers_client import SuppliersClient
|
||||
suppliers_client = SuppliersClient()
|
||||
supplier_data = await suppliers_client.get_supplier_by_id(
|
||||
tenant_id=str(tenant_id),
|
||||
supplier_id=str(product.supplier_id)
|
||||
)
|
||||
if supplier_data and 'standard_lead_time' in supplier_data:
|
||||
lead_time_days = supplier_data['standard_lead_time']
|
||||
logger.debug(
|
||||
f"Using supplier lead time for product {product_id}",
|
||||
lead_time=lead_time_days,
|
||||
supplier_id=str(product.supplier_id)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to fetch supplier lead time for product {product_id}, using default",
|
||||
error=str(e),
|
||||
supplier_id=str(product.supplier_id)
|
||||
)
|
||||
|
||||
# Product characteristics
|
||||
product_characteristics = {
|
||||
'lead_time_days': lead_time_days,
|
||||
'shelf_life_days': 30 if product.is_perishable else 365,
|
||||
'perishable': product.is_perishable
|
||||
}
|
||||
|
||||
# Run optimization
|
||||
results = await orchestrator.optimize_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=product_id,
|
||||
demand_history=demand_history,
|
||||
product_characteristics=product_characteristics,
|
||||
min_history_days=request_data.min_history_days
|
||||
)
|
||||
|
||||
# Track results
|
||||
total_insights_generated += results['insights_generated']
|
||||
total_insights_posted += results['insights_posted']
|
||||
if results.get('cost_savings'):
|
||||
total_cost_savings += results['cost_savings']
|
||||
|
||||
insights_by_product[product_id] = {
|
||||
'product_name': product.name,
|
||||
'insights_posted': results['insights_posted'],
|
||||
'optimal_safety_stock': results.get('optimal_safety_stock'),
|
||||
'cost_savings': results.get('cost_savings', 0.0)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Product {product_id} optimization complete",
|
||||
insights_posted=results['insights_posted'],
|
||||
cost_savings=results.get('cost_savings', 0)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error optimizing product {product_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Close orchestrator
|
||||
await orchestrator.close()
|
||||
|
||||
# Build response
|
||||
response = SafetyStockOptimizationResponse(
|
||||
success=total_insights_posted > 0,
|
||||
message=f"Successfully optimized {len(products)} products, generated {total_insights_posted} insights",
|
||||
tenant_id=tenant_id,
|
||||
products_optimized=len(products),
|
||||
total_insights_generated=total_insights_generated,
|
||||
total_insights_posted=total_insights_posted,
|
||||
total_cost_savings=round(total_cost_savings, 2),
|
||||
insights_by_product=insights_by_product,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"ML insights safety stock optimization complete",
|
||||
tenant_id=tenant_id,
|
||||
total_insights=total_insights_posted,
|
||||
total_savings=total_cost_savings
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"ML insights safety stock optimization failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Safety stock optimization failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def ml_insights_health():
|
||||
"""Health check for ML insights endpoints"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "inventory-ml-insights",
|
||||
"endpoints": [
|
||||
"POST /ml/insights/optimize-safety-stock"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL ENDPOINTS (for demo-session service)
|
||||
# ================================================================
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
# Create a separate router for internal endpoints to avoid the tenant prefix
|
||||
internal_router = APIRouter(
|
||||
tags=["ML Insights - Internal"]
|
||||
)
|
||||
|
||||
|
||||
@internal_router.post("/api/v1/tenants/{tenant_id}/inventory/internal/ml/generate-safety-stock-insights")
|
||||
async def generate_safety_stock_insights_internal(
|
||||
tenant_id: str,
|
||||
request: Request,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Internal endpoint to trigger safety stock insights generation for demo sessions.
|
||||
|
||||
This endpoint is called by the demo-session service after cloning data.
|
||||
It uses the same ML logic as the public endpoint but with optimized defaults.
|
||||
|
||||
Security: Protected by x-internal-service header check.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant UUID
|
||||
request: FastAPI request object
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
{
|
||||
"insights_posted": int,
|
||||
"tenant_id": str,
|
||||
"status": str
|
||||
}
|
||||
"""
|
||||
# Verify internal service header
|
||||
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
|
||||
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="This endpoint is for internal service use only"
|
||||
)
|
||||
|
||||
logger.info("Internal safety stock insights generation triggered", tenant_id=tenant_id)
|
||||
|
||||
try:
|
||||
# Use the existing safety stock optimization logic with sensible defaults
|
||||
request_data = SafetyStockOptimizationRequest(
|
||||
product_ids=None, # Analyze all products
|
||||
lookback_days=90, # 3 months of history
|
||||
min_history_days=30 # Minimum 30 days required
|
||||
)
|
||||
|
||||
# Call the existing safety stock optimization endpoint logic
|
||||
result = await trigger_safety_stock_optimization(
|
||||
tenant_id=tenant_id,
|
||||
request_data=request_data,
|
||||
request=request,
|
||||
db=db
|
||||
)
|
||||
|
||||
# Return simplified response for internal use
|
||||
return {
|
||||
"insights_posted": result.total_insights_posted,
|
||||
"tenant_id": tenant_id,
|
||||
"status": "success" if result.success else "failed",
|
||||
"message": result.message,
|
||||
"products_optimized": result.products_optimized,
|
||||
"total_cost_savings": result.total_cost_savings
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Internal safety stock insights generation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Internal safety stock insights generation failed: {str(e)}"
|
||||
)
|
||||
334
services/inventory/app/api/stock_entries.py
Normal file
334
services/inventory/app/api/stock_entries.py
Normal file
@@ -0,0 +1,334 @@
|
||||
# services/inventory/app/api/stock_entries.py
|
||||
"""
|
||||
Stock Entries API - ATOMIC CRUD operations on Stock model
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.schemas.inventory import (
|
||||
StockCreate,
|
||||
StockUpdate,
|
||||
StockResponse,
|
||||
StockMovementCreate,
|
||||
StockMovementResponse,
|
||||
BulkStockCreate,
|
||||
BulkStockResponse
|
||||
)
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["stock-entries"])
|
||||
|
||||
|
||||
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
|
||||
"""Extract user ID from current user context"""
|
||||
user_id = current_user.get('user_id')
|
||||
if not user_id:
|
||||
if current_user.get('type') == 'service':
|
||||
return None
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User ID not found in context"
|
||||
)
|
||||
try:
|
||||
return UUID(user_id)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("stock"),
|
||||
response_model=StockResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def add_stock(
|
||||
stock_data: StockCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Add new stock entry"""
|
||||
try:
|
||||
user_id = get_current_user_id(current_user)
|
||||
service = InventoryService()
|
||||
stock = await service.add_stock(stock_data, tenant_id, user_id)
|
||||
return stock
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to add stock"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("stock/bulk"),
|
||||
response_model=BulkStockResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def bulk_add_stock(
|
||||
bulk_data: BulkStockCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Bulk add stock entries for efficient batch operations"""
|
||||
try:
|
||||
user_id = get_current_user_id(current_user)
|
||||
service = InventoryService()
|
||||
result = await service.bulk_add_stock(bulk_data, tenant_id, user_id)
|
||||
return result
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to bulk add stock", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to bulk add stock"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("stock"),
|
||||
response_model=List[StockResponse]
|
||||
)
|
||||
async def get_stock(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient"),
|
||||
available_only: bool = Query(True, description="Show only available stock"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock entries with filtering"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
stock_entries = await service.get_stock(
|
||||
tenant_id, skip, limit, ingredient_id, available_only
|
||||
)
|
||||
return stock_entries
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get stock entries"
|
||||
)
|
||||
|
||||
|
||||
# ===== STOCK MOVEMENTS ROUTES (must come before stock/{stock_id} route) =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("stock/movements"),
|
||||
response_model=List[StockMovementResponse]
|
||||
)
|
||||
async def get_stock_movements(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
ingredient_id: Optional[str] = Query(None, description="Filter by ingredient"),
|
||||
movement_type: Optional[str] = Query(None, description="Filter by movement type"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get stock movements with filtering"""
|
||||
logger.info("Stock movements endpoint called",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=ingredient_id,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
movement_type=movement_type)
|
||||
|
||||
# Validate and convert ingredient_id if provided
|
||||
ingredient_uuid = None
|
||||
if ingredient_id:
|
||||
try:
|
||||
ingredient_uuid = UUID(ingredient_id)
|
||||
logger.info("Ingredient ID validated", ingredient_id=str(ingredient_uuid))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.error("Invalid ingredient_id format",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid ingredient_id format: {ingredient_id}. Must be a valid UUID."
|
||||
)
|
||||
|
||||
try:
|
||||
service = InventoryService()
|
||||
movements = await service.get_stock_movements(
|
||||
tenant_id, skip, limit, ingredient_uuid, movement_type
|
||||
)
|
||||
logger.info("Successfully retrieved stock movements",
|
||||
count=len(movements),
|
||||
tenant_id=str(tenant_id))
|
||||
return movements
|
||||
except ValueError as e:
|
||||
logger.error("Validation error in stock movements",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=ingredient_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock movements",
|
||||
error=str(e),
|
||||
error_type=type(e).__name__,
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=ingredient_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get stock movements: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("stock/movements"),
|
||||
response_model=StockMovementResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_stock_movement(
|
||||
movement_data: StockMovementCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create stock movement record"""
|
||||
try:
|
||||
user_id = get_current_user_id(current_user)
|
||||
service = InventoryService()
|
||||
movement = await service.create_stock_movement(movement_data, tenant_id, user_id)
|
||||
return movement
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create stock movement"
|
||||
)
|
||||
|
||||
|
||||
# ===== STOCK DETAIL ROUTES (must come after stock/movements routes) =====
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("stock", "stock_id"),
|
||||
response_model=StockResponse
|
||||
)
|
||||
async def get_stock_entry(
|
||||
stock_id: UUID = Path(..., description="Stock entry ID"),
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific stock entry"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
stock = await service.get_stock_entry(stock_id, tenant_id)
|
||||
|
||||
if not stock:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Stock entry not found"
|
||||
)
|
||||
|
||||
return stock
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get stock entry"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
route_builder.build_resource_detail_route("stock", "stock_id"),
|
||||
response_model=StockResponse
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def update_stock(
|
||||
stock_data: StockUpdate,
|
||||
stock_id: UUID = Path(..., description="Stock entry ID"),
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update stock entry"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
stock = await service.update_stock(stock_id, stock_data, tenant_id)
|
||||
|
||||
if not stock:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Stock entry not found"
|
||||
)
|
||||
|
||||
return stock
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to update stock entry"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("stock", "stock_id"),
|
||||
status_code=status.HTTP_204_NO_CONTENT
|
||||
)
|
||||
@admin_role_required
|
||||
async def delete_stock(
|
||||
stock_id: UUID = Path(..., description="Stock entry ID"),
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete stock entry"""
|
||||
try:
|
||||
service = InventoryService()
|
||||
deleted = await service.delete_stock(stock_id, tenant_id)
|
||||
|
||||
if not deleted:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Stock entry not found"
|
||||
)
|
||||
|
||||
return None
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete stock entry"
|
||||
)
|
||||
459
services/inventory/app/api/stock_receipts.py
Normal file
459
services/inventory/app/api/stock_receipts.py
Normal file
@@ -0,0 +1,459 @@
|
||||
"""
|
||||
Stock Receipt API Endpoints
|
||||
|
||||
Handles delivery receipt confirmation with lot-level tracking.
|
||||
Critical for food safety compliance - captures expiration dates per lot.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, status
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
|
||||
import structlog
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.stock_receipt import StockReceipt, StockReceiptLineItem, StockLot, ReceiptStatus
|
||||
from app.models.inventory import Stock, StockMovement, StockMovementType
|
||||
from shared.database.dependencies import get_db
|
||||
from shared.security import get_current_user
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(prefix="/stock-receipts", tags=["stock-receipts"])
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Request/Response Models
|
||||
# ============================================================
|
||||
|
||||
class LotInput(BaseModel):
|
||||
"""Individual lot details within a line item"""
|
||||
lot_number: Optional[str] = None
|
||||
supplier_lot_number: Optional[str] = None
|
||||
quantity: Decimal = Field(..., gt=0)
|
||||
unit_of_measure: str
|
||||
expiration_date: date = Field(..., description="Required for food safety")
|
||||
best_before_date: Optional[date] = None
|
||||
warehouse_location: Optional[str] = None
|
||||
storage_zone: Optional[str] = None
|
||||
quality_notes: Optional[str] = None
|
||||
|
||||
|
||||
class LineItemInput(BaseModel):
|
||||
"""Line item input for stock receipt"""
|
||||
ingredient_id: UUID
|
||||
ingredient_name: Optional[str] = None
|
||||
po_line_id: Optional[UUID] = None
|
||||
expected_quantity: Decimal
|
||||
actual_quantity: Decimal
|
||||
unit_of_measure: str
|
||||
discrepancy_reason: Optional[str] = None
|
||||
unit_cost: Optional[Decimal] = None
|
||||
lots: List[LotInput] = Field(..., min_items=1, description="At least one lot required")
|
||||
|
||||
@validator('lots')
|
||||
def validate_lot_totals(cls, lots, values):
|
||||
"""Ensure lot quantities sum to actual quantity"""
|
||||
if 'actual_quantity' not in values:
|
||||
return lots
|
||||
|
||||
total_lot_qty = sum(lot.quantity for lot in lots)
|
||||
actual_qty = values['actual_quantity']
|
||||
|
||||
if abs(total_lot_qty - actual_qty) > Decimal('0.01'): # Allow small floating point errors
|
||||
raise ValueError(
|
||||
f"Lot quantities ({total_lot_qty}) must sum to actual quantity ({actual_qty})"
|
||||
)
|
||||
|
||||
return lots
|
||||
|
||||
|
||||
class CreateStockReceiptRequest(BaseModel):
|
||||
"""Create draft stock receipt"""
|
||||
tenant_id: UUID
|
||||
po_id: UUID
|
||||
po_number: Optional[str] = None
|
||||
received_by_user_id: UUID
|
||||
supplier_id: Optional[UUID] = None
|
||||
supplier_name: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
line_items: List[LineItemInput] = Field(..., min_items=1)
|
||||
|
||||
|
||||
class UpdateStockReceiptRequest(BaseModel):
|
||||
"""Update draft stock receipt"""
|
||||
notes: Optional[str] = None
|
||||
line_items: Optional[List[LineItemInput]] = None
|
||||
|
||||
|
||||
class ConfirmStockReceiptRequest(BaseModel):
|
||||
"""Confirm stock receipt and update inventory"""
|
||||
confirmed_by_user_id: UUID
|
||||
|
||||
|
||||
# ============================================================
|
||||
# API Endpoints
|
||||
# ============================================================
|
||||
|
||||
@router.post("/", status_code=status.HTTP_201_CREATED)
|
||||
async def create_stock_receipt(
|
||||
request: CreateStockReceiptRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Create a draft stock receipt from a delivery.
|
||||
|
||||
Workflow:
|
||||
1. User clicks "Mark as Received" on delivery alert
|
||||
2. This endpoint creates draft receipt
|
||||
3. Frontend opens StockReceiptModal with draft data
|
||||
4. User fills in lot details
|
||||
5. User saves draft (PUT endpoint) or confirms (POST /confirm)
|
||||
"""
|
||||
try:
|
||||
# Create receipt
|
||||
receipt = StockReceipt(
|
||||
tenant_id=request.tenant_id,
|
||||
po_id=request.po_id,
|
||||
po_number=request.po_number,
|
||||
received_at=datetime.utcnow(),
|
||||
received_by_user_id=request.received_by_user_id,
|
||||
status=ReceiptStatus.DRAFT,
|
||||
supplier_id=request.supplier_id,
|
||||
supplier_name=request.supplier_name,
|
||||
notes=request.notes,
|
||||
has_discrepancies=False
|
||||
)
|
||||
|
||||
db.add(receipt)
|
||||
await db.flush() # Get receipt ID
|
||||
|
||||
# Create line items and lots
|
||||
for line_input in request.line_items:
|
||||
has_discrepancy = abs(line_input.expected_quantity - line_input.actual_quantity) > Decimal('0.01')
|
||||
|
||||
if has_discrepancy:
|
||||
receipt.has_discrepancies = True
|
||||
|
||||
line_item = StockReceiptLineItem(
|
||||
tenant_id=request.tenant_id,
|
||||
receipt_id=receipt.id,
|
||||
ingredient_id=line_input.ingredient_id,
|
||||
ingredient_name=line_input.ingredient_name,
|
||||
po_line_id=line_input.po_line_id,
|
||||
expected_quantity=line_input.expected_quantity,
|
||||
actual_quantity=line_input.actual_quantity,
|
||||
unit_of_measure=line_input.unit_of_measure,
|
||||
has_discrepancy=has_discrepancy,
|
||||
discrepancy_reason=line_input.discrepancy_reason,
|
||||
unit_cost=line_input.unit_cost,
|
||||
total_cost=line_input.unit_cost * line_input.actual_quantity if line_input.unit_cost else None
|
||||
)
|
||||
|
||||
db.add(line_item)
|
||||
await db.flush() # Get line item ID
|
||||
|
||||
# Create lots
|
||||
for lot_input in line_input.lots:
|
||||
lot = StockLot(
|
||||
tenant_id=request.tenant_id,
|
||||
line_item_id=line_item.id,
|
||||
lot_number=lot_input.lot_number,
|
||||
supplier_lot_number=lot_input.supplier_lot_number,
|
||||
quantity=lot_input.quantity,
|
||||
unit_of_measure=lot_input.unit_of_measure,
|
||||
expiration_date=lot_input.expiration_date,
|
||||
best_before_date=lot_input.best_before_date,
|
||||
warehouse_location=lot_input.warehouse_location,
|
||||
storage_zone=lot_input.storage_zone,
|
||||
quality_notes=lot_input.quality_notes
|
||||
)
|
||||
db.add(lot)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(receipt)
|
||||
|
||||
logger.info(
|
||||
"Stock receipt created",
|
||||
receipt_id=str(receipt.id),
|
||||
po_id=str(request.po_id),
|
||||
line_items=len(request.line_items),
|
||||
tenant_id=str(request.tenant_id)
|
||||
)
|
||||
|
||||
return receipt.to_dict()
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"Failed to create stock receipt",
|
||||
error=str(e),
|
||||
po_id=str(request.po_id)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create stock receipt: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{receipt_id}")
|
||||
async def get_stock_receipt(
|
||||
receipt_id: UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Retrieve stock receipt with all line items and lots.
|
||||
|
||||
Used to resume editing a draft receipt.
|
||||
"""
|
||||
try:
|
||||
stmt = select(StockReceipt).where(
|
||||
StockReceipt.id == receipt_id,
|
||||
StockReceipt.tenant_id == current_user['tenant_id']
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
receipt = result.scalar_one_or_none()
|
||||
|
||||
if not receipt:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Stock receipt not found"
|
||||
)
|
||||
|
||||
return receipt.to_dict()
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to retrieve stock receipt",
|
||||
receipt_id=str(receipt_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve stock receipt: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.put("/{receipt_id}")
|
||||
async def update_stock_receipt(
|
||||
receipt_id: UUID,
|
||||
request: UpdateStockReceiptRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Update draft stock receipt.
|
||||
|
||||
Allows user to save progress while filling in lot details.
|
||||
"""
|
||||
try:
|
||||
stmt = select(StockReceipt).where(
|
||||
StockReceipt.id == receipt_id,
|
||||
StockReceipt.tenant_id == current_user['tenant_id'],
|
||||
StockReceipt.status == ReceiptStatus.DRAFT
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
receipt = result.scalar_one_or_none()
|
||||
|
||||
if not receipt:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Draft stock receipt not found"
|
||||
)
|
||||
|
||||
# Update notes if provided
|
||||
if request.notes is not None:
|
||||
receipt.notes = request.notes
|
||||
|
||||
# Update line items if provided
|
||||
if request.line_items:
|
||||
# Delete existing line items (cascade deletes lots)
|
||||
for line_item in receipt.line_items:
|
||||
await db.delete(line_item)
|
||||
|
||||
# Create new line items
|
||||
for line_input in request.line_items:
|
||||
has_discrepancy = abs(line_input.expected_quantity - line_input.actual_quantity) > Decimal('0.01')
|
||||
|
||||
line_item = StockReceiptLineItem(
|
||||
tenant_id=current_user['tenant_id'],
|
||||
receipt_id=receipt.id,
|
||||
ingredient_id=line_input.ingredient_id,
|
||||
ingredient_name=line_input.ingredient_name,
|
||||
po_line_id=line_input.po_line_id,
|
||||
expected_quantity=line_input.expected_quantity,
|
||||
actual_quantity=line_input.actual_quantity,
|
||||
unit_of_measure=line_input.unit_of_measure,
|
||||
has_discrepancy=has_discrepancy,
|
||||
discrepancy_reason=line_input.discrepancy_reason,
|
||||
unit_cost=line_input.unit_cost,
|
||||
total_cost=line_input.unit_cost * line_input.actual_quantity if line_input.unit_cost else None
|
||||
)
|
||||
|
||||
db.add(line_item)
|
||||
await db.flush()
|
||||
|
||||
# Create lots
|
||||
for lot_input in line_input.lots:
|
||||
lot = StockLot(
|
||||
tenant_id=current_user['tenant_id'],
|
||||
line_item_id=line_item.id,
|
||||
lot_number=lot_input.lot_number,
|
||||
supplier_lot_number=lot_input.supplier_lot_number,
|
||||
quantity=lot_input.quantity,
|
||||
unit_of_measure=lot_input.unit_of_measure,
|
||||
expiration_date=lot_input.expiration_date,
|
||||
best_before_date=lot_input.best_before_date,
|
||||
warehouse_location=lot_input.warehouse_location,
|
||||
storage_zone=lot_input.storage_zone,
|
||||
quality_notes=lot_input.quality_notes
|
||||
)
|
||||
db.add(lot)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(receipt)
|
||||
|
||||
logger.info(
|
||||
"Stock receipt updated",
|
||||
receipt_id=str(receipt_id),
|
||||
tenant_id=str(current_user['tenant_id'])
|
||||
)
|
||||
|
||||
return receipt.to_dict()
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"Failed to update stock receipt",
|
||||
receipt_id=str(receipt_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update stock receipt: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{receipt_id}/confirm")
|
||||
async def confirm_stock_receipt(
|
||||
receipt_id: UUID,
|
||||
request: ConfirmStockReceiptRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Confirm stock receipt and update inventory.
|
||||
|
||||
This finalizes the receipt:
|
||||
1. Creates Stock records for each lot
|
||||
2. Creates StockMovement records (PURCHASE type)
|
||||
3. Marks receipt as CONFIRMED
|
||||
4. Updates PO status to RECEIVED (via procurement service)
|
||||
"""
|
||||
try:
|
||||
stmt = select(StockReceipt).where(
|
||||
StockReceipt.id == receipt_id,
|
||||
StockReceipt.tenant_id == current_user['tenant_id'],
|
||||
StockReceipt.status == ReceiptStatus.DRAFT
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
receipt = result.scalar_one_or_none()
|
||||
|
||||
if not receipt:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Draft stock receipt not found"
|
||||
)
|
||||
|
||||
# Process each line item and its lots
|
||||
for line_item in receipt.line_items:
|
||||
for lot in line_item.lots:
|
||||
# Create Stock record
|
||||
stock = Stock(
|
||||
tenant_id=current_user['tenant_id'],
|
||||
ingredient_id=line_item.ingredient_id,
|
||||
supplier_id=receipt.supplier_id,
|
||||
batch_number=f"RCV-{receipt_id}-{lot.id}",
|
||||
lot_number=lot.lot_number,
|
||||
supplier_batch_ref=lot.supplier_lot_number,
|
||||
production_stage='raw_ingredient',
|
||||
current_quantity=float(lot.quantity),
|
||||
reserved_quantity=0.0,
|
||||
available_quantity=float(lot.quantity),
|
||||
received_date=receipt.received_at,
|
||||
expiration_date=datetime.combine(lot.expiration_date, datetime.min.time()),
|
||||
best_before_date=datetime.combine(lot.best_before_date, datetime.min.time()) if lot.best_before_date else None,
|
||||
unit_cost=line_item.unit_cost,
|
||||
total_cost=line_item.unit_cost * lot.quantity if line_item.unit_cost else None,
|
||||
storage_location=lot.warehouse_location,
|
||||
warehouse_zone=lot.storage_zone,
|
||||
is_available=True,
|
||||
is_expired=False,
|
||||
quality_status="good"
|
||||
)
|
||||
db.add(stock)
|
||||
await db.flush()
|
||||
|
||||
# Link lot to stock
|
||||
lot.stock_id = stock.id
|
||||
|
||||
# Create StockMovement record
|
||||
movement = StockMovement(
|
||||
tenant_id=current_user['tenant_id'],
|
||||
ingredient_id=line_item.ingredient_id,
|
||||
stock_id=stock.id,
|
||||
movement_type=StockMovementType.PURCHASE,
|
||||
quantity=float(lot.quantity),
|
||||
unit_cost=line_item.unit_cost,
|
||||
total_cost=line_item.unit_cost * lot.quantity if line_item.unit_cost else None,
|
||||
quantity_before=0.0,
|
||||
quantity_after=float(lot.quantity),
|
||||
reference_number=receipt.po_number,
|
||||
supplier_id=receipt.supplier_id,
|
||||
notes=f"Stock receipt {receipt_id}",
|
||||
movement_date=receipt.received_at
|
||||
)
|
||||
db.add(movement)
|
||||
|
||||
# Mark receipt as confirmed
|
||||
receipt.status = ReceiptStatus.CONFIRMED
|
||||
receipt.confirmed_at = datetime.utcnow()
|
||||
|
||||
await db.commit()
|
||||
|
||||
logger.info(
|
||||
"Stock receipt confirmed",
|
||||
receipt_id=str(receipt_id),
|
||||
po_id=str(receipt.po_id),
|
||||
tenant_id=str(current_user['tenant_id'])
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"receipt_id": str(receipt_id),
|
||||
"message": "Stock receipt confirmed and inventory updated"
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(
|
||||
"Failed to confirm stock receipt",
|
||||
receipt_id=str(receipt_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to confirm stock receipt: {str(e)}"
|
||||
)
|
||||
398
services/inventory/app/api/sustainability.py
Normal file
398
services/inventory/app/api/sustainability.py
Normal file
@@ -0,0 +1,398 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/api/sustainability.py
|
||||
# ================================================================
|
||||
"""
|
||||
Inventory Sustainability API - Microservices Architecture
|
||||
Provides inventory-specific sustainability metrics (waste tracking, expiry alerts)
|
||||
Following microservices principles: each service owns its domain data
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from app.core.database import get_db
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
router = APIRouter(tags=["sustainability"])
|
||||
|
||||
|
||||
# ===== INVENTORY SUSTAINABILITY ENDPOINTS =====
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/inventory/sustainability/waste-metrics",
|
||||
summary="Get Inventory Waste Metrics",
|
||||
description="Get inventory-specific waste metrics from stock movements and expired items"
|
||||
)
|
||||
async def get_inventory_waste_metrics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date for metrics (default: 30 days ago)"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date for metrics (default: now)"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get inventory waste metrics including:
|
||||
- Waste from stock movements (expired, damaged, contaminated, spillage)
|
||||
- Total waste quantity and cost
|
||||
- Breakdown by waste reason
|
||||
- Number of waste incidents
|
||||
|
||||
**Domain**: Inventory Service owns this data
|
||||
**Use case**: Frontend aggregates with production service waste metrics
|
||||
"""
|
||||
try:
|
||||
# Default to last 30 days
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
# Get inventory waste from stock movements
|
||||
stock_movement_repo = StockMovementRepository(db)
|
||||
|
||||
# Get waste movements using explicit date range
|
||||
waste_movements = await stock_movement_repo.get_waste_movements(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
limit=1000
|
||||
)
|
||||
|
||||
# Calculate period days
|
||||
days_back = (end_date - start_date).days
|
||||
|
||||
# Calculate totals
|
||||
total_waste_kg = 0.0
|
||||
total_waste_cost_eur = 0.0
|
||||
waste_by_reason = {
|
||||
'expired': 0.0,
|
||||
'damaged': 0.0,
|
||||
'contaminated': 0.0,
|
||||
'spillage': 0.0,
|
||||
'other': 0.0
|
||||
}
|
||||
|
||||
for movement in (waste_movements or []):
|
||||
quantity = float(movement.quantity) if movement.quantity else 0.0
|
||||
total_waste_kg += quantity
|
||||
|
||||
# Add to cost if available
|
||||
if movement.total_cost:
|
||||
total_waste_cost_eur += float(movement.total_cost)
|
||||
|
||||
# Categorize by reason
|
||||
reason = movement.reason_code or 'other'
|
||||
if reason in waste_by_reason:
|
||||
waste_by_reason[reason] += quantity
|
||||
else:
|
||||
waste_by_reason['other'] += quantity
|
||||
|
||||
result = {
|
||||
'inventory_waste_kg': round(total_waste_kg, 2),
|
||||
'waste_cost_eur': round(total_waste_cost_eur, 2),
|
||||
'waste_by_reason': {
|
||||
key: round(val, 2) for key, val in waste_by_reason.items()
|
||||
},
|
||||
'waste_movements_count': len(waste_movements) if waste_movements else 0,
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat(),
|
||||
'days': days_back
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Inventory waste metrics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
waste_kg=result['inventory_waste_kg'],
|
||||
movements=result['waste_movements_count']
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting inventory waste metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve inventory waste metrics: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/inventory/sustainability/expiry-alerts",
|
||||
summary="Get Expiry Alerts",
|
||||
description="Get items at risk of expiring soon (waste prevention opportunities)"
|
||||
)
|
||||
async def get_expiry_alerts(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days_ahead: int = Query(7, ge=1, le=30, description="Days ahead to check for expiry"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get items at risk of expiring within the specified time window.
|
||||
|
||||
**Purpose**: Waste prevention and FIFO compliance
|
||||
**Returns**:
|
||||
- Items expiring soon
|
||||
- Potential waste value
|
||||
- Recommended actions
|
||||
"""
|
||||
try:
|
||||
stock_repo = StockRepository(db)
|
||||
|
||||
# Get stock items expiring soon
|
||||
expiring_soon = await stock_repo.get_expiring_stock(
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=days_ahead
|
||||
)
|
||||
|
||||
at_risk_items = []
|
||||
total_at_risk_kg = 0.0
|
||||
total_at_risk_value_eur = 0.0
|
||||
|
||||
for stock in (expiring_soon or []):
|
||||
quantity = float(stock.quantity) if stock.quantity else 0.0
|
||||
unit_cost = float(stock.unit_cost) if stock.unit_cost else 0.0
|
||||
total_value = quantity * unit_cost
|
||||
|
||||
total_at_risk_kg += quantity
|
||||
total_at_risk_value_eur += total_value
|
||||
|
||||
at_risk_items.append({
|
||||
'stock_id': str(stock.id),
|
||||
'ingredient_id': str(stock.ingredient_id),
|
||||
'ingredient_name': stock.ingredient.name if stock.ingredient else 'Unknown',
|
||||
'quantity': round(quantity, 2),
|
||||
'unit': stock.unit,
|
||||
'expiry_date': stock.expiry_date.isoformat() if stock.expiry_date else None,
|
||||
'days_until_expiry': (stock.expiry_date - datetime.now()).days if stock.expiry_date else None,
|
||||
'value_eur': round(total_value, 2),
|
||||
'location': stock.location or 'unspecified'
|
||||
})
|
||||
|
||||
result = {
|
||||
'at_risk_items': at_risk_items,
|
||||
'total_items': len(at_risk_items),
|
||||
'total_at_risk_kg': round(total_at_risk_kg, 2),
|
||||
'total_at_risk_value_eur': round(total_at_risk_value_eur, 2),
|
||||
'alert_window_days': days_ahead,
|
||||
'checked_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Expiry alerts retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
at_risk_items=result['total_items'],
|
||||
at_risk_value=result['total_at_risk_value_eur']
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting expiry alerts",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve expiry alerts: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/inventory/sustainability/waste-events",
|
||||
summary="Get Waste Event Log",
|
||||
description="Get detailed waste event history with reasons, costs, and timestamps"
|
||||
)
|
||||
async def get_waste_events(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
limit: int = Query(50, ge=1, le=500, description="Maximum number of events to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of events to skip"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date filter"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date filter"),
|
||||
reason_code: Optional[str] = Query(None, description="Filter by reason code (expired, damaged, etc.)"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get detailed waste event log for trend analysis and auditing.
|
||||
|
||||
**Use cases**:
|
||||
- Root cause analysis
|
||||
- Waste trend identification
|
||||
- Compliance auditing
|
||||
- Process improvement
|
||||
"""
|
||||
try:
|
||||
stock_movement_repo = StockMovementRepository(db)
|
||||
|
||||
# Default to last 90 days if no date range
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=90)
|
||||
|
||||
days_back = (end_date - start_date).days
|
||||
|
||||
# Get waste movements
|
||||
waste_movements = await stock_movement_repo.get_waste_movements(
|
||||
tenant_id=tenant_id,
|
||||
days_back=days_back,
|
||||
limit=limit + offset # Get extra for offset handling
|
||||
)
|
||||
|
||||
# Filter by reason if specified
|
||||
if reason_code and waste_movements:
|
||||
waste_movements = [
|
||||
m for m in waste_movements
|
||||
if m.reason_code == reason_code
|
||||
]
|
||||
|
||||
# Apply pagination
|
||||
total_count = len(waste_movements) if waste_movements else 0
|
||||
paginated_movements = (waste_movements or [])[offset:offset + limit]
|
||||
|
||||
# Format events
|
||||
events = []
|
||||
for movement in paginated_movements:
|
||||
events.append({
|
||||
'event_id': str(movement.id),
|
||||
'ingredient_id': str(movement.ingredient_id),
|
||||
'ingredient_name': movement.ingredient.name if movement.ingredient else 'Unknown',
|
||||
'quantity': float(movement.quantity) if movement.quantity else 0.0,
|
||||
'unit': movement.unit,
|
||||
'reason_code': movement.reason_code,
|
||||
'total_cost_eur': float(movement.total_cost) if movement.total_cost else 0.0,
|
||||
'movement_date': movement.movement_date.isoformat() if movement.movement_date else None,
|
||||
'notes': movement.notes or '',
|
||||
'created_by': movement.created_by
|
||||
})
|
||||
|
||||
result = {
|
||||
'events': events,
|
||||
'total_count': total_count,
|
||||
'returned_count': len(events),
|
||||
'offset': offset,
|
||||
'limit': limit,
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat()
|
||||
},
|
||||
'filter': {
|
||||
'reason_code': reason_code
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Waste events retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
total_events=total_count,
|
||||
returned=len(events)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting waste events",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve waste events: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/inventory/sustainability/summary",
|
||||
summary="Get Inventory Sustainability Summary",
|
||||
description="Get condensed inventory sustainability data for dashboard widgets"
|
||||
)
|
||||
async def get_inventory_sustainability_summary(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get summary of inventory sustainability metrics optimized for widgets.
|
||||
|
||||
**Returns**: Condensed version of waste metrics and expiry alerts
|
||||
**Use case**: Dashboard widgets, quick overview cards
|
||||
"""
|
||||
try:
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
# Get waste metrics
|
||||
stock_movement_repo = StockMovementRepository(db)
|
||||
waste_movements = await stock_movement_repo.get_waste_movements(
|
||||
tenant_id=tenant_id,
|
||||
days_back=days,
|
||||
limit=1000
|
||||
)
|
||||
|
||||
total_waste_kg = sum(
|
||||
float(m.quantity) for m in (waste_movements or [])
|
||||
if m.quantity
|
||||
)
|
||||
|
||||
total_waste_cost = sum(
|
||||
float(m.total_cost) for m in (waste_movements or [])
|
||||
if m.total_cost
|
||||
)
|
||||
|
||||
# Get expiry alerts
|
||||
stock_repo = StockRepository(db)
|
||||
expiring_soon = await stock_repo.get_expiring_stock(
|
||||
tenant_id=tenant_id,
|
||||
days_ahead=7
|
||||
)
|
||||
|
||||
at_risk_count = len(expiring_soon) if expiring_soon else 0
|
||||
|
||||
result = {
|
||||
'inventory_waste_kg': round(total_waste_kg, 2),
|
||||
'waste_cost_eur': round(total_waste_cost, 2),
|
||||
'waste_incidents': len(waste_movements) if waste_movements else 0,
|
||||
'items_at_risk_expiry': at_risk_count,
|
||||
'period_days': days,
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat()
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Inventory sustainability summary retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
waste_kg=result['inventory_waste_kg']
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting inventory sustainability summary",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve inventory sustainability summary: {str(e)}"
|
||||
)
|
||||
240
services/inventory/app/api/temperature_logs.py
Normal file
240
services/inventory/app/api/temperature_logs.py
Normal file
@@ -0,0 +1,240 @@
|
||||
# services/inventory/app/api/temperature_logs.py
|
||||
"""
|
||||
Temperature Logs API - ATOMIC CRUD operations on TemperatureLog model
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from app.core.database import get_db
|
||||
from app.services.food_safety_service import FoodSafetyService
|
||||
from app.schemas.food_safety import (
|
||||
TemperatureLogCreate,
|
||||
TemperatureLogResponse,
|
||||
BulkTemperatureLogCreate
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
route_builder = RouteBuilder('inventory')
|
||||
router = APIRouter(tags=["temperature-logs"])
|
||||
|
||||
|
||||
async def get_food_safety_service() -> FoodSafetyService:
|
||||
"""Get food safety service instance"""
|
||||
return FoodSafetyService()
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("food-safety/temperature"),
|
||||
response_model=TemperatureLogResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def log_temperature(
|
||||
temp_data: TemperatureLogCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Log a temperature reading"""
|
||||
try:
|
||||
temp_data.tenant_id = tenant_id
|
||||
|
||||
temp_log = await food_safety_service.log_temperature(
|
||||
db,
|
||||
temp_data,
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Temperature logged",
|
||||
location=temp_data.storage_location,
|
||||
temperature=temp_data.temperature_celsius)
|
||||
|
||||
return temp_log
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error logging temperature", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to log temperature"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("food-safety/temperature/bulk"),
|
||||
response_model=List[TemperatureLogResponse],
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def bulk_log_temperatures(
|
||||
bulk_data: BulkTemperatureLogCreate,
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Bulk log temperature readings"""
|
||||
try:
|
||||
for reading in bulk_data.readings:
|
||||
reading.tenant_id = tenant_id
|
||||
|
||||
temp_logs = await food_safety_service.bulk_log_temperatures(
|
||||
db,
|
||||
bulk_data.readings,
|
||||
user_id=UUID(current_user["user_id"])
|
||||
)
|
||||
|
||||
logger.info("Bulk temperature logging completed",
|
||||
count=len(bulk_data.readings))
|
||||
|
||||
return temp_logs
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error bulk logging temperatures", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to bulk log temperatures"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("food-safety/temperature"),
|
||||
response_model=List[TemperatureLogResponse]
|
||||
)
|
||||
async def get_temperature_logs(
|
||||
tenant_id: UUID = Path(...),
|
||||
location: Optional[str] = Query(None, description="Filter by storage location"),
|
||||
equipment_id: Optional[str] = Query(None, description="Filter by equipment ID"),
|
||||
date_from: Optional[datetime] = Query(None, description="Start date for filtering"),
|
||||
date_to: Optional[datetime] = Query(None, description="End date for filtering"),
|
||||
violations_only: bool = Query(False, description="Show only temperature violations"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get temperature logs with filtering"""
|
||||
try:
|
||||
where_conditions = ["tenant_id = :tenant_id"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if location:
|
||||
where_conditions.append("storage_location ILIKE :location")
|
||||
params["location"] = f"%{location}%"
|
||||
|
||||
if equipment_id:
|
||||
where_conditions.append("equipment_id = :equipment_id")
|
||||
params["equipment_id"] = equipment_id
|
||||
|
||||
if date_from:
|
||||
where_conditions.append("recorded_at >= :date_from")
|
||||
params["date_from"] = date_from
|
||||
|
||||
if date_to:
|
||||
where_conditions.append("recorded_at <= :date_to")
|
||||
params["date_to"] = date_to
|
||||
|
||||
if violations_only:
|
||||
where_conditions.append("is_within_range = false")
|
||||
|
||||
where_clause = " AND ".join(where_conditions)
|
||||
|
||||
query = f"""
|
||||
SELECT * FROM temperature_logs
|
||||
WHERE {where_clause}
|
||||
ORDER BY recorded_at DESC
|
||||
LIMIT :limit OFFSET :skip
|
||||
"""
|
||||
params.update({"limit": limit, "skip": skip})
|
||||
|
||||
result = await db.execute(query, params)
|
||||
logs = result.fetchall()
|
||||
|
||||
return [
|
||||
TemperatureLogResponse(**dict(log))
|
||||
for log in logs
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting temperature logs", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve temperature logs"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("food-safety/temperature", "log_id"),
|
||||
response_model=TemperatureLogResponse
|
||||
)
|
||||
async def get_temperature_log(
|
||||
log_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific temperature log"""
|
||||
try:
|
||||
query = "SELECT * FROM temperature_logs WHERE id = :log_id AND tenant_id = :tenant_id"
|
||||
result = await db.execute(query, {"log_id": log_id, "tenant_id": tenant_id})
|
||||
log = result.fetchone()
|
||||
|
||||
if not log:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Temperature log not found"
|
||||
)
|
||||
|
||||
return TemperatureLogResponse(**dict(log))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error getting temperature log", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve temperature log"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_resource_detail_route("food-safety/temperature", "log_id"),
|
||||
status_code=status.HTTP_204_NO_CONTENT
|
||||
)
|
||||
@require_user_role(['admin', 'owner'])
|
||||
async def delete_temperature_log(
|
||||
log_id: UUID = Path(...),
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete temperature log"""
|
||||
try:
|
||||
query = "DELETE FROM temperature_logs WHERE id = :log_id AND tenant_id = :tenant_id"
|
||||
result = await db.execute(query, {"log_id": log_id, "tenant_id": tenant_id})
|
||||
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Temperature log not found"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
return None
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Error deleting temperature log", error=str(e))
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to delete temperature log"
|
||||
)
|
||||
222
services/inventory/app/api/transformations.py
Normal file
222
services/inventory/app/api/transformations.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# services/inventory/app/api/transformations.py
|
||||
"""
|
||||
API endpoints for product transformations
|
||||
Following standardized URL structure with role-based access control
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.services.transformation_service import TransformationService
|
||||
from app.schemas.inventory import (
|
||||
ProductTransformationCreate,
|
||||
ProductTransformationResponse
|
||||
)
|
||||
from app.models.inventory import ProductionStage
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('inventory')
|
||||
|
||||
router = APIRouter(tags=["transformations"])
|
||||
|
||||
|
||||
# Helper function to extract user ID from user object
|
||||
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
|
||||
"""Extract user ID from current user context"""
|
||||
user_id = current_user.get('user_id')
|
||||
if not user_id:
|
||||
# Handle service tokens that don't have UUID user_ids
|
||||
if current_user.get('type') == 'service':
|
||||
return None
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User ID not found in context"
|
||||
)
|
||||
try:
|
||||
return UUID(user_id)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_base_route("transformations"),
|
||||
response_model=ProductTransformationResponse,
|
||||
status_code=status.HTTP_201_CREATED
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_transformation(
|
||||
transformation_data: ProductTransformationCreate,
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new product transformation (e.g., par-baked to fully baked)"""
|
||||
try:
|
||||
# Extract user ID - handle service tokens
|
||||
user_id = get_current_user_id(current_user)
|
||||
|
||||
service = TransformationService()
|
||||
transformation = await service.create_transformation(transformation_data, tenant_id, user_id)
|
||||
return transformation
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to create transformation", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create transformation"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("transformations"),
|
||||
response_model=List[ProductTransformationResponse]
|
||||
)
|
||||
async def get_transformations(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
skip: int = Query(0, ge=0, description="Number of records to skip"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
|
||||
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient (source or target)"),
|
||||
source_stage: Optional[ProductionStage] = Query(None, description="Filter by source production stage"),
|
||||
target_stage: Optional[ProductionStage] = Query(None, description="Filter by target production stage"),
|
||||
days_back: Optional[int] = Query(None, ge=1, le=365, description="Filter by days back from today"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get product transformations with filtering"""
|
||||
try:
|
||||
service = TransformationService()
|
||||
transformations = await service.get_transformations(
|
||||
tenant_id, skip, limit, ingredient_id, source_stage, target_stage, days_back
|
||||
)
|
||||
return transformations
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformations", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get transformations"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("transformations", "transformation_id"),
|
||||
response_model=ProductTransformationResponse
|
||||
)
|
||||
async def get_transformation(
|
||||
transformation_id: UUID = Path(..., description="Transformation ID"),
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get specific transformation by ID"""
|
||||
try:
|
||||
service = TransformationService()
|
||||
transformation = await service.get_transformation(transformation_id, tenant_id)
|
||||
|
||||
if not transformation:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Transformation not found"
|
||||
)
|
||||
|
||||
return transformation
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformation", error=str(e), transformation_id=transformation_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get transformation"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("transformations/summary"),
|
||||
response_model=dict
|
||||
)
|
||||
async def get_transformation_summary(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days_back: int = Query(30, ge=1, le=365, description="Days back for summary"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get transformation summary for dashboard"""
|
||||
try:
|
||||
service = TransformationService()
|
||||
summary = await service.get_transformation_summary(tenant_id, days_back)
|
||||
return summary
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformation summary", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to get transformation summary"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
route_builder.build_operations_route("transformations/par-bake-to-fresh"),
|
||||
response_model=dict
|
||||
)
|
||||
@require_user_role(['admin', 'owner', 'member'])
|
||||
async def create_par_bake_transformation(
|
||||
source_ingredient_id: UUID = Query(..., description="Par-baked ingredient ID"),
|
||||
target_ingredient_id: UUID = Query(..., description="Fresh baked ingredient ID"),
|
||||
quantity: float = Query(..., gt=0, description="Quantity to transform"),
|
||||
target_batch_number: Optional[str] = Query(None, description="Target batch number"),
|
||||
expiration_hours: int = Query(24, ge=1, le=72, description="Hours until expiration after baking"),
|
||||
notes: Optional[str] = Query(None, description="Process notes"),
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Convenience endpoint for par-baked to fresh transformation"""
|
||||
try:
|
||||
# Extract user ID - handle service tokens
|
||||
user_id = get_current_user_id(current_user)
|
||||
|
||||
# Create transformation data for par-baked to fully baked
|
||||
transformation_data = ProductTransformationCreate(
|
||||
source_ingredient_id=str(source_ingredient_id),
|
||||
target_ingredient_id=str(target_ingredient_id),
|
||||
source_stage=ProductionStage.PAR_BAKED,
|
||||
target_stage=ProductionStage.FULLY_BAKED,
|
||||
source_quantity=quantity,
|
||||
target_quantity=quantity, # Assume 1:1 ratio for par-baked goods
|
||||
expiration_calculation_method="days_from_transformation",
|
||||
expiration_days_offset=max(1, expiration_hours // 24), # Convert hours to days, minimum 1 day
|
||||
process_notes=notes,
|
||||
target_batch_number=target_batch_number
|
||||
)
|
||||
|
||||
service = TransformationService()
|
||||
transformation = await service.create_transformation(transformation_data, tenant_id, user_id)
|
||||
|
||||
return {
|
||||
"transformation_id": transformation.id,
|
||||
"transformation_reference": transformation.transformation_reference,
|
||||
"source_quantity": transformation.source_quantity,
|
||||
"target_quantity": transformation.target_quantity,
|
||||
"expiration_date": transformation.transformation_date,
|
||||
"message": f"Successfully transformed {quantity} units from par-baked to fresh baked"
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to create par-bake transformation", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create par-bake transformation"
|
||||
)
|
||||
6
services/inventory/app/consumers/__init__.py
Normal file
6
services/inventory/app/consumers/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
Event consumers for inventory service
|
||||
"""
|
||||
from .delivery_event_consumer import DeliveryEventConsumer
|
||||
|
||||
__all__ = ["DeliveryEventConsumer"]
|
||||
272
services/inventory/app/consumers/delivery_event_consumer.py
Normal file
272
services/inventory/app/consumers/delivery_event_consumer.py
Normal file
@@ -0,0 +1,272 @@
|
||||
"""
|
||||
Delivery Event Consumer
|
||||
|
||||
Listens for delivery.received events from procurement service
|
||||
and automatically updates inventory stock levels.
|
||||
"""
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from shared.messaging import RabbitMQClient
|
||||
from app.core.database import database_manager
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DeliveryEventConsumer:
|
||||
"""
|
||||
Consumes delivery.received events and updates inventory stock.
|
||||
|
||||
When a delivery is recorded in procurement service:
|
||||
1. Listens for delivery.received event
|
||||
2. Creates stock entries for each delivered item
|
||||
3. Updates stock levels (quantity_available)
|
||||
4. Records batch numbers and expiry dates
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize delivery event consumer"""
|
||||
self.service_name = "inventory"
|
||||
|
||||
async def consume_delivery_received_events(
|
||||
self,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
"""
|
||||
Start consuming delivery.received events from RabbitMQ
|
||||
|
||||
Args:
|
||||
rabbitmq_client: RabbitMQ client instance
|
||||
"""
|
||||
async def process_message(message):
|
||||
"""Process a single delivery.received event message"""
|
||||
try:
|
||||
async with message.process():
|
||||
# Parse event data
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info(
|
||||
"Received delivery.received event",
|
||||
event_id=event_data.get('event_id'),
|
||||
delivery_id=event_data.get('data', {}).get('delivery_id')
|
||||
)
|
||||
|
||||
# Process the delivery and update stock
|
||||
success = await self.process_delivery_stock_update(event_data)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Successfully processed delivery stock update",
|
||||
delivery_id=event_data.get('data', {}).get('delivery_id')
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Failed to process delivery stock update",
|
||||
delivery_id=event_data.get('data', {}).get('delivery_id')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing delivery.received event",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Start consuming events
|
||||
await rabbitmq_client.consume_events(
|
||||
exchange_name="procurement.events",
|
||||
queue_name="inventory.delivery.received",
|
||||
routing_key="delivery.received",
|
||||
callback=process_message
|
||||
)
|
||||
|
||||
logger.info("Started consuming delivery.received events")
|
||||
|
||||
async def process_delivery_stock_update(self, event_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Process delivery event and update stock levels.
|
||||
|
||||
Args:
|
||||
event_data: Full event payload from RabbitMQ
|
||||
|
||||
Returns:
|
||||
bool: True if stock updated successfully
|
||||
"""
|
||||
try:
|
||||
data = event_data.get('data', {})
|
||||
|
||||
# Extract delivery information
|
||||
tenant_id = uuid.UUID(data.get('tenant_id'))
|
||||
delivery_id = uuid.UUID(data.get('delivery_id'))
|
||||
po_id = uuid.UUID(data.get('po_id'))
|
||||
items = data.get('items', [])
|
||||
received_by = data.get('received_by')
|
||||
received_at = data.get('received_at')
|
||||
|
||||
if not items:
|
||||
logger.warning(
|
||||
"No items in delivery event, skipping stock update",
|
||||
delivery_id=str(delivery_id)
|
||||
)
|
||||
return False
|
||||
|
||||
# Process each item
|
||||
async with database_manager.get_session() as session:
|
||||
stock_repo = StockRepository(session)
|
||||
movement_repo = StockMovementRepository(session)
|
||||
|
||||
for item in items:
|
||||
try:
|
||||
# inventory_product_id is the same as ingredient_id
|
||||
# The ingredients table serves as a unified catalog for both raw materials and products
|
||||
ingredient_id = uuid.UUID(item.get('inventory_product_id'))
|
||||
accepted_quantity = Decimal(str(item.get('accepted_quantity', 0)))
|
||||
|
||||
# Only process if quantity was accepted
|
||||
if accepted_quantity <= 0:
|
||||
logger.debug(
|
||||
"Skipping item with zero accepted quantity",
|
||||
ingredient_id=str(ingredient_id)
|
||||
)
|
||||
continue
|
||||
|
||||
# Create a new stock batch entry for this delivery
|
||||
# The Stock model uses batch tracking - each delivery creates a new batch entry
|
||||
# Extract unit cost from delivery item
|
||||
unit_cost = Decimal('0')
|
||||
try:
|
||||
if 'unit_cost' in item:
|
||||
unit_cost = Decimal(str(item['unit_cost']))
|
||||
elif 'unit_price' in item:
|
||||
unit_cost = Decimal(str(item['unit_price']))
|
||||
elif 'price' in item:
|
||||
unit_cost = Decimal(str(item['price']))
|
||||
except (ValueError, TypeError, KeyError) as e:
|
||||
logger.warning("Could not extract unit cost from delivery item for stock entry",
|
||||
item_id=item.get('id'),
|
||||
error=str(e))
|
||||
|
||||
# Calculate total cost
|
||||
total_cost = unit_cost * accepted_quantity
|
||||
|
||||
stock_data = {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
'batch_number': item.get('batch_lot_number'),
|
||||
'lot_number': item.get('batch_lot_number'), # Use same as batch_number
|
||||
'supplier_batch_ref': item.get('batch_lot_number'),
|
||||
|
||||
# Quantities
|
||||
'current_quantity': float(accepted_quantity),
|
||||
'reserved_quantity': 0.0,
|
||||
'available_quantity': float(accepted_quantity),
|
||||
|
||||
# Dates
|
||||
'received_date': datetime.fromisoformat(received_at.replace('Z', '+00:00')) if received_at else datetime.now(timezone.utc),
|
||||
'expiration_date': datetime.fromisoformat(item.get('expiry_date').replace('Z', '+00:00')) if item.get('expiry_date') else None,
|
||||
|
||||
# Cost - extracted from delivery item
|
||||
'unit_cost': unit_cost,
|
||||
'total_cost': total_cost,
|
||||
|
||||
# Production stage - default to raw ingredient for deliveries
|
||||
'production_stage': 'raw_ingredient',
|
||||
|
||||
# Status
|
||||
'is_available': True,
|
||||
'quality_status': 'GOOD'
|
||||
}
|
||||
|
||||
from app.schemas.inventory import StockCreate
|
||||
stock_create = StockCreate(**stock_data)
|
||||
stock = await stock_repo.create_stock_entry(stock_create, tenant_id)
|
||||
|
||||
logger.info(
|
||||
"Created new stock batch from delivery",
|
||||
ingredient_id=str(ingredient_id),
|
||||
stock_id=str(stock.id),
|
||||
batch_number=item.get('batch_lot_number'),
|
||||
quantity=float(accepted_quantity),
|
||||
delivery_id=str(delivery_id)
|
||||
)
|
||||
|
||||
# Create stock movement record for audit trail
|
||||
from app.models.inventory import StockMovementType
|
||||
from app.schemas.inventory import StockMovementCreate
|
||||
|
||||
# Extract unit cost from delivery item or default to 0
|
||||
unit_cost = Decimal('0')
|
||||
try:
|
||||
if 'unit_cost' in item:
|
||||
unit_cost = Decimal(str(item['unit_cost']))
|
||||
elif 'unit_price' in item:
|
||||
unit_cost = Decimal(str(item['unit_price']))
|
||||
elif 'price' in item:
|
||||
unit_cost = Decimal(str(item['price']))
|
||||
except (ValueError, TypeError, KeyError) as e:
|
||||
logger.warning("Could not extract unit cost from delivery item",
|
||||
item_id=item.get('id'),
|
||||
error=str(e))
|
||||
|
||||
movement_data = StockMovementCreate(
|
||||
ingredient_id=ingredient_id,
|
||||
stock_id=stock.id,
|
||||
movement_type=StockMovementType.PURCHASE,
|
||||
quantity=float(accepted_quantity),
|
||||
unit_cost=unit_cost,
|
||||
reference_number=f"DEL-{delivery_id}",
|
||||
reason_code='delivery',
|
||||
notes=f"Delivery received from PO {po_id}. Batch: {item.get('batch_lot_number', 'N/A')}",
|
||||
movement_date=datetime.fromisoformat(received_at.replace('Z', '+00:00')) if received_at else datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
movement = await movement_repo.create_movement(
|
||||
movement_data=movement_data,
|
||||
tenant_id=tenant_id,
|
||||
created_by=uuid.UUID(received_by) if received_by else None,
|
||||
quantity_before=0.0, # New batch starts at 0
|
||||
quantity_after=float(accepted_quantity)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Created stock movement for delivery",
|
||||
movement_id=str(movement.id),
|
||||
ingredient_id=str(ingredient_id),
|
||||
quantity=float(accepted_quantity),
|
||||
batch=item.get('batch_lot_number')
|
||||
)
|
||||
|
||||
except Exception as item_error:
|
||||
logger.error(
|
||||
"Error processing delivery item",
|
||||
error=str(item_error),
|
||||
item=item,
|
||||
exc_info=True
|
||||
)
|
||||
# Continue processing other items even if one fails
|
||||
continue
|
||||
|
||||
# Commit all changes
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
"Successfully processed delivery stock update",
|
||||
delivery_id=str(delivery_id),
|
||||
items_processed=len(items)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in delivery stock update",
|
||||
error=str(e),
|
||||
delivery_id=data.get('delivery_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
256
services/inventory/app/consumers/inventory_transfer_consumer.py
Normal file
256
services/inventory/app/consumers/inventory_transfer_consumer.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
Inventory Transfer Event Consumer
|
||||
Listens for completed internal transfers and handles inventory ownership transfer
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import structlog
|
||||
from typing import Dict, Any
|
||||
import json
|
||||
|
||||
from app.services.internal_transfer_service import InternalTransferInventoryService
|
||||
from shared.messaging import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryTransferEventConsumer:
|
||||
"""
|
||||
Consumer for inventory transfer events triggered by internal transfers
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
internal_transfer_service: InternalTransferInventoryService,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
self.internal_transfer_service = internal_transfer_service
|
||||
self.rabbitmq_client = rabbitmq_client
|
||||
self.is_running = False
|
||||
|
||||
async def start_consuming(self):
|
||||
"""
|
||||
Start consuming inventory transfer events
|
||||
"""
|
||||
logger.info("Starting inventory transfer event consumer")
|
||||
self.is_running = True
|
||||
|
||||
# Declare exchange and queue for internal transfer events
|
||||
await self.rabbitmq_client.declare_exchange("internal_transfers", "topic")
|
||||
await self.rabbitmq_client.declare_queue("inventory_service_internal_transfers")
|
||||
await self.rabbitmq_client.bind_queue_to_exchange(
|
||||
queue_name="inventory_service_internal_transfers",
|
||||
exchange_name="internal_transfers",
|
||||
routing_key="internal_transfer.completed"
|
||||
)
|
||||
|
||||
# Start consuming
|
||||
await self.rabbitmq_client.consume(
|
||||
queue_name="inventory_service_internal_transfers",
|
||||
callback=self.handle_internal_transfer_completed,
|
||||
auto_ack=False
|
||||
)
|
||||
|
||||
logger.info("Inventory transfer event consumer started")
|
||||
|
||||
async def handle_internal_transfer_completed(self, message):
|
||||
"""
|
||||
Handle internal transfer completed event
|
||||
This means a shipment has been delivered and inventory ownership should transfer
|
||||
"""
|
||||
try:
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info("Processing internal transfer completed event", event_data=event_data)
|
||||
|
||||
# Extract data from the event
|
||||
shipment_id = event_data.get('shipment_id')
|
||||
parent_tenant_id = event_data.get('parent_tenant_id')
|
||||
child_tenant_id = event_data.get('child_tenant_id')
|
||||
items = event_data.get('items', [])
|
||||
|
||||
if not all([shipment_id, parent_tenant_id, child_tenant_id, items]):
|
||||
logger.error("Missing required data in internal transfer event", event_data=event_data)
|
||||
await message.nack(requeue=False) # Don't retry invalid messages
|
||||
return
|
||||
|
||||
# Process the inventory transfer for each item
|
||||
transfer_results = []
|
||||
errors = []
|
||||
|
||||
for item in items:
|
||||
product_id = item.get('product_id')
|
||||
delivered_quantity = item.get('delivered_quantity')
|
||||
|
||||
if not all([product_id, delivered_quantity]):
|
||||
errors.append({
|
||||
'error': 'Missing product_id or delivered_quantity',
|
||||
'item': item
|
||||
})
|
||||
continue
|
||||
|
||||
try:
|
||||
# Deduct from parent inventory
|
||||
await self._transfer_inventory_from_parent(
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=delivered_quantity
|
||||
)
|
||||
|
||||
# Add to child inventory
|
||||
await self._transfer_inventory_to_child(
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=delivered_quantity
|
||||
)
|
||||
|
||||
transfer_results.append({
|
||||
'product_id': product_id,
|
||||
'quantity': delivered_quantity,
|
||||
'status': 'completed'
|
||||
})
|
||||
|
||||
logger.info(
|
||||
"Inventory transferred successfully",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=delivered_quantity
|
||||
)
|
||||
|
||||
except Exception as item_error:
|
||||
logger.error(
|
||||
"Failed to transfer inventory for item",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
error=str(item_error)
|
||||
)
|
||||
errors.append({
|
||||
'product_id': product_id,
|
||||
'quantity': delivered_quantity,
|
||||
'error': str(item_error)
|
||||
})
|
||||
|
||||
# Acknowledge message after processing
|
||||
await message.ack()
|
||||
|
||||
logger.info(
|
||||
"Internal transfer processed",
|
||||
shipment_id=shipment_id,
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
successful_transfers=len(transfer_results),
|
||||
failed_transfers=len(errors)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing internal transfer event", error=str(e), exc_info=True)
|
||||
# Nack with requeue=True to retry on transient errors
|
||||
await message.nack(requeue=True)
|
||||
|
||||
async def _transfer_inventory_from_parent(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
product_id: str,
|
||||
quantity: float
|
||||
):
|
||||
"""
|
||||
Deduct inventory from parent tenant
|
||||
"""
|
||||
try:
|
||||
# Create stock movement to reduce parent inventory
|
||||
stock_movement_data = {
|
||||
"product_id": product_id,
|
||||
"movement_type": "internal_transfer_out",
|
||||
"quantity": -float(quantity), # Negative for outflow
|
||||
"reference_type": "internal_transfer",
|
||||
"reference_id": f"transfer_{parent_tenant_id}_to_{product_id}", # Would have actual transfer ID
|
||||
"source_tenant_id": parent_tenant_id,
|
||||
"destination_tenant_id": None, # Will be set when we know the child
|
||||
"notes": f"Internal transfer to child tenant"
|
||||
}
|
||||
|
||||
# Call inventory service to process the movement
|
||||
await self.internal_transfer_service.inventory_client.create_stock_movement(
|
||||
tenant_id=parent_tenant_id,
|
||||
movement_data=stock_movement_data
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Inventory deducted from parent tenant",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=quantity
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error deducting inventory from parent",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id,
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def _transfer_inventory_to_child(
|
||||
self,
|
||||
child_tenant_id: str,
|
||||
product_id: str,
|
||||
quantity: float
|
||||
):
|
||||
"""
|
||||
Add inventory to child tenant
|
||||
"""
|
||||
try:
|
||||
# Create stock movement to increase child inventory
|
||||
stock_movement_data = {
|
||||
"product_id": product_id,
|
||||
"movement_type": "internal_transfer_in",
|
||||
"quantity": float(quantity), # Positive for inflow
|
||||
"reference_type": "internal_transfer",
|
||||
"reference_id": f"transfer_from_parent_{product_id}_to_{child_tenant_id}", # Would have actual transfer ID
|
||||
"source_tenant_id": None, # Will be set when we know the parent
|
||||
"destination_tenant_id": child_tenant_id,
|
||||
"notes": f"Internal transfer from parent tenant"
|
||||
}
|
||||
|
||||
# Call inventory service to process the movement
|
||||
await self.internal_transfer_service.inventory_client.create_stock_movement(
|
||||
tenant_id=child_tenant_id,
|
||||
movement_data=stock_movement_data
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Inventory added to child tenant",
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=quantity
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error adding inventory to child",
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def stop_consuming(self):
|
||||
"""
|
||||
Stop consuming inventory transfer events
|
||||
"""
|
||||
logger.info("Stopping inventory transfer event consumer")
|
||||
self.is_running = False
|
||||
# In a real implementation, we would close the RabbitMQ connection
|
||||
logger.info("Inventory transfer event consumer stopped")
|
||||
|
||||
async def health_check(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Health check for the consumer
|
||||
"""
|
||||
return {
|
||||
"consumer": "inventory_transfer_event_consumer",
|
||||
"status": "running" if self.is_running else "stopped",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
0
services/inventory/app/core/__init__.py
Normal file
0
services/inventory/app/core/__init__.py
Normal file
124
services/inventory/app/core/config.py
Normal file
124
services/inventory/app/core/config.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# services/inventory/app/core/config.py
|
||||
"""
|
||||
Inventory Service Configuration
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
from pydantic import Field
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
|
||||
class Settings(BaseServiceSettings):
|
||||
"""Inventory service settings extending base configuration"""
|
||||
|
||||
# Override service-specific settings
|
||||
SERVICE_NAME: str = "inventory-service"
|
||||
VERSION: str = "1.0.0"
|
||||
APP_NAME: str = "Bakery Inventory Service"
|
||||
DESCRIPTION: str = "Inventory and stock management service"
|
||||
|
||||
# API Configuration
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# Database configuration (secure approach - build from components)
|
||||
@property
|
||||
def DATABASE_URL(self) -> str:
|
||||
"""Build database URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("INVENTORY_DATABASE_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("INVENTORY_DB_USER", "inventory_user")
|
||||
password = os.getenv("INVENTORY_DB_PASSWORD", "inventory_pass123")
|
||||
host = os.getenv("INVENTORY_DB_HOST", "localhost")
|
||||
port = os.getenv("INVENTORY_DB_PORT", "5432")
|
||||
name = os.getenv("INVENTORY_DB_NAME", "inventory_db")
|
||||
|
||||
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
||||
|
||||
# Inventory-specific Redis database
|
||||
REDIS_DB: int = Field(default=3, env="INVENTORY_REDIS_DB")
|
||||
|
||||
# File upload configuration
|
||||
MAX_UPLOAD_SIZE: int = 10 * 1024 * 1024 # 10MB
|
||||
UPLOAD_PATH: str = Field(default="/tmp/uploads", env="INVENTORY_UPLOAD_PATH")
|
||||
ALLOWED_FILE_EXTENSIONS: List[str] = [".csv", ".xlsx", ".xls", ".png", ".jpg", ".jpeg"]
|
||||
|
||||
# Pagination
|
||||
DEFAULT_PAGE_SIZE: int = 50
|
||||
MAX_PAGE_SIZE: int = 1000
|
||||
|
||||
# Stock validation
|
||||
MIN_QUANTITY: float = 0.0
|
||||
MAX_QUANTITY: float = 100000.0
|
||||
MIN_PRICE: float = 0.01
|
||||
MAX_PRICE: float = 10000.0
|
||||
|
||||
# Inventory-specific cache TTL
|
||||
INVENTORY_CACHE_TTL: int = 180 # 3 minutes for real-time stock
|
||||
INGREDIENT_CACHE_TTL: int = 600 # 10 minutes
|
||||
SUPPLIER_CACHE_TTL: int = 1800 # 30 minutes
|
||||
|
||||
# Low stock thresholds
|
||||
DEFAULT_LOW_STOCK_THRESHOLD: int = 10
|
||||
DEFAULT_REORDER_POINT: int = 20
|
||||
DEFAULT_REORDER_QUANTITY: int = 50
|
||||
|
||||
# Expiration alert thresholds (in days)
|
||||
EXPIRING_SOON_DAYS: int = 7
|
||||
EXPIRED_ALERT_DAYS: int = 1
|
||||
|
||||
# Barcode/QR configuration
|
||||
BARCODE_FORMAT: str = "Code128"
|
||||
QR_CODE_VERSION: int = 1
|
||||
|
||||
# Food safety and compliance settings
|
||||
FOOD_SAFETY_ENABLED: bool = Field(default=True, env="FOOD_SAFETY_ENABLED")
|
||||
TEMPERATURE_MONITORING_ENABLED: bool = Field(default=True, env="TEMPERATURE_MONITORING_ENABLED")
|
||||
AUTOMATIC_COMPLIANCE_CHECKS: bool = Field(default=True, env="AUTOMATIC_COMPLIANCE_CHECKS")
|
||||
|
||||
# Temperature monitoring thresholds
|
||||
REFRIGERATION_TEMP_MIN: float = Field(default=1.0, env="REFRIGERATION_TEMP_MIN") # Celsius
|
||||
REFRIGERATION_TEMP_MAX: float = Field(default=4.0, env="REFRIGERATION_TEMP_MAX") # Celsius
|
||||
FREEZER_TEMP_MIN: float = Field(default=-20.0, env="FREEZER_TEMP_MIN") # Celsius
|
||||
FREEZER_TEMP_MAX: float = Field(default=-15.0, env="FREEZER_TEMP_MAX") # Celsius
|
||||
ROOM_TEMP_MIN: float = Field(default=18.0, env="ROOM_TEMP_MIN") # Celsius
|
||||
ROOM_TEMP_MAX: float = Field(default=25.0, env="ROOM_TEMP_MAX") # Celsius
|
||||
|
||||
# Temperature alert thresholds
|
||||
TEMP_DEVIATION_ALERT_MINUTES: int = Field(default=15, env="TEMP_DEVIATION_ALERT_MINUTES")
|
||||
CRITICAL_TEMP_DEVIATION_MINUTES: int = Field(default=5, env="CRITICAL_TEMP_DEVIATION_MINUTES")
|
||||
TEMP_SENSOR_OFFLINE_ALERT_MINUTES: int = Field(default=30, env="TEMP_SENSOR_OFFLINE_ALERT_MINUTES")
|
||||
|
||||
# Food safety alert thresholds
|
||||
EXPIRATION_WARNING_DAYS: int = Field(default=3, env="EXPIRATION_WARNING_DAYS")
|
||||
CRITICAL_EXPIRATION_HOURS: int = Field(default=24, env="CRITICAL_EXPIRATION_HOURS")
|
||||
QUALITY_SCORE_THRESHOLD: float = Field(default=8.0, env="QUALITY_SCORE_THRESHOLD")
|
||||
|
||||
# Compliance monitoring
|
||||
AUDIT_REMINDER_DAYS: int = Field(default=30, env="AUDIT_REMINDER_DAYS")
|
||||
CERTIFICATION_EXPIRY_WARNING_DAYS: int = Field(default=60, env="CERTIFICATION_EXPIRY_WARNING_DAYS")
|
||||
COMPLIANCE_CHECK_FREQUENCY_HOURS: int = Field(default=24, env="COMPLIANCE_CHECK_FREQUENCY_HOURS")
|
||||
|
||||
# Dashboard refresh intervals
|
||||
DASHBOARD_CACHE_TTL: int = Field(default=300, env="DASHBOARD_CACHE_TTL") # 5 minutes
|
||||
ALERTS_REFRESH_INTERVAL: int = Field(default=60, env="ALERTS_REFRESH_INTERVAL") # 1 minute
|
||||
TEMPERATURE_LOG_INTERVAL: int = Field(default=300, env="TEMPERATURE_LOG_INTERVAL") # 5 minutes
|
||||
|
||||
# Alert notification settings
|
||||
ENABLE_EMAIL_ALERTS: bool = Field(default=True, env="ENABLE_EMAIL_ALERTS")
|
||||
ENABLE_SMS_ALERTS: bool = Field(default=True, env="ENABLE_SMS_ALERTS")
|
||||
ENABLE_WHATSAPP_ALERTS: bool = Field(default=True, env="ENABLE_WHATSAPP_ALERTS")
|
||||
REGULATORY_NOTIFICATION_ENABLED: bool = Field(default=False, env="REGULATORY_NOTIFICATION_ENABLED")
|
||||
|
||||
# Business model detection for inventory
|
||||
ENABLE_BUSINESS_MODEL_DETECTION: bool = Field(default=True, env="ENABLE_BUSINESS_MODEL_DETECTION")
|
||||
CENTRAL_BAKERY_THRESHOLD_INGREDIENTS: int = Field(default=50, env="CENTRAL_BAKERY_THRESHOLD_INGREDIENTS")
|
||||
INDIVIDUAL_BAKERY_THRESHOLD_INGREDIENTS: int = Field(default=20, env="INDIVIDUAL_BAKERY_THRESHOLD_INGREDIENTS")
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
86
services/inventory/app/core/database.py
Normal file
86
services/inventory/app/core/database.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# services/inventory/app/core/database.py
|
||||
"""
|
||||
Inventory Service Database Configuration using shared database manager
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from app.core.config import settings
|
||||
from shared.database.base import DatabaseManager, Base
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create database manager instance
|
||||
database_manager = DatabaseManager(
|
||||
database_url=settings.DATABASE_URL,
|
||||
service_name="inventory-service",
|
||||
pool_size=settings.DB_POOL_SIZE,
|
||||
max_overflow=settings.DB_MAX_OVERFLOW,
|
||||
pool_recycle=settings.DB_POOL_RECYCLE,
|
||||
echo=settings.DB_ECHO
|
||||
)
|
||||
|
||||
|
||||
async def get_db():
|
||||
"""
|
||||
Database dependency for FastAPI - using shared database manager
|
||||
"""
|
||||
async for session in database_manager.get_db():
|
||||
yield session
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database tables using shared database manager"""
|
||||
try:
|
||||
logger.info("Initializing Inventory Service database...")
|
||||
|
||||
# Import all models to ensure they're registered
|
||||
from app.models import inventory # noqa: F401
|
||||
|
||||
# Create all tables using database manager
|
||||
await database_manager.create_tables(Base.metadata)
|
||||
|
||||
logger.info("Inventory Service database initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize database", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Close database connections using shared database manager"""
|
||||
try:
|
||||
await database_manager.close_connections()
|
||||
logger.info("Database connections closed")
|
||||
except Exception as e:
|
||||
logger.error("Error closing database connections", error=str(e))
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db_transaction():
|
||||
"""
|
||||
Context manager for database transactions using shared database manager
|
||||
"""
|
||||
async with database_manager.get_session() as session:
|
||||
try:
|
||||
async with session.begin():
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error("Transaction error", error=str(e))
|
||||
raise
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_background_session():
|
||||
"""
|
||||
Context manager for background tasks using shared database manager
|
||||
"""
|
||||
async with database_manager.get_background_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def health_check():
|
||||
"""Database health check using shared database manager"""
|
||||
return await database_manager.health_check()
|
||||
239
services/inventory/app/main.py
Normal file
239
services/inventory/app/main.py
Normal file
@@ -0,0 +1,239 @@
|
||||
# services/inventory/app/main.py
|
||||
"""
|
||||
Inventory Service FastAPI Application
|
||||
"""
|
||||
|
||||
import os
|
||||
from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
|
||||
# Import core modules
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.services.inventory_alert_service import InventoryAlertService
|
||||
from app.services.inventory_scheduler import InventoryScheduler
|
||||
from app.consumers.delivery_event_consumer import DeliveryEventConsumer
|
||||
from shared.service_base import StandardFastAPIService
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
import asyncio
|
||||
|
||||
from app.api import (
|
||||
internal_demo,
|
||||
batch,
|
||||
ingredients,
|
||||
stock_entries,
|
||||
transformations,
|
||||
inventory_operations,
|
||||
food_safety_compliance,
|
||||
temperature_logs,
|
||||
food_safety_alerts,
|
||||
food_safety_operations,
|
||||
dashboard,
|
||||
analytics,
|
||||
sustainability,
|
||||
audit,
|
||||
ml_insights,
|
||||
enterprise_inventory,
|
||||
internal
|
||||
)
|
||||
from app.api.internal_alert_trigger import router as internal_alert_trigger_router
|
||||
from app.api.internal_demo import router as internal_demo_router
|
||||
|
||||
|
||||
class InventoryService(StandardFastAPIService):
|
||||
"""Inventory Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "20251123_unified_initial_schema"
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
result = await session.execute(text("SELECT version_num FROM alembic_version"))
|
||||
version = result.scalar()
|
||||
if version != self.expected_migration_version:
|
||||
self.logger.error(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
raise RuntimeError(f"Migration version mismatch: expected {self.expected_migration_version}, got {version}")
|
||||
self.logger.info(f"Migration verification successful: {version}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration verification failed: {e}")
|
||||
raise
|
||||
|
||||
def __init__(self):
|
||||
# Define expected database tables for health checks
|
||||
inventory_expected_tables = [
|
||||
'ingredients', 'stock', 'stock_movements', 'product_transformations',
|
||||
'stock_alerts', 'food_safety_compliance', 'temperature_logs', 'food_safety_alerts'
|
||||
]
|
||||
|
||||
# Initialize delivery consumer and rabbitmq client
|
||||
self.delivery_consumer = None
|
||||
self.delivery_consumer_task = None
|
||||
self.rabbitmq_client = None
|
||||
|
||||
super().__init__(
|
||||
service_name="inventory-service",
|
||||
app_name=settings.APP_NAME,
|
||||
description=settings.DESCRIPTION,
|
||||
version=settings.VERSION,
|
||||
log_level=settings.LOG_LEVEL,
|
||||
cors_origins=settings.CORS_ORIGINS,
|
||||
api_prefix="", # Empty because RouteBuilder already includes /api/v1
|
||||
database_manager=database_manager,
|
||||
expected_tables=inventory_expected_tables,
|
||||
enable_messaging=True # Enable RabbitMQ for event consumption
|
||||
)
|
||||
|
||||
async def _setup_messaging(self):
|
||||
"""Setup messaging for inventory service"""
|
||||
from shared.messaging import RabbitMQClient
|
||||
try:
|
||||
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="inventory-service")
|
||||
await self.rabbitmq_client.connect()
|
||||
# Create event publisher
|
||||
self.event_publisher = UnifiedEventPublisher(self.rabbitmq_client, "inventory-service")
|
||||
self.logger.info("Inventory service messaging setup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Failed to setup inventory messaging", error=str(e))
|
||||
raise
|
||||
|
||||
async def _cleanup_messaging(self):
|
||||
"""Cleanup messaging for inventory service"""
|
||||
try:
|
||||
if self.rabbitmq_client:
|
||||
await self.rabbitmq_client.disconnect()
|
||||
self.logger.info("Inventory service messaging cleanup completed")
|
||||
except Exception as e:
|
||||
self.logger.error("Error during inventory messaging cleanup", error=str(e))
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for inventory service"""
|
||||
# Verify migrations first
|
||||
await self.verify_migrations()
|
||||
|
||||
# Call parent startup (includes database, messaging, etc.)
|
||||
await super().on_startup(app)
|
||||
|
||||
# Initialize alert service with EventPublisher
|
||||
if self.event_publisher:
|
||||
alert_service = InventoryAlertService(self.event_publisher)
|
||||
await alert_service.start()
|
||||
self.logger.info("Inventory alert service started")
|
||||
|
||||
# Initialize inventory scheduler with alert service, database manager, and Redis URL for leader election
|
||||
inventory_scheduler = InventoryScheduler(
|
||||
alert_service,
|
||||
self.database_manager,
|
||||
redis_url=settings.REDIS_URL # Pass Redis URL for leader election in multi-replica deployments
|
||||
)
|
||||
await inventory_scheduler.start()
|
||||
self.logger.info("Inventory scheduler started")
|
||||
|
||||
# Store services in app state
|
||||
app.state.alert_service = alert_service
|
||||
app.state.inventory_scheduler = inventory_scheduler # Store scheduler for manual triggering
|
||||
app.state.event_publisher = self.event_publisher # Store event publisher for ML insights
|
||||
else:
|
||||
self.logger.error("Event publisher not initialized, alert service unavailable")
|
||||
|
||||
# Initialize and start delivery event consumer
|
||||
self.delivery_consumer = DeliveryEventConsumer()
|
||||
|
||||
# Start consuming delivery.received events in background
|
||||
if self.rabbitmq_client and self.rabbitmq_client.connected:
|
||||
self.delivery_consumer_task = asyncio.create_task(
|
||||
self.delivery_consumer.consume_delivery_received_events(self.rabbitmq_client)
|
||||
)
|
||||
self.logger.info("Delivery event consumer started successfully")
|
||||
else:
|
||||
self.logger.warning("RabbitMQ not connected, delivery event consumer not started")
|
||||
|
||||
app.state.delivery_consumer = self.delivery_consumer
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
"""Custom shutdown logic for inventory service"""
|
||||
# Stop inventory scheduler
|
||||
if hasattr(app.state, 'inventory_scheduler') and app.state.inventory_scheduler:
|
||||
await app.state.inventory_scheduler.stop()
|
||||
self.logger.info("Inventory scheduler stopped")
|
||||
|
||||
# Cancel delivery consumer task
|
||||
if self.delivery_consumer_task and not self.delivery_consumer_task.done():
|
||||
self.delivery_consumer_task.cancel()
|
||||
try:
|
||||
await self.delivery_consumer_task
|
||||
except asyncio.CancelledError:
|
||||
self.logger.info("Delivery event consumer task cancelled")
|
||||
|
||||
# Stop alert service
|
||||
if hasattr(app.state, 'alert_service'):
|
||||
await app.state.alert_service.stop()
|
||||
self.logger.info("Alert service stopped")
|
||||
|
||||
await super().on_shutdown(app)
|
||||
|
||||
def get_service_features(self):
|
||||
"""Return inventory-specific features"""
|
||||
return [
|
||||
"ingredient_management",
|
||||
"stock_tracking",
|
||||
"expiration_alerts",
|
||||
"low_stock_alerts",
|
||||
"batch_tracking",
|
||||
"fifo_consumption",
|
||||
"barcode_support",
|
||||
"food_safety_compliance",
|
||||
"temperature_monitoring",
|
||||
"dashboard_analytics",
|
||||
"business_model_detection",
|
||||
"real_time_alerts",
|
||||
"regulatory_reporting",
|
||||
"sustainability_tracking",
|
||||
"sdg_compliance",
|
||||
"environmental_impact",
|
||||
"grant_reporting"
|
||||
]
|
||||
|
||||
|
||||
# Create service instance
|
||||
service = InventoryService()
|
||||
|
||||
# Create FastAPI app with standardized setup
|
||||
app = service.create_app()
|
||||
|
||||
# Setup standard endpoints
|
||||
service.setup_standard_endpoints()
|
||||
|
||||
# Include new standardized routers
|
||||
# IMPORTANT: Register audit router FIRST to avoid route matching conflicts
|
||||
service.add_router(audit.router)
|
||||
service.add_router(batch.router)
|
||||
service.add_router(ingredients.router)
|
||||
service.add_router(stock_entries.router)
|
||||
service.add_router(transformations.router)
|
||||
service.add_router(inventory_operations.router)
|
||||
service.add_router(food_safety_compliance.router)
|
||||
service.add_router(temperature_logs.router)
|
||||
service.add_router(food_safety_alerts.router)
|
||||
service.add_router(food_safety_operations.router)
|
||||
service.add_router(dashboard.router)
|
||||
service.add_router(analytics.router)
|
||||
service.add_router(sustainability.router)
|
||||
service.add_router(internal_demo.router, tags=["internal-demo"])
|
||||
service.add_router(internal.router)
|
||||
service.add_router(ml_insights.router) # ML insights endpoint
|
||||
service.add_router(ml_insights.internal_router) # Internal ML insights endpoint for demo cloning
|
||||
service.add_router(internal_alert_trigger_router) # Internal alert trigger for demo cloning
|
||||
service.add_router(enterprise_inventory.router) # Enterprise inventory endpoints
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(
|
||||
"app.main:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=os.getenv("RELOAD", "false").lower() == "true",
|
||||
log_level="info"
|
||||
)
|
||||
443
services/inventory/app/ml/safety_stock_insights_orchestrator.py
Normal file
443
services/inventory/app/ml/safety_stock_insights_orchestrator.py
Normal file
@@ -0,0 +1,443 @@
|
||||
"""
|
||||
Safety Stock Insights Orchestrator
|
||||
Coordinates safety stock optimization and insight posting
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
from typing import Dict, List, Any, Optional
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add shared clients to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
||||
from shared.clients.ai_insights_client import AIInsightsClient
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
from app.ml.safety_stock_optimizer import SafetyStockOptimizer
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SafetyStockInsightsOrchestrator:
|
||||
"""
|
||||
Orchestrates safety stock optimization and insight generation workflow.
|
||||
|
||||
Workflow:
|
||||
1. Optimize safety stock from demand history and cost parameters
|
||||
2. Generate insights comparing optimal vs hardcoded approach
|
||||
3. Post insights to AI Insights Service
|
||||
4. Publish recommendation events to RabbitMQ
|
||||
5. Provide optimized safety stock levels for inventory management
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ai_insights_base_url: str = "http://ai-insights-service:8000",
|
||||
event_publisher: Optional[UnifiedEventPublisher] = None
|
||||
):
|
||||
self.optimizer = SafetyStockOptimizer()
|
||||
self.ai_insights_client = AIInsightsClient(ai_insights_base_url)
|
||||
self.event_publisher = event_publisher
|
||||
|
||||
async def optimize_and_post_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
demand_history: pd.DataFrame,
|
||||
product_characteristics: Dict[str, Any],
|
||||
cost_parameters: Optional[Dict[str, float]] = None,
|
||||
supplier_reliability: Optional[float] = None,
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Complete workflow: Optimize safety stock and post insights.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
demand_history: Historical demand data
|
||||
product_characteristics: Product properties
|
||||
cost_parameters: Optional cost parameters
|
||||
supplier_reliability: Optional supplier on-time rate
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Workflow results with optimization and posted insights
|
||||
"""
|
||||
logger.info(
|
||||
"Starting safety stock optimization workflow",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
history_days=len(demand_history)
|
||||
)
|
||||
|
||||
# Step 1: Optimize safety stock
|
||||
optimization_results = await self.optimizer.optimize_safety_stock(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
demand_history=demand_history,
|
||||
product_characteristics=product_characteristics,
|
||||
cost_parameters=cost_parameters,
|
||||
supplier_reliability=supplier_reliability,
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Safety stock optimization complete",
|
||||
inventory_product_id=inventory_product_id,
|
||||
optimal_stock=optimization_results.get('optimal_result', {}).get('safety_stock'),
|
||||
insights_generated=len(optimization_results.get('insights', []))
|
||||
)
|
||||
|
||||
# Step 2: Enrich insights with tenant_id and product context
|
||||
enriched_insights = self._enrich_insights(
|
||||
optimization_results.get('insights', []),
|
||||
tenant_id,
|
||||
inventory_product_id
|
||||
)
|
||||
|
||||
# Step 3: Post insights to AI Insights Service
|
||||
if enriched_insights:
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_insights
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Safety stock insights posted to AI Insights Service",
|
||||
inventory_product_id=inventory_product_id,
|
||||
total=post_results['total'],
|
||||
successful=post_results['successful'],
|
||||
failed=post_results['failed']
|
||||
)
|
||||
|
||||
# Step 4: Publish recommendation events to RabbitMQ
|
||||
created_insights = post_results.get('created_insights', [])
|
||||
if created_insights:
|
||||
product_context = product_characteristics.copy() if product_characteristics else {}
|
||||
product_context['inventory_product_id'] = inventory_product_id
|
||||
await self._publish_insight_events(
|
||||
tenant_id=tenant_id,
|
||||
insights=created_insights,
|
||||
product_context=product_context
|
||||
)
|
||||
else:
|
||||
post_results = {'total': 0, 'successful': 0, 'failed': 0}
|
||||
logger.info("No insights to post for product", inventory_product_id=inventory_product_id)
|
||||
|
||||
# Step 4: Return comprehensive results
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'optimized_at': optimization_results['optimized_at'],
|
||||
'history_days': optimization_results['history_days'],
|
||||
'optimal_safety_stock': optimization_results.get('optimal_result', {}).get('safety_stock'),
|
||||
'optimal_service_level': optimization_results.get('optimal_result', {}).get('service_level'),
|
||||
'cost_savings': optimization_results.get('comparison', {}).get('annual_holding_cost_savings'),
|
||||
'insights_generated': len(enriched_insights),
|
||||
'insights_posted': post_results['successful'],
|
||||
'insights_failed': post_results['failed'],
|
||||
'created_insights': post_results.get('created_insights', [])
|
||||
}
|
||||
|
||||
def _enrich_insights(
|
||||
self,
|
||||
insights: List[Dict[str, Any]],
|
||||
tenant_id: str,
|
||||
inventory_product_id: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Enrich insights with required fields for AI Insights Service.
|
||||
|
||||
Args:
|
||||
insights: Raw insights from optimizer
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Enriched insights ready for posting
|
||||
"""
|
||||
enriched = []
|
||||
|
||||
for insight in insights:
|
||||
# Add required tenant_id
|
||||
enriched_insight = insight.copy()
|
||||
enriched_insight['tenant_id'] = tenant_id
|
||||
|
||||
# Add product context to metrics
|
||||
if 'metrics_json' not in enriched_insight:
|
||||
enriched_insight['metrics_json'] = {}
|
||||
|
||||
enriched_insight['metrics_json']['inventory_product_id'] = inventory_product_id
|
||||
|
||||
# Add source metadata
|
||||
enriched_insight['source_service'] = 'inventory'
|
||||
enriched_insight['source_model'] = 'safety_stock_optimizer'
|
||||
enriched_insight['detected_at'] = datetime.utcnow().isoformat()
|
||||
|
||||
enriched.append(enriched_insight)
|
||||
|
||||
return enriched
|
||||
|
||||
async def _publish_insight_events(
|
||||
self,
|
||||
tenant_id: str,
|
||||
insights: List[Dict[str, Any]],
|
||||
product_context: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Publish recommendation events to RabbitMQ for each insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
insights: List of created insights (with insight_id from AI Insights Service)
|
||||
product_context: Optional product context (name, id, etc.)
|
||||
"""
|
||||
if not self.event_publisher:
|
||||
logger.warning("Event publisher not configured, skipping event publication")
|
||||
return
|
||||
|
||||
for insight in insights:
|
||||
try:
|
||||
# Determine severity based on confidence and priority
|
||||
confidence = insight.get('confidence', 0)
|
||||
priority = insight.get('priority', 'medium')
|
||||
|
||||
if priority == 'urgent' or confidence >= 90:
|
||||
severity = 'urgent'
|
||||
elif priority == 'high' or confidence >= 70:
|
||||
severity = 'high'
|
||||
elif priority == 'medium' or confidence >= 50:
|
||||
severity = 'medium'
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
# Build event metadata
|
||||
event_metadata = {
|
||||
'insight_id': insight.get('id'),
|
||||
'insight_type': insight.get('insight_type'),
|
||||
'inventory_product_id': insight.get('metrics_json', {}).get('inventory_product_id'),
|
||||
'ingredient_name': product_context.get('ingredient_name') if product_context else None,
|
||||
'suggested_safety_stock': insight.get('metrics_json', {}).get('suggested_safety_stock'),
|
||||
'current_safety_stock': insight.get('metrics_json', {}).get('current_safety_stock'),
|
||||
'estimated_savings': insight.get('impact_value'),
|
||||
'confidence': confidence,
|
||||
'recommendation': insight.get('recommendation'),
|
||||
'impact_type': insight.get('impact_type'),
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
event_metadata = {k: v for k, v in event_metadata.items() if v is not None}
|
||||
|
||||
# Publish recommendation event
|
||||
await self.event_publisher.publish_recommendation(
|
||||
event_type='ai_safety_stock_optimization',
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=event_metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Published safety stock insight recommendation event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
insight_type=insight.get('insight_type'),
|
||||
severity=severity
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to publish insight event",
|
||||
tenant_id=tenant_id,
|
||||
insight_id=insight.get('id'),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Don't raise - we don't want to fail the whole workflow if event publishing fails
|
||||
|
||||
async def optimize_all_products(
|
||||
self,
|
||||
tenant_id: str,
|
||||
products_data: Dict[str, Dict[str, Any]],
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Optimize safety stock for all products for a tenant.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
products_data: Dict of {inventory_product_id: {
|
||||
'demand_history': DataFrame,
|
||||
'product_characteristics': dict,
|
||||
'cost_parameters': dict (optional),
|
||||
'supplier_reliability': float (optional)
|
||||
}}
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Comprehensive optimization results
|
||||
"""
|
||||
logger.info(
|
||||
"Optimizing safety stock for all products",
|
||||
tenant_id=tenant_id,
|
||||
products=len(products_data)
|
||||
)
|
||||
|
||||
all_results = []
|
||||
total_insights_posted = 0
|
||||
total_cost_savings = 0.0
|
||||
|
||||
# Optimize each product
|
||||
for inventory_product_id, product_data in products_data.items():
|
||||
try:
|
||||
results = await self.optimize_and_post_insights(
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
demand_history=product_data['demand_history'],
|
||||
product_characteristics=product_data['product_characteristics'],
|
||||
cost_parameters=product_data.get('cost_parameters'),
|
||||
supplier_reliability=product_data.get('supplier_reliability'),
|
||||
min_history_days=min_history_days
|
||||
)
|
||||
|
||||
all_results.append(results)
|
||||
total_insights_posted += results['insights_posted']
|
||||
|
||||
if results.get('cost_savings'):
|
||||
total_cost_savings += results['cost_savings']
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error optimizing product",
|
||||
inventory_product_id=inventory_product_id,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Generate summary insight
|
||||
if total_cost_savings > 0:
|
||||
summary_insight = self._generate_portfolio_summary_insight(
|
||||
tenant_id, all_results, total_cost_savings
|
||||
)
|
||||
|
||||
if summary_insight:
|
||||
enriched_summary = self._enrich_insights(
|
||||
[summary_insight], tenant_id, 'all_products'
|
||||
)
|
||||
|
||||
post_results = await self.ai_insights_client.create_insights_bulk(
|
||||
tenant_id=UUID(tenant_id),
|
||||
insights=enriched_summary
|
||||
)
|
||||
|
||||
total_insights_posted += post_results['successful']
|
||||
|
||||
logger.info(
|
||||
"All products optimization complete",
|
||||
tenant_id=tenant_id,
|
||||
products_optimized=len(all_results),
|
||||
total_insights_posted=total_insights_posted,
|
||||
total_annual_savings=total_cost_savings
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'optimized_at': datetime.utcnow().isoformat(),
|
||||
'products_optimized': len(all_results),
|
||||
'product_results': all_results,
|
||||
'total_insights_posted': total_insights_posted,
|
||||
'total_annual_cost_savings': round(total_cost_savings, 2)
|
||||
}
|
||||
|
||||
def _generate_portfolio_summary_insight(
|
||||
self,
|
||||
tenant_id: str,
|
||||
all_results: List[Dict[str, Any]],
|
||||
total_cost_savings: float
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Generate portfolio-level summary insight.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
all_results: All product optimization results
|
||||
total_cost_savings: Total annual cost savings
|
||||
|
||||
Returns:
|
||||
Summary insight or None
|
||||
"""
|
||||
if total_cost_savings < 100: # Only if meaningful savings
|
||||
return None
|
||||
|
||||
products_optimized = len(all_results)
|
||||
products_with_savings = len([r for r in all_results if r.get('cost_savings', 0) > 0])
|
||||
|
||||
return {
|
||||
'type': 'optimization',
|
||||
'priority': 'high' if total_cost_savings > 1000 else 'medium',
|
||||
'category': 'inventory',
|
||||
'title': f'Portfolio Safety Stock Optimization: €{total_cost_savings:.0f}/year Savings',
|
||||
'description': f'Optimized safety stock across {products_optimized} products. {products_with_savings} products have over-stocked inventory. Implementing optimal levels saves €{total_cost_savings:.2f} annually in holding costs while maintaining or improving service levels.',
|
||||
'impact_type': 'cost_savings',
|
||||
'impact_value': total_cost_savings,
|
||||
'impact_unit': 'euros_per_year',
|
||||
'confidence': 85,
|
||||
'metrics_json': {
|
||||
'products_optimized': products_optimized,
|
||||
'products_with_savings': products_with_savings,
|
||||
'total_annual_savings': round(total_cost_savings, 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Apply All Optimizations',
|
||||
'action': 'apply_all_safety_stock_optimizations',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
},
|
||||
{
|
||||
'label': 'Review Individual Products',
|
||||
'action': 'review_safety_stock_insights',
|
||||
'params': {'tenant_id': tenant_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
}
|
||||
|
||||
async def get_optimal_safety_stock(
|
||||
self,
|
||||
inventory_product_id: str
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Get cached optimal safety stock for a product.
|
||||
|
||||
Args:
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Optimal safety stock or None if not optimized
|
||||
"""
|
||||
return self.optimizer.get_optimal_safety_stock(inventory_product_id)
|
||||
|
||||
async def get_learned_service_level(
|
||||
self,
|
||||
inventory_product_id: str
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Get learned optimal service level for a product.
|
||||
|
||||
Args:
|
||||
inventory_product_id: Product identifier
|
||||
|
||||
Returns:
|
||||
Optimal service level (0-1) or None if not learned
|
||||
"""
|
||||
return self.optimizer.get_learned_service_level(inventory_product_id)
|
||||
|
||||
async def close(self):
|
||||
"""Close HTTP client connections."""
|
||||
await self.ai_insights_client.close()
|
||||
755
services/inventory/app/ml/safety_stock_optimizer.py
Normal file
755
services/inventory/app/ml/safety_stock_optimizer.py
Normal file
@@ -0,0 +1,755 @@
|
||||
"""
|
||||
Safety Stock Optimizer
|
||||
Replaces hardcoded 95% service level with learned optimal safety stock levels
|
||||
Optimizes based on product characteristics, demand variability, and cost trade-offs
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import structlog
|
||||
from datetime import datetime, timedelta
|
||||
from scipy import stats
|
||||
from scipy.optimize import minimize_scalar
|
||||
import warnings
|
||||
warnings.filterwarnings('ignore')
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SafetyStockOptimizer:
|
||||
"""
|
||||
Optimizes safety stock levels for inventory management.
|
||||
|
||||
Current problem: Hardcoded 95% service level for all products
|
||||
Solution: Learn optimal service levels based on:
|
||||
- Product characteristics (shelf life, criticality)
|
||||
- Demand variability (coefficient of variation)
|
||||
- Cost trade-offs (holding cost vs stockout cost)
|
||||
- Historical stockout patterns
|
||||
- Supplier reliability
|
||||
|
||||
Approaches:
|
||||
1. Statistical approach: Based on demand variability and lead time
|
||||
2. Cost-based optimization: Minimize total cost (holding + stockout)
|
||||
3. Service level optimization: Product-specific target service levels
|
||||
4. Dynamic adjustment: Seasonality and trend awareness
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.optimal_stocks = {}
|
||||
self.learned_service_levels = {}
|
||||
|
||||
async def optimize_safety_stock(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
demand_history: pd.DataFrame,
|
||||
product_characteristics: Dict[str, Any],
|
||||
cost_parameters: Optional[Dict[str, float]] = None,
|
||||
supplier_reliability: Optional[float] = None,
|
||||
min_history_days: int = 90
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate optimal safety stock for a product.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
inventory_product_id: Product identifier
|
||||
demand_history: Historical demand data with columns:
|
||||
- date
|
||||
- demand_quantity
|
||||
- stockout (bool, optional)
|
||||
- lead_time_days (optional)
|
||||
product_characteristics: Product properties:
|
||||
- shelf_life_days: int
|
||||
- criticality: str (high, medium, low)
|
||||
- unit_cost: float
|
||||
- avg_daily_demand: float
|
||||
cost_parameters: Optional cost params:
|
||||
- holding_cost_per_unit_per_day: float
|
||||
- stockout_cost_per_unit: float
|
||||
supplier_reliability: Supplier on-time rate (0-1)
|
||||
min_history_days: Minimum days of history required
|
||||
|
||||
Returns:
|
||||
Dictionary with optimal safety stock and insights
|
||||
"""
|
||||
logger.info(
|
||||
"Optimizing safety stock",
|
||||
tenant_id=tenant_id,
|
||||
inventory_product_id=inventory_product_id,
|
||||
history_days=len(demand_history)
|
||||
)
|
||||
|
||||
# Validate input
|
||||
if len(demand_history) < min_history_days:
|
||||
logger.warning(
|
||||
"Insufficient demand history",
|
||||
inventory_product_id=inventory_product_id,
|
||||
days=len(demand_history),
|
||||
required=min_history_days
|
||||
)
|
||||
return self._insufficient_data_response(
|
||||
tenant_id, inventory_product_id, product_characteristics
|
||||
)
|
||||
|
||||
# Calculate demand statistics
|
||||
demand_stats = self._calculate_demand_statistics(demand_history)
|
||||
|
||||
# Calculate optimal safety stock using multiple methods
|
||||
statistical_result = self._calculate_statistical_safety_stock(
|
||||
demand_stats,
|
||||
product_characteristics,
|
||||
supplier_reliability
|
||||
)
|
||||
|
||||
# Cost-based optimization if cost parameters provided
|
||||
if cost_parameters:
|
||||
cost_based_result = self._calculate_cost_optimal_safety_stock(
|
||||
demand_stats,
|
||||
product_characteristics,
|
||||
cost_parameters,
|
||||
demand_history
|
||||
)
|
||||
else:
|
||||
cost_based_result = None
|
||||
|
||||
# Service level optimization
|
||||
service_level_result = self._calculate_service_level_optimal_stock(
|
||||
demand_stats,
|
||||
product_characteristics,
|
||||
demand_history
|
||||
)
|
||||
|
||||
# Combine methods and select optimal
|
||||
optimal_result = self._select_optimal_safety_stock(
|
||||
statistical_result,
|
||||
cost_based_result,
|
||||
service_level_result,
|
||||
product_characteristics
|
||||
)
|
||||
|
||||
# Compare with current hardcoded approach (95% service level)
|
||||
hardcoded_result = self._calculate_hardcoded_safety_stock(
|
||||
demand_stats,
|
||||
service_level=0.95
|
||||
)
|
||||
|
||||
comparison = self._compare_with_hardcoded(
|
||||
optimal_result,
|
||||
hardcoded_result,
|
||||
cost_parameters
|
||||
)
|
||||
|
||||
# Generate insights
|
||||
insights = self._generate_safety_stock_insights(
|
||||
tenant_id,
|
||||
inventory_product_id,
|
||||
optimal_result,
|
||||
hardcoded_result,
|
||||
comparison,
|
||||
demand_stats,
|
||||
product_characteristics
|
||||
)
|
||||
|
||||
# Store optimal stock
|
||||
self.optimal_stocks[inventory_product_id] = optimal_result['safety_stock']
|
||||
self.learned_service_levels[inventory_product_id] = optimal_result['service_level']
|
||||
|
||||
logger.info(
|
||||
"Safety stock optimization complete",
|
||||
inventory_product_id=inventory_product_id,
|
||||
optimal_stock=optimal_result['safety_stock'],
|
||||
optimal_service_level=optimal_result['service_level'],
|
||||
improvement_vs_hardcoded=comparison.get('cost_savings_pct', 0)
|
||||
)
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'optimized_at': datetime.utcnow().isoformat(),
|
||||
'history_days': len(demand_history),
|
||||
'demand_stats': demand_stats,
|
||||
'optimal_result': optimal_result,
|
||||
'hardcoded_result': hardcoded_result,
|
||||
'comparison': comparison,
|
||||
'insights': insights
|
||||
}
|
||||
|
||||
def _calculate_demand_statistics(
|
||||
self,
|
||||
demand_history: pd.DataFrame
|
||||
) -> Dict[str, float]:
|
||||
"""
|
||||
Calculate comprehensive demand statistics.
|
||||
|
||||
Args:
|
||||
demand_history: Historical demand data
|
||||
|
||||
Returns:
|
||||
Dictionary of demand statistics
|
||||
"""
|
||||
# Ensure date column
|
||||
if 'date' not in demand_history.columns:
|
||||
demand_history = demand_history.copy()
|
||||
demand_history['date'] = pd.to_datetime(demand_history.index)
|
||||
|
||||
demand_history['date'] = pd.to_datetime(demand_history['date'])
|
||||
|
||||
# Basic statistics
|
||||
mean_demand = demand_history['demand_quantity'].mean()
|
||||
std_demand = demand_history['demand_quantity'].std()
|
||||
cv_demand = std_demand / mean_demand if mean_demand > 0 else 0
|
||||
|
||||
# Lead time statistics (if available)
|
||||
if 'lead_time_days' in demand_history.columns:
|
||||
mean_lead_time = demand_history['lead_time_days'].mean()
|
||||
std_lead_time = demand_history['lead_time_days'].std()
|
||||
else:
|
||||
mean_lead_time = 3.0 # Default assumption
|
||||
std_lead_time = 0.5
|
||||
|
||||
# Stockout rate (if available)
|
||||
if 'stockout' in demand_history.columns:
|
||||
stockout_rate = demand_history['stockout'].mean()
|
||||
stockout_frequency = demand_history['stockout'].sum()
|
||||
else:
|
||||
stockout_rate = 0.05 # Assume 5% if not tracked
|
||||
stockout_frequency = 0
|
||||
|
||||
# Demand distribution characteristics
|
||||
skewness = demand_history['demand_quantity'].skew()
|
||||
kurtosis = demand_history['demand_quantity'].kurtosis()
|
||||
|
||||
# Recent trend (last 30 days vs overall)
|
||||
if len(demand_history) >= 60:
|
||||
recent_mean = demand_history.tail(30)['demand_quantity'].mean()
|
||||
trend = (recent_mean - mean_demand) / mean_demand if mean_demand > 0 else 0
|
||||
else:
|
||||
trend = 0
|
||||
|
||||
return {
|
||||
'mean_demand': float(mean_demand),
|
||||
'std_demand': float(std_demand),
|
||||
'cv_demand': float(cv_demand),
|
||||
'min_demand': float(demand_history['demand_quantity'].min()),
|
||||
'max_demand': float(demand_history['demand_quantity'].max()),
|
||||
'mean_lead_time': float(mean_lead_time),
|
||||
'std_lead_time': float(std_lead_time),
|
||||
'stockout_rate': float(stockout_rate),
|
||||
'stockout_frequency': int(stockout_frequency),
|
||||
'skewness': float(skewness),
|
||||
'kurtosis': float(kurtosis),
|
||||
'trend': float(trend),
|
||||
'data_points': int(len(demand_history))
|
||||
}
|
||||
|
||||
def _calculate_statistical_safety_stock(
|
||||
self,
|
||||
demand_stats: Dict[str, float],
|
||||
product_characteristics: Dict[str, Any],
|
||||
supplier_reliability: Optional[float] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate safety stock using statistical approach (Classic formula).
|
||||
|
||||
Formula: SS = Z * sqrt(LT * σ_d² + d_avg² * σ_LT²)
|
||||
Where:
|
||||
- Z: Z-score for desired service level
|
||||
- LT: Mean lead time
|
||||
- σ_d: Standard deviation of demand
|
||||
- d_avg: Average demand
|
||||
- σ_LT: Standard deviation of lead time
|
||||
"""
|
||||
# Determine target service level based on product criticality
|
||||
criticality = product_characteristics.get('criticality', 'medium').lower()
|
||||
|
||||
if criticality == 'high':
|
||||
target_service_level = 0.98 # 98% for critical products
|
||||
elif criticality == 'medium':
|
||||
target_service_level = 0.95 # 95% for medium
|
||||
else:
|
||||
target_service_level = 0.90 # 90% for low criticality
|
||||
|
||||
# Adjust for supplier reliability
|
||||
if supplier_reliability is not None and supplier_reliability < 0.9:
|
||||
# Less reliable suppliers need higher safety stock
|
||||
target_service_level = min(0.99, target_service_level + 0.03)
|
||||
|
||||
# Calculate Z-score for target service level
|
||||
z_score = stats.norm.ppf(target_service_level)
|
||||
|
||||
# Calculate safety stock
|
||||
mean_demand = demand_stats['mean_demand']
|
||||
std_demand = demand_stats['std_demand']
|
||||
mean_lead_time = demand_stats['mean_lead_time']
|
||||
std_lead_time = demand_stats['std_lead_time']
|
||||
|
||||
# Safety stock formula
|
||||
variance_component = (
|
||||
mean_lead_time * (std_demand ** 2) +
|
||||
(mean_demand ** 2) * (std_lead_time ** 2)
|
||||
)
|
||||
|
||||
safety_stock = z_score * np.sqrt(variance_component)
|
||||
|
||||
# Ensure non-negative
|
||||
safety_stock = max(0, safety_stock)
|
||||
|
||||
return {
|
||||
'method': 'statistical',
|
||||
'safety_stock': round(safety_stock, 2),
|
||||
'service_level': target_service_level,
|
||||
'z_score': round(z_score, 2),
|
||||
'rationale': f'Based on {target_service_level*100:.0f}% service level for {criticality} criticality product'
|
||||
}
|
||||
|
||||
def _calculate_cost_optimal_safety_stock(
|
||||
self,
|
||||
demand_stats: Dict[str, float],
|
||||
product_characteristics: Dict[str, Any],
|
||||
cost_parameters: Dict[str, float],
|
||||
demand_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate safety stock that minimizes total cost (holding + stockout).
|
||||
|
||||
Total Cost = (Holding Cost × Safety Stock) + (Stockout Cost × Stockout Frequency)
|
||||
"""
|
||||
holding_cost = cost_parameters.get('holding_cost_per_unit_per_day', 0.01)
|
||||
stockout_cost = cost_parameters.get('stockout_cost_per_unit', 10.0)
|
||||
|
||||
mean_demand = demand_stats['mean_demand']
|
||||
std_demand = demand_stats['std_demand']
|
||||
mean_lead_time = demand_stats['mean_lead_time']
|
||||
|
||||
def total_cost(safety_stock):
|
||||
"""Calculate total cost for given safety stock level."""
|
||||
# Holding cost (annual)
|
||||
annual_holding_cost = holding_cost * safety_stock * 365
|
||||
|
||||
# Stockout probability and expected stockouts
|
||||
# Demand during lead time follows normal distribution
|
||||
demand_during_lt_mean = mean_demand * mean_lead_time
|
||||
demand_during_lt_std = std_demand * np.sqrt(mean_lead_time)
|
||||
|
||||
# Service level achieved with this safety stock
|
||||
if demand_during_lt_std > 0:
|
||||
z_score = (safety_stock) / demand_during_lt_std
|
||||
service_level = stats.norm.cdf(z_score)
|
||||
else:
|
||||
service_level = 0.99
|
||||
|
||||
# Stockout probability
|
||||
stockout_prob = 1 - service_level
|
||||
|
||||
# Expected annual stockouts (simplified)
|
||||
orders_per_year = 365 / mean_lead_time
|
||||
expected_stockouts = stockout_prob * orders_per_year * mean_demand
|
||||
|
||||
# Stockout cost (annual)
|
||||
annual_stockout_cost = expected_stockouts * stockout_cost
|
||||
|
||||
return annual_holding_cost + annual_stockout_cost
|
||||
|
||||
# Optimize to find minimum total cost
|
||||
# Search range: 0 to 5 * mean demand during lead time
|
||||
max_search = 5 * mean_demand * mean_lead_time
|
||||
|
||||
result = minimize_scalar(
|
||||
total_cost,
|
||||
bounds=(0, max_search),
|
||||
method='bounded'
|
||||
)
|
||||
|
||||
optimal_safety_stock = result.x
|
||||
optimal_cost = result.fun
|
||||
|
||||
# Calculate achieved service level
|
||||
demand_during_lt_std = std_demand * np.sqrt(mean_lead_time)
|
||||
if demand_during_lt_std > 0:
|
||||
z_score = optimal_safety_stock / demand_during_lt_std
|
||||
achieved_service_level = stats.norm.cdf(z_score)
|
||||
else:
|
||||
achieved_service_level = 0.99
|
||||
|
||||
return {
|
||||
'method': 'cost_optimization',
|
||||
'safety_stock': round(optimal_safety_stock, 2),
|
||||
'service_level': round(achieved_service_level, 4),
|
||||
'annual_total_cost': round(optimal_cost, 2),
|
||||
'rationale': f'Minimizes total cost (holding + stockout): €{optimal_cost:.2f}/year'
|
||||
}
|
||||
|
||||
def _calculate_service_level_optimal_stock(
|
||||
self,
|
||||
demand_stats: Dict[str, float],
|
||||
product_characteristics: Dict[str, Any],
|
||||
demand_history: pd.DataFrame
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate safety stock based on empirical service level optimization.
|
||||
|
||||
Uses historical stockout data to find optimal service level.
|
||||
"""
|
||||
# If we have stockout history, learn from it
|
||||
if 'stockout' in demand_history.columns and demand_history['stockout'].sum() > 0:
|
||||
current_stockout_rate = demand_stats['stockout_rate']
|
||||
|
||||
# Target: Reduce stockouts by 50% or achieve 95%, whichever is higher
|
||||
target_stockout_rate = min(current_stockout_rate * 0.5, 0.05)
|
||||
target_service_level = 1 - target_stockout_rate
|
||||
|
||||
else:
|
||||
# No stockout data, use criticality-based default
|
||||
criticality = product_characteristics.get('criticality', 'medium').lower()
|
||||
target_service_level = {
|
||||
'high': 0.98,
|
||||
'medium': 0.95,
|
||||
'low': 0.90
|
||||
}.get(criticality, 0.95)
|
||||
|
||||
# Calculate safety stock for target service level
|
||||
z_score = stats.norm.ppf(target_service_level)
|
||||
mean_demand = demand_stats['mean_demand']
|
||||
std_demand = demand_stats['std_demand']
|
||||
mean_lead_time = demand_stats['mean_lead_time']
|
||||
|
||||
safety_stock = z_score * std_demand * np.sqrt(mean_lead_time)
|
||||
safety_stock = max(0, safety_stock)
|
||||
|
||||
return {
|
||||
'method': 'service_level_optimization',
|
||||
'safety_stock': round(safety_stock, 2),
|
||||
'service_level': target_service_level,
|
||||
'rationale': f'Achieves {target_service_level*100:.0f}% service level based on historical performance'
|
||||
}
|
||||
|
||||
def _select_optimal_safety_stock(
|
||||
self,
|
||||
statistical_result: Dict[str, Any],
|
||||
cost_based_result: Optional[Dict[str, Any]],
|
||||
service_level_result: Dict[str, Any],
|
||||
product_characteristics: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Select optimal safety stock from multiple methods.
|
||||
|
||||
Priority:
|
||||
1. Cost-based if available and product value is high
|
||||
2. Statistical for general case
|
||||
3. Service level as validation
|
||||
"""
|
||||
# If cost data available and product is valuable, use cost optimization
|
||||
if cost_based_result and product_characteristics.get('unit_cost', 0) > 5:
|
||||
selected = cost_based_result
|
||||
logger.info("Selected cost-based safety stock (high-value product)")
|
||||
|
||||
# Otherwise use statistical approach
|
||||
else:
|
||||
selected = statistical_result
|
||||
logger.info("Selected statistical safety stock")
|
||||
|
||||
# Add shelf life constraint
|
||||
shelf_life = product_characteristics.get('shelf_life_days')
|
||||
if shelf_life:
|
||||
max_safe_stock = product_characteristics.get('avg_daily_demand', 0) * (shelf_life * 0.5)
|
||||
if selected['safety_stock'] > max_safe_stock:
|
||||
logger.warning(
|
||||
"Safety stock exceeds shelf life constraint",
|
||||
calculated=selected['safety_stock'],
|
||||
max_allowed=max_safe_stock
|
||||
)
|
||||
selected['safety_stock'] = round(max_safe_stock, 2)
|
||||
selected['constrained_by'] = 'shelf_life'
|
||||
|
||||
return selected
|
||||
|
||||
def _calculate_hardcoded_safety_stock(
|
||||
self,
|
||||
demand_stats: Dict[str, float],
|
||||
service_level: float = 0.95
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate safety stock using current hardcoded 95% service level.
|
||||
|
||||
Args:
|
||||
demand_stats: Demand statistics
|
||||
service_level: Hardcoded service level (default 0.95)
|
||||
|
||||
Returns:
|
||||
Safety stock result with hardcoded approach
|
||||
"""
|
||||
z_score = stats.norm.ppf(service_level)
|
||||
mean_demand = demand_stats['mean_demand']
|
||||
std_demand = demand_stats['std_demand']
|
||||
mean_lead_time = demand_stats['mean_lead_time']
|
||||
|
||||
safety_stock = z_score * std_demand * np.sqrt(mean_lead_time)
|
||||
safety_stock = max(0, safety_stock)
|
||||
|
||||
return {
|
||||
'method': 'hardcoded_95_service_level',
|
||||
'safety_stock': round(safety_stock, 2),
|
||||
'service_level': service_level,
|
||||
'rationale': 'Current hardcoded 95% service level for all products'
|
||||
}
|
||||
|
||||
def _compare_with_hardcoded(
|
||||
self,
|
||||
optimal_result: Dict[str, Any],
|
||||
hardcoded_result: Dict[str, Any],
|
||||
cost_parameters: Optional[Dict[str, float]]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Compare optimal safety stock with hardcoded approach.
|
||||
|
||||
Args:
|
||||
optimal_result: Optimal safety stock result
|
||||
hardcoded_result: Hardcoded approach result
|
||||
cost_parameters: Optional cost parameters for savings calculation
|
||||
|
||||
Returns:
|
||||
Comparison metrics
|
||||
"""
|
||||
optimal_stock = optimal_result['safety_stock']
|
||||
hardcoded_stock = hardcoded_result['safety_stock']
|
||||
|
||||
stock_difference = optimal_stock - hardcoded_stock
|
||||
stock_difference_pct = (stock_difference / hardcoded_stock * 100) if hardcoded_stock > 0 else 0
|
||||
|
||||
comparison = {
|
||||
'stock_difference': round(stock_difference, 2),
|
||||
'stock_difference_pct': round(stock_difference_pct, 2),
|
||||
'optimal_service_level': optimal_result['service_level'],
|
||||
'hardcoded_service_level': hardcoded_result['service_level'],
|
||||
'service_level_difference': round(
|
||||
(optimal_result['service_level'] - hardcoded_result['service_level']) * 100, 2
|
||||
)
|
||||
}
|
||||
|
||||
# Calculate cost savings if cost data available
|
||||
if cost_parameters:
|
||||
holding_cost = cost_parameters.get('holding_cost_per_unit_per_day', 0.01)
|
||||
annual_holding_savings = stock_difference * holding_cost * 365
|
||||
|
||||
comparison['annual_holding_cost_savings'] = round(annual_holding_savings, 2)
|
||||
if hardcoded_stock > 0:
|
||||
comparison['cost_savings_pct'] = round(
|
||||
(annual_holding_savings / (hardcoded_stock * holding_cost * 365)) * 100, 2
|
||||
)
|
||||
|
||||
return comparison
|
||||
|
||||
def _generate_safety_stock_insights(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
optimal_result: Dict[str, Any],
|
||||
hardcoded_result: Dict[str, Any],
|
||||
comparison: Dict[str, Any],
|
||||
demand_stats: Dict[str, float],
|
||||
product_characteristics: Dict[str, Any]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Generate actionable insights from safety stock optimization.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
inventory_product_id: Product ID
|
||||
optimal_result: Optimal safety stock result
|
||||
hardcoded_result: Hardcoded result
|
||||
comparison: Comparison metrics
|
||||
demand_stats: Demand statistics
|
||||
product_characteristics: Product characteristics
|
||||
|
||||
Returns:
|
||||
List of insights
|
||||
"""
|
||||
insights = []
|
||||
|
||||
stock_diff_pct = comparison['stock_difference_pct']
|
||||
|
||||
# Insight 1: Over-stocking reduction opportunity
|
||||
if stock_diff_pct < -10: # Optimal is >10% lower
|
||||
cost_savings = comparison.get('annual_holding_cost_savings', 0)
|
||||
|
||||
insights.append({
|
||||
'type': 'optimization',
|
||||
'priority': 'high' if abs(stock_diff_pct) > 25 else 'medium',
|
||||
'category': 'inventory',
|
||||
'title': f'Reduce Safety Stock by {abs(stock_diff_pct):.0f}%',
|
||||
'description': f'Product {inventory_product_id} is over-stocked. Optimal safety stock is {optimal_result["safety_stock"]:.1f} units vs current {hardcoded_result["safety_stock"]:.1f}. Reducing to optimal level saves €{abs(cost_savings):.2f}/year in holding costs while maintaining {optimal_result["service_level"]*100:.1f}% service level.',
|
||||
'impact_type': 'cost_savings',
|
||||
'impact_value': abs(cost_savings),
|
||||
'impact_unit': 'euros_per_year',
|
||||
'confidence': 85,
|
||||
'metrics_json': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'current_safety_stock': round(hardcoded_result['safety_stock'], 2),
|
||||
'optimal_safety_stock': round(optimal_result['safety_stock'], 2),
|
||||
'reduction_pct': round(abs(stock_diff_pct), 2),
|
||||
'annual_savings': round(abs(cost_savings), 2),
|
||||
'optimal_service_level': round(optimal_result['service_level'] * 100, 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Update Safety Stock',
|
||||
'action': 'update_safety_stock',
|
||||
'params': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'new_safety_stock': round(optimal_result['safety_stock'], 2)
|
||||
}
|
||||
}
|
||||
],
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
})
|
||||
|
||||
# Insight 2: Under-stocking risk
|
||||
elif stock_diff_pct > 10: # Optimal is >10% higher
|
||||
insights.append({
|
||||
'type': 'alert',
|
||||
'priority': 'high' if stock_diff_pct > 25 else 'medium',
|
||||
'category': 'inventory',
|
||||
'title': f'Increase Safety Stock by {stock_diff_pct:.0f}%',
|
||||
'description': f'Product {inventory_product_id} safety stock is too low. Current {hardcoded_result["safety_stock"]:.1f} units provides only {hardcoded_result["service_level"]*100:.0f}% service level. Increase to {optimal_result["safety_stock"]:.1f} for optimal {optimal_result["service_level"]*100:.1f}% service level.',
|
||||
'impact_type': 'stockout_risk_reduction',
|
||||
'impact_value': stock_diff_pct,
|
||||
'impact_unit': 'percentage',
|
||||
'confidence': 85,
|
||||
'metrics_json': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'current_safety_stock': round(hardcoded_result['safety_stock'], 2),
|
||||
'optimal_safety_stock': round(optimal_result['safety_stock'], 2),
|
||||
'increase_pct': round(stock_diff_pct, 2),
|
||||
'current_service_level': round(hardcoded_result['service_level'] * 100, 2),
|
||||
'optimal_service_level': round(optimal_result['service_level'] * 100, 2),
|
||||
'historical_stockout_rate': round(demand_stats['stockout_rate'] * 100, 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Update Safety Stock',
|
||||
'action': 'update_safety_stock',
|
||||
'params': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'new_safety_stock': round(optimal_result['safety_stock'], 2)
|
||||
}
|
||||
}
|
||||
],
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
})
|
||||
|
||||
# Insight 3: High demand variability
|
||||
if demand_stats['cv_demand'] > 0.5: # Coefficient of variation > 0.5
|
||||
insights.append({
|
||||
'type': 'insight',
|
||||
'priority': 'medium',
|
||||
'category': 'inventory',
|
||||
'title': f'High Demand Variability Detected',
|
||||
'description': f'Product {inventory_product_id} has high demand variability (CV={demand_stats["cv_demand"]:.2f}). This increases safety stock requirements. Consider demand smoothing strategies or more frequent orders.',
|
||||
'impact_type': 'operational_insight',
|
||||
'impact_value': demand_stats['cv_demand'],
|
||||
'impact_unit': 'coefficient_of_variation',
|
||||
'confidence': 90,
|
||||
'metrics_json': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'cv_demand': round(demand_stats['cv_demand'], 2),
|
||||
'mean_demand': round(demand_stats['mean_demand'], 2),
|
||||
'std_demand': round(demand_stats['std_demand'], 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'Review Demand Patterns',
|
||||
'action': 'analyze_demand_patterns',
|
||||
'params': {'inventory_product_id': inventory_product_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
})
|
||||
|
||||
# Insight 4: Frequent stockouts
|
||||
if demand_stats['stockout_rate'] > 0.1: # More than 10% stockout rate
|
||||
insights.append({
|
||||
'type': 'alert',
|
||||
'priority': 'critical' if demand_stats['stockout_rate'] > 0.2 else 'high',
|
||||
'category': 'inventory',
|
||||
'title': f'Frequent Stockouts: {demand_stats["stockout_rate"]*100:.1f}%',
|
||||
'description': f'Product {inventory_product_id} experiences frequent stockouts ({demand_stats["stockout_rate"]*100:.1f}% of days). Optimal safety stock of {optimal_result["safety_stock"]:.1f} units should reduce this significantly.',
|
||||
'impact_type': 'stockout_frequency',
|
||||
'impact_value': demand_stats['stockout_rate'] * 100,
|
||||
'impact_unit': 'percentage',
|
||||
'confidence': 95,
|
||||
'metrics_json': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'stockout_rate': round(demand_stats['stockout_rate'] * 100, 2),
|
||||
'stockout_frequency': demand_stats['stockout_frequency'],
|
||||
'optimal_safety_stock': round(optimal_result['safety_stock'], 2)
|
||||
},
|
||||
'actionable': True,
|
||||
'recommendation_actions': [
|
||||
{
|
||||
'label': 'URGENT: Update Safety Stock',
|
||||
'action': 'update_safety_stock',
|
||||
'params': {
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'new_safety_stock': round(optimal_result['safety_stock'], 2)
|
||||
}
|
||||
},
|
||||
{
|
||||
'label': 'Review Supplier Reliability',
|
||||
'action': 'review_supplier',
|
||||
'params': {'inventory_product_id': inventory_product_id}
|
||||
}
|
||||
],
|
||||
'source_service': 'inventory',
|
||||
'source_model': 'safety_stock_optimizer'
|
||||
})
|
||||
|
||||
return insights
|
||||
|
||||
def _insufficient_data_response(
|
||||
self,
|
||||
tenant_id: str,
|
||||
inventory_product_id: str,
|
||||
product_characteristics: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Return response when insufficient data available."""
|
||||
# Use simple heuristic based on criticality
|
||||
criticality = product_characteristics.get('criticality', 'medium').lower()
|
||||
avg_daily_demand = product_characteristics.get('avg_daily_demand', 10)
|
||||
|
||||
# Simple rule: 7 days of demand for high, 5 for medium, 3 for low
|
||||
safety_stock_days = {'high': 7, 'medium': 5, 'low': 3}.get(criticality, 5)
|
||||
fallback_safety_stock = avg_daily_demand * safety_stock_days
|
||||
|
||||
return {
|
||||
'tenant_id': tenant_id,
|
||||
'inventory_product_id': inventory_product_id,
|
||||
'optimized_at': datetime.utcnow().isoformat(),
|
||||
'history_days': 0,
|
||||
'demand_stats': {},
|
||||
'optimal_result': {
|
||||
'method': 'fallback_heuristic',
|
||||
'safety_stock': round(fallback_safety_stock, 2),
|
||||
'service_level': 0.95,
|
||||
'rationale': f'Insufficient data. Using {safety_stock_days} days of demand for {criticality} criticality.'
|
||||
},
|
||||
'hardcoded_result': None,
|
||||
'comparison': {},
|
||||
'insights': []
|
||||
}
|
||||
|
||||
def get_optimal_safety_stock(self, inventory_product_id: str) -> Optional[float]:
|
||||
"""Get cached optimal safety stock for a product."""
|
||||
return self.optimal_stocks.get(inventory_product_id)
|
||||
|
||||
def get_learned_service_level(self, inventory_product_id: str) -> Optional[float]:
|
||||
"""Get learned optimal service level for a product."""
|
||||
return self.learned_service_levels.get(inventory_product_id)
|
||||
74
services/inventory/app/models/__init__.py
Normal file
74
services/inventory/app/models/__init__.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""
|
||||
Inventory Service Models Package
|
||||
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .inventory import (
|
||||
Ingredient,
|
||||
Stock,
|
||||
StockMovement,
|
||||
ProductTransformation,
|
||||
StockAlert,
|
||||
UnitOfMeasure,
|
||||
IngredientCategory,
|
||||
ProductCategory,
|
||||
ProductType,
|
||||
ProductionStage,
|
||||
StockMovementType,
|
||||
)
|
||||
|
||||
from .food_safety import (
|
||||
FoodSafetyCompliance,
|
||||
TemperatureLog,
|
||||
FoodSafetyAlert,
|
||||
FoodSafetyStandard,
|
||||
ComplianceStatus,
|
||||
FoodSafetyAlertType,
|
||||
)
|
||||
|
||||
from .stock_receipt import (
|
||||
StockReceipt,
|
||||
StockReceiptLineItem,
|
||||
StockLot,
|
||||
ReceiptStatus,
|
||||
)
|
||||
|
||||
# List all models for easier access
|
||||
__all__ = [
|
||||
# Inventory models
|
||||
"Ingredient",
|
||||
"Stock",
|
||||
"StockMovement",
|
||||
"ProductTransformation",
|
||||
"StockAlert",
|
||||
# Inventory enums
|
||||
"UnitOfMeasure",
|
||||
"IngredientCategory",
|
||||
"ProductCategory",
|
||||
"ProductType",
|
||||
"ProductionStage",
|
||||
"StockMovementType",
|
||||
# Food safety models
|
||||
"FoodSafetyCompliance",
|
||||
"TemperatureLog",
|
||||
"FoodSafetyAlert",
|
||||
# Food safety enums
|
||||
"FoodSafetyStandard",
|
||||
"ComplianceStatus",
|
||||
"FoodSafetyAlertType",
|
||||
# Stock receipt models
|
||||
"StockReceipt",
|
||||
"StockReceiptLineItem",
|
||||
"StockLot",
|
||||
"ReceiptStatus",
|
||||
"AuditLog",
|
||||
]
|
||||
369
services/inventory/app/models/food_safety.py
Normal file
369
services/inventory/app/models/food_safety.py
Normal file
@@ -0,0 +1,369 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/models/food_safety.py
|
||||
# ================================================================
|
||||
"""
|
||||
Food safety and compliance models for Inventory Service
|
||||
"""
|
||||
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class FoodSafetyStandard(enum.Enum):
|
||||
"""Food safety standards and certifications"""
|
||||
HACCP = "haccp"
|
||||
FDA = "fda"
|
||||
USDA = "usda"
|
||||
FSMA = "fsma"
|
||||
SQF = "sqf"
|
||||
BRC = "brc"
|
||||
IFS = "ifs"
|
||||
ISO22000 = "iso22000"
|
||||
ORGANIC = "organic"
|
||||
NON_GMO = "non_gmo"
|
||||
ALLERGEN_FREE = "allergen_free"
|
||||
KOSHER = "kosher"
|
||||
HALAL = "halal"
|
||||
|
||||
|
||||
class ComplianceStatus(enum.Enum):
|
||||
"""Compliance status for food safety requirements"""
|
||||
COMPLIANT = "compliant"
|
||||
NON_COMPLIANT = "non_compliant"
|
||||
PENDING_REVIEW = "pending_review"
|
||||
EXPIRED = "expired"
|
||||
WARNING = "warning"
|
||||
|
||||
|
||||
class FoodSafetyAlertType(enum.Enum):
|
||||
"""Types of food safety alerts"""
|
||||
TEMPERATURE_VIOLATION = "temperature_violation"
|
||||
EXPIRATION_WARNING = "expiration_warning"
|
||||
EXPIRED_PRODUCT = "expired_product"
|
||||
CONTAMINATION_RISK = "contamination_risk"
|
||||
ALLERGEN_CROSS_CONTAMINATION = "allergen_cross_contamination"
|
||||
STORAGE_VIOLATION = "storage_violation"
|
||||
QUALITY_DEGRADATION = "quality_degradation"
|
||||
RECALL_NOTICE = "recall_notice"
|
||||
CERTIFICATION_EXPIRY = "certification_expiry"
|
||||
SUPPLIER_COMPLIANCE_ISSUE = "supplier_compliance_issue"
|
||||
|
||||
|
||||
class FoodSafetyCompliance(Base):
|
||||
"""Food safety compliance tracking for ingredients and products"""
|
||||
__tablename__ = "food_safety_compliance"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), ForeignKey("ingredients.id"), nullable=False, index=True)
|
||||
|
||||
# Compliance standard
|
||||
standard = Column(SQLEnum(FoodSafetyStandard), nullable=False, index=True)
|
||||
compliance_status = Column(SQLEnum(ComplianceStatus), nullable=False, default=ComplianceStatus.PENDING_REVIEW)
|
||||
|
||||
# Certification details
|
||||
certification_number = Column(String(100), nullable=True)
|
||||
certifying_body = Column(String(200), nullable=True)
|
||||
certification_date = Column(DateTime(timezone=True), nullable=True)
|
||||
expiration_date = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
|
||||
# Compliance requirements
|
||||
requirements = Column(JSONB, nullable=True) # Specific requirements for this standard
|
||||
compliance_notes = Column(Text, nullable=True)
|
||||
documentation_url = Column(String(500), nullable=True)
|
||||
|
||||
# Audit information
|
||||
last_audit_date = Column(DateTime(timezone=True), nullable=True)
|
||||
next_audit_date = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
auditor_name = Column(String(200), nullable=True)
|
||||
audit_score = Column(Float, nullable=True) # 0-100 score
|
||||
|
||||
# Risk assessment
|
||||
risk_level = Column(String(20), nullable=False, default="medium") # low, medium, high, critical
|
||||
risk_factors = Column(JSONB, nullable=True) # List of identified risk factors
|
||||
mitigation_measures = Column(JSONB, nullable=True) # Implemented mitigation measures
|
||||
|
||||
# Status tracking
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
requires_monitoring = Column(Boolean, nullable=False, default=True)
|
||||
monitoring_frequency_days = Column(Integer, nullable=True) # How often to check compliance
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'standard': self.standard.value if self.standard else None,
|
||||
'compliance_status': self.compliance_status.value if self.compliance_status else None,
|
||||
'certification_number': self.certification_number,
|
||||
'certifying_body': self.certifying_body,
|
||||
'certification_date': self.certification_date.isoformat() if self.certification_date else None,
|
||||
'expiration_date': self.expiration_date.isoformat() if self.expiration_date else None,
|
||||
'requirements': self.requirements,
|
||||
'compliance_notes': self.compliance_notes,
|
||||
'documentation_url': self.documentation_url,
|
||||
'last_audit_date': self.last_audit_date.isoformat() if self.last_audit_date else None,
|
||||
'next_audit_date': self.next_audit_date.isoformat() if self.next_audit_date else None,
|
||||
'auditor_name': self.auditor_name,
|
||||
'audit_score': self.audit_score,
|
||||
'risk_level': self.risk_level,
|
||||
'risk_factors': self.risk_factors,
|
||||
'mitigation_measures': self.mitigation_measures,
|
||||
'is_active': self.is_active,
|
||||
'requires_monitoring': self.requires_monitoring,
|
||||
'monitoring_frequency_days': self.monitoring_frequency_days,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
'updated_by': str(self.updated_by) if self.updated_by else None,
|
||||
}
|
||||
|
||||
|
||||
class TemperatureLog(Base):
|
||||
"""Temperature monitoring logs for storage areas"""
|
||||
__tablename__ = "temperature_logs"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Location information
|
||||
storage_location = Column(String(100), nullable=False, index=True)
|
||||
warehouse_zone = Column(String(50), nullable=True)
|
||||
equipment_id = Column(String(100), nullable=True) # Freezer/refrigerator ID
|
||||
|
||||
# Temperature readings
|
||||
temperature_celsius = Column(Float, nullable=False)
|
||||
humidity_percentage = Column(Float, nullable=True)
|
||||
target_temperature_min = Column(Float, nullable=True)
|
||||
target_temperature_max = Column(Float, nullable=True)
|
||||
|
||||
# Status and alerts
|
||||
is_within_range = Column(Boolean, nullable=False, default=True)
|
||||
alert_triggered = Column(Boolean, nullable=False, default=False)
|
||||
deviation_minutes = Column(Integer, nullable=True) # How long outside range
|
||||
|
||||
# Measurement details
|
||||
measurement_method = Column(String(50), nullable=False, default="manual") # manual, automatic, sensor
|
||||
device_id = Column(String(100), nullable=True)
|
||||
calibration_date = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Timestamp
|
||||
recorded_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False, index=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
recorded_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'storage_location': self.storage_location,
|
||||
'warehouse_zone': self.warehouse_zone,
|
||||
'equipment_id': self.equipment_id,
|
||||
'temperature_celsius': self.temperature_celsius,
|
||||
'humidity_percentage': self.humidity_percentage,
|
||||
'target_temperature_min': self.target_temperature_min,
|
||||
'target_temperature_max': self.target_temperature_max,
|
||||
'is_within_range': self.is_within_range,
|
||||
'alert_triggered': self.alert_triggered,
|
||||
'deviation_minutes': self.deviation_minutes,
|
||||
'measurement_method': self.measurement_method,
|
||||
'device_id': self.device_id,
|
||||
'calibration_date': self.calibration_date.isoformat() if self.calibration_date else None,
|
||||
'recorded_at': self.recorded_at.isoformat() if self.recorded_at else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'recorded_by': str(self.recorded_by) if self.recorded_by else None,
|
||||
}
|
||||
|
||||
|
||||
class FoodSafetyAlert(Base):
|
||||
"""Food safety alerts and notifications"""
|
||||
__tablename__ = "food_safety_alerts"
|
||||
|
||||
# Primary identification
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
alert_code = Column(String(50), nullable=False, index=True)
|
||||
|
||||
# Alert classification
|
||||
alert_type = Column(SQLEnum(FoodSafetyAlertType), nullable=False, index=True)
|
||||
severity = Column(String(20), nullable=False, default="medium", index=True) # low, medium, high, critical
|
||||
risk_level = Column(String(20), nullable=False, default="medium")
|
||||
|
||||
# Source information
|
||||
source_entity_type = Column(String(50), nullable=False) # ingredient, stock, temperature_log, compliance
|
||||
source_entity_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), ForeignKey("ingredients.id"), nullable=True, index=True)
|
||||
stock_id = Column(UUID(as_uuid=True), ForeignKey("stock.id"), nullable=True, index=True)
|
||||
|
||||
# Alert content
|
||||
title = Column(String(200), nullable=False)
|
||||
description = Column(Text, nullable=False)
|
||||
detailed_message = Column(Text, nullable=True)
|
||||
|
||||
# Regulatory and compliance context
|
||||
regulatory_requirement = Column(String(100), nullable=True)
|
||||
compliance_standard = Column(SQLEnum(FoodSafetyStandard), nullable=True)
|
||||
regulatory_action_required = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Alert conditions and triggers
|
||||
trigger_condition = Column(String(200), nullable=True)
|
||||
threshold_value = Column(Numeric(15, 4), nullable=True)
|
||||
actual_value = Column(Numeric(15, 4), nullable=True)
|
||||
|
||||
# Context data
|
||||
alert_data = Column(JSONB, nullable=True) # Additional context-specific data
|
||||
environmental_factors = Column(JSONB, nullable=True) # Temperature, humidity, etc.
|
||||
affected_products = Column(JSONB, nullable=True) # List of affected product IDs
|
||||
|
||||
# Risk assessment
|
||||
public_health_risk = Column(Boolean, nullable=False, default=False)
|
||||
business_impact = Column(Text, nullable=True)
|
||||
estimated_loss = Column(Numeric(12, 2), nullable=True)
|
||||
|
||||
# Alert status and lifecycle
|
||||
status = Column(String(50), nullable=False, default="active", index=True)
|
||||
# Status values: active, acknowledged, investigating, resolved, dismissed, escalated
|
||||
|
||||
alert_state = Column(String(50), nullable=False, default="new") # new, escalated, recurring
|
||||
|
||||
# Response and resolution
|
||||
immediate_actions_taken = Column(JSONB, nullable=True) # Actions taken immediately
|
||||
investigation_notes = Column(Text, nullable=True)
|
||||
resolution_action = Column(String(200), nullable=True)
|
||||
resolution_notes = Column(Text, nullable=True)
|
||||
corrective_actions = Column(JSONB, nullable=True) # List of corrective actions
|
||||
preventive_measures = Column(JSONB, nullable=True) # Preventive measures implemented
|
||||
|
||||
# Timing and escalation
|
||||
first_occurred_at = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
last_occurred_at = Column(DateTime(timezone=True), nullable=False)
|
||||
acknowledged_at = Column(DateTime(timezone=True), nullable=True)
|
||||
resolved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
escalation_deadline = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Occurrence tracking
|
||||
occurrence_count = Column(Integer, nullable=False, default=1)
|
||||
is_recurring = Column(Boolean, nullable=False, default=False)
|
||||
recurrence_pattern = Column(String(100), nullable=True)
|
||||
|
||||
# Responsibility and assignment
|
||||
assigned_to = Column(UUID(as_uuid=True), nullable=True)
|
||||
assigned_role = Column(String(50), nullable=True) # food_safety_manager, quality_assurance, etc.
|
||||
escalated_to = Column(UUID(as_uuid=True), nullable=True)
|
||||
escalation_level = Column(Integer, nullable=False, default=0)
|
||||
|
||||
# Notification tracking
|
||||
notification_sent = Column(Boolean, nullable=False, default=False)
|
||||
notification_methods = Column(JSONB, nullable=True) # [email, sms, whatsapp, dashboard]
|
||||
notification_recipients = Column(JSONB, nullable=True) # List of recipients
|
||||
regulatory_notification_required = Column(Boolean, nullable=False, default=False)
|
||||
regulatory_notification_sent = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Documentation and audit trail
|
||||
documentation = Column(JSONB, nullable=True) # Links to documentation, photos, etc.
|
||||
audit_trail = Column(JSONB, nullable=True) # Changes and actions taken
|
||||
external_reference = Column(String(100), nullable=True) # External system reference
|
||||
|
||||
# Performance tracking
|
||||
detection_time = Column(DateTime(timezone=True), nullable=True) # When issue was detected
|
||||
response_time_minutes = Column(Integer, nullable=True) # Time to acknowledge
|
||||
resolution_time_minutes = Column(Integer, nullable=True) # Time to resolve
|
||||
|
||||
# Quality and feedback
|
||||
alert_accuracy = Column(Boolean, nullable=True) # Was this a valid alert?
|
||||
false_positive = Column(Boolean, nullable=False, default=False)
|
||||
feedback_notes = Column(Text, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
updated_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'alert_code': self.alert_code,
|
||||
'alert_type': self.alert_type.value if self.alert_type else None,
|
||||
'severity': self.severity,
|
||||
'risk_level': self.risk_level,
|
||||
'source_entity_type': self.source_entity_type,
|
||||
'source_entity_id': str(self.source_entity_id),
|
||||
'ingredient_id': str(self.ingredient_id) if self.ingredient_id else None,
|
||||
'stock_id': str(self.stock_id) if self.stock_id else None,
|
||||
'title': self.title,
|
||||
'description': self.description,
|
||||
'detailed_message': self.detailed_message,
|
||||
'regulatory_requirement': self.regulatory_requirement,
|
||||
'compliance_standard': self.compliance_standard.value if self.compliance_standard else None,
|
||||
'regulatory_action_required': self.regulatory_action_required,
|
||||
'trigger_condition': self.trigger_condition,
|
||||
'threshold_value': float(self.threshold_value) if self.threshold_value else None,
|
||||
'actual_value': float(self.actual_value) if self.actual_value else None,
|
||||
'alert_data': self.alert_data,
|
||||
'environmental_factors': self.environmental_factors,
|
||||
'affected_products': self.affected_products,
|
||||
'public_health_risk': self.public_health_risk,
|
||||
'business_impact': self.business_impact,
|
||||
'estimated_loss': float(self.estimated_loss) if self.estimated_loss else None,
|
||||
'status': self.status,
|
||||
'alert_state': self.alert_state,
|
||||
'immediate_actions_taken': self.immediate_actions_taken,
|
||||
'investigation_notes': self.investigation_notes,
|
||||
'resolution_action': self.resolution_action,
|
||||
'resolution_notes': self.resolution_notes,
|
||||
'corrective_actions': self.corrective_actions,
|
||||
'preventive_measures': self.preventive_measures,
|
||||
'first_occurred_at': self.first_occurred_at.isoformat() if self.first_occurred_at else None,
|
||||
'last_occurred_at': self.last_occurred_at.isoformat() if self.last_occurred_at else None,
|
||||
'acknowledged_at': self.acknowledged_at.isoformat() if self.acknowledged_at else None,
|
||||
'resolved_at': self.resolved_at.isoformat() if self.resolved_at else None,
|
||||
'escalation_deadline': self.escalation_deadline.isoformat() if self.escalation_deadline else None,
|
||||
'occurrence_count': self.occurrence_count,
|
||||
'is_recurring': self.is_recurring,
|
||||
'recurrence_pattern': self.recurrence_pattern,
|
||||
'assigned_to': str(self.assigned_to) if self.assigned_to else None,
|
||||
'assigned_role': self.assigned_role,
|
||||
'escalated_to': str(self.escalated_to) if self.escalated_to else None,
|
||||
'escalation_level': self.escalation_level,
|
||||
'notification_sent': self.notification_sent,
|
||||
'notification_methods': self.notification_methods,
|
||||
'notification_recipients': self.notification_recipients,
|
||||
'regulatory_notification_required': self.regulatory_notification_required,
|
||||
'regulatory_notification_sent': self.regulatory_notification_sent,
|
||||
'documentation': self.documentation,
|
||||
'audit_trail': self.audit_trail,
|
||||
'external_reference': self.external_reference,
|
||||
'detection_time': self.detection_time.isoformat() if self.detection_time else None,
|
||||
'response_time_minutes': self.response_time_minutes,
|
||||
'resolution_time_minutes': self.resolution_time_minutes,
|
||||
'alert_accuracy': self.alert_accuracy,
|
||||
'false_positive': self.false_positive,
|
||||
'feedback_notes': self.feedback_notes,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
'updated_by': str(self.updated_by) if self.updated_by else None,
|
||||
}
|
||||
564
services/inventory/app/models/inventory.py
Normal file
564
services/inventory/app/models/inventory.py
Normal file
@@ -0,0 +1,564 @@
|
||||
# services/inventory/app/models/inventory.py
|
||||
"""
|
||||
Inventory management models for Inventory Service
|
||||
Comprehensive inventory tracking, ingredient management, and supplier integration
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class UnitOfMeasure(enum.Enum):
|
||||
"""Standard units of measure for ingredients"""
|
||||
KILOGRAMS = "KILOGRAMS"
|
||||
GRAMS = "GRAMS"
|
||||
LITERS = "LITERS"
|
||||
MILLILITERS = "MILLILITERS"
|
||||
UNITS = "UNITS"
|
||||
PIECES = "PIECES"
|
||||
PACKAGES = "PACKAGES"
|
||||
BAGS = "BAGS"
|
||||
BOXES = "BOXES"
|
||||
|
||||
|
||||
class IngredientCategory(enum.Enum):
|
||||
"""Bakery ingredient categories"""
|
||||
FLOUR = "FLOUR"
|
||||
YEAST = "YEAST"
|
||||
DAIRY = "DAIRY"
|
||||
EGGS = "EGGS"
|
||||
SUGAR = "SUGAR"
|
||||
FATS = "FATS"
|
||||
SALT = "SALT"
|
||||
SPICES = "SPICES"
|
||||
ADDITIVES = "ADDITIVES"
|
||||
PACKAGING = "PACKAGING"
|
||||
CLEANING = "CLEANING"
|
||||
OTHER = "OTHER"
|
||||
|
||||
|
||||
class ProductCategory(enum.Enum):
|
||||
"""Finished bakery product categories for retail/distribution model"""
|
||||
BREAD = "BREAD"
|
||||
CROISSANTS = "CROISSANTS"
|
||||
PASTRIES = "PASTRIES"
|
||||
CAKES = "CAKES"
|
||||
COOKIES = "COOKIES"
|
||||
MUFFINS = "MUFFINS"
|
||||
SANDWICHES = "SANDWICHES"
|
||||
SEASONAL = "SEASONAL"
|
||||
BEVERAGES = "BEVERAGES"
|
||||
OTHER_PRODUCTS = "OTHER_PRODUCTS"
|
||||
|
||||
|
||||
class ProductType(enum.Enum):
|
||||
"""Type of product in inventory"""
|
||||
INGREDIENT = "INGREDIENT" # Raw materials (flour, yeast, etc.)
|
||||
FINISHED_PRODUCT = "FINISHED_PRODUCT" # Ready-to-sell items (bread, croissants, etc.)
|
||||
|
||||
|
||||
class ProductionStage(enum.Enum):
|
||||
"""Production stages for bakery products"""
|
||||
RAW_INGREDIENT = "raw_ingredient" # Basic ingredients (flour, yeast)
|
||||
PAR_BAKED = "par_baked" # Pre-baked items needing final baking
|
||||
FULLY_BAKED = "fully_baked" # Completed products ready for sale
|
||||
PREPARED_DOUGH = "prepared_dough" # Prepared but unbaked dough
|
||||
FROZEN_PRODUCT = "frozen_product" # Frozen intermediate products
|
||||
|
||||
|
||||
class StockMovementType(enum.Enum):
|
||||
"""Types of inventory movements"""
|
||||
PURCHASE = "PURCHASE"
|
||||
PRODUCTION_USE = "PRODUCTION_USE"
|
||||
TRANSFORMATION = "TRANSFORMATION" # Converting between production stages
|
||||
ADJUSTMENT = "ADJUSTMENT"
|
||||
WASTE = "WASTE"
|
||||
TRANSFER = "TRANSFER"
|
||||
RETURN = "RETURN"
|
||||
INITIAL_STOCK = "INITIAL_STOCK"
|
||||
|
||||
|
||||
class Ingredient(Base):
|
||||
"""Master catalog for ingredients and finished products"""
|
||||
__tablename__ = "ingredients"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Product identification
|
||||
name = Column(String(255), nullable=False, index=True)
|
||||
sku = Column(String(100), nullable=True, index=True)
|
||||
barcode = Column(String(50), nullable=True, index=True)
|
||||
|
||||
# Product type and categories
|
||||
product_type = Column(SQLEnum(ProductType), nullable=False, default=ProductType.INGREDIENT, index=True)
|
||||
ingredient_category = Column(SQLEnum(IngredientCategory), nullable=True, index=True) # For ingredients
|
||||
product_category = Column(SQLEnum(ProductCategory), nullable=True, index=True) # For finished products
|
||||
subcategory = Column(String(100), nullable=True)
|
||||
|
||||
# Product details
|
||||
description = Column(Text, nullable=True)
|
||||
brand = Column(String(100), nullable=True) # Brand or central baker name
|
||||
unit_of_measure = Column(SQLEnum(UnitOfMeasure), nullable=False)
|
||||
package_size = Column(Float, nullable=True) # Size per package/unit
|
||||
|
||||
# Pricing and costs
|
||||
average_cost = Column(Numeric(10, 2), nullable=True)
|
||||
last_purchase_price = Column(Numeric(10, 2), nullable=True)
|
||||
standard_cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Stock management - now optional to simplify onboarding
|
||||
# These can be configured later based on actual usage patterns
|
||||
low_stock_threshold = Column(Float, nullable=True, default=None)
|
||||
reorder_point = Column(Float, nullable=True, default=None)
|
||||
reorder_quantity = Column(Float, nullable=True, default=None)
|
||||
max_stock_level = Column(Float, nullable=True)
|
||||
|
||||
# Shelf life (critical for finished products) - default values only
|
||||
shelf_life_days = Column(Integer, nullable=True) # Default shelf life - actual per batch
|
||||
display_life_hours = Column(Integer, nullable=True) # How long can be displayed (for fresh products)
|
||||
best_before_hours = Column(Integer, nullable=True) # Hours until best before (for same-day products)
|
||||
storage_instructions = Column(Text, nullable=True)
|
||||
|
||||
# Finished product specific fields
|
||||
central_baker_product_code = Column(String(100), nullable=True) # Central baker's product code
|
||||
delivery_days = Column(String(20), nullable=True) # Days of week delivered (e.g., "Mon,Wed,Fri")
|
||||
minimum_order_quantity = Column(Float, nullable=True) # Minimum order from central baker
|
||||
pack_size = Column(Integer, nullable=True) # How many pieces per pack
|
||||
|
||||
# Status
|
||||
is_active = Column(Boolean, default=True)
|
||||
is_perishable = Column(Boolean, default=False)
|
||||
allergen_info = Column(JSONB, nullable=True) # JSON array of allergens
|
||||
nutritional_info = Column(JSONB, nullable=True) # Nutritional information for finished products
|
||||
|
||||
# NEW: Local production support (for procurement service integration)
|
||||
produced_locally = Column(Boolean, default=False, nullable=False) # If true, ingredient is produced in-house
|
||||
recipe_id = Column(UUID(as_uuid=True), nullable=True) # Links to recipe for BOM explosion
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
stock_items = relationship("Stock", back_populates="ingredient", cascade="all, delete-orphan")
|
||||
movement_items = relationship("StockMovement", back_populates="ingredient", cascade="all, delete-orphan")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_ingredients_tenant_name', 'tenant_id', 'name', unique=True),
|
||||
Index('idx_ingredients_tenant_sku', 'tenant_id', 'sku'),
|
||||
Index('idx_ingredients_barcode', 'barcode'),
|
||||
Index('idx_ingredients_product_type', 'tenant_id', 'product_type', 'is_active'),
|
||||
Index('idx_ingredients_ingredient_category', 'tenant_id', 'ingredient_category', 'is_active'),
|
||||
Index('idx_ingredients_product_category', 'tenant_id', 'product_category', 'is_active'),
|
||||
Index('idx_ingredients_stock_levels', 'tenant_id', 'low_stock_threshold', 'reorder_point'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
# Map to response schema format - use appropriate category based on product type
|
||||
category = None
|
||||
if self.product_type == ProductType.FINISHED_PRODUCT and self.product_category:
|
||||
# For finished products, use product_category
|
||||
category = self.product_category.value
|
||||
elif self.product_type == ProductType.INGREDIENT and self.ingredient_category:
|
||||
# For ingredients, use ingredient_category
|
||||
category = self.ingredient_category.value
|
||||
elif self.ingredient_category and self.ingredient_category != IngredientCategory.OTHER:
|
||||
# If ingredient_category is set and not 'OTHER', use it
|
||||
category = self.ingredient_category.value
|
||||
elif self.product_category:
|
||||
# Fall back to product_category if available
|
||||
category = self.product_category.value
|
||||
else:
|
||||
# Final fallback
|
||||
category = "other"
|
||||
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'name': self.name,
|
||||
'sku': self.sku,
|
||||
'barcode': self.barcode,
|
||||
'product_type': self.product_type.value if self.product_type else None,
|
||||
'category': category, # Map to what IngredientResponse expects
|
||||
'ingredient_category': self.ingredient_category.value if self.ingredient_category else None,
|
||||
'product_category': self.product_category.value if self.product_category else None,
|
||||
'subcategory': self.subcategory,
|
||||
'description': self.description,
|
||||
'brand': self.brand,
|
||||
'unit_of_measure': self.unit_of_measure.value if self.unit_of_measure else None,
|
||||
'package_size': self.package_size,
|
||||
'average_cost': float(self.average_cost) if self.average_cost else None,
|
||||
'last_purchase_price': float(self.last_purchase_price) if self.last_purchase_price else None,
|
||||
'standard_cost': float(self.standard_cost) if self.standard_cost else None,
|
||||
'low_stock_threshold': self.low_stock_threshold,
|
||||
'reorder_point': self.reorder_point,
|
||||
'reorder_quantity': self.reorder_quantity,
|
||||
'max_stock_level': self.max_stock_level,
|
||||
'shelf_life_days': self.shelf_life_days,
|
||||
'display_life_hours': self.display_life_hours,
|
||||
'best_before_hours': self.best_before_hours,
|
||||
'storage_instructions': self.storage_instructions,
|
||||
'central_baker_product_code': self.central_baker_product_code,
|
||||
'delivery_days': self.delivery_days,
|
||||
'minimum_order_quantity': self.minimum_order_quantity,
|
||||
'pack_size': self.pack_size,
|
||||
'is_active': self.is_active if self.is_active is not None else True,
|
||||
'is_perishable': self.is_perishable if self.is_perishable is not None else False,
|
||||
'allergen_info': self.allergen_info,
|
||||
'nutritional_info': self.nutritional_info,
|
||||
# NEW: Local production support
|
||||
'produced_locally': self.produced_locally if self.produced_locally is not None else False,
|
||||
'recipe_id': str(self.recipe_id) if self.recipe_id else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else datetime.now(timezone.utc).isoformat(),
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
|
||||
|
||||
class Stock(Base):
|
||||
"""Current stock levels and batch tracking"""
|
||||
__tablename__ = "stock"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), ForeignKey('ingredients.id'), nullable=False, index=True)
|
||||
|
||||
# Supplier association
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
|
||||
# Stock identification
|
||||
batch_number = Column(String(100), nullable=True, index=True)
|
||||
lot_number = Column(String(100), nullable=True, index=True)
|
||||
supplier_batch_ref = Column(String(100), nullable=True)
|
||||
|
||||
# Production stage tracking
|
||||
production_stage = Column(SQLEnum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage', create_type=False), nullable=False, default='raw_ingredient', index=True)
|
||||
transformation_reference = Column(String(100), nullable=True, index=True) # Links related transformations
|
||||
|
||||
# Quantities
|
||||
current_quantity = Column(Float, nullable=False, default=0.0)
|
||||
reserved_quantity = Column(Float, nullable=False, default=0.0) # Reserved for production
|
||||
available_quantity = Column(Float, nullable=False, default=0.0) # current - reserved
|
||||
|
||||
# Dates
|
||||
received_date = Column(DateTime(timezone=True), nullable=True)
|
||||
expiration_date = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
best_before_date = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Stage-specific expiration tracking
|
||||
original_expiration_date = Column(DateTime(timezone=True), nullable=True) # Original batch expiration (for par-baked)
|
||||
transformation_date = Column(DateTime(timezone=True), nullable=True) # When product was transformed
|
||||
final_expiration_date = Column(DateTime(timezone=True), nullable=True) # Final product expiration after transformation
|
||||
|
||||
# Cost tracking
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Location
|
||||
storage_location = Column(String(100), nullable=True)
|
||||
warehouse_zone = Column(String(50), nullable=True)
|
||||
shelf_position = Column(String(50), nullable=True)
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration = Column(Boolean, default=False)
|
||||
requires_freezing = Column(Boolean, default=False)
|
||||
storage_temperature_min = Column(Float, nullable=True) # Celsius
|
||||
storage_temperature_max = Column(Float, nullable=True) # Celsius
|
||||
storage_humidity_max = Column(Float, nullable=True) # Percentage
|
||||
shelf_life_days = Column(Integer, nullable=True) # Batch-specific shelf life
|
||||
storage_instructions = Column(Text, nullable=True) # Batch-specific instructions
|
||||
|
||||
# Status
|
||||
is_available = Column(Boolean, default=True)
|
||||
is_expired = Column(Boolean, default=False, index=True)
|
||||
quality_status = Column(String(20), default="good") # good, damaged, expired, quarantined
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationships
|
||||
ingredient = relationship("Ingredient", back_populates="stock_items")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_stock_tenant_ingredient', 'tenant_id', 'ingredient_id'),
|
||||
Index('idx_stock_expiration', 'tenant_id', 'expiration_date', 'is_available'),
|
||||
Index('idx_stock_batch', 'tenant_id', 'batch_number'),
|
||||
Index('idx_stock_low_levels', 'tenant_id', 'current_quantity', 'is_available'),
|
||||
Index('idx_stock_quality', 'tenant_id', 'quality_status', 'is_available'),
|
||||
Index('idx_stock_production_stage', 'tenant_id', 'production_stage', 'is_available'),
|
||||
Index('idx_stock_transformation', 'tenant_id', 'transformation_reference'),
|
||||
Index('idx_stock_final_expiration', 'tenant_id', 'final_expiration_date', 'is_available'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'supplier_id': str(self.supplier_id) if self.supplier_id else None,
|
||||
'batch_number': self.batch_number,
|
||||
'lot_number': self.lot_number,
|
||||
'supplier_batch_ref': self.supplier_batch_ref,
|
||||
'production_stage': self.production_stage if self.production_stage else None,
|
||||
'transformation_reference': self.transformation_reference,
|
||||
'current_quantity': self.current_quantity,
|
||||
'reserved_quantity': self.reserved_quantity,
|
||||
'available_quantity': self.available_quantity,
|
||||
'received_date': self.received_date.isoformat() if self.received_date else None,
|
||||
'expiration_date': self.expiration_date.isoformat() if self.expiration_date else None,
|
||||
'best_before_date': self.best_before_date.isoformat() if self.best_before_date else None,
|
||||
'original_expiration_date': self.original_expiration_date.isoformat() if self.original_expiration_date else None,
|
||||
'transformation_date': self.transformation_date.isoformat() if self.transformation_date else None,
|
||||
'final_expiration_date': self.final_expiration_date.isoformat() if self.final_expiration_date else None,
|
||||
'unit_cost': float(self.unit_cost) if self.unit_cost else None,
|
||||
'total_cost': float(self.total_cost) if self.total_cost else None,
|
||||
'storage_location': self.storage_location,
|
||||
'warehouse_zone': self.warehouse_zone,
|
||||
'shelf_position': self.shelf_position,
|
||||
'requires_refrigeration': self.requires_refrigeration,
|
||||
'requires_freezing': self.requires_freezing,
|
||||
'storage_temperature_min': self.storage_temperature_min,
|
||||
'storage_temperature_max': self.storage_temperature_max,
|
||||
'storage_humidity_max': self.storage_humidity_max,
|
||||
'shelf_life_days': self.shelf_life_days,
|
||||
'storage_instructions': self.storage_instructions,
|
||||
'is_available': self.is_available,
|
||||
'is_expired': self.is_expired,
|
||||
'quality_status': self.quality_status,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
class StockMovement(Base):
|
||||
"""Track all stock movements for audit trail"""
|
||||
__tablename__ = "stock_movements"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), ForeignKey('ingredients.id'), nullable=False, index=True)
|
||||
stock_id = Column(UUID(as_uuid=True), ForeignKey('stock.id'), nullable=True, index=True)
|
||||
|
||||
# Movement details
|
||||
movement_type = Column(SQLEnum(StockMovementType, name='stockmovementtype', create_type=False), nullable=False, index=True)
|
||||
quantity = Column(Float, nullable=False)
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Balance tracking
|
||||
quantity_before = Column(Float, nullable=True)
|
||||
quantity_after = Column(Float, nullable=True)
|
||||
|
||||
# References
|
||||
reference_number = Column(String(100), nullable=True, index=True) # PO number, production order, etc.
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
|
||||
# Additional details
|
||||
notes = Column(Text, nullable=True)
|
||||
reason_code = Column(String(50), nullable=True) # spoilage, damage, theft, etc.
|
||||
|
||||
# Timestamp
|
||||
movement_date = Column(DateTime(timezone=True), nullable=False,
|
||||
default=lambda: datetime.now(timezone.utc), index=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
ingredient = relationship("Ingredient", back_populates="movement_items")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_movements_tenant_date', 'tenant_id', 'movement_date'),
|
||||
Index('idx_movements_tenant_ingredient', 'tenant_id', 'ingredient_id', 'movement_date'),
|
||||
Index('idx_movements_type', 'tenant_id', 'movement_type', 'movement_date'),
|
||||
Index('idx_movements_reference', 'reference_number'),
|
||||
Index('idx_movements_supplier', 'supplier_id', 'movement_date'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'stock_id': str(self.stock_id) if self.stock_id else None,
|
||||
'movement_type': self.movement_type if self.movement_type else None,
|
||||
'quantity': self.quantity,
|
||||
'unit_cost': float(self.unit_cost) if self.unit_cost else None,
|
||||
'total_cost': float(self.total_cost) if self.total_cost else None,
|
||||
'quantity_before': self.quantity_before,
|
||||
'quantity_after': self.quantity_after,
|
||||
'reference_number': self.reference_number,
|
||||
'supplier_id': str(self.supplier_id) if self.supplier_id else None,
|
||||
'notes': self.notes,
|
||||
'reason_code': self.reason_code,
|
||||
'movement_date': self.movement_date.isoformat() if self.movement_date else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
|
||||
|
||||
class ProductTransformation(Base):
|
||||
"""Track product transformations (e.g., par-baked to fully baked)"""
|
||||
__tablename__ = "product_transformations"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Transformation details
|
||||
transformation_reference = Column(String(100), nullable=False, index=True)
|
||||
source_ingredient_id = Column(UUID(as_uuid=True), ForeignKey('ingredients.id'), nullable=False)
|
||||
target_ingredient_id = Column(UUID(as_uuid=True), ForeignKey('ingredients.id'), nullable=False)
|
||||
|
||||
# Stage transformation
|
||||
source_stage = Column(SQLEnum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage', create_type=False), nullable=False)
|
||||
target_stage = Column(SQLEnum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage', create_type=False), nullable=False)
|
||||
|
||||
# Quantities and conversion
|
||||
source_quantity = Column(Float, nullable=False) # Input quantity
|
||||
target_quantity = Column(Float, nullable=False) # Output quantity
|
||||
conversion_ratio = Column(Float, nullable=False, default=1.0) # target/source ratio
|
||||
|
||||
# Expiration logic
|
||||
expiration_calculation_method = Column(String(50), nullable=False, default="days_from_transformation") # days_from_transformation, preserve_original
|
||||
expiration_days_offset = Column(Integer, nullable=True) # Days from transformation date
|
||||
|
||||
# Process tracking
|
||||
transformation_date = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
process_notes = Column(Text, nullable=True)
|
||||
performed_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Batch tracking
|
||||
source_batch_numbers = Column(Text, nullable=True) # JSON array of source batch numbers
|
||||
target_batch_number = Column(String(100), nullable=True)
|
||||
|
||||
# Status
|
||||
is_completed = Column(Boolean, default=True)
|
||||
is_reversed = Column(Boolean, default=False)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_transformations_tenant_date', 'tenant_id', 'transformation_date'),
|
||||
Index('idx_transformations_reference', 'transformation_reference'),
|
||||
Index('idx_transformations_source', 'tenant_id', 'source_ingredient_id'),
|
||||
Index('idx_transformations_target', 'tenant_id', 'target_ingredient_id'),
|
||||
Index('idx_transformations_stages', 'source_stage', 'target_stage'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'transformation_reference': self.transformation_reference,
|
||||
'source_ingredient_id': str(self.source_ingredient_id),
|
||||
'target_ingredient_id': str(self.target_ingredient_id),
|
||||
'source_stage': self.source_stage if self.source_stage else None,
|
||||
'target_stage': self.target_stage if self.target_stage else None,
|
||||
'source_quantity': self.source_quantity,
|
||||
'target_quantity': self.target_quantity,
|
||||
'conversion_ratio': self.conversion_ratio,
|
||||
'expiration_calculation_method': self.expiration_calculation_method,
|
||||
'expiration_days_offset': self.expiration_days_offset,
|
||||
'transformation_date': self.transformation_date.isoformat() if self.transformation_date else None,
|
||||
'process_notes': self.process_notes,
|
||||
'performed_by': str(self.performed_by) if self.performed_by else None,
|
||||
'source_batch_numbers': self.source_batch_numbers,
|
||||
'target_batch_number': self.target_batch_number,
|
||||
'is_completed': self.is_completed,
|
||||
'is_reversed': self.is_reversed,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'created_by': str(self.created_by) if self.created_by else None,
|
||||
}
|
||||
|
||||
|
||||
class StockAlert(Base):
|
||||
"""Automated stock alerts for low stock, expiration, etc."""
|
||||
__tablename__ = "stock_alerts"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_id = Column(UUID(as_uuid=True), ForeignKey('ingredients.id'), nullable=False, index=True)
|
||||
stock_id = Column(UUID(as_uuid=True), ForeignKey('stock.id'), nullable=True, index=True)
|
||||
|
||||
# Alert details
|
||||
alert_type = Column(String(50), nullable=False, index=True) # low_stock, expiring_soon, expired, reorder
|
||||
severity = Column(String(20), nullable=False, default="medium") # low, medium, high, critical
|
||||
title = Column(String(255), nullable=False)
|
||||
message = Column(Text, nullable=False)
|
||||
|
||||
# Alert data
|
||||
current_quantity = Column(Float, nullable=True)
|
||||
threshold_value = Column(Float, nullable=True)
|
||||
expiration_date = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Status
|
||||
is_active = Column(Boolean, default=True)
|
||||
is_acknowledged = Column(Boolean, default=False)
|
||||
acknowledged_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
acknowledged_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Resolution
|
||||
is_resolved = Column(Boolean, default=False)
|
||||
resolved_by = Column(UUID(as_uuid=True), nullable=True)
|
||||
resolved_at = Column(DateTime(timezone=True), nullable=True)
|
||||
resolution_notes = Column(Text, nullable=True)
|
||||
|
||||
# Audit fields
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_alerts_tenant_active', 'tenant_id', 'is_active', 'created_at'),
|
||||
Index('idx_alerts_type_severity', 'alert_type', 'severity', 'is_active'),
|
||||
Index('idx_alerts_ingredient', 'ingredient_id', 'is_active'),
|
||||
Index('idx_alerts_unresolved', 'tenant_id', 'is_resolved', 'is_active'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'stock_id': str(self.stock_id) if self.stock_id else None,
|
||||
'alert_type': self.alert_type,
|
||||
'severity': self.severity,
|
||||
'title': self.title,
|
||||
'message': self.message,
|
||||
'current_quantity': self.current_quantity,
|
||||
'threshold_value': self.threshold_value,
|
||||
'expiration_date': self.expiration_date.isoformat() if self.expiration_date else None,
|
||||
'is_active': self.is_active,
|
||||
'is_acknowledged': self.is_acknowledged,
|
||||
'acknowledged_by': str(self.acknowledged_by) if self.acknowledged_by else None,
|
||||
'acknowledged_at': self.acknowledged_at.isoformat() if self.acknowledged_at else None,
|
||||
'is_resolved': self.is_resolved,
|
||||
'resolved_by': str(self.resolved_by) if self.resolved_by else None,
|
||||
'resolved_at': self.resolved_at.isoformat() if self.resolved_at else None,
|
||||
'resolution_notes': self.resolution_notes,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
233
services/inventory/app/models/stock_receipt.py
Normal file
233
services/inventory/app/models/stock_receipt.py
Normal file
@@ -0,0 +1,233 @@
|
||||
"""
|
||||
Stock Receipt Models for Inventory Service
|
||||
Lot-level tracking for deliveries with expiration dates
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, DateTime, Float, Integer, Text, Index, Boolean, Numeric, ForeignKey, Enum as SQLEnum, Date
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
import uuid
|
||||
import enum
|
||||
from datetime import datetime, timezone, date
|
||||
from typing import Dict, Any, Optional, List
|
||||
from decimal import Decimal
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class ReceiptStatus(enum.Enum):
|
||||
"""Stock receipt status values"""
|
||||
DRAFT = "draft"
|
||||
CONFIRMED = "confirmed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class StockReceipt(Base):
|
||||
"""
|
||||
Stock receipt tracking for purchase order deliveries
|
||||
Captures lot-level details and expiration dates
|
||||
"""
|
||||
__tablename__ = "stock_receipts"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
# Purchase order reference
|
||||
po_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
po_number = Column(String(100), nullable=True) # Denormalized for quick reference
|
||||
|
||||
# Receipt details
|
||||
received_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
received_by_user_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Status
|
||||
status = Column(
|
||||
SQLEnum(ReceiptStatus, name='receiptstatus', create_type=True),
|
||||
nullable=False,
|
||||
default=ReceiptStatus.DRAFT,
|
||||
index=True
|
||||
)
|
||||
|
||||
# Supplier information (denormalized)
|
||||
supplier_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
supplier_name = Column(String(255), nullable=True)
|
||||
|
||||
# Overall notes
|
||||
notes = Column(Text, nullable=True)
|
||||
has_discrepancies = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
confirmed_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
line_items = relationship("StockReceiptLineItem", back_populates="receipt", cascade="all, delete-orphan")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_stock_receipts_tenant_status', 'tenant_id', 'status'),
|
||||
Index('idx_stock_receipts_po', 'po_id'),
|
||||
Index('idx_stock_receipts_received_at', 'tenant_id', 'received_at'),
|
||||
Index('idx_stock_receipts_supplier', 'supplier_id', 'received_at'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'po_id': str(self.po_id),
|
||||
'po_number': self.po_number,
|
||||
'received_at': self.received_at.isoformat() if self.received_at else None,
|
||||
'received_by_user_id': str(self.received_by_user_id) if self.received_by_user_id else None,
|
||||
'status': self.status.value if isinstance(self.status, enum.Enum) else self.status,
|
||||
'supplier_id': str(self.supplier_id) if self.supplier_id else None,
|
||||
'supplier_name': self.supplier_name,
|
||||
'notes': self.notes,
|
||||
'has_discrepancies': self.has_discrepancies,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'confirmed_at': self.confirmed_at.isoformat() if self.confirmed_at else None,
|
||||
'line_items': [item.to_dict() for item in self.line_items] if self.line_items else [],
|
||||
}
|
||||
|
||||
|
||||
class StockReceiptLineItem(Base):
|
||||
"""
|
||||
Individual line items in a stock receipt
|
||||
One line item per product, with multiple lots possible
|
||||
"""
|
||||
__tablename__ = "stock_receipt_line_items"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
receipt_id = Column(UUID(as_uuid=True), ForeignKey('stock_receipts.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
|
||||
# Product information
|
||||
ingredient_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
ingredient_name = Column(String(255), nullable=True) # Denormalized
|
||||
|
||||
# PO line reference
|
||||
po_line_id = Column(UUID(as_uuid=True), nullable=True)
|
||||
|
||||
# Quantities
|
||||
expected_quantity = Column(Numeric(10, 2), nullable=False)
|
||||
actual_quantity = Column(Numeric(10, 2), nullable=False)
|
||||
unit_of_measure = Column(String(20), nullable=False)
|
||||
|
||||
# Discrepancy tracking
|
||||
has_discrepancy = Column(Boolean, default=False, nullable=False)
|
||||
discrepancy_reason = Column(Text, nullable=True)
|
||||
|
||||
# Cost tracking
|
||||
unit_cost = Column(Numeric(10, 2), nullable=True)
|
||||
total_cost = Column(Numeric(10, 2), nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationships
|
||||
receipt = relationship("StockReceipt", back_populates="line_items")
|
||||
lots = relationship("StockLot", back_populates="line_item", cascade="all, delete-orphan")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_line_items_receipt', 'receipt_id'),
|
||||
Index('idx_line_items_ingredient', 'ingredient_id'),
|
||||
Index('idx_line_items_discrepancy', 'tenant_id', 'has_discrepancy'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'receipt_id': str(self.receipt_id),
|
||||
'ingredient_id': str(self.ingredient_id),
|
||||
'ingredient_name': self.ingredient_name,
|
||||
'po_line_id': str(self.po_line_id) if self.po_line_id else None,
|
||||
'expected_quantity': float(self.expected_quantity) if self.expected_quantity else None,
|
||||
'actual_quantity': float(self.actual_quantity) if self.actual_quantity else None,
|
||||
'unit_of_measure': self.unit_of_measure,
|
||||
'has_discrepancy': self.has_discrepancy,
|
||||
'discrepancy_reason': self.discrepancy_reason,
|
||||
'unit_cost': float(self.unit_cost) if self.unit_cost else None,
|
||||
'total_cost': float(self.total_cost) if self.total_cost else None,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'lots': [lot.to_dict() for lot in self.lots] if self.lots else [],
|
||||
}
|
||||
|
||||
|
||||
class StockLot(Base):
|
||||
"""
|
||||
Individual lots within a line item
|
||||
Critical for tracking expiration dates when deliveries are split
|
||||
"""
|
||||
__tablename__ = "stock_lots"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
line_item_id = Column(UUID(as_uuid=True), ForeignKey('stock_receipt_line_items.id', ondelete='CASCADE'), nullable=False, index=True)
|
||||
|
||||
# Links to stock table (created on confirmation)
|
||||
stock_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
|
||||
# Lot identification
|
||||
lot_number = Column(String(100), nullable=True)
|
||||
supplier_lot_number = Column(String(100), nullable=True)
|
||||
|
||||
# Quantity for this lot
|
||||
quantity = Column(Numeric(10, 2), nullable=False)
|
||||
unit_of_measure = Column(String(20), nullable=False)
|
||||
|
||||
# Critical: Expiration tracking
|
||||
expiration_date = Column(Date, nullable=False, index=True)
|
||||
best_before_date = Column(Date, nullable=True)
|
||||
|
||||
# Storage location
|
||||
warehouse_location = Column(String(100), nullable=True)
|
||||
storage_zone = Column(String(50), nullable=True)
|
||||
|
||||
# Quality notes
|
||||
quality_notes = Column(Text, nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationships
|
||||
line_item = relationship("StockReceiptLineItem", back_populates="lots")
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_lots_line_item', 'line_item_id'),
|
||||
Index('idx_lots_stock', 'stock_id'),
|
||||
Index('idx_lots_expiration', 'tenant_id', 'expiration_date'),
|
||||
Index('idx_lots_lot_number', 'tenant_id', 'lot_number'),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert model to dictionary for API responses"""
|
||||
return {
|
||||
'id': str(self.id),
|
||||
'tenant_id': str(self.tenant_id),
|
||||
'line_item_id': str(self.line_item_id),
|
||||
'stock_id': str(self.stock_id) if self.stock_id else None,
|
||||
'lot_number': self.lot_number,
|
||||
'supplier_lot_number': self.supplier_lot_number,
|
||||
'quantity': float(self.quantity) if self.quantity else None,
|
||||
'unit_of_measure': self.unit_of_measure,
|
||||
'expiration_date': self.expiration_date.isoformat() if self.expiration_date else None,
|
||||
'best_before_date': self.best_before_date.isoformat() if self.best_before_date else None,
|
||||
'warehouse_location': self.warehouse_location,
|
||||
'storage_zone': self.storage_zone,
|
||||
'quality_notes': self.quality_notes,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
0
services/inventory/app/repositories/__init__.py
Normal file
0
services/inventory/app/repositories/__init__.py
Normal file
464
services/inventory/app/repositories/dashboard_repository.py
Normal file
464
services/inventory/app/repositories/dashboard_repository.py
Normal file
@@ -0,0 +1,464 @@
|
||||
# services/inventory/app/repositories/dashboard_repository.py
|
||||
"""
|
||||
Dashboard Repository for complex dashboard queries
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DashboardRepository:
|
||||
"""Repository for dashboard-specific database queries"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_business_model_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get ingredient metrics for business model detection"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total_ingredients,
|
||||
COUNT(CASE WHEN product_type::text = 'FINISHED_PRODUCT' THEN 1 END) as finished_products,
|
||||
COUNT(CASE WHEN product_type::text = 'INGREDIENT' THEN 1 END) as raw_ingredients,
|
||||
COUNT(DISTINCT st.supplier_id) as supplier_count,
|
||||
AVG(CASE WHEN s.available_quantity IS NOT NULL THEN s.available_quantity ELSE 0 END) as avg_stock_level
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, supplier_id
|
||||
FROM stock WHERE tenant_id = :tenant_id AND supplier_id IS NOT NULL
|
||||
GROUP BY ingredient_id, supplier_id
|
||||
) st ON i.id = st.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"total_ingredients": 0,
|
||||
"finished_products": 0,
|
||||
"raw_ingredients": 0,
|
||||
"supplier_count": 0,
|
||||
"avg_stock_level": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"total_ingredients": row.total_ingredients,
|
||||
"finished_products": row.finished_products,
|
||||
"raw_ingredients": row.raw_ingredients,
|
||||
"supplier_count": row.supplier_count,
|
||||
"avg_stock_level": float(row.avg_stock_level) if row.avg_stock_level else 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get business model metrics", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_stock_by_category(self, tenant_id: UUID) -> Dict[str, Dict[str, Any]]:
|
||||
"""Get stock breakdown by category"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(*) as count,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity, AVG(unit_cost) as unit_cost
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
categories = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
categories[row.category] = {
|
||||
"count": row.count,
|
||||
"total_value": float(row.total_value)
|
||||
}
|
||||
|
||||
return categories
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock by category", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_alerts_by_severity(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get active alerts breakdown by severity"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT severity, COUNT(*) as count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
GROUP BY severity
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
alerts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
||||
|
||||
for row in result.fetchall():
|
||||
alerts[row.severity] = row.count
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts by severity", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_movements_by_type(self, tenant_id: UUID, days: int = 7) -> Dict[str, int]:
|
||||
"""Get stock movements breakdown by type for recent period"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT sm.movement_type, COUNT(*) as count
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
GROUP BY sm.movement_type
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
movements = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
movements[row.movement_type] = row.count
|
||||
|
||||
return movements
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by type", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_alert_trend(self, tenant_id: UUID, days: int = 30) -> List[Dict[str, Any]]:
|
||||
"""Get alert trend over time"""
|
||||
try:
|
||||
query = text(f"""
|
||||
SELECT
|
||||
DATE(created_at) as alert_date,
|
||||
COUNT(*) as alert_count,
|
||||
COUNT(CASE WHEN severity IN ('high', 'critical') THEN 1 END) as high_severity_count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at > NOW() - INTERVAL '{days} days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY alert_date
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
|
||||
return [
|
||||
{
|
||||
"date": row.alert_date.isoformat(),
|
||||
"total_alerts": row.alert_count,
|
||||
"high_severity_alerts": row.high_severity_count
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert trend", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_recent_stock_movements(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get recent stock movements"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
'stock_movement' as activity_type,
|
||||
CASE
|
||||
WHEN movement_type = 'PURCHASE' THEN 'Stock added: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'PRODUCTION_USE' THEN 'Stock consumed: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'WASTE' THEN 'Stock wasted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'Stock adjusted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
ELSE 'Stock movement: ' || i.name
|
||||
END as description,
|
||||
sm.movement_date as timestamp,
|
||||
sm.created_by as user_id,
|
||||
CASE
|
||||
WHEN movement_type = 'WASTE' THEN 'high'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
sm.id as entity_id,
|
||||
'stock_movement' as entity_type
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
ORDER BY sm.movement_date DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id, "limit": limit})
|
||||
|
||||
return [
|
||||
{
|
||||
"activity_type": row.activity_type,
|
||||
"description": row.description,
|
||||
"timestamp": row.timestamp,
|
||||
"user_id": row.user_id,
|
||||
"impact_level": row.impact_level,
|
||||
"entity_id": row.entity_id,
|
||||
"entity_type": row.entity_type
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent stock movements", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_recent_food_safety_alerts(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get recent food safety alerts"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
'food_safety_alert' as activity_type,
|
||||
title as description,
|
||||
created_at as timestamp,
|
||||
created_by as user_id,
|
||||
CASE
|
||||
WHEN severity = 'critical' THEN 'high'
|
||||
WHEN severity = 'high' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
id as entity_id,
|
||||
'food_safety_alert' as entity_type
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
ORDER BY created_at DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id, "limit": limit})
|
||||
|
||||
return [
|
||||
{
|
||||
"activity_type": row.activity_type,
|
||||
"description": row.description,
|
||||
"timestamp": row.timestamp,
|
||||
"user_id": row.user_id,
|
||||
"impact_level": row.impact_level,
|
||||
"entity_id": row.entity_id,
|
||||
"entity_type": row.entity_type
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent food safety alerts", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_live_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get real-time inventory metrics"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN s.available_quantity = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value,
|
||||
COUNT(CASE WHEN s.expiration_date < NOW() THEN 1 END) as expired_items,
|
||||
COUNT(CASE WHEN s.expiration_date BETWEEN NOW() AND NOW() + INTERVAL '7 days' THEN 1 END) as expiring_soon
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
metrics = result.fetchone()
|
||||
|
||||
if not metrics:
|
||||
return {
|
||||
"total_ingredients": 0,
|
||||
"in_stock": 0,
|
||||
"low_stock": 0,
|
||||
"out_of_stock": 0,
|
||||
"total_value": 0.0,
|
||||
"expired_items": 0,
|
||||
"expiring_soon": 0,
|
||||
"last_updated": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
return {
|
||||
"total_ingredients": metrics.total_ingredients,
|
||||
"in_stock": metrics.in_stock,
|
||||
"low_stock": metrics.low_stock,
|
||||
"out_of_stock": metrics.out_of_stock,
|
||||
"total_value": float(metrics.total_value),
|
||||
"expired_items": metrics.expired_items,
|
||||
"expiring_soon": metrics.expiring_soon,
|
||||
"last_updated": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get live metrics", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_stock_status_by_category(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get stock status breakdown by category"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold AND s.available_quantity > 0 THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN COALESCE(s.available_quantity, 0) = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
ingredient_id,
|
||||
SUM(available_quantity) as available_quantity,
|
||||
AVG(unit_cost) as unit_cost
|
||||
FROM stock
|
||||
WHERE tenant_id = :tenant_id AND is_available = true
|
||||
GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
ORDER BY total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
|
||||
return [
|
||||
{
|
||||
"category": row.category,
|
||||
"total_ingredients": row.total_ingredients,
|
||||
"in_stock": row.in_stock,
|
||||
"low_stock": row.low_stock,
|
||||
"out_of_stock": row.out_of_stock,
|
||||
"total_value": float(row.total_value)
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock status by category", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_alerts_summary(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
alert_types: Optional[List[str]] = None,
|
||||
severities: Optional[List[str]] = None,
|
||||
date_from: Optional[datetime] = None,
|
||||
date_to: Optional[datetime] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get alerts summary by type and severity with filters"""
|
||||
try:
|
||||
# Build query with filters
|
||||
where_conditions = ["tenant_id = :tenant_id", "status = 'active'"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if alert_types:
|
||||
where_conditions.append("alert_type = ANY(:alert_types)")
|
||||
params["alert_types"] = alert_types
|
||||
|
||||
if severities:
|
||||
where_conditions.append("severity = ANY(:severities)")
|
||||
params["severities"] = severities
|
||||
|
||||
if date_from:
|
||||
where_conditions.append("created_at >= :date_from")
|
||||
params["date_from"] = date_from
|
||||
|
||||
if date_to:
|
||||
where_conditions.append("created_at <= :date_to")
|
||||
params["date_to"] = date_to
|
||||
|
||||
where_clause = " AND ".join(where_conditions)
|
||||
|
||||
query = text(f"""
|
||||
SELECT
|
||||
alert_type,
|
||||
severity,
|
||||
COUNT(*) as count,
|
||||
MIN(EXTRACT(EPOCH FROM (NOW() - created_at))/3600)::int as oldest_alert_age_hours,
|
||||
AVG(CASE WHEN resolved_at IS NOT NULL
|
||||
THEN EXTRACT(EPOCH FROM (resolved_at - created_at))/3600
|
||||
ELSE NULL END)::int as avg_resolution_hours
|
||||
FROM food_safety_alerts
|
||||
WHERE {where_clause}
|
||||
GROUP BY alert_type, severity
|
||||
ORDER BY severity DESC, count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, params)
|
||||
|
||||
return [
|
||||
{
|
||||
"alert_type": row.alert_type,
|
||||
"severity": row.severity,
|
||||
"count": row.count,
|
||||
"oldest_alert_age_hours": row.oldest_alert_age_hours,
|
||||
"average_resolution_time_hours": row.avg_resolution_hours
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts summary", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_ingredient_stock_levels(self, tenant_id: UUID) -> Dict[str, float]:
|
||||
"""
|
||||
Get current stock levels for all ingredients
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary mapping ingredient_id to current stock level
|
||||
"""
|
||||
try:
|
||||
stock_query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
COALESCE(SUM(s.available_quantity), 0) as current_stock
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id
|
||||
""")
|
||||
|
||||
result = await self.session.execute(stock_query, {"tenant_id": tenant_id})
|
||||
stock_levels = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
stock_levels[str(row.ingredient_id)] = float(row.current_stock)
|
||||
|
||||
return stock_levels
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient stock levels", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
298
services/inventory/app/repositories/food_safety_repository.py
Normal file
298
services/inventory/app/repositories/food_safety_repository.py
Normal file
@@ -0,0 +1,298 @@
|
||||
# services/inventory/app/repositories/food_safety_repository.py
|
||||
"""
|
||||
Food Safety Repository
|
||||
Data access layer for food safety compliance and monitoring
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy import text, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.food_safety import (
|
||||
FoodSafetyCompliance,
|
||||
FoodSafetyAlert,
|
||||
TemperatureLog,
|
||||
ComplianceStatus
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class FoodSafetyRepository:
|
||||
"""Repository for food safety data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
# ===== COMPLIANCE METHODS =====
|
||||
|
||||
async def create_compliance(self, compliance: FoodSafetyCompliance) -> FoodSafetyCompliance:
|
||||
"""
|
||||
Create a new compliance record
|
||||
|
||||
Args:
|
||||
compliance: FoodSafetyCompliance instance
|
||||
|
||||
Returns:
|
||||
Created FoodSafetyCompliance instance
|
||||
"""
|
||||
self.session.add(compliance)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(compliance)
|
||||
return compliance
|
||||
|
||||
async def get_compliance_by_id(
|
||||
self,
|
||||
compliance_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[FoodSafetyCompliance]:
|
||||
"""
|
||||
Get compliance record by ID
|
||||
|
||||
Args:
|
||||
compliance_id: Compliance record UUID
|
||||
tenant_id: Tenant UUID for authorization
|
||||
|
||||
Returns:
|
||||
FoodSafetyCompliance or None
|
||||
"""
|
||||
compliance = await self.session.get(FoodSafetyCompliance, compliance_id)
|
||||
if compliance and compliance.tenant_id == tenant_id:
|
||||
return compliance
|
||||
return None
|
||||
|
||||
async def update_compliance(
|
||||
self,
|
||||
compliance: FoodSafetyCompliance
|
||||
) -> FoodSafetyCompliance:
|
||||
"""
|
||||
Update compliance record
|
||||
|
||||
Args:
|
||||
compliance: FoodSafetyCompliance instance with updates
|
||||
|
||||
Returns:
|
||||
Updated FoodSafetyCompliance instance
|
||||
"""
|
||||
await self.session.flush()
|
||||
await self.session.refresh(compliance)
|
||||
return compliance
|
||||
|
||||
async def get_compliance_stats(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Get compliance statistics for dashboard
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with compliance counts by status
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
|
||||
FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"total": 0,
|
||||
"compliant": 0,
|
||||
"non_compliant": 0,
|
||||
"pending_review": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"total": row.total or 0,
|
||||
"compliant": row.compliant or 0,
|
||||
"non_compliant": row.non_compliant or 0,
|
||||
"pending_review": row.pending_review or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get compliance stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== TEMPERATURE MONITORING METHODS =====
|
||||
|
||||
async def get_temperature_stats(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Get temperature monitoring statistics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with temperature monitoring stats
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(DISTINCT equipment_id) as sensors_online,
|
||||
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
|
||||
FROM temperature_logs
|
||||
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"sensors_online": 0,
|
||||
"violations_24h": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"sensors_online": row.sensors_online or 0,
|
||||
"violations_24h": row.violations_24h or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== EXPIRATION TRACKING METHODS =====
|
||||
|
||||
async def get_expiration_stats(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Get expiration tracking statistics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with expiration counts
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
|
||||
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
|
||||
COUNT(CASE WHEN expiration_date < CURRENT_DATE AND is_available THEN 1 END) as expired_requiring_action
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id AND s.is_available = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"expiring_today": 0,
|
||||
"expiring_week": 0,
|
||||
"expired_requiring_action": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"expiring_today": row.expiring_today or 0,
|
||||
"expiring_week": row.expiring_week or 0,
|
||||
"expired_requiring_action": row.expired_requiring_action or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiration stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== ALERT METHODS =====
|
||||
|
||||
async def get_alert_stats(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Get food safety alert statistics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with alert counts by severity
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
|
||||
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
|
||||
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"high_risk": 0,
|
||||
"critical": 0,
|
||||
"regulatory_pending": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"high_risk": row.high_risk or 0,
|
||||
"critical": row.critical or 0,
|
||||
"regulatory_pending": row.regulatory_pending or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== VALIDATION METHODS =====
|
||||
|
||||
async def validate_ingredient_exists(
|
||||
self,
|
||||
ingredient_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> bool:
|
||||
"""
|
||||
Validate that an ingredient exists for a tenant
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient UUID
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
True if ingredient exists, False otherwise
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT id
|
||||
FROM ingredients
|
||||
WHERE id = :ingredient_id AND tenant_id = :tenant_id
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"ingredient_id": ingredient_id,
|
||||
"tenant_id": tenant_id
|
||||
})
|
||||
|
||||
return result.fetchone() is not None
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate ingredient", error=str(e))
|
||||
raise
|
||||
|
||||
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
|
||||
"""
|
||||
Mark a temperature log as having triggered an alert
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE temperature_logs
|
||||
SET alert_triggered = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": log_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
|
||||
raise
|
||||
668
services/inventory/app/repositories/ingredient_repository.py
Normal file
668
services/inventory/app/repositories/ingredient_repository.py
Normal file
@@ -0,0 +1,668 @@
|
||||
# services/inventory/app/repositories/ingredient_repository.py
|
||||
"""
|
||||
Ingredient Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.inventory import Ingredient, Stock
|
||||
from app.schemas.inventory import IngredientCreate, IngredientUpdate
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class IngredientRepository(BaseRepository[Ingredient, IngredientCreate, IngredientUpdate]):
|
||||
"""Repository for ingredient operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Ingredient, session)
|
||||
|
||||
async def create_ingredient(self, ingredient_data: IngredientCreate, tenant_id: UUID) -> Ingredient:
|
||||
"""Create a new ingredient"""
|
||||
try:
|
||||
# Prepare data and map schema fields to model fields
|
||||
create_data = ingredient_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
|
||||
# Handle product_type enum conversion
|
||||
product_type_value = create_data.get('product_type')
|
||||
|
||||
# Log warning if product_type is missing (should be provided by frontend)
|
||||
if not product_type_value:
|
||||
logger.warning(
|
||||
"product_type not provided, defaulting to 'ingredient'",
|
||||
ingredient_name=create_data.get('name'),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
product_type_value = 'INGREDIENT'
|
||||
|
||||
if 'product_type' in create_data:
|
||||
from app.models.inventory import ProductType
|
||||
try:
|
||||
# Convert string to enum object
|
||||
if isinstance(product_type_value, str):
|
||||
for enum_member in ProductType:
|
||||
if enum_member.value == product_type_value or enum_member.name == product_type_value:
|
||||
create_data['product_type'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, default to INGREDIENT
|
||||
logger.warning(
|
||||
"Invalid product_type value, defaulting to INGREDIENT",
|
||||
invalid_value=product_type_value,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
create_data['product_type'] = ProductType.INGREDIENT
|
||||
# If it's already an enum, keep it
|
||||
except Exception as e:
|
||||
# Fallback to INGREDIENT if any issues
|
||||
logger.error(
|
||||
"Error converting product_type to enum, defaulting to INGREDIENT",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
create_data['product_type'] = ProductType.INGREDIENT
|
||||
|
||||
# Handle category mapping based on product type
|
||||
if 'category' in create_data:
|
||||
category_value = create_data.pop('category')
|
||||
|
||||
if product_type_value == 'FINISHED_PRODUCT':
|
||||
# Map to product_category for finished products
|
||||
from app.models.inventory import ProductCategory
|
||||
if category_value:
|
||||
try:
|
||||
# Find the enum member by value
|
||||
for enum_member in ProductCategory:
|
||||
if enum_member.value == category_value:
|
||||
create_data['product_category'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, default to OTHER
|
||||
create_data['product_category'] = ProductCategory.OTHER_PRODUCTS
|
||||
except Exception:
|
||||
# Fallback to OTHER if any issues
|
||||
create_data['product_category'] = ProductCategory.OTHER_PRODUCTS
|
||||
else:
|
||||
# Map to ingredient_category for ingredients
|
||||
from app.models.inventory import IngredientCategory
|
||||
if category_value:
|
||||
try:
|
||||
# Find the enum member by value
|
||||
for enum_member in IngredientCategory:
|
||||
if enum_member.value == category_value:
|
||||
create_data['ingredient_category'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, default to OTHER
|
||||
create_data['ingredient_category'] = IngredientCategory.OTHER
|
||||
except Exception:
|
||||
# Fallback to OTHER if any issues
|
||||
create_data['ingredient_category'] = IngredientCategory.OTHER
|
||||
|
||||
# Convert unit_of_measure string to enum object
|
||||
if 'unit_of_measure' in create_data:
|
||||
unit_value = create_data['unit_of_measure']
|
||||
from app.models.inventory import UnitOfMeasure
|
||||
try:
|
||||
# Find the enum member by value
|
||||
for enum_member in UnitOfMeasure:
|
||||
if enum_member.value == unit_value:
|
||||
create_data['unit_of_measure'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, default to UNITS
|
||||
create_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
except Exception:
|
||||
# Fallback to UNITS if any issues
|
||||
create_data['unit_of_measure'] = UnitOfMeasure.UNITS
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created ingredient",
|
||||
ingredient_id=record.id,
|
||||
name=record.name,
|
||||
ingredient_category=record.ingredient_category.value if record.ingredient_category else None,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create ingredient", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def update(self, record_id: Any, obj_in: IngredientUpdate, **kwargs) -> Optional[Ingredient]:
|
||||
"""Override update to handle product_type and category enum conversions"""
|
||||
try:
|
||||
# Prepare data and map schema fields to model fields
|
||||
update_data = obj_in.model_dump(exclude_unset=True)
|
||||
|
||||
# Handle product_type enum conversion
|
||||
if 'product_type' in update_data:
|
||||
product_type_value = update_data['product_type']
|
||||
from app.models.inventory import ProductType
|
||||
try:
|
||||
# Convert string to enum object
|
||||
if isinstance(product_type_value, str):
|
||||
for enum_member in ProductType:
|
||||
if enum_member.value == product_type_value or enum_member.name == product_type_value:
|
||||
update_data['product_type'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, keep original value (don't update)
|
||||
del update_data['product_type']
|
||||
# If it's already an enum, keep it
|
||||
except Exception:
|
||||
# Remove invalid product_type to avoid update
|
||||
del update_data['product_type']
|
||||
|
||||
# Handle category mapping based on product type
|
||||
if 'category' in update_data:
|
||||
category_value = update_data.pop('category')
|
||||
product_type_value = update_data.get('product_type', 'INGREDIENT')
|
||||
|
||||
# Get current product if we need to determine type
|
||||
if 'product_type' not in update_data:
|
||||
current_record = await self.get_by_id(record_id)
|
||||
if current_record:
|
||||
product_type_value = current_record.product_type.value if current_record.product_type else 'INGREDIENT'
|
||||
|
||||
if product_type_value == 'FINISHED_PRODUCT':
|
||||
# Map to product_category for finished products
|
||||
from app.models.inventory import ProductCategory
|
||||
if category_value:
|
||||
try:
|
||||
for enum_member in ProductCategory:
|
||||
if enum_member.value == category_value:
|
||||
update_data['product_category'] = enum_member
|
||||
# Clear ingredient_category when setting product_category
|
||||
update_data['ingredient_category'] = None
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# Map to ingredient_category for ingredients
|
||||
from app.models.inventory import IngredientCategory
|
||||
if category_value:
|
||||
try:
|
||||
for enum_member in IngredientCategory:
|
||||
if enum_member.value == category_value:
|
||||
update_data['ingredient_category'] = enum_member
|
||||
# Clear product_category when setting ingredient_category
|
||||
update_data['product_category'] = None
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Handle unit_of_measure enum conversion
|
||||
if 'unit_of_measure' in update_data:
|
||||
unit_value = update_data['unit_of_measure']
|
||||
from app.models.inventory import UnitOfMeasure
|
||||
try:
|
||||
if isinstance(unit_value, str):
|
||||
for enum_member in UnitOfMeasure:
|
||||
if enum_member.value == unit_value:
|
||||
update_data['unit_of_measure'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, keep original value
|
||||
del update_data['unit_of_measure']
|
||||
except Exception:
|
||||
del update_data['unit_of_measure']
|
||||
|
||||
# Call parent update method
|
||||
return await super().update(record_id, update_data, **kwargs)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update ingredient", error=str(e), record_id=record_id)
|
||||
raise
|
||||
|
||||
async def get_ingredients_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
) -> List[Ingredient]:
|
||||
"""Get ingredients for a tenant with filtering"""
|
||||
try:
|
||||
# Handle search filter separately since it requires special query logic
|
||||
if filters and filters.get('search'):
|
||||
search_term = filters['search']
|
||||
logger.info(f"Searching ingredients with term: '{search_term}'", tenant_id=tenant_id)
|
||||
return await self.search_ingredients(tenant_id, search_term, skip, limit)
|
||||
|
||||
# Handle other filters with standard multi-get
|
||||
query_filters = {'tenant_id': tenant_id}
|
||||
if filters:
|
||||
if filters.get('category'):
|
||||
query_filters['category'] = filters['category']
|
||||
if filters.get('product_type'):
|
||||
# Convert string to enum object
|
||||
from app.models.inventory import ProductType
|
||||
product_type_value = filters['product_type']
|
||||
try:
|
||||
# Find the enum member by value
|
||||
for enum_member in ProductType:
|
||||
if enum_member.value == product_type_value:
|
||||
query_filters['product_type'] = enum_member
|
||||
break
|
||||
else:
|
||||
# If not found, skip this filter
|
||||
logger.warning(f"Invalid product_type value: {product_type_value}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error converting product_type: {e}")
|
||||
# Skip invalid product_type filter
|
||||
if filters.get('is_active') is not None:
|
||||
query_filters['is_active'] = filters['is_active']
|
||||
if filters.get('is_perishable') is not None:
|
||||
query_filters['is_perishable'] = filters['is_perishable']
|
||||
|
||||
ingredients = await self.get_multi(
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
filters=query_filters,
|
||||
order_by='name'
|
||||
)
|
||||
return ingredients
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredients", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def search_ingredients(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
search_term: str,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[Ingredient]:
|
||||
"""Search ingredients by name, sku, or barcode"""
|
||||
try:
|
||||
# Add tenant filter to search
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
or_(
|
||||
self.model.name.ilike(f"%{search_term}%"),
|
||||
self.model.sku.ilike(f"%{search_term}%"),
|
||||
self.model.barcode.ilike(f"%{search_term}%"),
|
||||
self.model.brand.ilike(f"%{search_term}%")
|
||||
)
|
||||
)
|
||||
).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to search ingredients", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_low_stock_ingredients(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""Get ingredients with low stock levels"""
|
||||
try:
|
||||
# Query ingredients with their current stock levels
|
||||
query = select(
|
||||
Ingredient,
|
||||
func.coalesce(func.sum(Stock.available_quantity), 0).label('current_stock')
|
||||
).outerjoin(
|
||||
Stock, and_(
|
||||
Stock.ingredient_id == Ingredient.id,
|
||||
Stock.is_available == True
|
||||
)
|
||||
).where(
|
||||
Ingredient.tenant_id == tenant_id
|
||||
).group_by(Ingredient.id).having(
|
||||
func.coalesce(func.sum(Stock.available_quantity), 0) <= Ingredient.low_stock_threshold
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
results = []
|
||||
|
||||
for ingredient, current_stock in result:
|
||||
results.append({
|
||||
'ingredient': ingredient,
|
||||
'current_stock': float(current_stock) if current_stock else 0.0,
|
||||
'threshold': ingredient.low_stock_threshold,
|
||||
'needs_reorder': (
|
||||
current_stock <= ingredient.reorder_point
|
||||
if current_stock and ingredient.reorder_point is not None else True
|
||||
)
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get low stock ingredients", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_ingredients_needing_reorder(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""Get ingredients that need reordering"""
|
||||
try:
|
||||
query = select(
|
||||
Ingredient,
|
||||
func.coalesce(func.sum(Stock.available_quantity), 0).label('current_stock')
|
||||
).outerjoin(
|
||||
Stock, and_(
|
||||
Stock.ingredient_id == Ingredient.id,
|
||||
Stock.is_available == True
|
||||
)
|
||||
).where(
|
||||
and_(
|
||||
Ingredient.tenant_id == tenant_id,
|
||||
Ingredient.is_active == True
|
||||
)
|
||||
).group_by(Ingredient.id).having(
|
||||
func.coalesce(func.sum(Stock.available_quantity), 0) <= Ingredient.reorder_point
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
results = []
|
||||
|
||||
for ingredient, current_stock in result:
|
||||
results.append({
|
||||
'ingredient': ingredient,
|
||||
'current_stock': float(current_stock) if current_stock else 0.0,
|
||||
'reorder_point': ingredient.reorder_point,
|
||||
'reorder_quantity': ingredient.reorder_quantity
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredients needing reorder", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_sku(self, tenant_id: UUID, sku: str) -> Optional[Ingredient]:
|
||||
"""Get ingredient by SKU"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.sku == sku
|
||||
)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient by SKU", error=str(e), sku=sku, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_by_barcode(self, tenant_id: UUID, barcode: str) -> Optional[Ingredient]:
|
||||
"""Get ingredient by barcode"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.barcode == barcode
|
||||
)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient by barcode", error=str(e), barcode=barcode, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def update_last_purchase_price(self, ingredient_id: UUID, price: float) -> Optional[Ingredient]:
|
||||
"""Update the last purchase price for an ingredient"""
|
||||
try:
|
||||
from app.schemas.inventory import IngredientUpdate
|
||||
update_data = IngredientUpdate(last_purchase_price=price)
|
||||
return await self.update(ingredient_id, update_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update last purchase price", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def update_weighted_average_cost(
|
||||
self,
|
||||
ingredient_id: UUID,
|
||||
current_stock_quantity: float,
|
||||
new_purchase_quantity: float,
|
||||
new_unit_cost: float
|
||||
) -> Optional[Ingredient]:
|
||||
"""
|
||||
Update the average cost using weighted average calculation.
|
||||
|
||||
Formula:
|
||||
new_average_cost = (current_stock_qty × current_avg_cost + new_qty × new_cost) / (current_stock_qty + new_qty)
|
||||
|
||||
Args:
|
||||
ingredient_id: ID of the ingredient
|
||||
current_stock_quantity: Current stock quantity before this purchase
|
||||
new_purchase_quantity: Quantity being purchased
|
||||
new_unit_cost: Unit cost of the new purchase
|
||||
|
||||
Returns:
|
||||
Updated ingredient or None if not found
|
||||
"""
|
||||
try:
|
||||
# Get current ingredient data
|
||||
ingredient = await self.get_by_id(ingredient_id)
|
||||
if not ingredient:
|
||||
logger.warning("Ingredient not found for average cost update", ingredient_id=ingredient_id)
|
||||
return None
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
# Get current average cost (default to new cost if not set)
|
||||
current_avg_cost = float(ingredient.average_cost) if ingredient.average_cost else float(new_unit_cost)
|
||||
|
||||
# Calculate weighted average
|
||||
# If no current stock, just use the new purchase price
|
||||
if current_stock_quantity <= 0:
|
||||
new_average_cost = Decimal(str(new_unit_cost))
|
||||
else:
|
||||
# Weighted average formula
|
||||
total_cost = (current_stock_quantity * current_avg_cost) + (new_purchase_quantity * new_unit_cost)
|
||||
total_quantity = current_stock_quantity + new_purchase_quantity
|
||||
new_average_cost = Decimal(str(total_cost / total_quantity))
|
||||
|
||||
# Update the ingredient
|
||||
from app.schemas.inventory import IngredientUpdate
|
||||
update_data = IngredientUpdate(average_cost=new_average_cost)
|
||||
updated_ingredient = await self.update(ingredient_id, update_data)
|
||||
|
||||
logger.info(
|
||||
"Updated weighted average cost",
|
||||
ingredient_id=ingredient_id,
|
||||
old_average_cost=current_avg_cost,
|
||||
new_average_cost=float(new_average_cost),
|
||||
current_stock_qty=current_stock_quantity,
|
||||
new_purchase_qty=new_purchase_quantity,
|
||||
new_unit_cost=new_unit_cost
|
||||
)
|
||||
|
||||
return updated_ingredient
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to update weighted average cost",
|
||||
error=str(e),
|
||||
ingredient_id=ingredient_id
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_ingredients_by_category(self, tenant_id: UUID, category: str) -> List[Ingredient]:
|
||||
"""Get all ingredients in a specific category"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.category == category,
|
||||
self.model.is_active == True
|
||||
)
|
||||
).order_by(self.model.name)
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredients by category", error=str(e), category=category, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def delete_by_id(self, ingredient_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Hard delete an ingredient by ID"""
|
||||
try:
|
||||
from sqlalchemy import delete
|
||||
|
||||
# Delete the ingredient
|
||||
stmt = delete(self.model).where(
|
||||
and_(
|
||||
self.model.id == ingredient_id,
|
||||
self.model.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
await self.session.commit()
|
||||
|
||||
# Return True if a row was deleted
|
||||
return result.rowcount > 0
|
||||
|
||||
except Exception as e:
|
||||
await self.session.rollback()
|
||||
logger.error("Failed to hard delete ingredient", error=str(e), ingredient_id=ingredient_id, tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_active_tenants(self) -> List[UUID]:
|
||||
"""Get list of active tenant IDs from ingredients table"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(func.distinct(Ingredient.tenant_id))
|
||||
.where(Ingredient.is_active == True)
|
||||
)
|
||||
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row[0]
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
|
||||
logger.info("Retrieved active tenants from ingredients", count=len(tenant_ids))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenants from ingredients", error=str(e))
|
||||
return []
|
||||
|
||||
async def get_critical_stock_shortages(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get critical stock shortages across all tenants using CTE analysis.
|
||||
Returns ingredients that are critically low on stock.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.tenant_id,
|
||||
i.reorder_point,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_quantity,
|
||||
i.low_stock_threshold,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
ELSE 'normal'
|
||||
END as status
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.reorder_point, i.low_stock_threshold
|
||||
)
|
||||
SELECT
|
||||
ingredient_id,
|
||||
ingredient_name,
|
||||
tenant_id,
|
||||
current_quantity,
|
||||
reorder_point,
|
||||
shortage_amount
|
||||
FROM stock_analysis
|
||||
WHERE status = 'critical'
|
||||
ORDER BY shortage_amount DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query)
|
||||
rows = result.fetchall()
|
||||
|
||||
shortages = []
|
||||
for row in rows:
|
||||
shortages.append({
|
||||
'ingredient_id': row.ingredient_id,
|
||||
'ingredient_name': row.ingredient_name,
|
||||
'tenant_id': row.tenant_id,
|
||||
'current_quantity': float(row.current_quantity) if row.current_quantity else 0,
|
||||
'required_quantity': float(row.reorder_point) if row.reorder_point else 0,
|
||||
'shortage_amount': float(row.shortage_amount) if row.shortage_amount else 0
|
||||
})
|
||||
|
||||
return shortages
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get critical stock shortages", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_stock_issues(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get stock level issues with CTE analysis for a specific tenant
|
||||
Returns list of critical, low, and overstock situations
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
i.max_stock_level as maximum_stock,
|
||||
i.reorder_point,
|
||||
0 as tomorrow_needed,
|
||||
0 as avg_daily_usage,
|
||||
7 as lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
|
||||
ELSE 'normal'
|
||||
END as status,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
|
||||
)
|
||||
SELECT * FROM stock_analysis WHERE status != 'normal'
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'critical' THEN 1
|
||||
WHEN 'low' THEN 2
|
||||
WHEN 'overstock' THEN 3
|
||||
END,
|
||||
shortage_amount DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock issues", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
557
services/inventory/app/repositories/stock_movement_repository.py
Normal file
557
services/inventory/app/repositories/stock_movement_repository.py
Normal file
@@ -0,0 +1,557 @@
|
||||
# services/inventory/app/repositories/stock_movement_repository.py
|
||||
"""
|
||||
Stock Movement Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.inventory import StockMovement, Ingredient, StockMovementType
|
||||
from app.schemas.inventory import StockMovementCreate
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate, dict]):
|
||||
"""Repository for stock movement operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(StockMovement, session)
|
||||
|
||||
async def create_movement(
|
||||
self,
|
||||
movement_data: StockMovementCreate,
|
||||
tenant_id: UUID,
|
||||
created_by: Optional[UUID] = None,
|
||||
quantity_before: Optional[float] = None,
|
||||
quantity_after: Optional[float] = None
|
||||
) -> StockMovement:
|
||||
"""Create a new stock movement record"""
|
||||
try:
|
||||
# Prepare data
|
||||
create_data = movement_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
create_data['created_by'] = created_by
|
||||
|
||||
# Add quantity_before and quantity_after if provided
|
||||
if quantity_before is not None:
|
||||
create_data['quantity_before'] = quantity_before
|
||||
if quantity_after is not None:
|
||||
create_data['quantity_after'] = quantity_after
|
||||
|
||||
# Ensure movement_type is properly converted to enum value
|
||||
if 'movement_type' in create_data:
|
||||
movement_type = create_data['movement_type']
|
||||
if hasattr(movement_type, 'value'):
|
||||
# It's an enum object, use its value
|
||||
create_data['movement_type'] = movement_type.value
|
||||
elif isinstance(movement_type, str):
|
||||
# It's already a string, ensure it's uppercase for database
|
||||
create_data['movement_type'] = movement_type.upper()
|
||||
|
||||
# Set movement date if not provided
|
||||
if not create_data.get('movement_date'):
|
||||
create_data['movement_date'] = datetime.now()
|
||||
|
||||
# Calculate total cost if unit cost provided
|
||||
if create_data.get('unit_cost') and create_data.get('quantity'):
|
||||
unit_cost = create_data['unit_cost']
|
||||
quantity = Decimal(str(create_data['quantity']))
|
||||
create_data['total_cost'] = unit_cost * quantity
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created stock movement",
|
||||
movement_id=record.id,
|
||||
ingredient_id=record.ingredient_id,
|
||||
movement_type=record.movement_type if record.movement_type else None,
|
||||
quantity=record.quantity,
|
||||
quantity_before=record.quantity_before,
|
||||
quantity_after=record.quantity_after,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create stock movement", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_movements_by_ingredient(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
days_back: Optional[int] = None
|
||||
) -> List[StockMovement]:
|
||||
"""Get stock movements for a specific ingredient"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id
|
||||
)
|
||||
)
|
||||
|
||||
# Filter by date range if specified
|
||||
if days_back:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
query = query.where(self.model.movement_date >= start_date)
|
||||
|
||||
query = query.order_by(desc(self.model.movement_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by ingredient", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def get_movements_by_type(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
movement_type: StockMovementType,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
days_back: Optional[int] = None
|
||||
) -> List[StockMovement]:
|
||||
"""Get stock movements by type"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.movement_type == movement_type
|
||||
)
|
||||
)
|
||||
|
||||
# Filter by date range if specified
|
||||
if days_back:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
query = query.where(self.model.movement_date >= start_date)
|
||||
|
||||
query = query.order_by(desc(self.model.movement_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by type", error=str(e), movement_type=movement_type)
|
||||
raise
|
||||
|
||||
async def get_recent_movements(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 50
|
||||
) -> List[StockMovement]:
|
||||
"""Get recent stock movements for dashboard"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model)
|
||||
.where(self.model.tenant_id == tenant_id)
|
||||
.order_by(desc(self.model.movement_date))
|
||||
.limit(limit)
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent movements", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_movements(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
movement_type: Optional[str] = None
|
||||
) -> List[StockMovement]:
|
||||
"""Get stock movements with filtering"""
|
||||
logger.info("🔍 Repository getting movements",
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=ingredient_id,
|
||||
skip=skip,
|
||||
limit=limit)
|
||||
try:
|
||||
query = select(self.model).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Add filters
|
||||
if ingredient_id:
|
||||
query = query.where(self.model.ingredient_id == ingredient_id)
|
||||
logger.info("🎯 Filtering by ingredient_id", ingredient_id=ingredient_id)
|
||||
|
||||
if movement_type:
|
||||
# Convert string to enum
|
||||
try:
|
||||
movement_type_enum = StockMovementType(movement_type)
|
||||
query = query.where(self.model.movement_type == movement_type_enum)
|
||||
logger.info("🏷️ Filtering by movement_type", movement_type=movement_type)
|
||||
except ValueError:
|
||||
logger.warning("⚠️ Invalid movement type", movement_type=movement_type)
|
||||
# Invalid movement type, skip filter
|
||||
pass
|
||||
|
||||
# Order by date (newest first) and apply pagination
|
||||
query = query.order_by(desc(self.model.movement_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
movements = result.scalars().all()
|
||||
|
||||
logger.info("🔢 Repository found movements", count=len(movements))
|
||||
return movements
|
||||
|
||||
except Exception as e:
|
||||
logger.error("❌ Repository failed to get movements", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_movements_by_reference(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
reference_number: str
|
||||
) -> List[StockMovement]:
|
||||
"""Get stock movements by reference number (e.g., purchase order)"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.reference_number == reference_number
|
||||
)
|
||||
).order_by(desc(self.model.movement_date))
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by reference", error=str(e), reference_number=reference_number)
|
||||
raise
|
||||
|
||||
async def get_movement_summary_by_period(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get movement summary for specified period"""
|
||||
try:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
|
||||
# Get movement counts by type
|
||||
result = await self.session.execute(
|
||||
select(
|
||||
self.model.movement_type,
|
||||
func.count(self.model.id).label('count'),
|
||||
func.coalesce(func.sum(self.model.quantity), 0).label('total_quantity'),
|
||||
func.coalesce(func.sum(self.model.total_cost), 0).label('total_cost')
|
||||
).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.movement_date >= start_date
|
||||
)
|
||||
).group_by(self.model.movement_type)
|
||||
)
|
||||
|
||||
summary = {}
|
||||
for row in result:
|
||||
movement_type = row.movement_type if row.movement_type else "unknown"
|
||||
summary[movement_type] = {
|
||||
'count': row.count,
|
||||
'total_quantity': float(row.total_quantity),
|
||||
'total_cost': float(row.total_cost) if row.total_cost else 0.0
|
||||
}
|
||||
|
||||
# Get total movements count
|
||||
total_result = await self.session.execute(
|
||||
select(func.count(self.model.id)).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.movement_date >= start_date
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
summary['total_movements'] = total_result.scalar() or 0
|
||||
summary['period_days'] = days_back
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movement summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_waste_movements(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: Optional[int] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[StockMovement]:
|
||||
"""Get waste-related movements"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.movement_type == StockMovementType.WASTE
|
||||
)
|
||||
)
|
||||
|
||||
# Prefer explicit date range over days_back
|
||||
if start_date and end_date:
|
||||
query = query.where(
|
||||
and_(
|
||||
self.model.movement_date >= start_date,
|
||||
self.model.movement_date <= end_date
|
||||
)
|
||||
)
|
||||
elif days_back:
|
||||
calculated_start = datetime.now() - timedelta(days=days_back)
|
||||
query = query.where(self.model.movement_date >= calculated_start)
|
||||
|
||||
query = query.order_by(desc(self.model.movement_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste movements", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_purchase_movements(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: Optional[int] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[StockMovement]:
|
||||
"""Get purchase-related movements"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.movement_type == StockMovementType.PURCHASE
|
||||
)
|
||||
)
|
||||
|
||||
if days_back:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
query = query.where(self.model.movement_date >= start_date)
|
||||
|
||||
query = query.order_by(desc(self.model.movement_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get purchase movements", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def calculate_ingredient_usage(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, float]:
|
||||
"""Calculate ingredient usage statistics"""
|
||||
try:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
|
||||
# Get production usage
|
||||
production_result = await self.session.execute(
|
||||
select(func.coalesce(func.sum(self.model.quantity), 0)).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id,
|
||||
self.model.movement_type == StockMovementType.PRODUCTION_USE,
|
||||
self.model.movement_date >= start_date
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Get waste quantity
|
||||
waste_result = await self.session.execute(
|
||||
select(func.coalesce(func.sum(self.model.quantity), 0)).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id,
|
||||
self.model.movement_type == StockMovementType.WASTE,
|
||||
self.model.movement_date >= start_date
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Get purchases
|
||||
purchase_result = await self.session.execute(
|
||||
select(func.coalesce(func.sum(self.model.quantity), 0)).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id,
|
||||
self.model.movement_type == StockMovementType.PURCHASE,
|
||||
self.model.movement_date >= start_date
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
production_usage = float(production_result.scalar() or 0)
|
||||
waste_quantity = float(waste_result.scalar() or 0)
|
||||
purchase_quantity = float(purchase_result.scalar() or 0)
|
||||
|
||||
# Calculate usage rate per day
|
||||
usage_per_day = production_usage / days_back if days_back > 0 else 0
|
||||
waste_percentage = (waste_quantity / purchase_quantity * 100) if purchase_quantity > 0 else 0
|
||||
|
||||
return {
|
||||
'production_usage': production_usage,
|
||||
'waste_quantity': waste_quantity,
|
||||
'purchase_quantity': purchase_quantity,
|
||||
'usage_per_day': usage_per_day,
|
||||
'waste_percentage': waste_percentage,
|
||||
'period_days': days_back
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate ingredient usage", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def delete_by_ingredient(self, ingredient_id: UUID, tenant_id: UUID) -> int:
|
||||
"""Delete all stock movements for a specific ingredient"""
|
||||
try:
|
||||
from sqlalchemy import delete
|
||||
from app.models.inventory import StockMovement
|
||||
|
||||
stmt = delete(StockMovement).where(
|
||||
and_(
|
||||
StockMovement.ingredient_id == ingredient_id,
|
||||
StockMovement.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
deleted_count = result.rowcount
|
||||
|
||||
logger.info(
|
||||
"Deleted stock movements for ingredient",
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id),
|
||||
deleted_count=deleted_count
|
||||
)
|
||||
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete stock movements for ingredient",
|
||||
error=str(e),
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
raise
|
||||
|
||||
async def create_automatic_waste_movement(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
stock_id: UUID,
|
||||
quantity: float,
|
||||
unit_cost: Optional[float],
|
||||
batch_number: Optional[str],
|
||||
expiration_date: datetime,
|
||||
created_by: Optional[UUID] = None
|
||||
) -> StockMovement:
|
||||
"""Create an automatic waste movement for expired batches"""
|
||||
try:
|
||||
# Calculate total cost
|
||||
total_cost = None
|
||||
if unit_cost and quantity:
|
||||
total_cost = Decimal(str(unit_cost)) * Decimal(str(quantity))
|
||||
|
||||
# Generate reference number
|
||||
reference_number = f"AUTO-EXPIRE-{batch_number or stock_id}"
|
||||
|
||||
# Create movement data (without quantity_before/quantity_after - these will be calculated by the caller)
|
||||
movement_data = {
|
||||
'tenant_id': tenant_id,
|
||||
'ingredient_id': ingredient_id,
|
||||
'stock_id': stock_id,
|
||||
'movement_type': StockMovementType.WASTE.value,
|
||||
'quantity': quantity,
|
||||
'unit_cost': Decimal(str(unit_cost)) if unit_cost else None,
|
||||
'total_cost': total_cost,
|
||||
'reference_number': reference_number,
|
||||
'reason_code': 'expired',
|
||||
'notes': f"Lote automáticamente marcado como caducado. Vencimiento: {expiration_date.strftime('%Y-%m-%d')}",
|
||||
'movement_date': datetime.now(),
|
||||
'created_by': created_by
|
||||
}
|
||||
|
||||
# Create the movement record
|
||||
movement = await self.create(movement_data)
|
||||
|
||||
logger.info("Created automatic waste movement for expired batch",
|
||||
movement_id=str(movement.id),
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id),
|
||||
stock_id=str(stock_id),
|
||||
quantity=quantity,
|
||||
batch_number=batch_number,
|
||||
reference_number=reference_number)
|
||||
|
||||
return movement
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create automatic waste movement",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id),
|
||||
stock_id=str(stock_id))
|
||||
raise
|
||||
|
||||
async def get_inventory_waste_total(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> float:
|
||||
"""
|
||||
Get total inventory waste for sustainability reporting
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
start_date: Start date for period
|
||||
end_date: End date for period
|
||||
|
||||
Returns:
|
||||
Total waste quantity
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
query = text("""
|
||||
SELECT COALESCE(SUM(sm.quantity), 0) as total_inventory_waste
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date BETWEEN :start_date AND :end_date
|
||||
""")
|
||||
|
||||
result = await self.session.execute(
|
||||
query,
|
||||
{
|
||||
'tenant_id': tenant_id,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date
|
||||
}
|
||||
)
|
||||
row = result.fetchone()
|
||||
|
||||
return float(row.total_inventory_waste or 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory waste total", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
920
services/inventory/app/repositories/stock_repository.py
Normal file
920
services/inventory/app/repositories/stock_repository.py
Normal file
@@ -0,0 +1,920 @@
|
||||
# services/inventory/app/repositories/stock_repository.py
|
||||
"""
|
||||
Stock Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc, update, exists
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.inventory import Stock, Ingredient
|
||||
from app.schemas.inventory import StockCreate, StockUpdate
|
||||
from shared.database.repository import BaseRepository
|
||||
from shared.utils.batch_generator import BatchCountProvider
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class StockRepository(BaseRepository[Stock, StockCreate, StockUpdate], BatchCountProvider):
|
||||
"""Repository for stock operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Stock, session)
|
||||
|
||||
async def create_stock_entry(self, stock_data: StockCreate, tenant_id: UUID) -> Stock:
|
||||
"""Create a new stock entry"""
|
||||
try:
|
||||
# Prepare data
|
||||
create_data = stock_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
|
||||
# Ensure production_stage is properly converted to enum value
|
||||
if 'production_stage' in create_data:
|
||||
if hasattr(create_data['production_stage'], 'value'):
|
||||
create_data['production_stage'] = create_data['production_stage'].value
|
||||
elif isinstance(create_data['production_stage'], str):
|
||||
# If it's a string, ensure it's the correct enum value
|
||||
from app.models.inventory import ProductionStage
|
||||
try:
|
||||
enum_obj = ProductionStage[create_data['production_stage']]
|
||||
create_data['production_stage'] = enum_obj.value
|
||||
except KeyError:
|
||||
# If it's already the value, keep it as is
|
||||
pass
|
||||
|
||||
# Calculate available quantity
|
||||
available_qty = create_data['current_quantity'] - create_data.get('reserved_quantity', 0)
|
||||
create_data['available_quantity'] = max(0, available_qty)
|
||||
|
||||
# Calculate total cost if unit cost provided
|
||||
if create_data.get('unit_cost') and create_data.get('current_quantity'):
|
||||
unit_cost = create_data['unit_cost']
|
||||
current_quantity = Decimal(str(create_data['current_quantity']))
|
||||
create_data['total_cost'] = unit_cost * current_quantity
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created stock entry",
|
||||
stock_id=record.id,
|
||||
ingredient_id=record.ingredient_id,
|
||||
quantity=record.current_quantity,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create stock entry", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_stock_by_ingredient(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
include_unavailable: bool = False
|
||||
) -> List[Stock]:
|
||||
"""Get all stock entries for a specific ingredient"""
|
||||
try:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.ingredient_id == ingredient_id
|
||||
)
|
||||
)
|
||||
|
||||
if not include_unavailable:
|
||||
query = query.where(self.model.is_available == True)
|
||||
|
||||
query = query.order_by(asc(self.model.expiration_date))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock by ingredient", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def get_stock_by_product(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
inventory_product_id: UUID,
|
||||
include_unavailable: bool = False
|
||||
) -> List[Stock]:
|
||||
"""
|
||||
Get all stock entries for a specific product.
|
||||
|
||||
Note: inventory_product_id and ingredient_id refer to the same entity.
|
||||
The 'ingredients' table is used as a unified catalog for both raw ingredients
|
||||
and finished products, distinguished by the product_type field.
|
||||
|
||||
This method is an alias for get_stock_by_ingredient for clarity when called
|
||||
from contexts that use 'product' terminology (e.g., procurement service).
|
||||
"""
|
||||
return await self.get_stock_by_ingredient(
|
||||
tenant_id=tenant_id,
|
||||
ingredient_id=inventory_product_id,
|
||||
include_unavailable=include_unavailable
|
||||
)
|
||||
|
||||
async def get_total_stock_by_ingredient(self, tenant_id: UUID, ingredient_id: UUID) -> Dict[str, float]:
|
||||
"""Get total stock quantities for an ingredient"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(
|
||||
func.coalesce(func.sum(Stock.current_quantity), 0).label('total_quantity'),
|
||||
func.coalesce(func.sum(Stock.reserved_quantity), 0).label('total_reserved'),
|
||||
func.coalesce(func.sum(Stock.available_quantity), 0).label('total_available')
|
||||
).where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.ingredient_id == ingredient_id,
|
||||
Stock.is_available == True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
row = result.first()
|
||||
return {
|
||||
'total_quantity': float(row.total_quantity) if row.total_quantity else 0.0,
|
||||
'total_reserved': float(row.total_reserved) if row.total_reserved else 0.0,
|
||||
'total_available': float(row.total_available) if row.total_available else 0.0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get total stock", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def get_expiring_stock(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_ahead: int = 7
|
||||
) -> List[Tuple[Stock, Ingredient]]:
|
||||
"""Get stock items expiring within specified days using state-dependent expiration logic"""
|
||||
try:
|
||||
expiry_date = datetime.now() + timedelta(days=days_ahead)
|
||||
|
||||
# Use final_expiration_date if available (for transformed products),
|
||||
# otherwise use regular expiration_date
|
||||
result = await self.session.execute(
|
||||
select(Stock, Ingredient)
|
||||
.join(Ingredient, Stock.ingredient_id == Ingredient.id)
|
||||
.where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
or_(
|
||||
and_(
|
||||
Stock.final_expiration_date.isnot(None),
|
||||
Stock.final_expiration_date <= expiry_date
|
||||
),
|
||||
and_(
|
||||
Stock.final_expiration_date.is_(None),
|
||||
Stock.expiration_date.isnot(None),
|
||||
Stock.expiration_date <= expiry_date
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(
|
||||
asc(
|
||||
func.coalesce(Stock.final_expiration_date, Stock.expiration_date)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return result.all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring stock", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_expired_stock(self, tenant_id: UUID) -> List[Tuple[Stock, Ingredient]]:
|
||||
"""Get stock items that have expired using state-dependent expiration logic"""
|
||||
try:
|
||||
current_date = datetime.now()
|
||||
|
||||
# Use final_expiration_date if available (for transformed products),
|
||||
# otherwise use regular expiration_date
|
||||
result = await self.session.execute(
|
||||
select(Stock, Ingredient)
|
||||
.join(Ingredient, Stock.ingredient_id == Ingredient.id)
|
||||
.where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
or_(
|
||||
and_(
|
||||
Stock.final_expiration_date.isnot(None),
|
||||
Stock.final_expiration_date < current_date
|
||||
),
|
||||
and_(
|
||||
Stock.final_expiration_date.is_(None),
|
||||
Stock.expiration_date.isnot(None),
|
||||
Stock.expiration_date < current_date
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(
|
||||
desc(
|
||||
func.coalesce(Stock.final_expiration_date, Stock.expiration_date)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return result.all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expired stock", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def reserve_stock(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
quantity: float,
|
||||
fifo: bool = True
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Reserve stock using FIFO/LIFO method with state-dependent expiration"""
|
||||
try:
|
||||
# Order by appropriate expiration date based on transformation status
|
||||
effective_expiration = func.coalesce(Stock.final_expiration_date, Stock.expiration_date)
|
||||
order_clause = asc(effective_expiration) if fifo else desc(effective_expiration)
|
||||
|
||||
result = await self.session.execute(
|
||||
select(Stock).where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.ingredient_id == ingredient_id,
|
||||
Stock.is_available == True,
|
||||
Stock.available_quantity > 0
|
||||
)
|
||||
).order_by(order_clause)
|
||||
)
|
||||
|
||||
stock_items = result.scalars().all()
|
||||
reservations = []
|
||||
remaining_qty = quantity
|
||||
|
||||
for stock_item in stock_items:
|
||||
if remaining_qty <= 0:
|
||||
break
|
||||
|
||||
available = stock_item.available_quantity
|
||||
to_reserve = min(remaining_qty, available)
|
||||
|
||||
# Update stock reservation
|
||||
new_reserved = stock_item.reserved_quantity + to_reserve
|
||||
new_available = stock_item.current_quantity - new_reserved
|
||||
|
||||
await self.session.execute(
|
||||
update(Stock)
|
||||
.where(Stock.id == stock_item.id)
|
||||
.values(
|
||||
reserved_quantity=new_reserved,
|
||||
available_quantity=new_available
|
||||
)
|
||||
)
|
||||
|
||||
reservations.append({
|
||||
'stock_id': stock_item.id,
|
||||
'reserved_quantity': to_reserve,
|
||||
'batch_number': stock_item.batch_number,
|
||||
'expiration_date': stock_item.expiration_date
|
||||
})
|
||||
|
||||
remaining_qty -= to_reserve
|
||||
|
||||
if remaining_qty > 0:
|
||||
logger.warning(
|
||||
"Insufficient stock for reservation",
|
||||
ingredient_id=ingredient_id,
|
||||
requested=quantity,
|
||||
unfulfilled=remaining_qty
|
||||
)
|
||||
|
||||
return reservations
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to reserve stock", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def release_stock_reservation(
|
||||
self,
|
||||
stock_id: UUID,
|
||||
quantity: float
|
||||
) -> Optional[Stock]:
|
||||
"""Release reserved stock"""
|
||||
try:
|
||||
stock_item = await self.get_by_id(stock_id)
|
||||
if not stock_item:
|
||||
return None
|
||||
|
||||
# Calculate new quantities
|
||||
new_reserved = max(0, stock_item.reserved_quantity - quantity)
|
||||
new_available = stock_item.current_quantity - new_reserved
|
||||
|
||||
# Update stock
|
||||
await self.session.execute(
|
||||
update(Stock)
|
||||
.where(Stock.id == stock_id)
|
||||
.values(
|
||||
reserved_quantity=new_reserved,
|
||||
available_quantity=new_available
|
||||
)
|
||||
)
|
||||
|
||||
# Refresh and return updated stock
|
||||
await self.session.refresh(stock_item)
|
||||
return stock_item
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to release stock reservation", error=str(e), stock_id=stock_id)
|
||||
raise
|
||||
|
||||
async def consume_stock(
|
||||
self,
|
||||
stock_id: UUID,
|
||||
quantity: float,
|
||||
from_reserved: bool = True
|
||||
) -> Optional[Stock]:
|
||||
"""Consume stock (reduce current quantity)"""
|
||||
try:
|
||||
stock_item = await self.get_by_id(stock_id)
|
||||
if not stock_item:
|
||||
return None
|
||||
|
||||
if from_reserved:
|
||||
# Reduce from reserved quantity
|
||||
new_reserved = max(0, stock_item.reserved_quantity - quantity)
|
||||
new_current = max(0, stock_item.current_quantity - quantity)
|
||||
new_available = new_current - new_reserved
|
||||
else:
|
||||
# Reduce from available quantity
|
||||
new_current = max(0, stock_item.current_quantity - quantity)
|
||||
new_available = max(0, stock_item.available_quantity - quantity)
|
||||
new_reserved = stock_item.reserved_quantity
|
||||
|
||||
# Update stock
|
||||
await self.session.execute(
|
||||
update(Stock)
|
||||
.where(Stock.id == stock_id)
|
||||
.values(
|
||||
current_quantity=new_current,
|
||||
reserved_quantity=new_reserved,
|
||||
available_quantity=new_available,
|
||||
is_available=new_current > 0
|
||||
)
|
||||
)
|
||||
|
||||
# Refresh and return updated stock
|
||||
await self.session.refresh(stock_item)
|
||||
return stock_item
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to consume stock", error=str(e), stock_id=stock_id)
|
||||
raise
|
||||
|
||||
async def get_stock_summary_by_tenant(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get stock summary for tenant dashboard"""
|
||||
try:
|
||||
# Basic stock summary
|
||||
basic_result = await self.session.execute(
|
||||
select(
|
||||
func.count(Stock.id).label('total_stock_items'),
|
||||
func.coalesce(func.sum(Stock.total_cost), 0).label('total_stock_value'),
|
||||
func.count(func.distinct(Stock.ingredient_id)).label('unique_ingredients')
|
||||
).where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True
|
||||
)
|
||||
)
|
||||
)
|
||||
basic_summary = basic_result.first()
|
||||
|
||||
# Count expired items
|
||||
expired_result = await self.session.execute(
|
||||
select(func.count(Stock.id)).where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
Stock.expiration_date < datetime.now()
|
||||
)
|
||||
)
|
||||
)
|
||||
expired_count = expired_result.scalar() or 0
|
||||
|
||||
# Count expiring soon items
|
||||
expiring_result = await self.session.execute(
|
||||
select(func.count(Stock.id)).where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
Stock.expiration_date.isnot(None),
|
||||
Stock.expiration_date <= datetime.now() + timedelta(days=7)
|
||||
)
|
||||
)
|
||||
)
|
||||
expiring_count = expiring_result.scalar() or 0
|
||||
|
||||
# Count out of stock items (ingredients with no available stock)
|
||||
out_of_stock_result = await self.session.execute(
|
||||
select(func.count(Ingredient.id)).where(
|
||||
and_(
|
||||
Ingredient.tenant_id == tenant_id,
|
||||
~exists(
|
||||
select(1).where(
|
||||
and_(
|
||||
Stock.ingredient_id == Ingredient.id,
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
Stock.available_quantity > 0
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
out_of_stock_count = out_of_stock_result.scalar() or 0
|
||||
|
||||
return {
|
||||
'total_stock_items': basic_summary.total_stock_items or 0,
|
||||
'total_stock_value': float(basic_summary.total_stock_value) if basic_summary.total_stock_value else 0.0,
|
||||
'unique_ingredients': basic_summary.unique_ingredients or 0,
|
||||
'expired_items': expired_count,
|
||||
'expiring_soon_items': expiring_count,
|
||||
'out_of_stock_count': out_of_stock_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def mark_expired_stock(self, tenant_id: UUID) -> int:
|
||||
"""Mark expired stock items as expired using state-dependent expiration logic"""
|
||||
try:
|
||||
current_date = datetime.now()
|
||||
|
||||
# Mark items as expired based on final_expiration_date or expiration_date
|
||||
result = await self.session.execute(
|
||||
update(Stock)
|
||||
.where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_expired == False,
|
||||
or_(
|
||||
and_(
|
||||
Stock.final_expiration_date.isnot(None),
|
||||
Stock.final_expiration_date < current_date
|
||||
),
|
||||
and_(
|
||||
Stock.final_expiration_date.is_(None),
|
||||
Stock.expiration_date.isnot(None),
|
||||
Stock.expiration_date < current_date
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.values(is_expired=True, quality_status="expired")
|
||||
)
|
||||
|
||||
expired_count = result.rowcount
|
||||
logger.info(f"Marked {expired_count} stock items as expired using state-dependent logic", tenant_id=tenant_id)
|
||||
|
||||
return expired_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark expired stock", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_stock_by_production_stage(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
production_stage: 'ProductionStage',
|
||||
ingredient_id: Optional[UUID] = None
|
||||
) -> List['Stock']:
|
||||
"""Get stock items by production stage"""
|
||||
try:
|
||||
conditions = [
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.production_stage == production_stage,
|
||||
Stock.is_available == True
|
||||
]
|
||||
|
||||
if ingredient_id:
|
||||
conditions.append(Stock.ingredient_id == ingredient_id)
|
||||
|
||||
result = await self.session.execute(
|
||||
select(Stock)
|
||||
.where(and_(*conditions))
|
||||
.order_by(asc(func.coalesce(Stock.final_expiration_date, Stock.expiration_date)))
|
||||
)
|
||||
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock by production stage", error=str(e), production_stage=production_stage)
|
||||
raise
|
||||
|
||||
async def get_stock_entries(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
available_only: bool = True
|
||||
) -> List[Stock]:
|
||||
"""Get stock entries with filtering and pagination"""
|
||||
try:
|
||||
conditions = [Stock.tenant_id == tenant_id]
|
||||
|
||||
if available_only:
|
||||
conditions.append(Stock.is_available == True)
|
||||
|
||||
if ingredient_id:
|
||||
conditions.append(Stock.ingredient_id == ingredient_id)
|
||||
|
||||
query = (
|
||||
select(Stock)
|
||||
.where(and_(*conditions))
|
||||
.order_by(desc(Stock.created_at))
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock entries", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def delete_by_ingredient(self, ingredient_id: UUID, tenant_id: UUID) -> int:
|
||||
"""Delete all stock entries for a specific ingredient"""
|
||||
try:
|
||||
from sqlalchemy import delete
|
||||
|
||||
stmt = delete(Stock).where(
|
||||
and_(
|
||||
Stock.ingredient_id == ingredient_id,
|
||||
Stock.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.session.execute(stmt)
|
||||
deleted_count = result.rowcount
|
||||
|
||||
logger.info(
|
||||
"Deleted stock entries for ingredient",
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id),
|
||||
deleted_count=deleted_count
|
||||
)
|
||||
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to delete stock entries for ingredient",
|
||||
error=str(e),
|
||||
ingredient_id=str(ingredient_id),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_daily_batch_count(
|
||||
self,
|
||||
tenant_id: str,
|
||||
date_start: datetime,
|
||||
date_end: datetime,
|
||||
prefix: Optional[str] = None
|
||||
) -> int:
|
||||
"""Get the count of batches created today for the given tenant"""
|
||||
try:
|
||||
conditions = [
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.created_at >= date_start,
|
||||
Stock.created_at <= date_end
|
||||
]
|
||||
|
||||
if prefix:
|
||||
conditions.append(Stock.batch_number.like(f"{prefix}-%"))
|
||||
|
||||
stmt = select(func.count(Stock.id)).where(and_(*conditions))
|
||||
result = await self.session.execute(stmt)
|
||||
count = result.scalar() or 0
|
||||
|
||||
logger.debug(
|
||||
"Retrieved daily batch count",
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix,
|
||||
count=count,
|
||||
date_start=date_start,
|
||||
date_end=date_end
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to get daily batch count",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
prefix=prefix
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_expired_batches_for_processing(self, tenant_id: UUID) -> List[Tuple[Stock, Ingredient]]:
|
||||
"""Get expired batches that haven't been processed yet (for automatic processing)"""
|
||||
try:
|
||||
current_date = datetime.now()
|
||||
|
||||
# Find expired batches that are still available and not yet marked as expired
|
||||
result = await self.session.execute(
|
||||
select(Stock, Ingredient)
|
||||
.join(Ingredient, Stock.ingredient_id == Ingredient.id)
|
||||
.where(
|
||||
and_(
|
||||
Stock.tenant_id == tenant_id,
|
||||
Stock.is_available == True,
|
||||
Stock.is_expired == False,
|
||||
Stock.current_quantity > 0,
|
||||
or_(
|
||||
and_(
|
||||
Stock.final_expiration_date.isnot(None),
|
||||
Stock.final_expiration_date <= current_date
|
||||
),
|
||||
and_(
|
||||
Stock.final_expiration_date.is_(None),
|
||||
Stock.expiration_date.isnot(None),
|
||||
Stock.expiration_date <= current_date
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.order_by(
|
||||
asc(func.coalesce(Stock.final_expiration_date, Stock.expiration_date))
|
||||
)
|
||||
)
|
||||
|
||||
expired_batches = result.all()
|
||||
logger.info("Found expired batches for processing",
|
||||
tenant_id=str(tenant_id),
|
||||
count=len(expired_batches))
|
||||
|
||||
return expired_batches
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expired batches for processing",
|
||||
error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def mark_batch_as_expired(self, stock_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Mark a specific batch as expired and unavailable"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
update(Stock)
|
||||
.where(
|
||||
and_(
|
||||
Stock.id == stock_id,
|
||||
Stock.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
.values(
|
||||
is_expired=True,
|
||||
is_available=False,
|
||||
quality_status="expired",
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
)
|
||||
|
||||
if result.rowcount > 0:
|
||||
logger.info("Marked batch as expired",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return True
|
||||
else:
|
||||
logger.warning("No batch found to mark as expired",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark batch as expired",
|
||||
error=str(e),
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def update_stock_to_zero(self, stock_id: UUID, tenant_id: UUID) -> bool:
|
||||
"""Update stock quantities to zero after moving to waste"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
update(Stock)
|
||||
.where(
|
||||
and_(
|
||||
Stock.id == stock_id,
|
||||
Stock.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
.values(
|
||||
current_quantity=0,
|
||||
available_quantity=0,
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
)
|
||||
|
||||
if result.rowcount > 0:
|
||||
logger.info("Updated stock quantities to zero",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return True
|
||||
else:
|
||||
logger.warning("No stock found to update to zero",
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update stock to zero",
|
||||
error=str(e),
|
||||
stock_id=str(stock_id),
|
||||
tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get products expiring soon or already expired
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
s.id as stock_id,
|
||||
s.batch_number,
|
||||
s.expiration_date,
|
||||
s.current_quantity,
|
||||
i.unit_of_measure,
|
||||
s.unit_cost,
|
||||
(s.current_quantity * s.unit_cost) as total_value,
|
||||
CASE
|
||||
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
|
||||
ELSE 'warning'
|
||||
END as urgency,
|
||||
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date <= CURRENT_DATE + (INTERVAL '1 day' * :days_threshold)
|
||||
ORDER BY s.expiration_date ASC, total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"days_threshold": days_threshold
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get temperature monitoring breaches
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
SELECT
|
||||
tl.id,
|
||||
tl.equipment_id,
|
||||
tl.equipment_name,
|
||||
tl.storage_type,
|
||||
tl.temperature_celsius,
|
||||
tl.min_threshold,
|
||||
tl.max_threshold,
|
||||
tl.is_within_range,
|
||||
tl.recorded_at,
|
||||
tl.alert_triggered,
|
||||
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
|
||||
CASE
|
||||
WHEN tl.temperature_celsius < tl.min_threshold
|
||||
THEN tl.min_threshold - tl.temperature_celsius
|
||||
WHEN tl.temperature_celsius > tl.max_threshold
|
||||
THEN tl.temperature_celsius - tl.max_threshold
|
||||
ELSE 0
|
||||
END as deviation
|
||||
FROM temperature_logs tl
|
||||
WHERE tl.tenant_id = :tenant_id
|
||||
AND tl.is_within_range = false
|
||||
AND tl.recorded_at > NOW() - (INTERVAL '1 hour' * :hours_back)
|
||||
AND tl.alert_triggered = false
|
||||
ORDER BY deviation DESC, tl.recorded_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"hours_back": hours_back
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Identify waste reduction opportunities
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH waste_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.ingredient_category,
|
||||
COUNT(sm.id) as waste_incidents,
|
||||
SUM(sm.quantity) as total_waste_quantity,
|
||||
SUM(sm.total_cost) as total_waste_cost,
|
||||
AVG(sm.quantity) as avg_waste_per_incident,
|
||||
MAX(sm.movement_date) as last_waste_date
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '30 days'
|
||||
GROUP BY i.id, i.name, i.ingredient_category
|
||||
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
|
||||
)
|
||||
SELECT * FROM waste_analysis
|
||||
ORDER BY total_waste_cost DESC, waste_incidents DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ingredients that need reordering based on stock levels and usage
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
query = text("""
|
||||
WITH usage_analysis AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.reorder_point,
|
||||
i.low_stock_threshold,
|
||||
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
|
||||
i.preferred_supplier_id,
|
||||
i.standard_order_quantity
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
|
||||
AND sm.movement_type = 'PRODUCTION_USE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
|
||||
i.preferred_supplier_id, i.standard_order_quantity
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
|
||||
ELSE 999
|
||||
END as days_of_stock,
|
||||
GREATEST(
|
||||
standard_order_quantity,
|
||||
CEIL(daily_usage * 14)
|
||||
) as recommended_order_quantity
|
||||
FROM usage_analysis
|
||||
WHERE current_stock <= reorder_point
|
||||
ORDER BY days_of_stock ASC, current_stock ASC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
257
services/inventory/app/repositories/transformation_repository.py
Normal file
257
services/inventory/app/repositories/transformation_repository.py
Normal file
@@ -0,0 +1,257 @@
|
||||
# services/inventory/app/repositories/transformation_repository.py
|
||||
"""
|
||||
Product Transformation Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import select, func, and_, or_, desc, asc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from app.models.inventory import ProductTransformation, Ingredient, ProductionStage
|
||||
from app.schemas.inventory import ProductTransformationCreate
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class TransformationRepository(BaseRepository[ProductTransformation, ProductTransformationCreate, dict]):
|
||||
"""Repository for product transformation operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(ProductTransformation, session)
|
||||
|
||||
async def create_transformation(
|
||||
self,
|
||||
transformation_data: ProductTransformationCreate,
|
||||
tenant_id: UUID,
|
||||
created_by: Optional[UUID] = None,
|
||||
source_batch_numbers: Optional[List[str]] = None
|
||||
) -> ProductTransformation:
|
||||
"""Create a new product transformation record"""
|
||||
try:
|
||||
# Generate transformation reference
|
||||
transformation_ref = f"TRANS-{datetime.now().strftime('%Y%m%d')}-{str(uuid.uuid4())[:8].upper()}"
|
||||
|
||||
# Prepare data
|
||||
create_data = transformation_data.model_dump()
|
||||
create_data['tenant_id'] = tenant_id
|
||||
create_data['created_by'] = created_by
|
||||
create_data['transformation_reference'] = transformation_ref
|
||||
|
||||
# Calculate conversion ratio if not provided
|
||||
if not create_data.get('conversion_ratio'):
|
||||
create_data['conversion_ratio'] = create_data['target_quantity'] / create_data['source_quantity']
|
||||
|
||||
# Store source batch numbers as JSON
|
||||
if source_batch_numbers:
|
||||
create_data['source_batch_numbers'] = json.dumps(source_batch_numbers)
|
||||
|
||||
# Create record
|
||||
record = await self.create(create_data)
|
||||
logger.info(
|
||||
"Created product transformation",
|
||||
transformation_id=record.id,
|
||||
reference=record.transformation_reference,
|
||||
source_stage=record.source_stage.value,
|
||||
target_stage=record.target_stage.value,
|
||||
source_quantity=record.source_quantity,
|
||||
target_quantity=record.target_quantity,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return record
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create transformation", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_transformations_by_ingredient(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
is_source: bool = True,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
days_back: Optional[int] = None
|
||||
) -> List[ProductTransformation]:
|
||||
"""Get transformations for a specific ingredient"""
|
||||
try:
|
||||
if is_source:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.source_ingredient_id == ingredient_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
query = select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.target_ingredient_id == ingredient_id
|
||||
)
|
||||
)
|
||||
|
||||
# Filter by date range if specified
|
||||
if days_back:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
query = query.where(self.model.transformation_date >= start_date)
|
||||
|
||||
query = query.order_by(desc(self.model.transformation_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformations by ingredient", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def get_transformations_by_stage(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
source_stage: Optional[ProductionStage] = None,
|
||||
target_stage: Optional[ProductionStage] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
days_back: Optional[int] = None
|
||||
) -> List[ProductTransformation]:
|
||||
"""Get transformations by production stage"""
|
||||
try:
|
||||
conditions = [self.model.tenant_id == tenant_id]
|
||||
|
||||
if source_stage:
|
||||
conditions.append(self.model.source_stage == source_stage)
|
||||
if target_stage:
|
||||
conditions.append(self.model.target_stage == target_stage)
|
||||
|
||||
query = select(self.model).where(and_(*conditions))
|
||||
|
||||
# Filter by date range if specified
|
||||
if days_back:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
query = query.where(self.model.transformation_date >= start_date)
|
||||
|
||||
query = query.order_by(desc(self.model.transformation_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformations by stage", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_transformation_by_reference(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
transformation_reference: str
|
||||
) -> Optional[ProductTransformation]:
|
||||
"""Get transformation by reference number"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
select(self.model).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.transformation_reference == transformation_reference
|
||||
)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformation by reference", error=str(e), reference=transformation_reference)
|
||||
raise
|
||||
|
||||
async def get_transformation_summary_by_period(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get transformation summary for specified period"""
|
||||
try:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
|
||||
# Get transformation counts by stage combination
|
||||
result = await self.session.execute(
|
||||
select(
|
||||
self.model.source_stage,
|
||||
self.model.target_stage,
|
||||
func.count(self.model.id).label('count'),
|
||||
func.coalesce(func.sum(self.model.source_quantity), 0).label('total_source_quantity'),
|
||||
func.coalesce(func.sum(self.model.target_quantity), 0).label('total_target_quantity')
|
||||
).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.transformation_date >= start_date
|
||||
)
|
||||
).group_by(self.model.source_stage, self.model.target_stage)
|
||||
)
|
||||
|
||||
summary = {}
|
||||
total_transformations = 0
|
||||
|
||||
for row in result:
|
||||
source_stage = row.source_stage.value if row.source_stage else "unknown"
|
||||
target_stage = row.target_stage.value if row.target_stage else "unknown"
|
||||
|
||||
stage_key = f"{source_stage}_to_{target_stage}"
|
||||
summary[stage_key] = {
|
||||
'count': row.count,
|
||||
'total_source_quantity': float(row.total_source_quantity),
|
||||
'total_target_quantity': float(row.total_target_quantity),
|
||||
'average_conversion_ratio': float(row.total_target_quantity) / float(row.total_source_quantity) if row.total_source_quantity > 0 else 0
|
||||
}
|
||||
total_transformations += row.count
|
||||
|
||||
summary['total_transformations'] = total_transformations
|
||||
summary['period_days'] = days_back
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformation summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def calculate_transformation_efficiency(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
source_ingredient_id: UUID,
|
||||
target_ingredient_id: UUID,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, float]:
|
||||
"""Calculate transformation efficiency between ingredients"""
|
||||
try:
|
||||
start_date = datetime.now() - timedelta(days=days_back)
|
||||
|
||||
result = await self.session.execute(
|
||||
select(
|
||||
func.count(self.model.id).label('transformation_count'),
|
||||
func.coalesce(func.sum(self.model.source_quantity), 0).label('total_source'),
|
||||
func.coalesce(func.sum(self.model.target_quantity), 0).label('total_target'),
|
||||
func.coalesce(func.avg(self.model.conversion_ratio), 0).label('avg_conversion_ratio')
|
||||
).where(
|
||||
and_(
|
||||
self.model.tenant_id == tenant_id,
|
||||
self.model.source_ingredient_id == source_ingredient_id,
|
||||
self.model.target_ingredient_id == target_ingredient_id,
|
||||
self.model.transformation_date >= start_date
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
row = result.first()
|
||||
|
||||
return {
|
||||
'transformation_count': row.transformation_count or 0,
|
||||
'total_source_quantity': float(row.total_source) if row.total_source else 0.0,
|
||||
'total_target_quantity': float(row.total_target) if row.total_target else 0.0,
|
||||
'average_conversion_ratio': float(row.avg_conversion_ratio) if row.avg_conversion_ratio else 0.0,
|
||||
'efficiency_percentage': (float(row.total_target) / float(row.total_source) * 100) if row.total_source and row.total_source > 0 else 0.0,
|
||||
'period_days': days_back
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate transformation efficiency", error=str(e))
|
||||
raise
|
||||
0
services/inventory/app/schemas/__init__.py
Normal file
0
services/inventory/app/schemas/__init__.py
Normal file
250
services/inventory/app/schemas/dashboard.py
Normal file
250
services/inventory/app/schemas/dashboard.py
Normal file
@@ -0,0 +1,250 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/schemas/dashboard.py
|
||||
# ================================================================
|
||||
"""
|
||||
Dashboard and analytics schemas for Inventory Service
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
# ===== Dashboard Summary Schemas =====
|
||||
|
||||
class InventoryDashboardSummary(BaseModel):
|
||||
"""Comprehensive dashboard summary for inventory management"""
|
||||
|
||||
# Current inventory metrics
|
||||
total_ingredients: int
|
||||
active_ingredients: int
|
||||
total_stock_value: Decimal
|
||||
total_stock_items: int
|
||||
|
||||
# Stock status breakdown
|
||||
in_stock_items: int
|
||||
low_stock_items: int
|
||||
out_of_stock_items: int
|
||||
expired_items: int
|
||||
expiring_soon_items: int
|
||||
|
||||
# Food safety metrics
|
||||
food_safety_alerts_active: int
|
||||
temperature_violations_today: int
|
||||
compliance_issues: int
|
||||
certifications_expiring_soon: int
|
||||
|
||||
# Recent activity
|
||||
recent_stock_movements: int
|
||||
recent_purchases: int
|
||||
recent_waste: int
|
||||
recent_adjustments: int
|
||||
|
||||
# Business model context
|
||||
business_model: Optional[str] = None # individual_bakery, central_bakery
|
||||
business_model_confidence: Optional[Decimal] = None
|
||||
|
||||
# Category breakdown
|
||||
stock_by_category: Dict[str, Any]
|
||||
alerts_by_severity: Dict[str, int]
|
||||
movements_by_type: Dict[str, int]
|
||||
|
||||
# Performance indicators
|
||||
inventory_turnover_ratio: Optional[Decimal] = None
|
||||
waste_percentage: Optional[Decimal] = None
|
||||
compliance_score: Optional[Decimal] = None
|
||||
cost_per_unit_avg: Optional[Decimal] = None
|
||||
|
||||
# Trending data
|
||||
stock_value_trend: List[Dict[str, Any]] = []
|
||||
alert_trend: List[Dict[str, Any]] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class StockStatusSummary(BaseModel):
|
||||
"""Summary of stock status by category"""
|
||||
category: str
|
||||
total_ingredients: int
|
||||
in_stock: int
|
||||
low_stock: int
|
||||
out_of_stock: int
|
||||
total_value: Decimal
|
||||
percentage_of_total: Decimal
|
||||
|
||||
|
||||
class AlertSummary(BaseModel):
|
||||
"""Summary of alerts by type and severity"""
|
||||
alert_type: str
|
||||
severity: str
|
||||
count: int
|
||||
oldest_alert_age_hours: Optional[int] = None
|
||||
average_resolution_time_hours: Optional[int] = None
|
||||
|
||||
|
||||
class RecentActivity(BaseModel):
|
||||
"""Recent activity item for dashboard"""
|
||||
activity_type: str # stock_added, stock_consumed, alert_created, etc.
|
||||
description: str
|
||||
timestamp: datetime
|
||||
user_name: Optional[str] = None
|
||||
impact_level: str = Field(default="low") # low, medium, high
|
||||
entity_id: Optional[UUID] = None
|
||||
entity_type: Optional[str] = None
|
||||
|
||||
|
||||
# ===== Food Safety Dashboard Schemas =====
|
||||
|
||||
class FoodSafetyDashboard(BaseModel):
|
||||
"""Food safety specific dashboard metrics"""
|
||||
|
||||
# Compliance overview
|
||||
total_compliance_items: int
|
||||
compliant_items: int
|
||||
non_compliant_items: int
|
||||
pending_review_items: int
|
||||
compliance_percentage: Decimal
|
||||
|
||||
# Temperature monitoring
|
||||
temperature_sensors_online: int
|
||||
temperature_sensors_total: int
|
||||
temperature_violations_24h: int
|
||||
current_temperature_status: str # all_good, warnings, violations
|
||||
|
||||
# Expiration tracking
|
||||
items_expiring_today: int
|
||||
items_expiring_this_week: int
|
||||
expired_items_requiring_action: int
|
||||
|
||||
# Audit and certification status
|
||||
upcoming_audits: int
|
||||
overdue_audits: int
|
||||
certifications_valid: int
|
||||
certifications_expiring_soon: int
|
||||
|
||||
# Risk assessment
|
||||
high_risk_items: int
|
||||
critical_alerts: int
|
||||
regulatory_notifications_pending: int
|
||||
|
||||
# Recent safety events
|
||||
recent_safety_incidents: List[RecentActivity] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TemperatureMonitoringStatus(BaseModel):
|
||||
"""Current temperature monitoring status"""
|
||||
location: str
|
||||
equipment_id: Optional[str] = None
|
||||
current_temperature: Decimal
|
||||
target_min: Decimal
|
||||
target_max: Decimal
|
||||
status: str # normal, warning, critical
|
||||
last_reading: datetime
|
||||
hours_since_last_reading: Decimal
|
||||
alert_active: bool = False
|
||||
|
||||
|
||||
class ComplianceStatusSummary(BaseModel):
|
||||
"""Compliance status summary by standard"""
|
||||
standard: str
|
||||
standard_name: str
|
||||
total_items: int
|
||||
compliant: int
|
||||
non_compliant: int
|
||||
pending_review: int
|
||||
expired: int
|
||||
compliance_rate: Decimal
|
||||
next_audit_date: Optional[datetime] = None
|
||||
|
||||
|
||||
# ===== Analytics and Reporting Schemas =====
|
||||
|
||||
class InventoryAnalytics(BaseModel):
|
||||
"""Advanced analytics for inventory management"""
|
||||
|
||||
# Turnover analysis
|
||||
inventory_turnover_rate: Decimal
|
||||
fast_moving_items: List[Dict[str, Any]]
|
||||
slow_moving_items: List[Dict[str, Any]]
|
||||
dead_stock_items: List[Dict[str, Any]]
|
||||
|
||||
# Cost analysis
|
||||
total_inventory_cost: Decimal
|
||||
cost_by_category: Dict[str, Decimal]
|
||||
average_unit_cost_trend: List[Dict[str, Any]]
|
||||
waste_cost_analysis: Dict[str, Any]
|
||||
|
||||
# Efficiency metrics
|
||||
stockout_frequency: Dict[str, int]
|
||||
overstock_frequency: Dict[str, int]
|
||||
reorder_accuracy: Decimal
|
||||
forecast_accuracy: Decimal
|
||||
|
||||
# Quality and safety metrics
|
||||
quality_incidents_rate: Decimal
|
||||
food_safety_score: Decimal
|
||||
compliance_score_by_standard: Dict[str, Decimal]
|
||||
temperature_compliance_rate: Decimal
|
||||
|
||||
# Supplier performance
|
||||
supplier_performance: List[Dict[str, Any]]
|
||||
delivery_reliability: Decimal
|
||||
quality_consistency: Decimal
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class BusinessModelInsights(BaseModel):
|
||||
"""Business model insights based on inventory patterns"""
|
||||
detected_model: str # individual_bakery, central_bakery, mixed
|
||||
confidence_score: Decimal
|
||||
|
||||
# Model characteristics
|
||||
total_ingredient_types: int
|
||||
average_stock_per_ingredient: Decimal
|
||||
finished_product_ratio: Decimal
|
||||
supplier_diversity: int
|
||||
|
||||
# Operational patterns
|
||||
order_frequency_pattern: str
|
||||
seasonal_variation: bool
|
||||
bulk_purchasing_indicator: Decimal
|
||||
production_scale_indicator: str
|
||||
|
||||
# Recommendations
|
||||
business_model_specific_recommendations: List[str]
|
||||
optimization_opportunities: List[str]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ===== Request/Filter Schemas =====
|
||||
|
||||
class DashboardFilter(BaseModel):
|
||||
"""Filtering options for dashboard data"""
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
categories: Optional[List[str]] = None
|
||||
severity_levels: Optional[List[str]] = None
|
||||
alert_types: Optional[List[str]] = None
|
||||
business_model: Optional[str] = None
|
||||
include_inactive: bool = False
|
||||
|
||||
|
||||
class AlertsFilter(BaseModel):
|
||||
"""Filtering options for alerts dashboard"""
|
||||
alert_types: Optional[List[str]] = None
|
||||
severities: Optional[List[str]] = None
|
||||
statuses: Optional[List[str]] = None
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
assigned_to: Optional[UUID] = None
|
||||
unresolved_only: bool = True
|
||||
283
services/inventory/app/schemas/food_safety.py
Normal file
283
services/inventory/app/schemas/food_safety.py
Normal file
@@ -0,0 +1,283 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/schemas/food_safety.py
|
||||
# ================================================================
|
||||
"""
|
||||
Food safety schemas for Inventory Service
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field, validator
|
||||
|
||||
|
||||
# ===== Food Safety Compliance Schemas =====
|
||||
|
||||
class FoodSafetyComplianceBase(BaseModel):
|
||||
ingredient_id: UUID
|
||||
standard: str
|
||||
compliance_status: str = Field(default="pending_review")
|
||||
certification_number: Optional[str] = None
|
||||
certifying_body: Optional[str] = None
|
||||
certification_date: Optional[datetime] = None
|
||||
expiration_date: Optional[datetime] = None
|
||||
requirements: Optional[Dict[str, Any]] = None
|
||||
compliance_notes: Optional[str] = None
|
||||
documentation_url: Optional[str] = None
|
||||
last_audit_date: Optional[datetime] = None
|
||||
next_audit_date: Optional[datetime] = None
|
||||
auditor_name: Optional[str] = None
|
||||
audit_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
risk_level: str = Field(default="medium")
|
||||
risk_factors: Optional[List[str]] = None
|
||||
mitigation_measures: Optional[List[str]] = None
|
||||
requires_monitoring: bool = Field(default=True)
|
||||
monitoring_frequency_days: Optional[int] = Field(None, gt=0)
|
||||
|
||||
|
||||
class FoodSafetyComplianceCreate(FoodSafetyComplianceBase):
|
||||
tenant_id: UUID
|
||||
|
||||
|
||||
class FoodSafetyComplianceUpdate(BaseModel):
|
||||
compliance_status: Optional[str] = None
|
||||
certification_number: Optional[str] = None
|
||||
certifying_body: Optional[str] = None
|
||||
certification_date: Optional[datetime] = None
|
||||
expiration_date: Optional[datetime] = None
|
||||
requirements: Optional[Dict[str, Any]] = None
|
||||
compliance_notes: Optional[str] = None
|
||||
documentation_url: Optional[str] = None
|
||||
last_audit_date: Optional[datetime] = None
|
||||
next_audit_date: Optional[datetime] = None
|
||||
auditor_name: Optional[str] = None
|
||||
audit_score: Optional[float] = Field(None, ge=0, le=100)
|
||||
risk_level: Optional[str] = None
|
||||
risk_factors: Optional[List[str]] = None
|
||||
mitigation_measures: Optional[List[str]] = None
|
||||
requires_monitoring: Optional[bool] = None
|
||||
monitoring_frequency_days: Optional[int] = Field(None, gt=0)
|
||||
|
||||
|
||||
class FoodSafetyComplianceResponse(FoodSafetyComplianceBase):
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[UUID] = None
|
||||
updated_by: Optional[UUID] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ===== Temperature Monitoring Schemas =====
|
||||
|
||||
class TemperatureLogBase(BaseModel):
|
||||
storage_location: str = Field(..., min_length=1, max_length=100)
|
||||
warehouse_zone: Optional[str] = Field(None, max_length=50)
|
||||
equipment_id: Optional[str] = Field(None, max_length=100)
|
||||
temperature_celsius: float
|
||||
humidity_percentage: Optional[float] = Field(None, ge=0, le=100)
|
||||
target_temperature_min: Optional[float] = None
|
||||
target_temperature_max: Optional[float] = None
|
||||
measurement_method: str = Field(default="manual")
|
||||
device_id: Optional[str] = Field(None, max_length=100)
|
||||
calibration_date: Optional[datetime] = None
|
||||
|
||||
|
||||
class TemperatureLogCreate(TemperatureLogBase):
|
||||
tenant_id: UUID
|
||||
|
||||
|
||||
class TemperatureLogResponse(TemperatureLogBase):
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
is_within_range: bool
|
||||
alert_triggered: bool
|
||||
deviation_minutes: Optional[int] = None
|
||||
recorded_at: datetime
|
||||
created_at: datetime
|
||||
recorded_by: Optional[UUID] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ===== Food Safety Alert Schemas =====
|
||||
|
||||
class FoodSafetyAlertBase(BaseModel):
|
||||
alert_type: str
|
||||
severity: str = Field(default="medium")
|
||||
risk_level: str = Field(default="medium")
|
||||
source_entity_type: str
|
||||
source_entity_id: UUID
|
||||
ingredient_id: Optional[UUID] = None
|
||||
stock_id: Optional[UUID] = None
|
||||
title: str = Field(..., min_length=1, max_length=200)
|
||||
description: str = Field(..., min_length=1)
|
||||
detailed_message: Optional[str] = None
|
||||
regulatory_requirement: Optional[str] = Field(None, max_length=100)
|
||||
compliance_standard: Optional[str] = None
|
||||
regulatory_action_required: bool = Field(default=False)
|
||||
trigger_condition: Optional[str] = Field(None, max_length=200)
|
||||
threshold_value: Optional[Decimal] = None
|
||||
actual_value: Optional[Decimal] = None
|
||||
alert_data: Optional[Dict[str, Any]] = None
|
||||
environmental_factors: Optional[Dict[str, Any]] = None
|
||||
affected_products: Optional[List[UUID]] = None
|
||||
public_health_risk: bool = Field(default=False)
|
||||
business_impact: Optional[str] = None
|
||||
estimated_loss: Optional[Decimal] = Field(None, ge=0)
|
||||
|
||||
|
||||
class FoodSafetyAlertCreate(FoodSafetyAlertBase):
|
||||
tenant_id: UUID
|
||||
alert_code: str = Field(..., min_length=1, max_length=50)
|
||||
|
||||
|
||||
class FoodSafetyAlertUpdate(BaseModel):
|
||||
status: Optional[str] = None
|
||||
alert_state: Optional[str] = None
|
||||
immediate_actions_taken: Optional[List[str]] = None
|
||||
investigation_notes: Optional[str] = None
|
||||
resolution_action: Optional[str] = Field(None, max_length=200)
|
||||
resolution_notes: Optional[str] = None
|
||||
corrective_actions: Optional[List[str]] = None
|
||||
preventive_measures: Optional[List[str]] = None
|
||||
assigned_to: Optional[UUID] = None
|
||||
assigned_role: Optional[str] = Field(None, max_length=50)
|
||||
escalated_to: Optional[UUID] = None
|
||||
escalation_deadline: Optional[datetime] = None
|
||||
documentation: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class FoodSafetyAlertResponse(FoodSafetyAlertBase):
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
alert_code: str
|
||||
status: str
|
||||
alert_state: str
|
||||
immediate_actions_taken: Optional[List[str]] = None
|
||||
investigation_notes: Optional[str] = None
|
||||
resolution_action: Optional[str] = None
|
||||
resolution_notes: Optional[str] = None
|
||||
corrective_actions: Optional[List[str]] = None
|
||||
preventive_measures: Optional[List[str]] = None
|
||||
first_occurred_at: datetime
|
||||
last_occurred_at: datetime
|
||||
acknowledged_at: Optional[datetime] = None
|
||||
resolved_at: Optional[datetime] = None
|
||||
escalation_deadline: Optional[datetime] = None
|
||||
occurrence_count: int
|
||||
is_recurring: bool
|
||||
recurrence_pattern: Optional[str] = None
|
||||
assigned_to: Optional[UUID] = None
|
||||
assigned_role: Optional[str] = None
|
||||
escalated_to: Optional[UUID] = None
|
||||
escalation_level: int
|
||||
notification_sent: bool
|
||||
notification_methods: Optional[List[str]] = None
|
||||
notification_recipients: Optional[List[str]] = None
|
||||
regulatory_notification_required: bool
|
||||
regulatory_notification_sent: bool
|
||||
documentation: Optional[Dict[str, Any]] = None
|
||||
audit_trail: Optional[List[Dict[str, Any]]] = None
|
||||
external_reference: Optional[str] = None
|
||||
detection_time: Optional[datetime] = None
|
||||
response_time_minutes: Optional[int] = None
|
||||
resolution_time_minutes: Optional[int] = None
|
||||
alert_accuracy: Optional[bool] = None
|
||||
false_positive: bool
|
||||
feedback_notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[UUID] = None
|
||||
updated_by: Optional[UUID] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# ===== Bulk Operations Schemas =====
|
||||
|
||||
class BulkTemperatureLogCreate(BaseModel):
|
||||
"""Schema for bulk temperature logging"""
|
||||
tenant_id: UUID
|
||||
readings: List[TemperatureLogBase] = Field(..., min_items=1, max_items=100)
|
||||
|
||||
|
||||
class BulkComplianceUpdate(BaseModel):
|
||||
"""Schema for bulk compliance updates"""
|
||||
tenant_id: UUID
|
||||
updates: List[Dict[str, Any]] = Field(..., min_items=1, max_items=50)
|
||||
|
||||
|
||||
# ===== Filter and Query Schemas =====
|
||||
|
||||
class FoodSafetyFilter(BaseModel):
|
||||
"""Filtering options for food safety data"""
|
||||
compliance_standards: Optional[List[str]] = None
|
||||
compliance_statuses: Optional[List[str]] = None
|
||||
risk_levels: Optional[List[str]] = None
|
||||
alert_types: Optional[List[str]] = None
|
||||
severities: Optional[List[str]] = None
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
assigned_to: Optional[UUID] = None
|
||||
include_resolved: bool = False
|
||||
regulatory_action_required: Optional[bool] = None
|
||||
|
||||
|
||||
class TemperatureMonitoringFilter(BaseModel):
|
||||
"""Filtering options for temperature monitoring"""
|
||||
storage_locations: Optional[List[str]] = None
|
||||
equipment_ids: Optional[List[str]] = None
|
||||
date_from: Optional[datetime] = None
|
||||
date_to: Optional[datetime] = None
|
||||
violations_only: bool = False
|
||||
alerts_only: bool = False
|
||||
|
||||
|
||||
# ===== Analytics Schemas =====
|
||||
|
||||
class FoodSafetyMetrics(BaseModel):
|
||||
"""Food safety performance metrics"""
|
||||
compliance_rate: Decimal = Field(..., ge=0, le=100)
|
||||
temperature_compliance_rate: Decimal = Field(..., ge=0, le=100)
|
||||
alert_response_time_avg: Optional[Decimal] = None
|
||||
alert_resolution_time_avg: Optional[Decimal] = None
|
||||
recurring_issues_count: int
|
||||
regulatory_violations: int
|
||||
certification_coverage: Decimal = Field(..., ge=0, le=100)
|
||||
audit_score_avg: Optional[Decimal] = Field(None, ge=0, le=100)
|
||||
risk_score: Decimal = Field(..., ge=0, le=10)
|
||||
|
||||
|
||||
class TemperatureAnalytics(BaseModel):
|
||||
"""Temperature monitoring analytics"""
|
||||
total_readings: int
|
||||
violations_count: int
|
||||
violation_rate: Decimal = Field(..., ge=0, le=100)
|
||||
average_temperature: Decimal
|
||||
temperature_range: Dict[str, Decimal]
|
||||
longest_violation_hours: Optional[int] = None
|
||||
equipment_performance: List[Dict[str, Any]]
|
||||
location_performance: List[Dict[str, Any]]
|
||||
|
||||
|
||||
# ===== Notification Schemas =====
|
||||
|
||||
class AlertNotificationPreferences(BaseModel):
|
||||
"""User preferences for alert notifications"""
|
||||
email_enabled: bool = True
|
||||
sms_enabled: bool = False
|
||||
whatsapp_enabled: bool = False
|
||||
dashboard_enabled: bool = True
|
||||
severity_threshold: str = Field(default="medium") # Only notify for this severity and above
|
||||
alert_types: Optional[List[str]] = None # Specific alert types to receive
|
||||
quiet_hours_start: Optional[str] = Field(None, pattern=r"^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$")
|
||||
quiet_hours_end: Optional[str] = Field(None, pattern=r"^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$")
|
||||
weekend_notifications: bool = True
|
||||
631
services/inventory/app/schemas/inventory.py
Normal file
631
services/inventory/app/schemas/inventory.py
Normal file
@@ -0,0 +1,631 @@
|
||||
# services/inventory/app/schemas/inventory.py
|
||||
"""
|
||||
Pydantic schemas for inventory API requests and responses
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import Generic, TypeVar
|
||||
from enum import Enum
|
||||
|
||||
from app.models.inventory import UnitOfMeasure, IngredientCategory, StockMovementType, ProductType, ProductCategory, ProductionStage
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# ===== BASE SCHEMAS =====
|
||||
|
||||
class InventoryBaseSchema(BaseModel):
|
||||
"""Base schema for inventory models"""
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else None
|
||||
}
|
||||
|
||||
|
||||
# ===== INGREDIENT SCHEMAS =====
|
||||
|
||||
class IngredientCreate(InventoryBaseSchema):
|
||||
"""Schema for creating ingredients and finished products"""
|
||||
name: str = Field(..., max_length=255, description="Product name")
|
||||
product_type: ProductType = Field(ProductType.INGREDIENT, description="Type of product (ingredient or finished_product)")
|
||||
sku: Optional[str] = Field(None, max_length=100, description="SKU code")
|
||||
barcode: Optional[str] = Field(None, max_length=50, description="Barcode")
|
||||
category: Optional[str] = Field(None, description="Product category (ingredient or finished product category)")
|
||||
subcategory: Optional[str] = Field(None, max_length=100, description="Subcategory")
|
||||
description: Optional[str] = Field(None, description="Ingredient description")
|
||||
brand: Optional[str] = Field(None, max_length=100, description="Brand name")
|
||||
unit_of_measure: UnitOfMeasure = Field(..., description="Unit of measure")
|
||||
package_size: Optional[float] = Field(None, gt=0, description="Package size")
|
||||
|
||||
# Pricing
|
||||
# Note: average_cost is calculated automatically from purchases (not set on create)
|
||||
# All cost fields are optional - can be added later after onboarding
|
||||
standard_cost: Optional[Decimal] = Field(None, ge=0, description="Standard/target cost per unit for budgeting")
|
||||
|
||||
# Stock management - all optional with sensible defaults for onboarding
|
||||
# These can be configured later based on actual usage patterns
|
||||
low_stock_threshold: Optional[float] = Field(None, ge=0, description="Low stock alert threshold")
|
||||
reorder_point: Optional[float] = Field(None, ge=0, description="Reorder point")
|
||||
reorder_quantity: Optional[float] = Field(None, gt=0, description="Default reorder quantity")
|
||||
max_stock_level: Optional[float] = Field(None, gt=0, description="Maximum stock level")
|
||||
|
||||
# Shelf life (default value only - actual per batch)
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Default shelf life in days")
|
||||
|
||||
# Properties
|
||||
is_perishable: bool = Field(False, description="Is perishable")
|
||||
allergen_info: Optional[Dict[str, Any]] = Field(None, description="Allergen information")
|
||||
|
||||
# NEW: Local production support
|
||||
produced_locally: bool = Field(False, description="If true, ingredient is produced in-house")
|
||||
recipe_id: Optional[str] = Field(None, description="Recipe ID for BOM explosion (if produced locally)")
|
||||
|
||||
@validator('reorder_point')
|
||||
def validate_reorder_point(cls, v, values):
|
||||
# Only validate if both values are provided and not None
|
||||
low_stock = values.get('low_stock_threshold')
|
||||
if v is not None and low_stock is not None:
|
||||
try:
|
||||
if v <= low_stock:
|
||||
raise ValueError('Reorder point must be greater than low stock threshold')
|
||||
except TypeError:
|
||||
# Skip validation if comparison fails due to type mismatch
|
||||
pass
|
||||
return v
|
||||
|
||||
|
||||
class IngredientUpdate(InventoryBaseSchema):
|
||||
"""Schema for updating ingredients and finished products"""
|
||||
name: Optional[str] = Field(None, max_length=255, description="Product name")
|
||||
product_type: Optional[ProductType] = Field(None, description="Type of product (ingredient or finished_product)")
|
||||
sku: Optional[str] = Field(None, max_length=100, description="SKU code")
|
||||
barcode: Optional[str] = Field(None, max_length=50, description="Barcode")
|
||||
category: Optional[str] = Field(None, description="Product category")
|
||||
subcategory: Optional[str] = Field(None, max_length=100, description="Subcategory")
|
||||
description: Optional[str] = Field(None, description="Ingredient description")
|
||||
brand: Optional[str] = Field(None, max_length=100, description="Brand name")
|
||||
unit_of_measure: Optional[UnitOfMeasure] = Field(None, description="Unit of measure")
|
||||
package_size: Optional[float] = Field(None, gt=0, description="Package size")
|
||||
|
||||
# Pricing
|
||||
average_cost: Optional[Decimal] = Field(None, ge=0, description="Average cost per unit")
|
||||
standard_cost: Optional[Decimal] = Field(None, ge=0, description="Standard cost per unit")
|
||||
|
||||
# Stock management
|
||||
low_stock_threshold: Optional[float] = Field(None, ge=0, description="Low stock alert threshold")
|
||||
reorder_point: Optional[float] = Field(None, ge=0, description="Reorder point")
|
||||
reorder_quantity: Optional[float] = Field(None, gt=0, description="Default reorder quantity")
|
||||
max_stock_level: Optional[float] = Field(None, gt=0, description="Maximum stock level")
|
||||
|
||||
# Shelf life (default value only - actual per batch)
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Default shelf life in days")
|
||||
|
||||
# Properties
|
||||
is_active: Optional[bool] = Field(None, description="Is active")
|
||||
is_perishable: Optional[bool] = Field(None, description="Is perishable")
|
||||
allergen_info: Optional[Dict[str, Any]] = Field(None, description="Allergen information")
|
||||
|
||||
# NEW: Local production support
|
||||
produced_locally: Optional[bool] = Field(None, description="If true, ingredient is produced in-house")
|
||||
recipe_id: Optional[str] = Field(None, description="Recipe ID for BOM explosion (if produced locally)")
|
||||
|
||||
|
||||
class IngredientResponse(InventoryBaseSchema):
|
||||
"""Schema for ingredient and finished product API responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
name: str
|
||||
product_type: ProductType
|
||||
sku: Optional[str]
|
||||
barcode: Optional[str]
|
||||
category: Optional[str] # Will be populated from ingredient_category or product_category
|
||||
subcategory: Optional[str]
|
||||
description: Optional[str]
|
||||
brand: Optional[str]
|
||||
unit_of_measure: UnitOfMeasure
|
||||
package_size: Optional[float]
|
||||
average_cost: Optional[float]
|
||||
last_purchase_price: Optional[float]
|
||||
standard_cost: Optional[float]
|
||||
low_stock_threshold: Optional[float] # Now optional
|
||||
reorder_point: Optional[float] # Now optional
|
||||
reorder_quantity: Optional[float] # Now optional
|
||||
max_stock_level: Optional[float]
|
||||
shelf_life_days: Optional[int] # Default value only
|
||||
is_active: bool
|
||||
is_perishable: bool
|
||||
allergen_info: Optional[Dict[str, Any]]
|
||||
|
||||
# NEW: Local production support
|
||||
produced_locally: bool = False
|
||||
recipe_id: Optional[str] = None
|
||||
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
created_by: Optional[str]
|
||||
|
||||
# Computed fields
|
||||
current_stock: Optional[float] = None
|
||||
is_low_stock: Optional[bool] = None
|
||||
needs_reorder: Optional[bool] = None
|
||||
|
||||
@validator('allergen_info', pre=True)
|
||||
def validate_allergen_info(cls, v):
|
||||
"""Convert empty lists or lists to empty dict, handle None"""
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, list):
|
||||
# If it's an empty list, return None; if it's a non-empty list, convert to dict format
|
||||
return {"allergens": v} if v else None
|
||||
if isinstance(v, dict):
|
||||
return v
|
||||
# For any other type including invalid ones, return None
|
||||
return None
|
||||
|
||||
|
||||
# ===== BULK INGREDIENT SCHEMAS =====
|
||||
|
||||
class BulkIngredientCreate(InventoryBaseSchema):
|
||||
"""Schema for bulk creating ingredients"""
|
||||
ingredients: List[IngredientCreate] = Field(..., description="List of ingredients to create")
|
||||
|
||||
|
||||
class BulkIngredientResult(InventoryBaseSchema):
|
||||
"""Schema for individual result in bulk operation"""
|
||||
index: int = Field(..., description="Index of the ingredient in the original request")
|
||||
success: bool = Field(..., description="Whether the creation succeeded")
|
||||
ingredient: Optional[IngredientResponse] = Field(None, description="Created ingredient (if successful)")
|
||||
error: Optional[str] = Field(None, description="Error message (if failed)")
|
||||
|
||||
|
||||
class BulkIngredientResponse(InventoryBaseSchema):
|
||||
"""Schema for bulk ingredient creation response"""
|
||||
total_requested: int = Field(..., description="Total number of ingredients requested")
|
||||
total_created: int = Field(..., description="Number of ingredients successfully created")
|
||||
total_failed: int = Field(..., description="Number of ingredients that failed")
|
||||
results: List[BulkIngredientResult] = Field(..., description="Detailed results for each ingredient")
|
||||
transaction_id: str = Field(..., description="Transaction ID for audit trail")
|
||||
|
||||
|
||||
# ===== STOCK SCHEMAS =====
|
||||
|
||||
class StockCreate(InventoryBaseSchema):
|
||||
"""Schema for creating stock entries"""
|
||||
ingredient_id: str = Field(..., description="Ingredient ID")
|
||||
supplier_id: Optional[str] = Field(None, description="Supplier ID")
|
||||
batch_number: Optional[str] = Field(None, max_length=100, description="Batch number")
|
||||
lot_number: Optional[str] = Field(None, max_length=100, description="Lot number")
|
||||
supplier_batch_ref: Optional[str] = Field(None, max_length=100, description="Supplier batch reference")
|
||||
|
||||
# Production stage tracking
|
||||
production_stage: ProductionStage = Field(default=ProductionStage.RAW_INGREDIENT, description="Production stage of the stock")
|
||||
transformation_reference: Optional[str] = Field(None, max_length=100, description="Transformation reference ID")
|
||||
|
||||
current_quantity: float = Field(..., ge=0, description="Current quantity")
|
||||
received_date: Optional[datetime] = Field(None, description="Date received")
|
||||
expiration_date: Optional[datetime] = Field(None, description="Expiration date")
|
||||
best_before_date: Optional[datetime] = Field(None, description="Best before date")
|
||||
|
||||
# Stage-specific expiration fields
|
||||
original_expiration_date: Optional[datetime] = Field(None, description="Original batch expiration (for par-baked items)")
|
||||
transformation_date: Optional[datetime] = Field(None, description="Date when product was transformed")
|
||||
final_expiration_date: Optional[datetime] = Field(None, description="Final expiration after transformation")
|
||||
|
||||
unit_cost: Optional[Decimal] = Field(None, ge=0, description="Unit cost")
|
||||
storage_location: Optional[str] = Field(None, max_length=100, description="Storage location")
|
||||
warehouse_zone: Optional[str] = Field(None, max_length=50, description="Warehouse zone")
|
||||
shelf_position: Optional[str] = Field(None, max_length=50, description="Shelf position")
|
||||
|
||||
quality_status: str = Field("good", description="Quality status")
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration: bool = Field(False, description="Requires refrigeration")
|
||||
requires_freezing: bool = Field(False, description="Requires freezing")
|
||||
storage_temperature_min: Optional[float] = Field(None, description="Min storage temperature (°C)")
|
||||
storage_temperature_max: Optional[float] = Field(None, description="Max storage temperature (°C)")
|
||||
storage_humidity_max: Optional[float] = Field(None, ge=0, le=100, description="Max humidity (%)")
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Batch-specific shelf life in days")
|
||||
storage_instructions: Optional[str] = Field(None, description="Batch-specific storage instructions")
|
||||
|
||||
@validator('ingredient_id')
|
||||
def validate_ingredient_id(cls, v):
|
||||
"""Validate ingredient_id is a valid UUID"""
|
||||
if not v:
|
||||
raise ValueError("ingredient_id is required")
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
# Validate it's a proper UUID
|
||||
UUID(v)
|
||||
return v
|
||||
except (ValueError, AttributeError) as e:
|
||||
raise ValueError(f"ingredient_id must be a valid UUID string, got: {v}")
|
||||
return str(v)
|
||||
|
||||
@validator('supplier_id')
|
||||
def validate_supplier_id(cls, v):
|
||||
"""Convert empty string to None for optional UUID field"""
|
||||
if v == '' or (isinstance(v, str) and v.strip() == ''):
|
||||
return None
|
||||
return v
|
||||
|
||||
@validator('storage_temperature_max')
|
||||
def validate_temperature_range(cls, v, values):
|
||||
# Only validate if both values are provided and not None
|
||||
min_temp = values.get('storage_temperature_min')
|
||||
if v is not None and min_temp is not None:
|
||||
try:
|
||||
if v <= min_temp:
|
||||
raise ValueError('Max temperature must be greater than min temperature')
|
||||
except TypeError:
|
||||
# Skip validation if comparison fails due to type mismatch
|
||||
pass
|
||||
return v
|
||||
|
||||
class StockUpdate(InventoryBaseSchema):
|
||||
"""Schema for updating stock entries"""
|
||||
supplier_id: Optional[str] = Field(None, description="Supplier ID")
|
||||
batch_number: Optional[str] = Field(None, max_length=100, description="Batch number")
|
||||
lot_number: Optional[str] = Field(None, max_length=100, description="Lot number")
|
||||
supplier_batch_ref: Optional[str] = Field(None, max_length=100, description="Supplier batch reference")
|
||||
|
||||
# Production stage tracking
|
||||
production_stage: Optional[ProductionStage] = Field(None, description="Production stage of the stock")
|
||||
transformation_reference: Optional[str] = Field(None, max_length=100, description="Transformation reference ID")
|
||||
|
||||
current_quantity: Optional[float] = Field(None, ge=0, description="Current quantity")
|
||||
reserved_quantity: Optional[float] = Field(None, ge=0, description="Reserved quantity")
|
||||
received_date: Optional[datetime] = Field(None, description="Date received")
|
||||
expiration_date: Optional[datetime] = Field(None, description="Expiration date")
|
||||
best_before_date: Optional[datetime] = Field(None, description="Best before date")
|
||||
|
||||
# Stage-specific expiration fields
|
||||
original_expiration_date: Optional[datetime] = Field(None, description="Original batch expiration (for par-baked items)")
|
||||
transformation_date: Optional[datetime] = Field(None, description="Date when product was transformed")
|
||||
final_expiration_date: Optional[datetime] = Field(None, description="Final expiration after transformation")
|
||||
|
||||
unit_cost: Optional[Decimal] = Field(None, ge=0, description="Unit cost")
|
||||
storage_location: Optional[str] = Field(None, max_length=100, description="Storage location")
|
||||
warehouse_zone: Optional[str] = Field(None, max_length=50, description="Warehouse zone")
|
||||
shelf_position: Optional[str] = Field(None, max_length=50, description="Shelf position")
|
||||
|
||||
is_available: Optional[bool] = Field(None, description="Is available")
|
||||
quality_status: Optional[str] = Field(None, description="Quality status")
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration: Optional[bool] = Field(None, description="Requires refrigeration")
|
||||
requires_freezing: Optional[bool] = Field(None, description="Requires freezing")
|
||||
storage_temperature_min: Optional[float] = Field(None, description="Min storage temperature (°C)")
|
||||
storage_temperature_max: Optional[float] = Field(None, description="Max storage temperature (°C)")
|
||||
storage_humidity_max: Optional[float] = Field(None, ge=0, le=100, description="Max humidity (%)")
|
||||
shelf_life_days: Optional[int] = Field(None, gt=0, description="Batch-specific shelf life in days")
|
||||
storage_instructions: Optional[str] = Field(None, description="Batch-specific storage instructions")
|
||||
|
||||
@validator('supplier_id')
|
||||
def validate_supplier_id(cls, v):
|
||||
"""Convert empty string to None for optional UUID field"""
|
||||
if v == '' or (isinstance(v, str) and v.strip() == ''):
|
||||
return None
|
||||
return v
|
||||
|
||||
|
||||
class StockResponse(InventoryBaseSchema):
|
||||
"""Schema for stock API responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
ingredient_id: str
|
||||
supplier_id: Optional[str]
|
||||
batch_number: Optional[str]
|
||||
lot_number: Optional[str]
|
||||
supplier_batch_ref: Optional[str]
|
||||
|
||||
# Production stage tracking
|
||||
production_stage: ProductionStage
|
||||
transformation_reference: Optional[str]
|
||||
|
||||
current_quantity: float
|
||||
reserved_quantity: float
|
||||
available_quantity: float
|
||||
received_date: Optional[datetime]
|
||||
expiration_date: Optional[datetime]
|
||||
best_before_date: Optional[datetime]
|
||||
|
||||
# Stage-specific expiration fields
|
||||
original_expiration_date: Optional[datetime]
|
||||
transformation_date: Optional[datetime]
|
||||
final_expiration_date: Optional[datetime]
|
||||
|
||||
unit_cost: Optional[float]
|
||||
total_cost: Optional[float]
|
||||
storage_location: Optional[str]
|
||||
warehouse_zone: Optional[str]
|
||||
shelf_position: Optional[str]
|
||||
is_available: bool
|
||||
is_expired: bool
|
||||
quality_status: str
|
||||
|
||||
# Batch-specific storage requirements
|
||||
requires_refrigeration: bool
|
||||
requires_freezing: bool
|
||||
storage_temperature_min: Optional[float]
|
||||
storage_temperature_max: Optional[float]
|
||||
storage_humidity_max: Optional[float]
|
||||
shelf_life_days: Optional[int]
|
||||
storage_instructions: Optional[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
# Related data
|
||||
ingredient: Optional[IngredientResponse] = None
|
||||
|
||||
|
||||
# ===== BULK STOCK SCHEMAS =====
|
||||
|
||||
class BulkStockCreate(InventoryBaseSchema):
|
||||
"""Schema for bulk creating stock entries"""
|
||||
stocks: List[StockCreate] = Field(..., description="List of stock entries to create")
|
||||
|
||||
|
||||
class BulkStockResult(InventoryBaseSchema):
|
||||
"""Schema for individual result in bulk stock operation"""
|
||||
index: int = Field(..., description="Index of the stock in the original request")
|
||||
success: bool = Field(..., description="Whether the creation succeeded")
|
||||
stock: Optional[StockResponse] = Field(None, description="Created stock (if successful)")
|
||||
error: Optional[str] = Field(None, description="Error message (if failed)")
|
||||
|
||||
|
||||
class BulkStockResponse(InventoryBaseSchema):
|
||||
"""Schema for bulk stock creation response"""
|
||||
total_requested: int = Field(..., description="Total number of stock entries requested")
|
||||
total_created: int = Field(..., description="Number of stock entries successfully created")
|
||||
total_failed: int = Field(..., description="Number of stock entries that failed")
|
||||
results: List[BulkStockResult] = Field(..., description="Detailed results for each stock entry")
|
||||
transaction_id: str = Field(..., description="Transaction ID for audit trail")
|
||||
|
||||
|
||||
# ===== STOCK MOVEMENT SCHEMAS =====
|
||||
|
||||
class StockMovementCreate(InventoryBaseSchema):
|
||||
"""Schema for creating stock movements"""
|
||||
ingredient_id: str = Field(..., description="Ingredient ID")
|
||||
stock_id: Optional[str] = Field(None, description="Stock ID")
|
||||
movement_type: StockMovementType = Field(..., description="Movement type")
|
||||
quantity: float = Field(..., description="Quantity moved")
|
||||
|
||||
unit_cost: Optional[Decimal] = Field(None, ge=0, description="Unit cost")
|
||||
reference_number: Optional[str] = Field(None, max_length=100, description="Reference number")
|
||||
supplier_id: Optional[str] = Field(None, description="Supplier ID")
|
||||
|
||||
notes: Optional[str] = Field(None, description="Movement notes")
|
||||
reason_code: Optional[str] = Field(None, max_length=50, description="Reason code")
|
||||
movement_date: Optional[datetime] = Field(None, description="Movement date")
|
||||
|
||||
@validator('ingredient_id')
|
||||
def validate_ingredient_id(cls, v):
|
||||
"""Validate ingredient_id is a valid UUID"""
|
||||
if not v:
|
||||
raise ValueError("ingredient_id is required")
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
# Validate it's a proper UUID
|
||||
UUID(v)
|
||||
return v
|
||||
except (ValueError, AttributeError) as e:
|
||||
raise ValueError(f"ingredient_id must be a valid UUID string, got: {v}")
|
||||
return str(v)
|
||||
|
||||
|
||||
class StockMovementResponse(InventoryBaseSchema):
|
||||
"""Schema for stock movement API responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
ingredient_id: str
|
||||
stock_id: Optional[str]
|
||||
movement_type: StockMovementType
|
||||
quantity: float
|
||||
unit_cost: Optional[float]
|
||||
total_cost: Optional[float]
|
||||
quantity_before: Optional[float]
|
||||
quantity_after: Optional[float]
|
||||
reference_number: Optional[str]
|
||||
supplier_id: Optional[str]
|
||||
notes: Optional[str]
|
||||
reason_code: Optional[str]
|
||||
movement_date: datetime
|
||||
created_at: datetime
|
||||
created_by: Optional[str]
|
||||
|
||||
# Related data
|
||||
ingredient: Optional[IngredientResponse] = None
|
||||
|
||||
|
||||
# ===== PRODUCT TRANSFORMATION SCHEMAS =====
|
||||
|
||||
class ProductTransformationCreate(InventoryBaseSchema):
|
||||
"""Schema for creating product transformations"""
|
||||
source_ingredient_id: str = Field(..., description="Source ingredient ID")
|
||||
target_ingredient_id: str = Field(..., description="Target ingredient ID")
|
||||
source_stage: ProductionStage = Field(..., description="Source production stage")
|
||||
target_stage: ProductionStage = Field(..., description="Target production stage")
|
||||
|
||||
source_quantity: float = Field(..., gt=0, description="Input quantity")
|
||||
target_quantity: float = Field(..., gt=0, description="Output quantity")
|
||||
conversion_ratio: Optional[float] = Field(None, gt=0, description="Conversion ratio (auto-calculated if not provided)")
|
||||
|
||||
# Expiration handling
|
||||
expiration_calculation_method: str = Field("days_from_transformation", description="How to calculate expiration")
|
||||
expiration_days_offset: Optional[int] = Field(1, description="Days from transformation date for expiration")
|
||||
|
||||
# Process details
|
||||
process_notes: Optional[str] = Field(None, description="Process notes")
|
||||
target_batch_number: Optional[str] = Field(None, max_length=100, description="Target batch number")
|
||||
|
||||
# Source stock selection (optional - if not provided, uses FIFO)
|
||||
source_stock_ids: Optional[List[str]] = Field(None, description="Specific source stock IDs to transform")
|
||||
|
||||
@validator('source_ingredient_id', 'target_ingredient_id')
|
||||
def validate_ingredient_ids(cls, v):
|
||||
"""Validate ingredient IDs are valid UUIDs"""
|
||||
if not v:
|
||||
raise ValueError("ingredient_id is required")
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
# Validate it's a proper UUID
|
||||
UUID(v)
|
||||
return v
|
||||
except (ValueError, AttributeError) as e:
|
||||
raise ValueError(f"ingredient_id must be a valid UUID string, got: {v}")
|
||||
return str(v)
|
||||
|
||||
|
||||
class ProductTransformationResponse(InventoryBaseSchema):
|
||||
"""Schema for product transformation responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
transformation_reference: str
|
||||
source_ingredient_id: str
|
||||
target_ingredient_id: str
|
||||
source_stage: ProductionStage
|
||||
target_stage: ProductionStage
|
||||
source_quantity: float
|
||||
target_quantity: float
|
||||
conversion_ratio: float
|
||||
expiration_calculation_method: str
|
||||
expiration_days_offset: Optional[int]
|
||||
transformation_date: datetime
|
||||
process_notes: Optional[str]
|
||||
performed_by: Optional[str]
|
||||
source_batch_numbers: Optional[str]
|
||||
target_batch_number: Optional[str]
|
||||
is_completed: bool
|
||||
is_reversed: bool
|
||||
created_at: datetime
|
||||
created_by: Optional[str]
|
||||
|
||||
# Related data
|
||||
source_ingredient: Optional[IngredientResponse] = None
|
||||
target_ingredient: Optional[IngredientResponse] = None
|
||||
|
||||
|
||||
# ===== ALERT SCHEMAS =====
|
||||
|
||||
class StockAlertResponse(InventoryBaseSchema):
|
||||
"""Schema for stock alert API responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
ingredient_id: str
|
||||
stock_id: Optional[str]
|
||||
alert_type: str
|
||||
severity: str
|
||||
title: str
|
||||
message: str
|
||||
current_quantity: Optional[float]
|
||||
threshold_value: Optional[float]
|
||||
expiration_date: Optional[datetime]
|
||||
is_active: bool
|
||||
is_acknowledged: bool
|
||||
acknowledged_by: Optional[str]
|
||||
acknowledged_at: Optional[datetime]
|
||||
is_resolved: bool
|
||||
resolved_by: Optional[str]
|
||||
resolved_at: Optional[datetime]
|
||||
resolution_notes: Optional[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
# Related data
|
||||
ingredient: Optional[IngredientResponse] = None
|
||||
|
||||
|
||||
# ===== DASHBOARD AND SUMMARY SCHEMAS =====
|
||||
|
||||
class InventorySummary(InventoryBaseSchema):
|
||||
"""Inventory dashboard summary"""
|
||||
total_ingredients: int
|
||||
total_stock_value: float
|
||||
low_stock_alerts: int
|
||||
expiring_soon_items: int
|
||||
expired_items: int
|
||||
out_of_stock_items: int
|
||||
|
||||
# By category
|
||||
stock_by_category: Dict[str, Dict[str, Any]]
|
||||
|
||||
# Recent activity
|
||||
recent_movements: int
|
||||
recent_purchases: int
|
||||
recent_waste: int
|
||||
|
||||
|
||||
class StockLevelSummary(InventoryBaseSchema):
|
||||
"""Stock level summary for an ingredient"""
|
||||
ingredient_id: str
|
||||
ingredient_name: str
|
||||
unit_of_measure: str
|
||||
total_quantity: float
|
||||
available_quantity: float
|
||||
reserved_quantity: float
|
||||
|
||||
# Status indicators
|
||||
is_low_stock: bool
|
||||
needs_reorder: bool
|
||||
has_expired_stock: bool
|
||||
|
||||
# Batch information
|
||||
total_batches: int
|
||||
oldest_batch_date: Optional[datetime]
|
||||
newest_batch_date: Optional[datetime]
|
||||
next_expiration_date: Optional[datetime]
|
||||
|
||||
# Cost information
|
||||
average_unit_cost: Optional[float]
|
||||
total_stock_value: Optional[float]
|
||||
|
||||
|
||||
# ===== REQUEST/RESPONSE WRAPPER SCHEMAS =====
|
||||
|
||||
class PaginatedResponse(BaseModel, Generic[T]):
|
||||
"""Generic paginated response"""
|
||||
items: List[T]
|
||||
total: int
|
||||
page: int
|
||||
size: int
|
||||
pages: int
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class InventoryFilter(BaseModel):
|
||||
"""Inventory filtering parameters"""
|
||||
category: Optional[IngredientCategory] = None
|
||||
is_active: Optional[bool] = None
|
||||
is_low_stock: Optional[bool] = None
|
||||
needs_reorder: Optional[bool] = None
|
||||
search: Optional[str] = None
|
||||
|
||||
|
||||
class StockFilter(BaseModel):
|
||||
"""Stock filtering parameters"""
|
||||
ingredient_id: Optional[str] = None
|
||||
production_stage: Optional[ProductionStage] = None
|
||||
transformation_reference: Optional[str] = None
|
||||
is_available: Optional[bool] = None
|
||||
is_expired: Optional[bool] = None
|
||||
expiring_within_days: Optional[int] = None
|
||||
storage_location: Optional[str] = None
|
||||
quality_status: Optional[str] = None
|
||||
|
||||
|
||||
# Type aliases for paginated responses
|
||||
IngredientListResponse = PaginatedResponse[IngredientResponse]
|
||||
StockListResponse = PaginatedResponse[StockResponse]
|
||||
StockMovementListResponse = PaginatedResponse[StockMovementResponse]
|
||||
StockAlertListResponse = PaginatedResponse[StockAlertResponse]
|
||||
217
services/inventory/app/schemas/sustainability.py
Normal file
217
services/inventory/app/schemas/sustainability.py
Normal file
@@ -0,0 +1,217 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/schemas/sustainability.py
|
||||
# ================================================================
|
||||
"""
|
||||
Sustainability Schemas - Environmental Impact & SDG Compliance
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional
|
||||
from decimal import Decimal
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class PeriodInfo(BaseModel):
|
||||
"""Time period for metrics"""
|
||||
start_date: str
|
||||
end_date: str
|
||||
days: int
|
||||
|
||||
|
||||
class WasteMetrics(BaseModel):
|
||||
"""Waste tracking metrics"""
|
||||
total_waste_kg: float = Field(description="Total waste in kilograms")
|
||||
production_waste_kg: float = Field(description="Waste from production processes")
|
||||
expired_waste_kg: float = Field(description="Waste from expired inventory")
|
||||
waste_percentage: float = Field(description="Waste as percentage of total production")
|
||||
waste_by_reason: Dict[str, float] = Field(description="Breakdown by waste reason")
|
||||
|
||||
|
||||
class CO2Emissions(BaseModel):
|
||||
"""CO2 emission metrics"""
|
||||
kg: float = Field(description="CO2 emissions in kilograms")
|
||||
tons: float = Field(description="CO2 emissions in tons")
|
||||
trees_to_offset: float = Field(description="Equivalent trees needed to offset emissions")
|
||||
|
||||
|
||||
class WaterFootprint(BaseModel):
|
||||
"""Water usage metrics"""
|
||||
liters: float = Field(description="Water footprint in liters")
|
||||
cubic_meters: float = Field(description="Water footprint in cubic meters")
|
||||
|
||||
|
||||
class LandUse(BaseModel):
|
||||
"""Land use metrics"""
|
||||
square_meters: float = Field(description="Land use in square meters")
|
||||
hectares: float = Field(description="Land use in hectares")
|
||||
|
||||
|
||||
class HumanEquivalents(BaseModel):
|
||||
"""Human-relatable equivalents for impact"""
|
||||
car_km_equivalent: float = Field(description="Equivalent kilometers driven by car")
|
||||
smartphone_charges: float = Field(description="Equivalent smartphone charges")
|
||||
showers_equivalent: float = Field(description="Equivalent showers taken")
|
||||
trees_planted: float = Field(description="Equivalent trees planted")
|
||||
|
||||
|
||||
class EnvironmentalImpact(BaseModel):
|
||||
"""Environmental impact of food waste"""
|
||||
co2_emissions: CO2Emissions
|
||||
water_footprint: WaterFootprint
|
||||
land_use: LandUse
|
||||
human_equivalents: HumanEquivalents
|
||||
|
||||
|
||||
class SDG123Metrics(BaseModel):
|
||||
"""UN SDG 12.3 specific metrics"""
|
||||
baseline_waste_percentage: float = Field(description="Baseline waste percentage")
|
||||
current_waste_percentage: float = Field(description="Current waste percentage")
|
||||
reduction_achieved: float = Field(description="Reduction achieved from baseline (%)")
|
||||
target_reduction: float = Field(description="Target reduction (50%)", default=50.0)
|
||||
progress_to_target: float = Field(description="Progress toward target (%)")
|
||||
status: str = Field(description="Status code: sdg_compliant, on_track, progressing, baseline")
|
||||
status_label: str = Field(description="Human-readable status")
|
||||
target_waste_percentage: float = Field(description="Target waste percentage to achieve")
|
||||
|
||||
|
||||
class SDGCompliance(BaseModel):
|
||||
"""SDG compliance assessment"""
|
||||
sdg_12_3: SDG123Metrics
|
||||
baseline_period: str = Field(description="Period used for baseline calculation")
|
||||
certification_ready: bool = Field(description="Ready for SDG certification")
|
||||
improvement_areas: List[str] = Field(description="Identified areas for improvement")
|
||||
|
||||
|
||||
class EnvironmentalImpactAvoided(BaseModel):
|
||||
"""Environmental impact avoided through AI"""
|
||||
co2_kg: float = Field(description="CO2 emissions avoided (kg)")
|
||||
water_liters: float = Field(description="Water saved (liters)")
|
||||
|
||||
|
||||
class AvoidedWaste(BaseModel):
|
||||
"""Waste avoided through AI predictions"""
|
||||
waste_avoided_kg: float = Field(description="Waste avoided in kilograms")
|
||||
ai_assisted_batches: int = Field(description="Number of AI-assisted batches")
|
||||
environmental_impact_avoided: EnvironmentalImpactAvoided
|
||||
methodology: str = Field(description="Calculation methodology")
|
||||
|
||||
|
||||
class FinancialImpact(BaseModel):
|
||||
"""Financial impact of waste"""
|
||||
waste_cost_eur: float = Field(description="Cost of waste in euros")
|
||||
cost_per_kg: float = Field(description="Average cost per kg")
|
||||
potential_monthly_savings: float = Field(description="Potential monthly savings")
|
||||
annual_projection: float = Field(description="Annual cost projection")
|
||||
|
||||
|
||||
class GrantProgramEligibility(BaseModel):
|
||||
"""Eligibility for a specific grant program"""
|
||||
eligible: bool = Field(description="Whether eligible for this grant")
|
||||
confidence: str = Field(description="Confidence level: high, medium, low")
|
||||
requirements_met: bool = Field(description="Whether requirements are met")
|
||||
funding_eur: float = Field(description="Available funding in euros")
|
||||
deadline: str = Field(description="Application deadline")
|
||||
program_type: str = Field(description="Type: grant, loan, or certification")
|
||||
sector_specific: Optional[str] = Field(None, description="Sector if specific: bakery, retail, etc.")
|
||||
|
||||
|
||||
class SpainCompliance(BaseModel):
|
||||
"""Spain-specific legal compliance"""
|
||||
law_1_2025: bool = Field(description="Compliance with Spanish Law 1/2025 on food waste")
|
||||
circular_economy_strategy: bool = Field(description="Aligned with Spanish Circular Economy Strategy")
|
||||
|
||||
|
||||
class GrantReadiness(BaseModel):
|
||||
"""Grant application readiness assessment"""
|
||||
overall_readiness_percentage: float = Field(description="Overall readiness percentage")
|
||||
grant_programs: Dict[str, GrantProgramEligibility] = Field(description="Eligibility by program")
|
||||
recommended_applications: List[str] = Field(description="Recommended grant programs to apply for")
|
||||
spain_compliance: SpainCompliance = Field(description="Spain-specific compliance status")
|
||||
|
||||
|
||||
class SustainabilityMetrics(BaseModel):
|
||||
"""Complete sustainability metrics response"""
|
||||
period: PeriodInfo
|
||||
waste_metrics: WasteMetrics
|
||||
environmental_impact: EnvironmentalImpact
|
||||
sdg_compliance: SDGCompliance
|
||||
avoided_waste: AvoidedWaste
|
||||
financial_impact: FinancialImpact
|
||||
grant_readiness: GrantReadiness
|
||||
|
||||
|
||||
class BaselineComparison(BaseModel):
|
||||
"""Baseline comparison for grants"""
|
||||
baseline: float
|
||||
current: float
|
||||
improvement: float
|
||||
|
||||
|
||||
class SupportingData(BaseModel):
|
||||
"""Supporting data for grant applications"""
|
||||
baseline_comparison: BaselineComparison
|
||||
environmental_benefits: EnvironmentalImpact
|
||||
financial_benefits: FinancialImpact
|
||||
|
||||
|
||||
class Certifications(BaseModel):
|
||||
"""Certification status"""
|
||||
sdg_12_3_compliant: bool
|
||||
grant_programs_eligible: List[str]
|
||||
|
||||
|
||||
class ExecutiveSummary(BaseModel):
|
||||
"""Executive summary for grant reports"""
|
||||
total_waste_reduced_kg: float
|
||||
waste_reduction_percentage: float
|
||||
co2_emissions_avoided_kg: float
|
||||
financial_savings_eur: float
|
||||
sdg_compliance_status: str
|
||||
|
||||
|
||||
class ReportMetadata(BaseModel):
|
||||
"""Report metadata"""
|
||||
generated_at: str
|
||||
report_type: str
|
||||
period: PeriodInfo
|
||||
tenant_id: str
|
||||
|
||||
|
||||
class GrantReport(BaseModel):
|
||||
"""Complete grant application report"""
|
||||
report_metadata: ReportMetadata
|
||||
executive_summary: ExecutiveSummary
|
||||
detailed_metrics: SustainabilityMetrics
|
||||
certifications: Certifications
|
||||
supporting_data: SupportingData
|
||||
|
||||
|
||||
# Request schemas
|
||||
|
||||
class SustainabilityMetricsRequest(BaseModel):
|
||||
"""Request for sustainability metrics"""
|
||||
start_date: Optional[datetime] = Field(None, description="Start date for metrics")
|
||||
end_date: Optional[datetime] = Field(None, description="End date for metrics")
|
||||
|
||||
|
||||
class GrantReportRequest(BaseModel):
|
||||
"""Request for grant report export"""
|
||||
grant_type: str = Field("general", description="Type of grant: general, eu_horizon, farm_to_fork, etc.")
|
||||
start_date: Optional[datetime] = Field(None, description="Start date for report")
|
||||
end_date: Optional[datetime] = Field(None, description="End date for report")
|
||||
format: str = Field("json", description="Export format: json, pdf, csv")
|
||||
|
||||
|
||||
# Widget/Dashboard schemas
|
||||
|
||||
class SustainabilityWidgetData(BaseModel):
|
||||
"""Simplified data for dashboard widgets"""
|
||||
total_waste_kg: float
|
||||
waste_reduction_percentage: float
|
||||
co2_saved_kg: float
|
||||
water_saved_liters: float
|
||||
trees_equivalent: float
|
||||
sdg_status: str
|
||||
sdg_progress: float
|
||||
grant_programs_ready: int
|
||||
financial_savings_eur: float
|
||||
0
services/inventory/app/services/__init__.py
Normal file
0
services/inventory/app/services/__init__.py
Normal file
1156
services/inventory/app/services/dashboard_service.py
Normal file
1156
services/inventory/app/services/dashboard_service.py
Normal file
File diff suppressed because it is too large
Load Diff
473
services/inventory/app/services/enterprise_inventory_service.py
Normal file
473
services/inventory/app/services/enterprise_inventory_service.py
Normal file
@@ -0,0 +1,473 @@
|
||||
"""
|
||||
Enterprise Inventory Service
|
||||
Business logic for enterprise-level inventory management across outlets
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class EnterpriseInventoryService:
|
||||
"""
|
||||
Service for managing inventory across enterprise networks
|
||||
"""
|
||||
|
||||
def __init__(self, inventory_client, tenant_client):
|
||||
self.inventory_client = inventory_client
|
||||
self.tenant_client = tenant_client
|
||||
|
||||
async def get_child_outlets(self, parent_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all child outlets for a parent tenant
|
||||
"""
|
||||
try:
|
||||
# Get child tenants from tenant service
|
||||
children = await self.tenant_client.get_child_tenants(parent_id)
|
||||
|
||||
# Enrich with location data
|
||||
enriched_outlets = []
|
||||
for child in children:
|
||||
# Get location data for this outlet
|
||||
locations = await self.tenant_client.get_tenant_locations(child['id'])
|
||||
|
||||
outlet_info = {
|
||||
'id': child['id'],
|
||||
'name': child['name'],
|
||||
'subdomain': child.get('subdomain'),
|
||||
'location': locations[0] if locations else None
|
||||
}
|
||||
enriched_outlets.append(outlet_info)
|
||||
|
||||
return enriched_outlets
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get child outlets", parent_id=parent_id, error=str(e))
|
||||
raise Exception(f"Failed to get child outlets: {str(e)}")
|
||||
|
||||
async def get_inventory_coverage(self, outlet_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get inventory coverage metrics for a specific outlet
|
||||
"""
|
||||
try:
|
||||
# Get current inventory data
|
||||
inventory_data = await self.inventory_client.get_current_inventory(outlet_id)
|
||||
|
||||
if not inventory_data or not inventory_data.get('items'):
|
||||
return None
|
||||
|
||||
# Calculate coverage metrics
|
||||
total_items = len(inventory_data['items'])
|
||||
critical_count = 0
|
||||
high_risk_count = 0
|
||||
medium_risk_count = 0
|
||||
low_risk_count = 0
|
||||
total_coverage = 0
|
||||
|
||||
for item in inventory_data['items']:
|
||||
current_stock = item.get('current_stock', 0)
|
||||
safety_stock = item.get('safety_stock', 1) # Avoid division by zero
|
||||
|
||||
if safety_stock <= 0:
|
||||
safety_stock = 1
|
||||
|
||||
coverage = min(100, (current_stock / safety_stock) * 100)
|
||||
total_coverage += coverage
|
||||
|
||||
# Determine risk level
|
||||
if coverage < 30:
|
||||
critical_count += 1
|
||||
elif coverage < 50:
|
||||
high_risk_count += 1
|
||||
elif coverage < 70:
|
||||
medium_risk_count += 1
|
||||
else:
|
||||
low_risk_count += 1
|
||||
|
||||
# Calculate average coverage
|
||||
avg_coverage = total_coverage / total_items if total_items > 0 else 0
|
||||
|
||||
# Get fulfillment rate (simplified - in real implementation this would come from orders service)
|
||||
fulfillment_rate = await self._calculate_fulfillment_rate(outlet_id)
|
||||
|
||||
# Determine overall status
|
||||
status = self._determine_inventory_status(critical_count, high_risk_count, avg_coverage)
|
||||
|
||||
return {
|
||||
'outlet_id': outlet_id,
|
||||
'outlet_name': inventory_data.get('tenant_name', f'Outlet {outlet_id}'),
|
||||
'overall_coverage': round(avg_coverage, 1),
|
||||
'critical_items_count': critical_count,
|
||||
'high_risk_items_count': high_risk_count,
|
||||
'medium_risk_items_count': medium_risk_count,
|
||||
'low_risk_items_count': low_risk_count,
|
||||
'fulfillment_rate': round(fulfillment_rate, 1),
|
||||
'last_updated': datetime.now().isoformat(),
|
||||
'status': status
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory coverage", outlet_id=outlet_id, error=str(e))
|
||||
raise Exception(f"Failed to get inventory coverage: {str(e)}")
|
||||
|
||||
async def _calculate_fulfillment_rate(self, outlet_id: str) -> float:
|
||||
"""
|
||||
Calculate fulfillment rate for an outlet (simplified)
|
||||
In a real implementation, this would query the orders service
|
||||
"""
|
||||
# This is a placeholder - real implementation would:
|
||||
# 1. Get recent orders from orders service
|
||||
# 2. Calculate % successfully fulfilled
|
||||
# 3. Return the rate
|
||||
|
||||
# For demo purposes, return a reasonable default
|
||||
return 95.0
|
||||
|
||||
def _determine_inventory_status(self, critical_count: int, high_risk_count: int, avg_coverage: float) -> str:
|
||||
"""
|
||||
Determine overall inventory status based on risk factors
|
||||
"""
|
||||
if critical_count > 5 or (critical_count > 0 and avg_coverage < 40):
|
||||
return 'critical'
|
||||
elif high_risk_count > 3 or (high_risk_count > 0 and avg_coverage < 60):
|
||||
return 'warning'
|
||||
else:
|
||||
return 'normal'
|
||||
|
||||
async def get_network_inventory_summary(self, parent_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get aggregated inventory summary across the entire network
|
||||
"""
|
||||
try:
|
||||
# Get all child outlets
|
||||
child_outlets = await self.get_child_outlets(parent_id)
|
||||
|
||||
if not child_outlets:
|
||||
return {
|
||||
'total_outlets': 0,
|
||||
'average_coverage': 0,
|
||||
'average_fulfillment_rate': 0,
|
||||
'critical_outlets': 0,
|
||||
'warning_outlets': 0,
|
||||
'normal_outlets': 0,
|
||||
'total_critical_items': 0,
|
||||
'network_health_score': 0
|
||||
}
|
||||
|
||||
# Get coverage for each outlet
|
||||
coverage_data = []
|
||||
for outlet in child_outlets:
|
||||
coverage = await self.get_inventory_coverage(outlet['id'])
|
||||
if coverage:
|
||||
coverage_data.append(coverage)
|
||||
|
||||
if not coverage_data:
|
||||
return {
|
||||
'total_outlets': len(child_outlets),
|
||||
'average_coverage': 0,
|
||||
'average_fulfillment_rate': 0,
|
||||
'critical_outlets': 0,
|
||||
'warning_outlets': 0,
|
||||
'normal_outlets': len(child_outlets),
|
||||
'total_critical_items': 0,
|
||||
'network_health_score': 0
|
||||
}
|
||||
|
||||
# Calculate network metrics
|
||||
total_coverage = sum(c['overall_coverage'] for c in coverage_data)
|
||||
total_fulfillment = sum(c['fulfillment_rate'] for c in coverage_data)
|
||||
|
||||
avg_coverage = total_coverage / len(coverage_data)
|
||||
avg_fulfillment = total_fulfillment / len(coverage_data)
|
||||
|
||||
critical_outlets = sum(1 for c in coverage_data if c['status'] == 'critical')
|
||||
warning_outlets = sum(1 for c in coverage_data if c['status'] == 'warning')
|
||||
normal_outlets = sum(1 for c in coverage_data if c['status'] == 'normal')
|
||||
|
||||
total_critical_items = sum(c['critical_items_count'] for c in coverage_data)
|
||||
|
||||
# Calculate network health score (weighted average)
|
||||
network_health = round(avg_coverage * 0.6 + avg_fulfillment * 0.4, 1)
|
||||
|
||||
return {
|
||||
'total_outlets': len(child_outlets),
|
||||
'average_coverage': round(avg_coverage, 1),
|
||||
'average_fulfillment_rate': round(avg_fulfillment, 1),
|
||||
'critical_outlets': critical_outlets,
|
||||
'warning_outlets': warning_outlets,
|
||||
'normal_outlets': normal_outlets,
|
||||
'total_critical_items': total_critical_items,
|
||||
'network_health_score': network_health
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get network inventory summary", parent_id=parent_id, error=str(e))
|
||||
raise Exception(f"Failed to get network inventory summary: {str(e)}")
|
||||
|
||||
async def get_outlet_inventory_details(self, outlet_id: str, product_id: Optional[str] = None, risk_level: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get detailed product-level inventory data for a specific outlet
|
||||
"""
|
||||
try:
|
||||
# Get current inventory data
|
||||
inventory_data = await self.inventory_client.get_current_inventory(outlet_id)
|
||||
|
||||
if not inventory_data or not inventory_data.get('items'):
|
||||
return {
|
||||
'outlet_id': outlet_id,
|
||||
'outlet_name': inventory_data.get('tenant_name', f'Outlet {outlet_id}'),
|
||||
'overall_coverage': 0,
|
||||
'products': [],
|
||||
'last_updated': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Process product details
|
||||
products = []
|
||||
total_coverage = 0
|
||||
|
||||
for item in inventory_data['items']:
|
||||
# Filter by product_id if specified
|
||||
if product_id and item.get('product_id') != product_id:
|
||||
continue
|
||||
|
||||
current_stock = item.get('current_stock', 0)
|
||||
safety_stock = item.get('safety_stock', 1)
|
||||
|
||||
if safety_stock <= 0:
|
||||
safety_stock = 1
|
||||
|
||||
coverage = min(100, (current_stock / safety_stock) * 100)
|
||||
total_coverage += coverage
|
||||
|
||||
# Determine risk level
|
||||
if coverage < 30:
|
||||
risk = 'critical'
|
||||
elif coverage < 50:
|
||||
risk = 'high'
|
||||
elif coverage < 70:
|
||||
risk = 'medium'
|
||||
else:
|
||||
risk = 'low'
|
||||
|
||||
# Filter by risk level if specified
|
||||
if risk_level and risk != risk_level:
|
||||
continue
|
||||
|
||||
# Calculate days until stockout (simplified)
|
||||
daily_usage = item.get('average_daily_usage', 1)
|
||||
days_until_stockout = None
|
||||
|
||||
if daily_usage > 0:
|
||||
days_until_stockout = max(0, int((current_stock - safety_stock) / daily_usage))
|
||||
if days_until_stockout < 0:
|
||||
days_until_stockout = 0
|
||||
|
||||
product_detail = {
|
||||
'product_id': item.get('product_id'),
|
||||
'product_name': item.get('product_name', 'Unknown Product'),
|
||||
'current_stock': current_stock,
|
||||
'safety_stock': safety_stock,
|
||||
'coverage_percentage': round(coverage, 1),
|
||||
'risk_level': risk,
|
||||
'days_until_stockout': days_until_stockout
|
||||
}
|
||||
|
||||
products.append(product_detail)
|
||||
|
||||
# Calculate overall coverage
|
||||
avg_coverage = total_coverage / len(inventory_data['items']) if inventory_data['items'] else 0
|
||||
|
||||
return {
|
||||
'outlet_id': outlet_id,
|
||||
'outlet_name': inventory_data.get('tenant_name', f'Outlet {outlet_id}'),
|
||||
'overall_coverage': round(avg_coverage, 1),
|
||||
'products': products,
|
||||
'last_updated': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get outlet inventory details", outlet_id=outlet_id, error=str(e))
|
||||
raise Exception(f"Failed to get outlet inventory details: {str(e)}")
|
||||
|
||||
async def get_inventory_alerts(self, parent_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get real-time inventory alerts across all outlets
|
||||
"""
|
||||
try:
|
||||
# Get all child outlets
|
||||
child_outlets = await self.get_child_outlets(parent_id)
|
||||
|
||||
alerts = []
|
||||
|
||||
for outlet in child_outlets:
|
||||
outlet_id = outlet['id']
|
||||
outlet_name = outlet['name']
|
||||
|
||||
# Get inventory coverage for this outlet
|
||||
coverage = await self.get_inventory_coverage(outlet_id)
|
||||
|
||||
if coverage:
|
||||
# Create alerts for critical items
|
||||
if coverage['critical_items_count'] > 0:
|
||||
alerts.append({
|
||||
'alert_id': str(uuid.uuid4()),
|
||||
'outlet_id': outlet_id,
|
||||
'outlet_name': outlet_name,
|
||||
'product_id': None,
|
||||
'product_name': None,
|
||||
'alert_type': 'low_coverage',
|
||||
'severity': 'critical',
|
||||
'current_coverage': coverage['overall_coverage'],
|
||||
'threshold': 30,
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'message': f"Critical inventory coverage: {coverage['overall_coverage']}% (threshold: 30%)"
|
||||
})
|
||||
|
||||
# Create alerts for high risk items
|
||||
if coverage['high_risk_items_count'] > 0:
|
||||
alerts.append({
|
||||
'alert_id': str(uuid.uuid4()),
|
||||
'outlet_id': outlet_id,
|
||||
'outlet_name': outlet_name,
|
||||
'product_id': None,
|
||||
'product_name': None,
|
||||
'alert_type': 'stockout_risk',
|
||||
'severity': 'high',
|
||||
'current_coverage': coverage['overall_coverage'],
|
||||
'threshold': 50,
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'message': f"High stockout risk: {coverage['overall_coverage']}% coverage"
|
||||
})
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory alerts", parent_id=parent_id, error=str(e))
|
||||
raise Exception(f"Failed to get inventory alerts: {str(e)}")
|
||||
|
||||
async def get_transfer_recommendations(self, parent_id: str, urgency: str = "medium") -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get AI-powered inventory transfer recommendations
|
||||
"""
|
||||
try:
|
||||
# Get inventory coverage for all outlets
|
||||
child_outlets = await self.get_child_outlets(parent_id)
|
||||
coverage_data = []
|
||||
|
||||
for outlet in child_outlets:
|
||||
coverage = await self.get_inventory_coverage(outlet['id'])
|
||||
if coverage:
|
||||
coverage_data.append(coverage)
|
||||
|
||||
# Simple recommendation algorithm (in real implementation, this would be more sophisticated)
|
||||
recommendations = []
|
||||
|
||||
# Find outlets with surplus and deficit
|
||||
surplus_outlets = [c for c in coverage_data if c['overall_coverage'] > 85]
|
||||
deficit_outlets = [c for c in coverage_data if c['overall_coverage'] < 60]
|
||||
|
||||
# Generate transfer recommendations
|
||||
for deficit in deficit_outlets:
|
||||
for surplus in surplus_outlets:
|
||||
# Calculate transfer amount (simplified)
|
||||
transfer_amount = min(10, (deficit['overall_coverage'] - 60) * -2) # Transfer 2% per missing %
|
||||
|
||||
if transfer_amount > 0:
|
||||
recommendations.append({
|
||||
'recommendation_id': str(uuid.uuid4()),
|
||||
'from_outlet_id': surplus['outlet_id'],
|
||||
'from_outlet_name': surplus['outlet_name'],
|
||||
'to_outlet_id': deficit['outlet_id'],
|
||||
'to_outlet_name': deficit['outlet_name'],
|
||||
'transfer_amount': transfer_amount,
|
||||
'priority': self._calculate_priority(deficit, urgency),
|
||||
'reason': f"Balance inventory: {surplus['outlet_name']} has {surplus['overall_coverage']}% coverage, {deficit['outlet_name']} has {deficit['overall_coverage']}% coverage",
|
||||
'estimated_impact': f"Improve {deficit['outlet_name']} coverage by ~{transfer_amount}%"
|
||||
})
|
||||
|
||||
# Sort by priority
|
||||
recommendations.sort(key=lambda x: x['priority'], reverse=True)
|
||||
|
||||
return recommendations
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transfer recommendations", parent_id=parent_id, error=str(e))
|
||||
raise Exception(f"Failed to get transfer recommendations: {str(e)}")
|
||||
|
||||
def _calculate_priority(self, deficit_coverage: Dict[str, Any], urgency: str) -> int:
|
||||
"""
|
||||
Calculate priority score for transfer recommendation
|
||||
"""
|
||||
priority_scores = {
|
||||
'critical': 4,
|
||||
'high': 3,
|
||||
'medium': 2,
|
||||
'low': 1
|
||||
}
|
||||
|
||||
urgency_score = priority_scores.get(urgency, 2)
|
||||
|
||||
# Higher priority for lower coverage
|
||||
coverage_score = max(1, 5 - int(deficit_coverage['overall_coverage'] / 20))
|
||||
|
||||
return urgency_score * coverage_score
|
||||
|
||||
async def get_coverage_trends(self, parent_id: str, days: int = 30) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get historical inventory coverage trends
|
||||
"""
|
||||
try:
|
||||
# In a real implementation, this would query historical data
|
||||
# For demo purposes, generate some sample trend data
|
||||
|
||||
trends = []
|
||||
end_date = datetime.now()
|
||||
|
||||
for i in range(days):
|
||||
date = end_date - timedelta(days=i)
|
||||
|
||||
# Generate sample data with some variation
|
||||
base_coverage = 75
|
||||
variation = (i % 7) - 3 # Weekly pattern
|
||||
daily_variation = (i % 3) - 1 # Daily noise
|
||||
|
||||
coverage = max(50, min(95, base_coverage + variation + daily_variation))
|
||||
|
||||
trends.append({
|
||||
'date': date.strftime('%Y-%m-%d'),
|
||||
'average_coverage': round(coverage, 1),
|
||||
'min_coverage': max(40, coverage - 15),
|
||||
'max_coverage': min(95, coverage + 10)
|
||||
})
|
||||
|
||||
# Sort by date (oldest first)
|
||||
trends.sort(key=lambda x: x['date'])
|
||||
|
||||
return trends
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get coverage trends", parent_id=parent_id, error=str(e))
|
||||
raise Exception(f"Failed to get coverage trends: {str(e)}")
|
||||
|
||||
async def verify_parent_child_relationship(self, parent_id: str, child_id: str) -> bool:
|
||||
"""
|
||||
Verify that a child tenant belongs to a parent tenant
|
||||
"""
|
||||
try:
|
||||
# Get child tenant info
|
||||
child_info = await self.tenant_client.get_tenant(child_id)
|
||||
|
||||
if child_info.get('parent_tenant_id') != parent_id:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Child tenant does not belong to specified parent"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to verify parent-child relationship", parent_id=parent_id, child_id=child_id, error=str(e))
|
||||
raise Exception(f"Failed to verify relationship: {str(e)}")
|
||||
599
services/inventory/app/services/food_safety_service.py
Normal file
599
services/inventory/app/services/food_safety_service.py
Normal file
@@ -0,0 +1,599 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/services/food_safety_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Food Safety Service - Business logic for food safety and compliance
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.database.transactions import transactional
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models.food_safety import (
|
||||
FoodSafetyCompliance,
|
||||
TemperatureLog,
|
||||
FoodSafetyAlert,
|
||||
FoodSafetyStandard,
|
||||
ComplianceStatus,
|
||||
FoodSafetyAlertType
|
||||
)
|
||||
from app.repositories.food_safety_repository import FoodSafetyRepository
|
||||
from app.schemas.food_safety import (
|
||||
FoodSafetyComplianceCreate,
|
||||
FoodSafetyComplianceUpdate,
|
||||
FoodSafetyComplianceResponse,
|
||||
TemperatureLogCreate,
|
||||
TemperatureLogResponse,
|
||||
FoodSafetyAlertCreate,
|
||||
FoodSafetyAlertUpdate,
|
||||
FoodSafetyAlertResponse,
|
||||
FoodSafetyMetrics,
|
||||
TemperatureAnalytics
|
||||
)
|
||||
from app.schemas.dashboard import FoodSafetyDashboard, TemperatureMonitoringStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class FoodSafetyService:
|
||||
"""Service for food safety and compliance operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def _get_repository(self, db) -> FoodSafetyRepository:
|
||||
"""Get repository instance for the current database session"""
|
||||
return FoodSafetyRepository(db)
|
||||
|
||||
# ===== COMPLIANCE MANAGEMENT =====
|
||||
|
||||
@transactional
|
||||
async def create_compliance_record(
|
||||
self,
|
||||
db,
|
||||
compliance_data: FoodSafetyComplianceCreate,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> FoodSafetyComplianceResponse:
|
||||
"""Create a new food safety compliance record"""
|
||||
try:
|
||||
logger.info("Creating compliance record",
|
||||
ingredient_id=str(compliance_data.ingredient_id),
|
||||
standard=compliance_data.standard)
|
||||
|
||||
# Validate compliance data
|
||||
await self._validate_compliance_data(db, compliance_data)
|
||||
|
||||
# Create compliance record
|
||||
compliance = FoodSafetyCompliance(
|
||||
tenant_id=compliance_data.tenant_id,
|
||||
ingredient_id=compliance_data.ingredient_id,
|
||||
standard=FoodSafetyStandard(compliance_data.standard),
|
||||
compliance_status=ComplianceStatus(compliance_data.compliance_status),
|
||||
certification_number=compliance_data.certification_number,
|
||||
certifying_body=compliance_data.certifying_body,
|
||||
certification_date=compliance_data.certification_date,
|
||||
expiration_date=compliance_data.expiration_date,
|
||||
requirements=compliance_data.requirements,
|
||||
compliance_notes=compliance_data.compliance_notes,
|
||||
documentation_url=compliance_data.documentation_url,
|
||||
last_audit_date=compliance_data.last_audit_date,
|
||||
next_audit_date=compliance_data.next_audit_date,
|
||||
auditor_name=compliance_data.auditor_name,
|
||||
audit_score=compliance_data.audit_score,
|
||||
risk_level=compliance_data.risk_level,
|
||||
risk_factors=compliance_data.risk_factors,
|
||||
mitigation_measures=compliance_data.mitigation_measures,
|
||||
requires_monitoring=compliance_data.requires_monitoring,
|
||||
monitoring_frequency_days=compliance_data.monitoring_frequency_days,
|
||||
created_by=user_id,
|
||||
updated_by=user_id
|
||||
)
|
||||
|
||||
# Create compliance record using repository
|
||||
repo = self._get_repository(db)
|
||||
compliance = await repo.create_compliance(compliance)
|
||||
|
||||
# Check for compliance alerts
|
||||
await self._check_compliance_alerts(db, compliance)
|
||||
|
||||
logger.info("Compliance record created",
|
||||
compliance_id=str(compliance.id))
|
||||
|
||||
return FoodSafetyComplianceResponse(**compliance.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create compliance record", error=str(e))
|
||||
raise
|
||||
|
||||
@transactional
|
||||
async def update_compliance_record(
|
||||
self,
|
||||
db,
|
||||
compliance_id: UUID,
|
||||
tenant_id: UUID,
|
||||
compliance_data: FoodSafetyComplianceUpdate,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> Optional[FoodSafetyComplianceResponse]:
|
||||
"""Update an existing compliance record"""
|
||||
try:
|
||||
# Get existing compliance record using repository
|
||||
repo = self._get_repository(db)
|
||||
compliance = await repo.get_compliance_by_id(compliance_id, tenant_id)
|
||||
if not compliance:
|
||||
return None
|
||||
|
||||
# Update fields
|
||||
update_fields = compliance_data.dict(exclude_unset=True)
|
||||
for field, value in update_fields.items():
|
||||
if hasattr(compliance, field):
|
||||
if field in ['compliance_status'] and value:
|
||||
setattr(compliance, field, ComplianceStatus(value))
|
||||
else:
|
||||
setattr(compliance, field, value)
|
||||
|
||||
compliance.updated_by = user_id
|
||||
|
||||
# Update compliance record using repository
|
||||
compliance = await repo.update_compliance(compliance)
|
||||
|
||||
# Check for compliance alerts after update
|
||||
await self._check_compliance_alerts(db, compliance)
|
||||
|
||||
logger.info("Compliance record updated",
|
||||
compliance_id=str(compliance.id))
|
||||
|
||||
return FoodSafetyComplianceResponse(**compliance.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update compliance record",
|
||||
compliance_id=str(compliance_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
# ===== TEMPERATURE MONITORING =====
|
||||
|
||||
@transactional
|
||||
async def log_temperature(
|
||||
self,
|
||||
db,
|
||||
temp_data: TemperatureLogCreate,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> TemperatureLogResponse:
|
||||
"""Log a temperature reading"""
|
||||
try:
|
||||
# Determine if temperature is within range
|
||||
is_within_range = self._is_temperature_within_range(
|
||||
temp_data.temperature_celsius,
|
||||
temp_data.target_temperature_min,
|
||||
temp_data.target_temperature_max,
|
||||
temp_data.storage_location
|
||||
)
|
||||
|
||||
# Create temperature log
|
||||
temp_log = TemperatureLog(
|
||||
tenant_id=temp_data.tenant_id,
|
||||
storage_location=temp_data.storage_location,
|
||||
warehouse_zone=temp_data.warehouse_zone,
|
||||
equipment_id=temp_data.equipment_id,
|
||||
temperature_celsius=temp_data.temperature_celsius,
|
||||
humidity_percentage=temp_data.humidity_percentage,
|
||||
target_temperature_min=temp_data.target_temperature_min,
|
||||
target_temperature_max=temp_data.target_temperature_max,
|
||||
is_within_range=is_within_range,
|
||||
alert_triggered=not is_within_range,
|
||||
measurement_method=temp_data.measurement_method,
|
||||
device_id=temp_data.device_id,
|
||||
calibration_date=temp_data.calibration_date,
|
||||
recorded_by=user_id
|
||||
)
|
||||
|
||||
db.add(temp_log)
|
||||
await db.flush()
|
||||
await db.refresh(temp_log)
|
||||
|
||||
# Create alert if temperature is out of range
|
||||
if not is_within_range:
|
||||
await self._create_temperature_alert(db, temp_log)
|
||||
|
||||
logger.info("Temperature logged",
|
||||
location=temp_data.storage_location,
|
||||
temperature=temp_data.temperature_celsius,
|
||||
within_range=is_within_range)
|
||||
|
||||
return TemperatureLogResponse(**temp_log.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to log temperature", error=str(e))
|
||||
raise
|
||||
|
||||
@transactional
|
||||
async def bulk_log_temperatures(
|
||||
self,
|
||||
db,
|
||||
temp_readings: List[TemperatureLogCreate],
|
||||
user_id: Optional[UUID] = None
|
||||
) -> List[TemperatureLogResponse]:
|
||||
"""Bulk log temperature readings"""
|
||||
try:
|
||||
results = []
|
||||
alerts_to_create = []
|
||||
|
||||
for temp_data in temp_readings:
|
||||
# Determine if temperature is within range
|
||||
is_within_range = self._is_temperature_within_range(
|
||||
temp_data.temperature_celsius,
|
||||
temp_data.target_temperature_min,
|
||||
temp_data.target_temperature_max,
|
||||
temp_data.storage_location
|
||||
)
|
||||
|
||||
# Create temperature log
|
||||
temp_log = TemperatureLog(
|
||||
tenant_id=temp_data.tenant_id,
|
||||
storage_location=temp_data.storage_location,
|
||||
warehouse_zone=temp_data.warehouse_zone,
|
||||
equipment_id=temp_data.equipment_id,
|
||||
temperature_celsius=temp_data.temperature_celsius,
|
||||
humidity_percentage=temp_data.humidity_percentage,
|
||||
target_temperature_min=temp_data.target_temperature_min,
|
||||
target_temperature_max=temp_data.target_temperature_max,
|
||||
is_within_range=is_within_range,
|
||||
alert_triggered=not is_within_range,
|
||||
measurement_method=temp_data.measurement_method,
|
||||
device_id=temp_data.device_id,
|
||||
calibration_date=temp_data.calibration_date,
|
||||
recorded_by=user_id
|
||||
)
|
||||
|
||||
db.add(temp_log)
|
||||
|
||||
if not is_within_range:
|
||||
alerts_to_create.append(temp_log)
|
||||
|
||||
results.append(TemperatureLogResponse(**temp_log.to_dict()))
|
||||
|
||||
await db.flush()
|
||||
|
||||
# Create alerts for out-of-range temperatures
|
||||
for temp_log in alerts_to_create:
|
||||
await self._create_temperature_alert(db, temp_log)
|
||||
|
||||
logger.info("Bulk temperature logging completed",
|
||||
count=len(temp_readings),
|
||||
violations=len(alerts_to_create))
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to bulk log temperatures", error=str(e))
|
||||
raise
|
||||
|
||||
# ===== ALERT MANAGEMENT =====
|
||||
|
||||
@transactional
|
||||
async def create_food_safety_alert(
|
||||
self,
|
||||
db,
|
||||
alert_data: FoodSafetyAlertCreate,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> FoodSafetyAlertResponse:
|
||||
"""Create a food safety alert"""
|
||||
try:
|
||||
alert = FoodSafetyAlert(
|
||||
tenant_id=alert_data.tenant_id,
|
||||
alert_code=alert_data.alert_code,
|
||||
alert_type=FoodSafetyAlertType(alert_data.alert_type),
|
||||
severity=alert_data.severity,
|
||||
risk_level=alert_data.risk_level,
|
||||
source_entity_type=alert_data.source_entity_type,
|
||||
source_entity_id=alert_data.source_entity_id,
|
||||
ingredient_id=alert_data.ingredient_id,
|
||||
stock_id=alert_data.stock_id,
|
||||
title=alert_data.title,
|
||||
description=alert_data.description,
|
||||
detailed_message=alert_data.detailed_message,
|
||||
regulatory_requirement=alert_data.regulatory_requirement,
|
||||
compliance_standard=FoodSafetyStandard(alert_data.compliance_standard) if alert_data.compliance_standard else None,
|
||||
regulatory_action_required=alert_data.regulatory_action_required,
|
||||
trigger_condition=alert_data.trigger_condition,
|
||||
threshold_value=alert_data.threshold_value,
|
||||
actual_value=alert_data.actual_value,
|
||||
alert_data=alert_data.alert_data,
|
||||
environmental_factors=alert_data.environmental_factors,
|
||||
affected_products=alert_data.affected_products,
|
||||
public_health_risk=alert_data.public_health_risk,
|
||||
business_impact=alert_data.business_impact,
|
||||
estimated_loss=alert_data.estimated_loss,
|
||||
first_occurred_at=datetime.now(),
|
||||
last_occurred_at=datetime.now(),
|
||||
created_by=user_id
|
||||
)
|
||||
|
||||
db.add(alert)
|
||||
await db.flush()
|
||||
await db.refresh(alert)
|
||||
|
||||
# Send notifications
|
||||
await self._send_alert_notifications(alert)
|
||||
|
||||
logger.info("Food safety alert created",
|
||||
alert_id=str(alert.id),
|
||||
alert_type=alert_data.alert_type,
|
||||
severity=alert_data.severity)
|
||||
|
||||
return FoodSafetyAlertResponse(**alert.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create food safety alert", error=str(e))
|
||||
raise
|
||||
|
||||
# ===== DASHBOARD AND ANALYTICS =====
|
||||
|
||||
async def get_food_safety_dashboard(
|
||||
self,
|
||||
db,
|
||||
tenant_id: UUID
|
||||
) -> FoodSafetyDashboard:
|
||||
"""Get food safety dashboard data"""
|
||||
try:
|
||||
# Get repository instance
|
||||
repo = self._get_repository(db)
|
||||
|
||||
# Get compliance overview using repository
|
||||
compliance_stats = await repo.get_compliance_stats(tenant_id)
|
||||
total_compliance = compliance_stats["total"]
|
||||
compliant_items = compliance_stats["compliant"]
|
||||
compliance_percentage = (compliant_items / total_compliance * 100) if total_compliance > 0 else 0
|
||||
|
||||
# Get temperature monitoring status using repository
|
||||
temp_stats = await repo.get_temperature_stats(tenant_id)
|
||||
|
||||
# Get expiration tracking using repository
|
||||
expiration_stats = await repo.get_expiration_stats(tenant_id)
|
||||
|
||||
# Get alert counts using repository
|
||||
alert_stats = await repo.get_alert_stats(tenant_id)
|
||||
|
||||
return FoodSafetyDashboard(
|
||||
total_compliance_items=total_compliance,
|
||||
compliant_items=compliant_items,
|
||||
non_compliant_items=compliance_stats["non_compliant"],
|
||||
pending_review_items=compliance_stats["pending_review"],
|
||||
compliance_percentage=Decimal(str(compliance_percentage)),
|
||||
temperature_sensors_online=temp_stats["sensors_online"],
|
||||
temperature_sensors_total=temp_stats["sensors_online"], # Would need actual count
|
||||
temperature_violations_24h=temp_stats["violations_24h"],
|
||||
current_temperature_status="normal", # Would need to calculate
|
||||
items_expiring_today=expiration_stats["expiring_today"],
|
||||
items_expiring_this_week=expiration_stats["expiring_week"],
|
||||
expired_items_requiring_action=expiration_stats["expired_requiring_action"],
|
||||
upcoming_audits=0, # Would need to calculate
|
||||
overdue_audits=0, # Would need to calculate
|
||||
certifications_valid=compliant_items,
|
||||
certifications_expiring_soon=0, # Would need to calculate
|
||||
high_risk_items=alert_stats["high_risk"],
|
||||
critical_alerts=alert_stats["critical"],
|
||||
regulatory_notifications_pending=alert_stats["regulatory_pending"],
|
||||
recent_safety_incidents=[] # Would need to get recent incidents
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get food safety dashboard", error=str(e))
|
||||
raise
|
||||
|
||||
# ===== PRIVATE HELPER METHODS =====
|
||||
|
||||
async def _validate_compliance_data(self, db, compliance_data: FoodSafetyComplianceCreate):
|
||||
"""Validate compliance data for business rules"""
|
||||
# Check if ingredient exists using repository
|
||||
repo = self._get_repository(db)
|
||||
ingredient_exists = await repo.validate_ingredient_exists(
|
||||
compliance_data.ingredient_id,
|
||||
compliance_data.tenant_id
|
||||
)
|
||||
|
||||
if not ingredient_exists:
|
||||
raise ValueError("Ingredient not found")
|
||||
|
||||
# Validate standard
|
||||
try:
|
||||
FoodSafetyStandard(compliance_data.standard)
|
||||
except ValueError:
|
||||
raise ValueError(f"Invalid food safety standard: {compliance_data.standard}")
|
||||
|
||||
# Validate compliance status
|
||||
try:
|
||||
ComplianceStatus(compliance_data.compliance_status)
|
||||
except ValueError:
|
||||
raise ValueError(f"Invalid compliance status: {compliance_data.compliance_status}")
|
||||
|
||||
def _is_temperature_within_range(
|
||||
self,
|
||||
temperature: float,
|
||||
target_min: Optional[float],
|
||||
target_max: Optional[float],
|
||||
location: str
|
||||
) -> bool:
|
||||
"""Check if temperature is within acceptable range"""
|
||||
# Use target ranges if provided, otherwise use default ranges
|
||||
if target_min is not None and target_max is not None:
|
||||
return target_min <= temperature <= target_max
|
||||
|
||||
# Default ranges based on location type
|
||||
if "freezer" in location.lower():
|
||||
return settings.FREEZER_TEMP_MIN <= temperature <= settings.FREEZER_TEMP_MAX
|
||||
elif "refrigerat" in location.lower() or "fridge" in location.lower():
|
||||
return settings.REFRIGERATION_TEMP_MIN <= temperature <= settings.REFRIGERATION_TEMP_MAX
|
||||
else:
|
||||
return settings.ROOM_TEMP_MIN <= temperature <= settings.ROOM_TEMP_MAX
|
||||
|
||||
async def _create_temperature_alert(self, db, temp_log: TemperatureLog):
|
||||
"""Create an alert for temperature violation"""
|
||||
try:
|
||||
alert_code = f"TEMP-{uuid.uuid4().hex[:8].upper()}"
|
||||
|
||||
# Determine severity based on deviation
|
||||
target_min = temp_log.target_temperature_min or 0
|
||||
target_max = temp_log.target_temperature_max or 25
|
||||
deviation = max(
|
||||
abs(temp_log.temperature_celsius - target_min),
|
||||
abs(temp_log.temperature_celsius - target_max)
|
||||
)
|
||||
|
||||
if deviation > 10:
|
||||
severity = "critical"
|
||||
elif deviation > 5:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
alert = FoodSafetyAlert(
|
||||
tenant_id=temp_log.tenant_id,
|
||||
alert_code=alert_code,
|
||||
alert_type=FoodSafetyAlertType.TEMPERATURE_VIOLATION,
|
||||
severity=severity,
|
||||
risk_level="high" if severity == "critical" else "medium",
|
||||
source_entity_type="temperature_log",
|
||||
source_entity_id=temp_log.id,
|
||||
title=f"Temperature violation in {temp_log.storage_location}",
|
||||
description=f"Temperature reading of {temp_log.temperature_celsius}°C is outside acceptable range",
|
||||
regulatory_action_required=severity == "critical",
|
||||
trigger_condition="temperature_out_of_range",
|
||||
threshold_value=target_max,
|
||||
actual_value=temp_log.temperature_celsius,
|
||||
alert_data={
|
||||
"location": temp_log.storage_location,
|
||||
"equipment_id": temp_log.equipment_id,
|
||||
"target_range": f"{target_min}°C - {target_max}°C"
|
||||
},
|
||||
environmental_factors={
|
||||
"temperature": temp_log.temperature_celsius,
|
||||
"humidity": temp_log.humidity_percentage
|
||||
},
|
||||
first_occurred_at=datetime.now(),
|
||||
last_occurred_at=datetime.now()
|
||||
)
|
||||
|
||||
db.add(alert)
|
||||
await db.flush()
|
||||
|
||||
# Send notifications
|
||||
await self._send_alert_notifications(alert)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create temperature alert", error=str(e))
|
||||
|
||||
async def _check_compliance_alerts(self, db, compliance: FoodSafetyCompliance):
|
||||
"""Check for compliance-related alerts"""
|
||||
try:
|
||||
alerts_to_create = []
|
||||
|
||||
# Check for expiring certifications
|
||||
if compliance.expiration_date:
|
||||
days_to_expiry = (compliance.expiration_date - datetime.now()).days
|
||||
if days_to_expiry <= settings.CERTIFICATION_EXPIRY_WARNING_DAYS:
|
||||
alert_code = f"CERT-{uuid.uuid4().hex[:8].upper()}"
|
||||
severity = "critical" if days_to_expiry <= 7 else "high"
|
||||
|
||||
alert = FoodSafetyAlert(
|
||||
tenant_id=compliance.tenant_id,
|
||||
alert_code=alert_code,
|
||||
alert_type=FoodSafetyAlertType.CERTIFICATION_EXPIRY,
|
||||
severity=severity,
|
||||
risk_level="high",
|
||||
source_entity_type="compliance",
|
||||
source_entity_id=compliance.id,
|
||||
ingredient_id=compliance.ingredient_id,
|
||||
title=f"Certification expiring soon - {compliance.standard.value}",
|
||||
description=f"Certification expires in {days_to_expiry} days",
|
||||
regulatory_action_required=True,
|
||||
compliance_standard=compliance.standard,
|
||||
first_occurred_at=datetime.now(),
|
||||
last_occurred_at=datetime.now()
|
||||
)
|
||||
alerts_to_create.append(alert)
|
||||
|
||||
# Check for overdue audits
|
||||
if compliance.next_audit_date and compliance.next_audit_date < datetime.now():
|
||||
alert_code = f"AUDIT-{uuid.uuid4().hex[:8].upper()}"
|
||||
|
||||
alert = FoodSafetyAlert(
|
||||
tenant_id=compliance.tenant_id,
|
||||
alert_code=alert_code,
|
||||
alert_type=FoodSafetyAlertType.CERTIFICATION_EXPIRY,
|
||||
severity="high",
|
||||
risk_level="medium",
|
||||
source_entity_type="compliance",
|
||||
source_entity_id=compliance.id,
|
||||
ingredient_id=compliance.ingredient_id,
|
||||
title=f"Audit overdue - {compliance.standard.value}",
|
||||
description="Scheduled audit is overdue",
|
||||
regulatory_action_required=True,
|
||||
compliance_standard=compliance.standard,
|
||||
first_occurred_at=datetime.now(),
|
||||
last_occurred_at=datetime.now()
|
||||
)
|
||||
alerts_to_create.append(alert)
|
||||
|
||||
# Add alerts to database
|
||||
for alert in alerts_to_create:
|
||||
db.add(alert)
|
||||
|
||||
if alerts_to_create:
|
||||
await db.flush()
|
||||
|
||||
# Send notifications
|
||||
for alert in alerts_to_create:
|
||||
await self._send_alert_notifications(alert)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to check compliance alerts", error=str(e))
|
||||
|
||||
async def _send_alert_notifications(self, alert: FoodSafetyAlert):
|
||||
"""Send notifications for food safety alerts"""
|
||||
try:
|
||||
if not settings.ENABLE_EMAIL_ALERTS:
|
||||
return
|
||||
|
||||
# Determine notification methods based on severity
|
||||
notification_methods = ["dashboard"]
|
||||
|
||||
if alert.severity in ["high", "critical"]:
|
||||
notification_methods.extend(["email"])
|
||||
|
||||
if settings.ENABLE_SMS_ALERTS and alert.severity == "critical":
|
||||
notification_methods.append("sms")
|
||||
|
||||
if settings.ENABLE_WHATSAPP_ALERTS and alert.public_health_risk:
|
||||
notification_methods.append("whatsapp")
|
||||
|
||||
# Send notification via notification service
|
||||
if self.notification_client:
|
||||
await self.notification_client.send_alert(
|
||||
str(alert.tenant_id),
|
||||
{
|
||||
"alert_id": str(alert.id),
|
||||
"alert_type": alert.alert_type.value,
|
||||
"severity": alert.severity,
|
||||
"title": alert.title,
|
||||
"description": alert.description,
|
||||
"methods": notification_methods,
|
||||
"regulatory_action_required": alert.regulatory_action_required,
|
||||
"public_health_risk": alert.public_health_risk
|
||||
}
|
||||
)
|
||||
|
||||
# Update alert with notification status
|
||||
alert.notification_sent = True
|
||||
alert.notification_methods = notification_methods
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to send alert notifications",
|
||||
alert_id=str(alert.id),
|
||||
error=str(e))
|
||||
484
services/inventory/app/services/internal_transfer_service.py
Normal file
484
services/inventory/app/services/internal_transfer_service.py
Normal file
@@ -0,0 +1,484 @@
|
||||
"""
|
||||
Internal Transfer Service for Inventory Management
|
||||
Handles inventory ownership changes during internal transfers
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
import uuid
|
||||
|
||||
from shared.clients.tenant_client import TenantServiceClient
|
||||
from shared.clients.inventory_client import InventoryServiceClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InternalTransferInventoryService:
|
||||
"""
|
||||
Service for handling inventory transfers during enterprise internal transfers
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tenant_client: TenantServiceClient,
|
||||
inventory_client: InventoryServiceClient
|
||||
):
|
||||
self.tenant_client = tenant_client
|
||||
self.inventory_client = inventory_client
|
||||
|
||||
async def process_internal_delivery(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
child_tenant_id: str,
|
||||
shipment_items: List[Dict[str, Any]],
|
||||
shipment_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Process inventory ownership transfer when internal shipment is delivered
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Source tenant (central production)
|
||||
child_tenant_id: Destination tenant (retail outlet)
|
||||
shipment_items: List of items being transferred with quantities
|
||||
shipment_id: ID of the shipment for reference
|
||||
|
||||
Returns:
|
||||
Dict with transfer results
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Processing internal inventory transfer",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
item_count=len(shipment_items)
|
||||
)
|
||||
|
||||
# Process each item in the shipment
|
||||
successful_transfers = []
|
||||
failed_transfers = []
|
||||
|
||||
for item in shipment_items:
|
||||
product_id = item.get('product_id')
|
||||
quantity = Decimal(str(item.get('delivered_quantity', item.get('quantity', 0))))
|
||||
|
||||
if not product_id or quantity <= 0:
|
||||
logger.warning(
|
||||
"Skipping invalid transfer item",
|
||||
product_id=product_id,
|
||||
quantity=quantity
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
# Step 1: Deduct inventory from parent (central production)
|
||||
parent_subtraction_result = await self._subtract_from_parent_inventory(
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=quantity,
|
||||
shipment_id=shipment_id
|
||||
)
|
||||
|
||||
# Step 2: Add inventory to child (retail outlet)
|
||||
child_addition_result = await self._add_to_child_inventory(
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=quantity,
|
||||
shipment_id=shipment_id
|
||||
)
|
||||
|
||||
successful_transfers.append({
|
||||
'product_id': product_id,
|
||||
'quantity': float(quantity),
|
||||
'parent_result': parent_subtraction_result,
|
||||
'child_result': child_addition_result
|
||||
})
|
||||
|
||||
logger.info(
|
||||
"Internal inventory transfer completed",
|
||||
product_id=product_id,
|
||||
quantity=float(quantity),
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id
|
||||
)
|
||||
|
||||
except Exception as item_error:
|
||||
logger.error(
|
||||
"Failed to process inventory transfer for item",
|
||||
product_id=product_id,
|
||||
quantity=float(quantity),
|
||||
error=str(item_error),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
failed_transfers.append({
|
||||
'product_id': product_id,
|
||||
'quantity': float(quantity),
|
||||
'error': str(item_error)
|
||||
})
|
||||
|
||||
# Update shipment status in inventory records to reflect completed transfer
|
||||
await self._mark_shipment_as_completed_in_inventory(
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
shipment_id=shipment_id
|
||||
)
|
||||
|
||||
total_transferred = sum(item['quantity'] for item in successful_transfers)
|
||||
|
||||
result = {
|
||||
'shipment_id': shipment_id,
|
||||
'parent_tenant_id': parent_tenant_id,
|
||||
'child_tenant_id': child_tenant_id,
|
||||
'transfers_completed': len(successful_transfers),
|
||||
'transfers_failed': len(failed_transfers),
|
||||
'total_quantity_transferred': total_transferred,
|
||||
'successful_transfers': successful_transfers,
|
||||
'failed_transfers': failed_transfers,
|
||||
'status': 'completed' if failed_transfers == 0 else 'partial_success',
|
||||
'processed_at': datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Internal inventory transfer processing completed",
|
||||
shipment_id=shipment_id,
|
||||
successfully_processed=len(successful_transfers),
|
||||
failed_count=len(failed_transfers)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing internal inventory transfer",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def _subtract_from_parent_inventory(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
product_id: str,
|
||||
quantity: Decimal,
|
||||
shipment_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Subtract inventory from parent tenant (central production)
|
||||
"""
|
||||
try:
|
||||
# Check current inventory level in parent
|
||||
parent_stock = await self.inventory_client.get_product_stock(
|
||||
tenant_id=parent_tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
|
||||
current_stock = Decimal(str(parent_stock.get('available_quantity', 0)))
|
||||
|
||||
if current_stock < quantity:
|
||||
raise ValueError(
|
||||
f"Insufficient inventory in parent tenant {parent_tenant_id}. "
|
||||
f"Required: {quantity}, Available: {current_stock}"
|
||||
)
|
||||
|
||||
# Create stock movement record with negative quantity
|
||||
stock_movement_data = {
|
||||
'product_id': product_id,
|
||||
'movement_type': 'INTERNAL_TRANSFER_OUT',
|
||||
'quantity': float(-quantity), # Negative for outbound
|
||||
'reference_type': 'internal_transfer',
|
||||
'reference_id': shipment_id,
|
||||
'source_tenant_id': parent_tenant_id,
|
||||
'destination_tenant_id': parent_tenant_id, # Self-reference for tracking
|
||||
'notes': f'Shipment to child tenant #{shipment_id}'
|
||||
}
|
||||
|
||||
# Execute the stock movement
|
||||
movement_result = await self.inventory_client.create_stock_movement(
|
||||
tenant_id=parent_tenant_id,
|
||||
movement_data=stock_movement_data
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Inventory subtracted from parent",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=float(quantity),
|
||||
movement_id=movement_result.get('id')
|
||||
)
|
||||
|
||||
return {
|
||||
'movement_id': movement_result.get('id'),
|
||||
'quantity_subtracted': float(quantity),
|
||||
'new_balance': float(current_stock - quantity),
|
||||
'status': 'success'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error subtracting from parent inventory",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=float(quantity),
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def _add_to_child_inventory(
|
||||
self,
|
||||
child_tenant_id: str,
|
||||
product_id: str,
|
||||
quantity: Decimal,
|
||||
shipment_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Add inventory to child tenant (retail outlet)
|
||||
"""
|
||||
try:
|
||||
# Create stock movement record with positive quantity
|
||||
stock_movement_data = {
|
||||
'product_id': product_id,
|
||||
'movement_type': 'INTERNAL_TRANSFER_IN',
|
||||
'quantity': float(quantity), # Positive for inbound
|
||||
'reference_type': 'internal_transfer',
|
||||
'reference_id': shipment_id,
|
||||
'source_tenant_id': child_tenant_id, # Self-reference from parent
|
||||
'destination_tenant_id': child_tenant_id,
|
||||
'notes': f'Internal transfer from parent tenant shipment #{shipment_id}'
|
||||
}
|
||||
|
||||
# Execute the stock movement
|
||||
movement_result = await self.inventory_client.create_stock_movement(
|
||||
tenant_id=child_tenant_id,
|
||||
movement_data=stock_movement_data
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Inventory added to child",
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=float(quantity),
|
||||
movement_id=movement_result.get('id')
|
||||
)
|
||||
|
||||
return {
|
||||
'movement_id': movement_result.get('id'),
|
||||
'quantity_added': float(quantity),
|
||||
'status': 'success'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error adding to child inventory",
|
||||
child_tenant_id=child_tenant_id,
|
||||
product_id=product_id,
|
||||
quantity=float(quantity),
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def _mark_shipment_as_completed_in_inventory(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
child_tenant_id: str,
|
||||
shipment_id: str
|
||||
):
|
||||
"""
|
||||
Update inventory records to mark shipment as completed
|
||||
"""
|
||||
try:
|
||||
# In a real implementation, this would update inventory tracking records
|
||||
# to reflect that the internal transfer is complete
|
||||
# For now, we'll just log that we're tracking this
|
||||
|
||||
logger.info(
|
||||
"Marked internal transfer as completed in inventory tracking",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
shipment_id=shipment_id
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error updating inventory completion status",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
shipment_id=shipment_id,
|
||||
error=str(e)
|
||||
)
|
||||
# This is not critical enough to fail the entire operation
|
||||
|
||||
async def get_internal_transfer_history(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
child_tenant_id: str = None,
|
||||
start_date: str = None,
|
||||
end_date: str = None,
|
||||
limit: int = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get history of internal inventory transfers
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Parent tenant ID
|
||||
child_tenant_id: Optional child tenant ID to filter by
|
||||
start_date: Optional start date filter
|
||||
end_date: Optional end date filter
|
||||
limit: Max results to return
|
||||
|
||||
Returns:
|
||||
List of internal transfer records
|
||||
"""
|
||||
try:
|
||||
# Build filter conditions
|
||||
filters = {
|
||||
'reference_type': 'internal_transfer'
|
||||
}
|
||||
|
||||
if child_tenant_id:
|
||||
filters['destination_tenant_id'] = child_tenant_id
|
||||
if start_date:
|
||||
filters['created_after'] = start_date
|
||||
if end_date:
|
||||
filters['created_before'] = end_date
|
||||
|
||||
# Query inventory movements for internal transfers
|
||||
parent_movements = await self.inventory_client.get_stock_movements(
|
||||
tenant_id=parent_tenant_id,
|
||||
filters=filters,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Filter for outbound transfers (negative values)
|
||||
outbound_transfers = [m for m in parent_movements if m.get('quantity', 0) < 0]
|
||||
|
||||
# Also get inbound transfers for the children if specified
|
||||
all_transfers = outbound_transfers
|
||||
|
||||
if child_tenant_id:
|
||||
child_movements = await self.inventory_client.get_stock_movements(
|
||||
tenant_id=child_tenant_id,
|
||||
filters=filters,
|
||||
limit=limit
|
||||
)
|
||||
# Filter for inbound transfers (positive values)
|
||||
inbound_transfers = [m for m in child_movements if m.get('quantity', 0) > 0]
|
||||
all_transfers.extend(inbound_transfers)
|
||||
|
||||
# Sort by creation date (most recent first)
|
||||
all_transfers.sort(key=lambda x: x.get('created_at', ''), reverse=True)
|
||||
|
||||
return all_transfers[:limit]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting internal transfer history",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def validate_internal_transfer_eligibility(
|
||||
self,
|
||||
parent_tenant_id: str,
|
||||
child_tenant_id: str,
|
||||
items: List[Dict[str, Any]]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate that internal transfer is possible (sufficient inventory, etc.)
|
||||
|
||||
Args:
|
||||
parent_tenant_id: Parent tenant ID (supplier)
|
||||
child_tenant_id: Child tenant ID (recipient)
|
||||
items: List of items to transfer
|
||||
|
||||
Returns:
|
||||
Dict with validation results
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"Validating internal transfer eligibility",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
item_count=len(items)
|
||||
)
|
||||
|
||||
validation_results = {
|
||||
'eligible': True,
|
||||
'errors': [],
|
||||
'warnings': [],
|
||||
'inventory_check': []
|
||||
}
|
||||
|
||||
for item in items:
|
||||
product_id = item.get('product_id')
|
||||
quantity = Decimal(str(item.get('quantity', 0)))
|
||||
|
||||
if quantity <= 0:
|
||||
validation_results['errors'].append({
|
||||
'product_id': product_id,
|
||||
'error': 'Quantity must be greater than 0',
|
||||
'quantity': float(quantity)
|
||||
})
|
||||
continue
|
||||
|
||||
# Check if parent has sufficient inventory
|
||||
try:
|
||||
parent_stock = await self.inventory_client.get_product_stock(
|
||||
tenant_id=parent_tenant_id,
|
||||
product_id=product_id
|
||||
)
|
||||
|
||||
available_quantity = Decimal(str(parent_stock.get('available_quantity', 0)))
|
||||
|
||||
if available_quantity < quantity:
|
||||
validation_results['errors'].append({
|
||||
'product_id': product_id,
|
||||
'error': 'Insufficient inventory in parent tenant',
|
||||
'available': float(available_quantity),
|
||||
'requested': float(quantity)
|
||||
})
|
||||
else:
|
||||
validation_results['inventory_check'].append({
|
||||
'product_id': product_id,
|
||||
'available': float(available_quantity),
|
||||
'requested': float(quantity),
|
||||
'sufficient': True
|
||||
})
|
||||
|
||||
except Exception as stock_error:
|
||||
logger.error(
|
||||
"Error checking parent inventory for validation",
|
||||
product_id=product_id,
|
||||
error=str(stock_error)
|
||||
)
|
||||
validation_results['errors'].append({
|
||||
'product_id': product_id,
|
||||
'error': f'Error checking inventory: {str(stock_error)}'
|
||||
})
|
||||
|
||||
# Overall eligibility based on errors
|
||||
validation_results['eligible'] = len(validation_results['errors']) == 0
|
||||
|
||||
logger.info(
|
||||
"Internal transfer validation completed",
|
||||
eligible=validation_results['eligible'],
|
||||
error_count=len(validation_results['errors'])
|
||||
)
|
||||
|
||||
return validation_results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error validating internal transfer eligibility",
|
||||
parent_tenant_id=parent_tenant_id,
|
||||
child_tenant_id=child_tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
376
services/inventory/app/services/inventory_alert_service.py
Normal file
376
services/inventory/app/services/inventory_alert_service.py
Normal file
@@ -0,0 +1,376 @@
|
||||
"""
|
||||
Inventory Alert Service - Simplified
|
||||
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import List, Dict, Any, Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher, EVENT_TYPES
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryAlertService:
|
||||
"""Simplified inventory alert service using EventPublisher"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
async def start(self):
|
||||
"""Start the inventory alert service"""
|
||||
logger.info("InventoryAlertService started")
|
||||
# Add any initialization logic here if needed
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the inventory alert service"""
|
||||
logger.info("InventoryAlertService stopped")
|
||||
# Add any cleanup logic here if needed
|
||||
|
||||
async def emit_critical_stock_shortage(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_stock: float,
|
||||
required_stock: float,
|
||||
shortage_amount: float,
|
||||
minimum_stock: float,
|
||||
tomorrow_needed: Optional[float] = None,
|
||||
supplier_name: Optional[str] = None,
|
||||
supplier_phone: Optional[str] = None,
|
||||
lead_time_days: Optional[int] = None,
|
||||
hours_until_stockout: Optional[int] = None
|
||||
):
|
||||
"""Emit minimal critical stock shortage event"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_stock": current_stock,
|
||||
"required_stock": required_stock,
|
||||
"shortage_amount": shortage_amount,
|
||||
"minimum_stock": minimum_stock
|
||||
}
|
||||
|
||||
# Add optional fields if present
|
||||
if tomorrow_needed:
|
||||
metadata["tomorrow_needed"] = tomorrow_needed
|
||||
if supplier_name:
|
||||
metadata["supplier_name"] = supplier_name
|
||||
if supplier_phone:
|
||||
metadata["supplier_contact"] = supplier_phone
|
||||
if lead_time_days:
|
||||
metadata["lead_time_days"] = lead_time_days
|
||||
if hours_until_stockout:
|
||||
metadata["hours_until"] = hours_until_stockout
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="inventory.critical_stock_shortage",
|
||||
tenant_id=tenant_id,
|
||||
severity="urgent",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"critical_stock_shortage_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
shortage_amount=shortage_amount
|
||||
)
|
||||
|
||||
async def emit_low_stock_warning(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_stock: float,
|
||||
minimum_stock: float,
|
||||
supplier_name: Optional[str] = None,
|
||||
supplier_phone: Optional[str] = None
|
||||
):
|
||||
"""Emit low stock warning event"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_stock": current_stock,
|
||||
"minimum_stock": minimum_stock
|
||||
}
|
||||
|
||||
if supplier_name:
|
||||
metadata["supplier_name"] = supplier_name
|
||||
if supplier_phone:
|
||||
metadata["supplier_contact"] = supplier_phone
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="inventory.low_stock_warning",
|
||||
tenant_id=tenant_id,
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"low_stock_warning_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
|
||||
async def emit_temperature_breach(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
sensor_id: str,
|
||||
location: str,
|
||||
temperature: float,
|
||||
max_threshold: float,
|
||||
duration_minutes: int
|
||||
):
|
||||
"""Emit temperature breach event"""
|
||||
|
||||
# Determine severity based on duration
|
||||
if duration_minutes > 120:
|
||||
severity = "urgent"
|
||||
elif duration_minutes > 60:
|
||||
severity = "high"
|
||||
else:
|
||||
severity = "medium"
|
||||
|
||||
metadata = {
|
||||
"sensor_id": sensor_id,
|
||||
"location": location,
|
||||
"temperature": temperature,
|
||||
"max_threshold": max_threshold,
|
||||
"duration_minutes": duration_minutes
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
event_type="inventory.temperature_breach",
|
||||
tenant_id=tenant_id,
|
||||
severity=severity,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"temperature_breach_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
location=location,
|
||||
temperature=temperature
|
||||
)
|
||||
|
||||
async def emit_expired_products(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
expired_items: List[Dict[str, Any]]
|
||||
):
|
||||
"""Emit expired products alert"""
|
||||
|
||||
metadata = {
|
||||
"expired_count": len(expired_items),
|
||||
"total_quantity_kg": sum(item["quantity"] for item in expired_items),
|
||||
"total_value": sum(item.get("value", 0) for item in expired_items),
|
||||
"expired_items": [
|
||||
{
|
||||
"id": str(item["id"]),
|
||||
"name": item["name"],
|
||||
"stock_id": str(item["stock_id"]),
|
||||
"quantity": float(item["quantity"]),
|
||||
"days_expired": item.get("days_expired", 0)
|
||||
}
|
||||
for item in expired_items
|
||||
]
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="inventory.expired_products",
|
||||
severity="urgent",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"expired_products_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
expired_count=len(expired_items)
|
||||
)
|
||||
|
||||
async def emit_urgent_expiry(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
ingredient_name: str,
|
||||
stock_id: UUID,
|
||||
days_to_expiry: int,
|
||||
quantity: float
|
||||
):
|
||||
"""Emit urgent expiry alert (1-2 days)"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"stock_id": str(stock_id),
|
||||
"days_to_expiry": days_to_expiry,
|
||||
"days_until_expiry": days_to_expiry, # Alias for urgency analyzer
|
||||
"quantity": quantity
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="inventory.urgent_expiry",
|
||||
severity="high",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"urgent_expiry_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
days_to_expiry=days_to_expiry
|
||||
)
|
||||
|
||||
async def emit_overstock_warning(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
ingredient_name: str,
|
||||
current_stock: float,
|
||||
maximum_stock: float,
|
||||
waste_risk_kg: float = 0
|
||||
):
|
||||
"""Emit overstock warning"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_stock": current_stock,
|
||||
"maximum_stock": maximum_stock,
|
||||
"waste_risk_kg": waste_risk_kg
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="inventory.overstock_warning",
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"overstock_warning_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
|
||||
async def emit_expired_batches_processed(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
total_batches: int,
|
||||
total_quantity: float,
|
||||
affected_ingredients: List[Dict[str, Any]]
|
||||
):
|
||||
"""Emit alert for automatically processed expired batches"""
|
||||
|
||||
metadata = {
|
||||
"total_batches_processed": total_batches,
|
||||
"total_quantity_wasted": total_quantity,
|
||||
"processing_date": datetime.utcnow().isoformat(),
|
||||
"affected_ingredients": affected_ingredients,
|
||||
"automation_source": "daily_expired_batch_check"
|
||||
}
|
||||
|
||||
await self.publisher.publish_alert(
|
||||
tenant_id=tenant_id,
|
||||
event_type="inventory.expired_batches_auto_processed",
|
||||
severity="medium",
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"expired_batches_processed_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
total_batches=total_batches,
|
||||
total_quantity=total_quantity
|
||||
)
|
||||
|
||||
# Recommendation methods
|
||||
|
||||
async def emit_inventory_optimization(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
ingredient_name: str,
|
||||
recommendation_type: str,
|
||||
current_max: Optional[float] = None,
|
||||
suggested_max: Optional[float] = None,
|
||||
current_min: Optional[float] = None,
|
||||
suggested_min: Optional[float] = None,
|
||||
avg_daily_usage: Optional[float] = None
|
||||
):
|
||||
"""Emit inventory optimization recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"recommendation_type": recommendation_type
|
||||
}
|
||||
|
||||
if current_max:
|
||||
metadata["current_max"] = current_max
|
||||
if suggested_max:
|
||||
metadata["suggested_max"] = suggested_max
|
||||
if current_min:
|
||||
metadata["current_min"] = current_min
|
||||
if suggested_min:
|
||||
metadata["suggested_min"] = suggested_min
|
||||
if avg_daily_usage:
|
||||
metadata["avg_daily_usage"] = avg_daily_usage
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="inventory.inventory_optimization",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"inventory_optimization_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
recommendation_type=recommendation_type
|
||||
)
|
||||
|
||||
async def emit_waste_reduction_recommendation(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: UUID,
|
||||
ingredient_name: str,
|
||||
total_waste_30d: float,
|
||||
waste_incidents: int,
|
||||
waste_reason: str,
|
||||
estimated_reduction_percent: float
|
||||
):
|
||||
"""Emit waste reduction recommendation"""
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": str(ingredient_id),
|
||||
"ingredient_name": ingredient_name,
|
||||
"total_waste_30d": total_waste_30d,
|
||||
"waste_incidents": waste_incidents,
|
||||
"waste_reason": waste_reason,
|
||||
"estimated_reduction_percent": estimated_reduction_percent
|
||||
}
|
||||
|
||||
await self.publisher.publish_recommendation(
|
||||
event_type="inventory.waste_reduction",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"waste_reduction_recommendation_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
total_waste=total_waste_30d
|
||||
)
|
||||
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
Inventory Notification Service - Simplified
|
||||
|
||||
Emits minimal events using EventPublisher.
|
||||
All enrichment handled by alert_processor.
|
||||
|
||||
These are NOTIFICATIONS (not alerts) - informational state changes that don't require user action.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from shared.messaging import UnifiedEventPublisher
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryNotificationService:
|
||||
"""
|
||||
Service for emitting inventory notifications using EventPublisher.
|
||||
"""
|
||||
|
||||
def __init__(self, event_publisher: UnifiedEventPublisher):
|
||||
self.publisher = event_publisher
|
||||
|
||||
async def emit_stock_received_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
stock_receipt_id: str,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
quantity_received: float,
|
||||
unit: str,
|
||||
supplier_name: Optional[str] = None,
|
||||
delivery_id: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification when stock is received.
|
||||
"""
|
||||
message = f"Received {quantity_received} {unit} of {ingredient_name}"
|
||||
if supplier_name:
|
||||
message += f" from {supplier_name}"
|
||||
|
||||
metadata = {
|
||||
"stock_receipt_id": stock_receipt_id,
|
||||
"ingredient_id": ingredient_id,
|
||||
"ingredient_name": ingredient_name,
|
||||
"quantity_received": float(quantity_received),
|
||||
"unit": unit,
|
||||
"supplier_name": supplier_name,
|
||||
"delivery_id": delivery_id,
|
||||
"received_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="inventory.stock_received",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"stock_received_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
quantity_received=quantity_received
|
||||
)
|
||||
|
||||
async def emit_stock_movement_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
movement_id: str,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
quantity: float,
|
||||
unit: str,
|
||||
movement_type: str, # 'transfer', 'adjustment', 'waste', 'return'
|
||||
from_location: Optional[str] = None,
|
||||
to_location: Optional[str] = None,
|
||||
reason: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification for stock movements (transfers, adjustments, waste).
|
||||
"""
|
||||
# Build message based on movement type
|
||||
if movement_type == "transfer":
|
||||
message = f"Transferred {quantity} {unit} of {ingredient_name}"
|
||||
if from_location and to_location:
|
||||
message += f" from {from_location} to {to_location}"
|
||||
elif movement_type == "adjustment":
|
||||
message = f"Adjusted {ingredient_name} by {quantity} {unit}"
|
||||
if reason:
|
||||
message += f" - {reason}"
|
||||
elif movement_type == "waste":
|
||||
message = f"Waste recorded: {quantity} {unit} of {ingredient_name}"
|
||||
if reason:
|
||||
message += f" - {reason}"
|
||||
elif movement_type == "return":
|
||||
message = f"Returned {quantity} {unit} of {ingredient_name}"
|
||||
else:
|
||||
message = f"Stock movement: {quantity} {unit} of {ingredient_name}"
|
||||
|
||||
metadata = {
|
||||
"movement_id": movement_id,
|
||||
"ingredient_id": ingredient_id,
|
||||
"ingredient_name": ingredient_name,
|
||||
"quantity": float(quantity),
|
||||
"unit": unit,
|
||||
"movement_type": movement_type,
|
||||
"from_location": from_location,
|
||||
"to_location": to_location,
|
||||
"reason": reason,
|
||||
"moved_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="inventory.stock_movement",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"stock_movement_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
movement_type=movement_type,
|
||||
ingredient_name=ingredient_name
|
||||
)
|
||||
|
||||
async def emit_stock_updated_notification(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
old_quantity: float,
|
||||
new_quantity: float,
|
||||
unit: str,
|
||||
update_reason: str,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification when stock is updated.
|
||||
"""
|
||||
quantity_change = new_quantity - old_quantity
|
||||
change_direction = "increased" if quantity_change > 0 else "decreased"
|
||||
|
||||
message = f"Stock {change_direction} by {abs(quantity_change)} {unit} - {update_reason}"
|
||||
|
||||
metadata = {
|
||||
"ingredient_id": ingredient_id,
|
||||
"ingredient_name": ingredient_name,
|
||||
"old_quantity": float(old_quantity),
|
||||
"new_quantity": float(new_quantity),
|
||||
"quantity_change": float(quantity_change),
|
||||
"unit": unit,
|
||||
"update_reason": update_reason,
|
||||
"updated_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
await self.publisher.publish_notification(
|
||||
event_type="inventory.stock_updated",
|
||||
tenant_id=tenant_id,
|
||||
data=metadata
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"stock_updated_notification_emitted",
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_name=ingredient_name,
|
||||
quantity_change=quantity_change
|
||||
)
|
||||
1192
services/inventory/app/services/inventory_scheduler.py
Normal file
1192
services/inventory/app/services/inventory_scheduler.py
Normal file
File diff suppressed because it is too large
Load Diff
1199
services/inventory/app/services/inventory_service.py
Normal file
1199
services/inventory/app/services/inventory_service.py
Normal file
File diff suppressed because it is too large
Load Diff
467
services/inventory/app/services/product_classifier.py
Normal file
467
services/inventory/app/services/product_classifier.py
Normal file
@@ -0,0 +1,467 @@
|
||||
# services/inventory/app/services/product_classifier.py
|
||||
"""
|
||||
AI Product Classification Service
|
||||
Automatically classifies products from sales data during onboarding
|
||||
"""
|
||||
|
||||
import re
|
||||
import structlog
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass
|
||||
|
||||
from app.models.inventory import ProductType, IngredientCategory, ProductCategory, UnitOfMeasure
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProductSuggestion:
|
||||
"""Suggested inventory item from sales data analysis"""
|
||||
original_name: str
|
||||
suggested_name: str
|
||||
product_type: ProductType
|
||||
category: str # ingredient_category or product_category
|
||||
unit_of_measure: UnitOfMeasure
|
||||
confidence_score: float # 0.0 to 1.0
|
||||
estimated_shelf_life_days: Optional[int] = None
|
||||
requires_refrigeration: bool = False
|
||||
requires_freezing: bool = False
|
||||
is_seasonal: bool = False
|
||||
suggested_supplier: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class ProductClassifierService:
|
||||
"""AI-powered product classification for onboarding automation"""
|
||||
|
||||
def __init__(self):
|
||||
self._load_classification_rules()
|
||||
|
||||
def _load_classification_rules(self):
|
||||
"""Load classification patterns and rules"""
|
||||
|
||||
# Ingredient patterns with high confidence
|
||||
self.ingredient_patterns = {
|
||||
IngredientCategory.FLOUR: {
|
||||
'patterns': [
|
||||
r'harina', r'flour', r'trigo', r'wheat', r'integral', r'whole.*wheat',
|
||||
r'centeno', r'rye', r'avena', r'oat', r'maiz', r'corn'
|
||||
],
|
||||
'unit': UnitOfMeasure.KILOGRAMS,
|
||||
'shelf_life': 365,
|
||||
'supplier_hints': ['molinos', 'harinera', 'mill']
|
||||
},
|
||||
IngredientCategory.YEAST: {
|
||||
'patterns': [
|
||||
r'levadura', r'yeast', r'fermento', r'baker.*yeast', r'instant.*yeast'
|
||||
],
|
||||
'unit': UnitOfMeasure.GRAMS,
|
||||
'shelf_life': 730,
|
||||
'refrigeration': True
|
||||
},
|
||||
IngredientCategory.DAIRY: {
|
||||
'patterns': [
|
||||
r'leche', r'milk', r'nata', r'cream', r'mantequilla', r'butter',
|
||||
r'queso', r'cheese', r'yogur', r'yogurt'
|
||||
],
|
||||
'unit': UnitOfMeasure.LITERS,
|
||||
'shelf_life': 7,
|
||||
'refrigeration': True
|
||||
},
|
||||
IngredientCategory.EGGS: {
|
||||
'patterns': [
|
||||
r'huevo', r'egg', r'clara', r'white', r'yema', r'yolk'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 28,
|
||||
'refrigeration': True
|
||||
},
|
||||
IngredientCategory.SUGAR: {
|
||||
'patterns': [
|
||||
r'azucar', r'sugar', r'edulcorante', r'sweetener', r'miel', r'honey',
|
||||
r'jarabe', r'syrup', r'mascabado', r'brown.*sugar'
|
||||
],
|
||||
'unit': UnitOfMeasure.KILOGRAMS,
|
||||
'shelf_life': 730
|
||||
},
|
||||
IngredientCategory.FATS: {
|
||||
'patterns': [
|
||||
r'aceite', r'oil', r'grasa', r'fat', r'margarina', r'margarine',
|
||||
r'manteca', r'lard', r'oliva', r'olive'
|
||||
],
|
||||
'unit': UnitOfMeasure.LITERS,
|
||||
'shelf_life': 365
|
||||
},
|
||||
IngredientCategory.SALT: {
|
||||
'patterns': [
|
||||
r'sal', r'salt', r'sodium', r'sodio'
|
||||
],
|
||||
'unit': UnitOfMeasure.KILOGRAMS,
|
||||
'shelf_life': 1825 # 5 years
|
||||
},
|
||||
IngredientCategory.SPICES: {
|
||||
'patterns': [
|
||||
r'canela', r'cinnamon', r'vainilla', r'vanilla', r'cacao', r'cocoa',
|
||||
r'chocolate', r'anis', r'anise', r'cardamomo', r'cardamom',
|
||||
r'jengibre', r'ginger', r'nuez.*moscada', r'nutmeg'
|
||||
],
|
||||
'unit': UnitOfMeasure.GRAMS,
|
||||
'shelf_life': 730
|
||||
},
|
||||
IngredientCategory.ADDITIVES: {
|
||||
'patterns': [
|
||||
r'polvo.*hornear', r'baking.*powder', r'bicarbonato', r'soda',
|
||||
r'cremor.*tartaro', r'cream.*tartar', r'lecitina', r'lecithin',
|
||||
r'conservante', r'preservative', r'emulsificante', r'emulsifier'
|
||||
],
|
||||
'unit': UnitOfMeasure.GRAMS,
|
||||
'shelf_life': 730
|
||||
},
|
||||
IngredientCategory.PACKAGING: {
|
||||
'patterns': [
|
||||
r'bolsa', r'bag', r'envase', r'container', r'papel', r'paper',
|
||||
r'plastico', r'plastic', r'carton', r'cardboard'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 1825
|
||||
}
|
||||
}
|
||||
|
||||
# Finished product patterns
|
||||
self.product_patterns = {
|
||||
ProductCategory.BREAD: {
|
||||
'patterns': [
|
||||
r'pan\b', r'bread', r'baguette', r'hogaza', r'loaf', r'molde',
|
||||
r'integral', r'whole.*grain', r'centeno', r'rye.*bread'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 3,
|
||||
'display_life': 24 # hours
|
||||
},
|
||||
ProductCategory.CROISSANTS: {
|
||||
'patterns': [
|
||||
r'croissant', r'cruasan', r'napolitana', r'palmera', r'palmier'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 2,
|
||||
'display_life': 12
|
||||
},
|
||||
ProductCategory.PASTRIES: {
|
||||
'patterns': [
|
||||
r'pastel', r'pastry', r'hojaldre', r'puff.*pastry', r'empanada',
|
||||
r'milhojas', r'napoleon', r'eclair', r'profiterol'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 2,
|
||||
'display_life': 24,
|
||||
'refrigeration': True
|
||||
},
|
||||
ProductCategory.CAKES: {
|
||||
'patterns': [
|
||||
r'tarta', r'cake', r'bizcocho', r'sponge', r'cheesecake',
|
||||
r'tiramisu', r'mousse', r'torta'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 3,
|
||||
'refrigeration': True
|
||||
},
|
||||
ProductCategory.COOKIES: {
|
||||
'patterns': [
|
||||
r'galleta', r'cookie', r'biscuit', r'mantecada', r'madeleine'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 14
|
||||
},
|
||||
ProductCategory.MUFFINS: {
|
||||
'patterns': [
|
||||
r'muffin', r'magdalena', r'cupcake', r'fairy.*cake'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 3
|
||||
},
|
||||
ProductCategory.SANDWICHES: {
|
||||
'patterns': [
|
||||
r'sandwich', r'bocadillo', r'tostada', r'toast', r'bagel'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 1,
|
||||
'display_life': 6,
|
||||
'refrigeration': True
|
||||
},
|
||||
ProductCategory.BEVERAGES: {
|
||||
'patterns': [
|
||||
r'cafe', r'coffee', r'te\b', r'tea', r'chocolate.*caliente',
|
||||
r'hot.*chocolate', r'zumo', r'juice', r'batido', r'smoothie'
|
||||
],
|
||||
'unit': UnitOfMeasure.UNITS,
|
||||
'shelf_life': 1
|
||||
}
|
||||
}
|
||||
|
||||
# Seasonal indicators
|
||||
self.seasonal_patterns = {
|
||||
'christmas': [r'navidad', r'christmas', r'turron', r'polvoron', r'roscon'],
|
||||
'easter': [r'pascua', r'easter', r'mona', r'torrija'],
|
||||
'summer': [r'helado', r'ice.*cream', r'granizado', r'sorbete']
|
||||
}
|
||||
|
||||
def classify_product(self, product_name: str, sales_volume: Optional[float] = None) -> ProductSuggestion:
|
||||
"""Classify a single product name into inventory suggestion"""
|
||||
|
||||
# Normalize product name for analysis
|
||||
normalized_name = self._normalize_name(product_name)
|
||||
|
||||
# Try to classify as ingredient first
|
||||
ingredient_result = self._classify_as_ingredient(normalized_name, product_name)
|
||||
if ingredient_result and ingredient_result.confidence_score >= 0.7:
|
||||
return ingredient_result
|
||||
|
||||
# Try to classify as finished product
|
||||
product_result = self._classify_as_finished_product(normalized_name, product_name)
|
||||
if product_result:
|
||||
return product_result
|
||||
|
||||
# Fallback: create generic finished product with low confidence
|
||||
return self._create_fallback_suggestion(product_name, normalized_name)
|
||||
|
||||
def classify_products_batch(self, product_names: List[str],
|
||||
sales_volumes: Optional[Dict[str, float]] = None) -> List[ProductSuggestion]:
|
||||
"""Classify multiple products and detect business model"""
|
||||
|
||||
suggestions = []
|
||||
for name in product_names:
|
||||
volume = sales_volumes.get(name) if sales_volumes else None
|
||||
suggestion = self.classify_product(name, volume)
|
||||
suggestions.append(suggestion)
|
||||
|
||||
# Analyze business model based on classification results
|
||||
self._analyze_business_model(suggestions)
|
||||
|
||||
return suggestions
|
||||
|
||||
def _normalize_name(self, name: str) -> str:
|
||||
"""Normalize product name for pattern matching"""
|
||||
if not name:
|
||||
return ""
|
||||
|
||||
# Convert to lowercase
|
||||
normalized = name.lower().strip()
|
||||
|
||||
# Remove common prefixes/suffixes
|
||||
prefixes_to_remove = ['el ', 'la ', 'los ', 'las ', 'un ', 'una ']
|
||||
for prefix in prefixes_to_remove:
|
||||
if normalized.startswith(prefix):
|
||||
normalized = normalized[len(prefix):]
|
||||
|
||||
# Remove special characters but keep spaces and accents
|
||||
normalized = re.sub(r'[^\w\sáéíóúñü]', ' ', normalized)
|
||||
|
||||
# Normalize multiple spaces
|
||||
normalized = re.sub(r'\s+', ' ', normalized).strip()
|
||||
|
||||
return normalized
|
||||
|
||||
def _classify_as_ingredient(self, normalized_name: str, original_name: str) -> Optional[ProductSuggestion]:
|
||||
"""Try to classify as ingredient"""
|
||||
|
||||
best_match = None
|
||||
best_score = 0.0
|
||||
|
||||
for category, config in self.ingredient_patterns.items():
|
||||
for pattern in config['patterns']:
|
||||
if re.search(pattern, normalized_name, re.IGNORECASE):
|
||||
# Calculate confidence based on pattern specificity
|
||||
score = self._calculate_confidence_score(pattern, normalized_name)
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_match = (category, config)
|
||||
|
||||
if best_match and best_score >= 0.6:
|
||||
category, config = best_match
|
||||
|
||||
return ProductSuggestion(
|
||||
original_name=original_name,
|
||||
suggested_name=self._suggest_clean_name(original_name, normalized_name),
|
||||
product_type=ProductType.INGREDIENT,
|
||||
category=category.value,
|
||||
unit_of_measure=config['unit'],
|
||||
confidence_score=best_score,
|
||||
estimated_shelf_life_days=config.get('shelf_life'),
|
||||
requires_refrigeration=config.get('refrigeration', False),
|
||||
requires_freezing=config.get('freezing', False),
|
||||
suggested_supplier=self._suggest_supplier(normalized_name, config.get('supplier_hints', [])),
|
||||
notes=f"Auto-classified as {category.value} ingredient"
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _classify_as_finished_product(self, normalized_name: str, original_name: str) -> Optional[ProductSuggestion]:
|
||||
"""Try to classify as finished product"""
|
||||
|
||||
best_match = None
|
||||
best_score = 0.0
|
||||
|
||||
for category, config in self.product_patterns.items():
|
||||
for pattern in config['patterns']:
|
||||
if re.search(pattern, normalized_name, re.IGNORECASE):
|
||||
score = self._calculate_confidence_score(pattern, normalized_name)
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_match = (category, config)
|
||||
|
||||
if best_match:
|
||||
category, config = best_match
|
||||
|
||||
# Check if seasonal
|
||||
is_seasonal = self._is_seasonal_product(normalized_name)
|
||||
|
||||
return ProductSuggestion(
|
||||
original_name=original_name,
|
||||
suggested_name=self._suggest_clean_name(original_name, normalized_name),
|
||||
product_type=ProductType.FINISHED_PRODUCT,
|
||||
category=category.value,
|
||||
unit_of_measure=config['unit'],
|
||||
confidence_score=best_score,
|
||||
estimated_shelf_life_days=config.get('shelf_life'),
|
||||
requires_refrigeration=config.get('refrigeration', False),
|
||||
requires_freezing=config.get('freezing', False),
|
||||
is_seasonal=is_seasonal,
|
||||
notes=f"Auto-classified as {category.value}"
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _create_fallback_suggestion(self, original_name: str, normalized_name: str) -> ProductSuggestion:
|
||||
"""Create a fallback suggestion for unclassified products"""
|
||||
|
||||
return ProductSuggestion(
|
||||
original_name=original_name,
|
||||
suggested_name=self._suggest_clean_name(original_name, normalized_name),
|
||||
product_type=ProductType.FINISHED_PRODUCT,
|
||||
category=ProductCategory.OTHER_PRODUCTS.value,
|
||||
unit_of_measure=UnitOfMeasure.UNITS,
|
||||
confidence_score=0.3,
|
||||
estimated_shelf_life_days=3,
|
||||
notes="Needs manual classification - defaulted to finished product"
|
||||
)
|
||||
|
||||
def _calculate_confidence_score(self, pattern: str, normalized_name: str) -> float:
|
||||
"""Calculate confidence score for pattern match"""
|
||||
|
||||
# Base score for match
|
||||
base_score = 0.8
|
||||
|
||||
# Boost score for exact matches
|
||||
if pattern.lower() == normalized_name:
|
||||
return 0.95
|
||||
|
||||
# Boost score for word boundary matches
|
||||
if re.search(r'\b' + pattern + r'\b', normalized_name, re.IGNORECASE):
|
||||
base_score += 0.1
|
||||
|
||||
# Reduce score for partial matches
|
||||
if len(pattern) < len(normalized_name) / 2:
|
||||
base_score -= 0.2
|
||||
|
||||
return min(0.95, max(0.3, base_score))
|
||||
|
||||
def _suggest_clean_name(self, original_name: str, normalized_name: str) -> str:
|
||||
"""Suggest a cleaned version of the product name"""
|
||||
|
||||
# Capitalize properly
|
||||
words = original_name.split()
|
||||
cleaned = []
|
||||
|
||||
for word in words:
|
||||
if len(word) > 0:
|
||||
# Keep original casing for abbreviations
|
||||
if word.isupper() and len(word) <= 3:
|
||||
cleaned.append(word)
|
||||
else:
|
||||
cleaned.append(word.capitalize())
|
||||
|
||||
return ' '.join(cleaned)
|
||||
|
||||
def _suggest_supplier(self, normalized_name: str, supplier_hints: List[str]) -> Optional[str]:
|
||||
"""Suggest potential supplier based on product type"""
|
||||
|
||||
for hint in supplier_hints:
|
||||
if hint in normalized_name:
|
||||
return f"Suggested: {hint.title()}"
|
||||
|
||||
return None
|
||||
|
||||
def _is_seasonal_product(self, normalized_name: str) -> bool:
|
||||
"""Check if product appears to be seasonal"""
|
||||
|
||||
for season, patterns in self.seasonal_patterns.items():
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, normalized_name, re.IGNORECASE):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _analyze_business_model(self, suggestions: List[ProductSuggestion]) -> Dict[str, Any]:
|
||||
"""Analyze business model based on product classifications"""
|
||||
|
||||
ingredient_count = sum(1 for s in suggestions if s.product_type == ProductType.INGREDIENT)
|
||||
finished_count = sum(1 for s in suggestions if s.product_type == ProductType.FINISHED_PRODUCT)
|
||||
total = len(suggestions)
|
||||
|
||||
if total == 0:
|
||||
return {"model": "unknown", "confidence": 0.0}
|
||||
|
||||
ingredient_ratio = ingredient_count / total
|
||||
|
||||
if ingredient_ratio >= 0.7:
|
||||
model = "production" # Production bakery
|
||||
elif ingredient_ratio <= 0.3:
|
||||
model = "retail" # Retail/Distribution bakery
|
||||
else:
|
||||
model = "hybrid" # Mixed model
|
||||
|
||||
confidence = max(abs(ingredient_ratio - 0.5) * 2, 0.1)
|
||||
|
||||
logger.info("Business model analysis",
|
||||
model=model, confidence=confidence,
|
||||
ingredient_count=ingredient_count,
|
||||
finished_count=finished_count)
|
||||
|
||||
return {
|
||||
"model": model,
|
||||
"confidence": confidence,
|
||||
"ingredient_ratio": ingredient_ratio,
|
||||
"recommendations": self._get_model_recommendations(model)
|
||||
}
|
||||
|
||||
def _get_model_recommendations(self, model: str) -> List[str]:
|
||||
"""Get recommendations based on detected business model"""
|
||||
|
||||
recommendations = {
|
||||
"production": [
|
||||
"Focus on ingredient inventory management",
|
||||
"Set up recipe cost calculation",
|
||||
"Configure supplier relationships",
|
||||
"Enable production planning features"
|
||||
],
|
||||
"retail": [
|
||||
"Configure central baker relationships",
|
||||
"Set up delivery schedule tracking",
|
||||
"Enable finished product freshness monitoring",
|
||||
"Focus on sales forecasting"
|
||||
],
|
||||
"hybrid": [
|
||||
"Configure both ingredient and finished product management",
|
||||
"Set up flexible inventory categories",
|
||||
"Enable both production and retail features"
|
||||
]
|
||||
}
|
||||
|
||||
return recommendations.get(model, [])
|
||||
|
||||
|
||||
# Dependency injection
|
||||
def get_product_classifier() -> ProductClassifierService:
|
||||
"""Get product classifier service instance"""
|
||||
return ProductClassifierService()
|
||||
882
services/inventory/app/services/sustainability_service.py
Normal file
882
services/inventory/app/services/sustainability_service.py
Normal file
@@ -0,0 +1,882 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/services/sustainability_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Sustainability Service - Environmental Impact & SDG Compliance Tracking
|
||||
Aligned with UN SDG 12.3 and EU Farm to Fork Strategy
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.config import settings
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.food_safety_repository import FoodSafetyRepository
|
||||
from shared.clients.production_client import create_production_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
# Environmental Impact Constants (Research-based averages for bakery products)
|
||||
class EnvironmentalConstants:
|
||||
"""Environmental impact factors for bakery production"""
|
||||
|
||||
# CO2 equivalent per kg of food waste (kg CO2e/kg)
|
||||
# Source: EU Commission, average for baked goods
|
||||
CO2_PER_KG_WASTE = 1.9
|
||||
|
||||
# Water footprint (liters per kg of ingredient)
|
||||
WATER_FOOTPRINT = {
|
||||
'flour': 1827, # Wheat flour
|
||||
'dairy': 1020, # Average dairy products
|
||||
'eggs': 3265, # Eggs
|
||||
'sugar': 1782, # Sugar
|
||||
'yeast': 500, # Estimated for yeast
|
||||
'fats': 1600, # Butter/oils average
|
||||
'default': 1500 # Conservative default
|
||||
}
|
||||
|
||||
# Land use per kg (m² per kg)
|
||||
LAND_USE_PER_KG = 3.4
|
||||
|
||||
# Average trees needed to offset 1 ton CO2
|
||||
TREES_PER_TON_CO2 = 50
|
||||
|
||||
# EU bakery waste baseline (average industry waste %)
|
||||
EU_BAKERY_BASELINE_WASTE = 0.25 # 25% average
|
||||
|
||||
# UN SDG 12.3 target: 50% reduction by 2030
|
||||
SDG_TARGET_REDUCTION = 0.50
|
||||
|
||||
|
||||
class SustainabilityService:
|
||||
"""Service for calculating environmental impact and SDG compliance"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_sustainability_metrics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive sustainability metrics for a tenant
|
||||
|
||||
Returns metrics aligned with:
|
||||
- UN SDG 12.3 (Food waste reduction)
|
||||
- EU Farm to Fork Strategy
|
||||
- Green Deal objectives
|
||||
"""
|
||||
try:
|
||||
# Default to last 30 days if no date range provided
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
# Get waste data from production and inventory
|
||||
waste_data = await self._get_waste_data(db, tenant_id, start_date, end_date)
|
||||
|
||||
# Check if there's sufficient data for meaningful calculations
|
||||
# Minimum: 50kg production to avoid false metrics on empty accounts
|
||||
total_production = waste_data['total_production_kg']
|
||||
has_sufficient_data = total_production >= 50.0
|
||||
|
||||
logger.info(
|
||||
"Checking data sufficiency for sustainability metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
total_production=total_production,
|
||||
has_sufficient_data=has_sufficient_data
|
||||
)
|
||||
|
||||
# If insufficient data, return a special "collecting data" state
|
||||
if not has_sufficient_data:
|
||||
return self._get_insufficient_data_response(start_date, end_date, waste_data)
|
||||
|
||||
# Calculate environmental impact
|
||||
environmental_impact = self._calculate_environmental_impact(waste_data)
|
||||
|
||||
# Calculate SDG compliance
|
||||
sdg_compliance = await self._calculate_sdg_compliance(
|
||||
db, tenant_id, waste_data, start_date, end_date
|
||||
)
|
||||
|
||||
# Calculate avoided waste (through AI predictions)
|
||||
avoided_waste = await self._calculate_avoided_waste(
|
||||
db, tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
# Calculate financial impact
|
||||
financial_impact = self._calculate_financial_impact(waste_data)
|
||||
|
||||
return {
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat(),
|
||||
'days': (end_date - start_date).days
|
||||
},
|
||||
'waste_metrics': {
|
||||
'total_waste_kg': waste_data['total_waste_kg'],
|
||||
'production_waste_kg': waste_data['production_waste_kg'],
|
||||
'expired_waste_kg': waste_data['expired_waste_kg'],
|
||||
'waste_percentage': waste_data['waste_percentage'],
|
||||
'waste_by_reason': waste_data['waste_by_reason']
|
||||
},
|
||||
'environmental_impact': environmental_impact,
|
||||
'sdg_compliance': sdg_compliance,
|
||||
'avoided_waste': avoided_waste,
|
||||
'financial_impact': financial_impact,
|
||||
'grant_readiness': self._assess_grant_readiness(sdg_compliance),
|
||||
'data_sufficient': True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate sustainability metrics",
|
||||
tenant_id=str(tenant_id), error=str(e))
|
||||
raise
|
||||
|
||||
def _get_insufficient_data_response(
|
||||
self,
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
waste_data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Return response for tenants with insufficient data
|
||||
|
||||
This prevents showing misleading "100% compliant" status for empty accounts
|
||||
"""
|
||||
return {
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat(),
|
||||
'days': (end_date - start_date).days
|
||||
},
|
||||
'waste_metrics': {
|
||||
'total_waste_kg': 0.0,
|
||||
'production_waste_kg': 0.0,
|
||||
'expired_waste_kg': 0.0,
|
||||
'waste_percentage': 0.0,
|
||||
'waste_by_reason': {}
|
||||
},
|
||||
'environmental_impact': {
|
||||
'co2_emissions': {
|
||||
'kg': 0.0,
|
||||
'tons': 0.0,
|
||||
'trees_to_offset': 0.0
|
||||
},
|
||||
'water_footprint': {
|
||||
'liters': 0.0,
|
||||
'cubic_meters': 0.0
|
||||
},
|
||||
'land_use': {
|
||||
'square_meters': 0.0,
|
||||
'hectares': 0.0
|
||||
},
|
||||
'human_equivalents': {
|
||||
'car_km_equivalent': 0.0,
|
||||
'smartphone_charges': 0.0,
|
||||
'showers_equivalent': 0.0,
|
||||
'trees_planted': 0.0
|
||||
}
|
||||
},
|
||||
'sdg_compliance': {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': 0.0,
|
||||
'current_waste_percentage': 0.0,
|
||||
'reduction_achieved': 0.0,
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': 0.0,
|
||||
'status': 'insufficient_data',
|
||||
'status_label': 'Collecting Baseline Data',
|
||||
'target_waste_percentage': 0.0
|
||||
},
|
||||
'baseline_period': 'not_available',
|
||||
'certification_ready': False,
|
||||
'improvement_areas': ['start_production_tracking']
|
||||
},
|
||||
'avoided_waste': {
|
||||
'waste_avoided_kg': 0.0,
|
||||
'ai_assisted_batches': 0,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': 0.0,
|
||||
'water_liters': 0.0
|
||||
},
|
||||
'methodology': 'insufficient_data'
|
||||
},
|
||||
'financial_impact': {
|
||||
'waste_cost_eur': 0.0,
|
||||
'cost_per_kg': 3.50,
|
||||
'potential_monthly_savings': 0.0,
|
||||
'annual_projection': 0.0
|
||||
},
|
||||
'grant_readiness': {
|
||||
'overall_readiness_percentage': 0.0,
|
||||
'grant_programs': {
|
||||
'life_circular_economy': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 73_000_000,
|
||||
'deadline': '2025-09-23',
|
||||
'program_type': 'grant'
|
||||
},
|
||||
'horizon_europe_cluster_6': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 880_000_000,
|
||||
'deadline': 'rolling_2025',
|
||||
'program_type': 'grant'
|
||||
},
|
||||
'fedima_sustainability_grant': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 20_000,
|
||||
'deadline': '2025-06-30',
|
||||
'program_type': 'grant',
|
||||
'sector_specific': 'bakery'
|
||||
},
|
||||
'eit_food_retail': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 45_000,
|
||||
'deadline': 'rolling',
|
||||
'program_type': 'grant',
|
||||
'sector_specific': 'retail'
|
||||
},
|
||||
'un_sdg_certified': {
|
||||
'eligible': False,
|
||||
'confidence': 'low',
|
||||
'requirements_met': False,
|
||||
'funding_eur': 0,
|
||||
'deadline': 'ongoing',
|
||||
'program_type': 'certification'
|
||||
}
|
||||
},
|
||||
'recommended_applications': [],
|
||||
'spain_compliance': {
|
||||
'law_1_2025': False,
|
||||
'circular_economy_strategy': False
|
||||
}
|
||||
},
|
||||
'data_sufficient': False,
|
||||
'minimum_production_required_kg': 50.0,
|
||||
'current_production_kg': waste_data['total_production_kg']
|
||||
}
|
||||
|
||||
async def _get_waste_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get waste data from production service and inventory"""
|
||||
try:
|
||||
# Get production waste data via HTTP call to production service
|
||||
production_waste_data = await self._get_production_waste_data(
|
||||
tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
prod_data = production_waste_data if production_waste_data else {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
# Query inventory waste using repository
|
||||
stock_movement_repo = StockMovementRepository(db)
|
||||
inventory_waste = await stock_movement_repo.get_inventory_waste_total(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
# Calculate totals
|
||||
production_waste = float(prod_data.get('total_production_waste', 0) or 0)
|
||||
defect_waste = float(prod_data.get('total_defects', 0) or 0)
|
||||
total_waste = production_waste + defect_waste + inventory_waste
|
||||
|
||||
total_production = float(prod_data.get('total_planned', 0) or 0)
|
||||
waste_percentage = (total_waste / total_production * 100) if total_production > 0 else 0
|
||||
|
||||
# Categorize waste by reason
|
||||
waste_by_reason = {
|
||||
'production_defects': defect_waste,
|
||||
'production_waste': production_waste - defect_waste,
|
||||
'expired_inventory': inventory_waste * 0.7, # Estimate: 70% expires
|
||||
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
|
||||
}
|
||||
|
||||
# Count waste incidents from stock movements
|
||||
total_waste_incidents = 0
|
||||
try:
|
||||
# Calculate days back from start_date to now
|
||||
days_back = (end_date - start_date).days if start_date and end_date else 30
|
||||
waste_movements = await stock_movement_repo.get_waste_movements(
|
||||
tenant_id=tenant_id,
|
||||
days_back=days_back,
|
||||
limit=1000 # Get all waste movements
|
||||
)
|
||||
total_waste_incidents = len(waste_movements) if waste_movements else 0
|
||||
except Exception as e:
|
||||
logger.warning("Could not get waste incidents count", error=str(e))
|
||||
total_waste_incidents = 0
|
||||
|
||||
return {
|
||||
'total_waste_kg': total_waste,
|
||||
'production_waste_kg': production_waste + defect_waste,
|
||||
'expired_waste_kg': inventory_waste,
|
||||
'waste_percentage': waste_percentage,
|
||||
'total_production_kg': total_production,
|
||||
'waste_by_reason': waste_by_reason,
|
||||
'waste_incidents': total_waste_incidents
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste data", error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_production_waste_data(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get production waste data from production service using shared client"""
|
||||
try:
|
||||
# Use the shared production client with proper authentication and resilience
|
||||
production_client = create_production_client(settings)
|
||||
|
||||
data = await production_client.get_waste_analytics(
|
||||
str(tenant_id),
|
||||
start_date.isoformat(),
|
||||
end_date.isoformat()
|
||||
)
|
||||
|
||||
if data:
|
||||
logger.info(
|
||||
"Retrieved production waste data via production client",
|
||||
tenant_id=str(tenant_id),
|
||||
total_waste=data.get('total_production_waste', 0)
|
||||
)
|
||||
return data
|
||||
else:
|
||||
# Client returned None, return zeros as fallback
|
||||
logger.warning(
|
||||
"Production waste analytics returned None, using zeros",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error calling production service for waste data via client",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
# Return zeros on error to not break the flow
|
||||
return {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
def _calculate_environmental_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate environmental impact of food waste"""
|
||||
try:
|
||||
total_waste_kg = waste_data['total_waste_kg']
|
||||
|
||||
# CO2 emissions
|
||||
co2_emissions_kg = total_waste_kg * EnvironmentalConstants.CO2_PER_KG_WASTE
|
||||
co2_emissions_tons = co2_emissions_kg / 1000
|
||||
|
||||
# Equivalent trees to offset
|
||||
trees_equivalent = co2_emissions_tons * EnvironmentalConstants.TREES_PER_TON_CO2
|
||||
|
||||
# Water footprint (using average for bakery products)
|
||||
water_liters = total_waste_kg * EnvironmentalConstants.WATER_FOOTPRINT['default']
|
||||
|
||||
# Land use
|
||||
land_use_m2 = total_waste_kg * EnvironmentalConstants.LAND_USE_PER_KG
|
||||
|
||||
# Human-readable equivalents for marketing
|
||||
equivalents = {
|
||||
'car_km': co2_emissions_kg / 0.12, # Average car emits 120g CO2/km
|
||||
'smartphone_charges': (co2_emissions_kg * 1000) / 8, # 8g CO2 per charge
|
||||
'showers': water_liters / 65, # Average shower uses 65L
|
||||
'trees_year_growth': trees_equivalent
|
||||
}
|
||||
|
||||
return {
|
||||
'co2_emissions': {
|
||||
'kg': round(co2_emissions_kg, 2),
|
||||
'tons': round(co2_emissions_tons, 4),
|
||||
'trees_to_offset': round(trees_equivalent, 1)
|
||||
},
|
||||
'water_footprint': {
|
||||
'liters': round(water_liters, 2),
|
||||
'cubic_meters': round(water_liters / 1000, 2)
|
||||
},
|
||||
'land_use': {
|
||||
'square_meters': round(land_use_m2, 2),
|
||||
'hectares': round(land_use_m2 / 10000, 4)
|
||||
},
|
||||
'human_equivalents': {
|
||||
'car_km_equivalent': round(equivalents['car_km'], 0),
|
||||
'smartphone_charges': round(equivalents['smartphone_charges'], 0),
|
||||
'showers_equivalent': round(equivalents['showers'], 0),
|
||||
'trees_planted': round(equivalents['trees_year_growth'], 1)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate environmental impact", error=str(e))
|
||||
raise
|
||||
|
||||
async def _calculate_sdg_compliance(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
waste_data: Dict[str, Any],
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate compliance with UN SDG 12.3
|
||||
Target: Halve per capita global food waste by 2030
|
||||
|
||||
IMPORTANT: This method assumes sufficient data validation was done upstream.
|
||||
It should only be called when waste_data has meaningful production volumes.
|
||||
"""
|
||||
try:
|
||||
# Get baseline (first 90 days of operation)
|
||||
baseline = await self._get_baseline_waste(db, tenant_id)
|
||||
|
||||
current_waste_percentage = waste_data['waste_percentage']
|
||||
total_production = waste_data['total_production_kg']
|
||||
|
||||
# Check if we have a real baseline from production history
|
||||
has_real_baseline = baseline.get('data_available', False)
|
||||
baseline_percentage = baseline.get('waste_percentage', 0.0)
|
||||
|
||||
# If no real baseline AND insufficient current production, we can't make comparisons
|
||||
if not has_real_baseline and total_production < 50:
|
||||
logger.warning(
|
||||
"Cannot calculate SDG compliance without baseline or sufficient production",
|
||||
tenant_id=str(tenant_id),
|
||||
total_production=total_production
|
||||
)
|
||||
return self._get_insufficient_sdg_data()
|
||||
|
||||
# If we have no real baseline but have current production, use it as baseline
|
||||
if not has_real_baseline:
|
||||
logger.info(
|
||||
"Using current period as baseline (no historical data available)",
|
||||
tenant_id=str(tenant_id),
|
||||
current_waste_percentage=current_waste_percentage
|
||||
)
|
||||
baseline_percentage = current_waste_percentage
|
||||
# Set reduction to 0 since we're establishing baseline
|
||||
reduction_percentage = 0
|
||||
progress_to_target = 0
|
||||
status = 'baseline'
|
||||
status_label = 'Establishing Baseline'
|
||||
else:
|
||||
# We have a real baseline - calculate actual reduction
|
||||
# If current waste is higher than baseline, show negative reduction (worse than baseline)
|
||||
# If current waste is lower than baseline, show positive reduction (better than baseline)
|
||||
if baseline_percentage > 0:
|
||||
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
|
||||
else:
|
||||
reduction_percentage = 0
|
||||
|
||||
# Calculate progress toward 50% reduction target
|
||||
# The target is to achieve 50% reduction from baseline
|
||||
# So if baseline is 25%, target is to reach 12.5% (25% * 0.5)
|
||||
target_reduction_percentage = 50.0
|
||||
target_waste_percentage = baseline_percentage * (1 - (target_reduction_percentage / 100))
|
||||
|
||||
# Calculate progress: how much of the 50% target has been achieved
|
||||
# If we've reduced from 25% to 19.28%, we've achieved (25-19.28)/(25-12.5) = 5.72/12.5 = 45.8% of target
|
||||
if baseline_percentage > target_waste_percentage:
|
||||
max_possible_reduction = baseline_percentage - target_waste_percentage
|
||||
actual_reduction = baseline_percentage - current_waste_percentage
|
||||
progress_to_target = (actual_reduction / max_possible_reduction) * 100 if max_possible_reduction > 0 else 0
|
||||
else:
|
||||
# If current is already better than target
|
||||
progress_to_target = 100.0 if current_waste_percentage <= target_waste_percentage else 0.0
|
||||
|
||||
# Ensure progress doesn't exceed 100%
|
||||
progress_to_target = min(progress_to_target, 100.0)
|
||||
|
||||
# Status assessment based on actual reduction achieved
|
||||
if reduction_percentage >= 50:
|
||||
status = 'sdg_compliant'
|
||||
status_label = 'SDG 12.3 Compliant'
|
||||
elif reduction_percentage >= 30:
|
||||
status = 'on_track'
|
||||
status_label = 'On Track to Compliance'
|
||||
elif reduction_percentage >= 10:
|
||||
status = 'progressing'
|
||||
status_label = 'Making Progress'
|
||||
elif reduction_percentage > 0:
|
||||
status = 'improving'
|
||||
status_label = 'Improving'
|
||||
elif reduction_percentage < 0:
|
||||
status = 'above_baseline'
|
||||
status_label = 'Above Baseline'
|
||||
else:
|
||||
status = 'baseline'
|
||||
status_label = 'At Baseline'
|
||||
|
||||
return {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': round(baseline_percentage, 2),
|
||||
'current_waste_percentage': round(current_waste_percentage, 2),
|
||||
'reduction_achieved': round(reduction_percentage, 2),
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': round(max(progress_to_target, 0), 1), # Ensure non-negative
|
||||
'status': status,
|
||||
'status_label': status_label,
|
||||
'target_waste_percentage': round(baseline_percentage * 0.5, 2) if baseline_percentage > 0 else 0.0
|
||||
},
|
||||
'baseline_period': baseline.get('period', 'current_period'),
|
||||
'certification_ready': reduction_percentage >= 50 if has_real_baseline else False,
|
||||
'improvement_areas': self._identify_improvement_areas(waste_data)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate SDG compliance", error=str(e))
|
||||
raise
|
||||
|
||||
def _get_insufficient_sdg_data(self) -> Dict[str, Any]:
|
||||
"""Return SDG compliance structure for insufficient data case"""
|
||||
return {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': 0.0,
|
||||
'current_waste_percentage': 0.0,
|
||||
'reduction_achieved': 0.0,
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': 0.0,
|
||||
'status': 'insufficient_data',
|
||||
'status_label': 'Collecting Baseline Data',
|
||||
'target_waste_percentage': 0.0
|
||||
},
|
||||
'baseline_period': 'not_available',
|
||||
'certification_ready': False,
|
||||
'improvement_areas': ['start_production_tracking']
|
||||
}
|
||||
|
||||
async def _get_baseline_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get baseline waste percentage from production service using shared client"""
|
||||
try:
|
||||
# Use the shared production client with proper authentication and resilience
|
||||
production_client = create_production_client(settings)
|
||||
|
||||
baseline_data = await production_client.get_baseline(str(tenant_id))
|
||||
|
||||
if baseline_data and baseline_data.get('data_available', False):
|
||||
# Production service has real baseline data
|
||||
logger.info(
|
||||
"Retrieved baseline from production service via client",
|
||||
tenant_id=str(tenant_id),
|
||||
baseline_percentage=baseline_data.get('waste_percentage', 0)
|
||||
)
|
||||
return {
|
||||
'waste_percentage': baseline_data['waste_percentage'],
|
||||
'period': baseline_data['period'].get('type', 'first_90_days'),
|
||||
'total_production_kg': baseline_data.get('total_production_kg', 0),
|
||||
'total_waste_kg': baseline_data.get('total_waste_kg', 0)
|
||||
}
|
||||
else:
|
||||
# Production service doesn't have enough data yet
|
||||
logger.info(
|
||||
"Production service baseline not available, using industry average",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return {
|
||||
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
|
||||
'period': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Error calling production service for baseline via client, using industry average",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Fallback to industry average
|
||||
return {
|
||||
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
|
||||
'period': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
}
|
||||
|
||||
async def _calculate_avoided_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate waste avoided through AI predictions and smart planning
|
||||
This is a KEY metric for marketing and grant applications
|
||||
"""
|
||||
try:
|
||||
# Get AI-assisted batch data from production service
|
||||
production_data = await self._get_production_waste_data(tenant_id, start_date, end_date)
|
||||
|
||||
# Extract data with AI batch tracking
|
||||
total_planned = production_data.get('total_planned', 0) if production_data else 0
|
||||
total_waste = production_data.get('total_production_waste', 0) if production_data else 0
|
||||
ai_assisted_batches = production_data.get('ai_assisted_batches', 0) if production_data else 0
|
||||
|
||||
# Estimate waste avoided by comparing to industry average
|
||||
if total_planned > 0:
|
||||
# Industry average waste: 25%
|
||||
# Current actual waste from production
|
||||
industry_expected_waste = total_planned * EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE
|
||||
actual_waste = total_waste
|
||||
estimated_avoided = max(0, industry_expected_waste - actual_waste)
|
||||
|
||||
# Calculate environmental impact of avoided waste
|
||||
avoided_co2 = estimated_avoided * EnvironmentalConstants.CO2_PER_KG_WASTE
|
||||
avoided_water = estimated_avoided * EnvironmentalConstants.WATER_FOOTPRINT['default']
|
||||
|
||||
return {
|
||||
'waste_avoided_kg': round(estimated_avoided, 2),
|
||||
'ai_assisted_batches': ai_assisted_batches,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': round(avoided_co2, 2),
|
||||
'water_liters': round(avoided_water, 2)
|
||||
},
|
||||
'methodology': 'compared_to_industry_baseline'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'waste_avoided_kg': 0,
|
||||
'ai_assisted_batches': 0,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': 0,
|
||||
'water_liters': 0
|
||||
},
|
||||
'methodology': 'insufficient_data'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate avoided waste", error=str(e))
|
||||
return {
|
||||
'waste_avoided_kg': 0,
|
||||
'ai_assisted_batches': 0,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': 0,
|
||||
'water_liters': 0
|
||||
},
|
||||
'methodology': 'error_occurred'
|
||||
}
|
||||
|
||||
def _calculate_financial_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate financial impact of food waste"""
|
||||
# Average cost per kg of bakery products: €3.50
|
||||
avg_cost_per_kg = 3.50
|
||||
|
||||
total_waste_kg = waste_data['total_waste_kg']
|
||||
waste_cost = total_waste_kg * avg_cost_per_kg
|
||||
|
||||
# If waste was reduced by 30%, potential savings
|
||||
potential_savings = waste_cost * 0.30
|
||||
|
||||
return {
|
||||
'waste_cost_eur': round(waste_cost, 2),
|
||||
'cost_per_kg': avg_cost_per_kg,
|
||||
'potential_monthly_savings': round(potential_savings, 2),
|
||||
'annual_projection': round(waste_cost * 12, 2)
|
||||
}
|
||||
|
||||
def _identify_improvement_areas(self, waste_data: Dict[str, Any]) -> List[str]:
|
||||
"""Identify areas for improvement based on waste data"""
|
||||
areas = []
|
||||
|
||||
waste_by_reason = waste_data.get('waste_by_reason', {})
|
||||
|
||||
if waste_by_reason.get('production_defects', 0) > waste_data['total_waste_kg'] * 0.3:
|
||||
areas.append('quality_control_in_production')
|
||||
|
||||
if waste_by_reason.get('expired_inventory', 0) > waste_data['total_waste_kg'] * 0.4:
|
||||
areas.append('inventory_rotation_management')
|
||||
|
||||
if waste_data.get('waste_percentage', 0) > 20:
|
||||
areas.append('demand_forecasting_accuracy')
|
||||
|
||||
if not areas:
|
||||
areas.append('maintain_current_practices')
|
||||
|
||||
return areas
|
||||
|
||||
def _assess_grant_readiness(self, sdg_compliance: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Assess readiness for EU grant programs accessible to Spanish bakeries and retail.
|
||||
Based on 2026 verified research. Updated Dec 2025.
|
||||
"""
|
||||
reduction = sdg_compliance['sdg_12_3']['reduction_achieved']
|
||||
|
||||
grants = {
|
||||
'horizon_europe_food_systems': {
|
||||
'eligible': reduction >= 20,
|
||||
'confidence': 'high' if reduction >= 35 else 'medium' if reduction >= 20 else 'low',
|
||||
'requirements_met': reduction >= 20,
|
||||
'funding_eur': 12_000_000, # €3-12M per project
|
||||
'deadline': '2026-02-18',
|
||||
'program_type': 'grant',
|
||||
'category': 'European Union'
|
||||
},
|
||||
'horizon_europe_circular_sme': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15,
|
||||
'funding_eur': 10_000_000, # €10M total program
|
||||
'deadline': '2026-02-18',
|
||||
'program_type': 'grant',
|
||||
'category': 'European Union'
|
||||
},
|
||||
'eit_food_impact_2026': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15,
|
||||
'funding_eur': 1_000_000, # €50K-1M range
|
||||
'deadline': 'rolling_2026',
|
||||
'program_type': 'grant',
|
||||
'category': 'European Union'
|
||||
},
|
||||
'eib_circular_economy': {
|
||||
'eligible': reduction >= 10,
|
||||
'confidence': 'high' if reduction >= 20 else 'medium' if reduction >= 10 else 'low',
|
||||
'requirements_met': reduction >= 10,
|
||||
'funding_eur': 12_500_000, # Up to €12.5M loans
|
||||
'deadline': 'ongoing_2026',
|
||||
'program_type': 'loan',
|
||||
'category': 'European Union'
|
||||
},
|
||||
'circular_economy_perte': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15,
|
||||
'funding_eur': 10_000_000, # €150K-10M range
|
||||
'deadline': 'rolling_until_2026',
|
||||
'program_type': 'grant',
|
||||
'category': 'Spain'
|
||||
},
|
||||
'planes_turismo_2026': {
|
||||
'eligible': reduction >= 10,
|
||||
'confidence': 'medium',
|
||||
'requirements_met': reduction >= 10,
|
||||
'funding_eur': 500_000, # Variable by region
|
||||
'deadline': '2026-12-31',
|
||||
'program_type': 'grant',
|
||||
'category': 'Spain',
|
||||
'sector_specific': 'tourism'
|
||||
},
|
||||
'un_sdg_certified': {
|
||||
'eligible': reduction >= 50,
|
||||
'confidence': 'high' if reduction >= 50 else 'low',
|
||||
'requirements_met': reduction >= 50,
|
||||
'funding_eur': 0, # Certification, not funding
|
||||
'deadline': 'ongoing',
|
||||
'program_type': 'certification',
|
||||
'category': 'International'
|
||||
}
|
||||
}
|
||||
|
||||
overall_readiness = sum(1 for g in grants.values() if g['eligible']) / len(grants) * 100
|
||||
|
||||
return {
|
||||
'overall_readiness_percentage': round(overall_readiness, 1),
|
||||
'grant_programs': grants,
|
||||
'recommended_applications': [
|
||||
name for name, details in grants.items() if details['eligible']
|
||||
],
|
||||
'spain_compliance': {
|
||||
'law_1_2025': True, # Spanish food waste prevention law
|
||||
'circular_economy_strategy': True # Spanish Circular Economy Strategy
|
||||
}
|
||||
}
|
||||
|
||||
async def export_grant_report(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
grant_type: str = 'general',
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate export-ready report for grant applications
|
||||
Formats data according to common grant application requirements
|
||||
"""
|
||||
try:
|
||||
metrics = await self.get_sustainability_metrics(
|
||||
db, tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
# Format for grant applications
|
||||
report = {
|
||||
'report_metadata': {
|
||||
'generated_at': datetime.now().isoformat(),
|
||||
'report_type': grant_type,
|
||||
'period': metrics['period'],
|
||||
'tenant_id': str(tenant_id)
|
||||
},
|
||||
'executive_summary': {
|
||||
'total_waste_reduced_kg': metrics['waste_metrics']['total_waste_kg'],
|
||||
'waste_reduction_percentage': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved'],
|
||||
'co2_emissions_avoided_kg': metrics['environmental_impact']['co2_emissions']['kg'],
|
||||
'financial_savings_eur': metrics['financial_impact']['waste_cost_eur'],
|
||||
'sdg_compliance_status': metrics['sdg_compliance']['sdg_12_3']['status_label']
|
||||
},
|
||||
'detailed_metrics': metrics,
|
||||
'certifications': {
|
||||
'sdg_12_3_compliant': metrics['sdg_compliance']['certification_ready'],
|
||||
'grant_programs_eligible': metrics['grant_readiness']['recommended_applications']
|
||||
},
|
||||
'supporting_data': {
|
||||
'baseline_comparison': {
|
||||
'baseline': metrics['sdg_compliance']['sdg_12_3']['baseline_waste_percentage'],
|
||||
'current': metrics['sdg_compliance']['sdg_12_3']['current_waste_percentage'],
|
||||
'improvement': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved']
|
||||
},
|
||||
'environmental_benefits': metrics['environmental_impact'],
|
||||
'financial_benefits': metrics['financial_impact']
|
||||
}
|
||||
}
|
||||
|
||||
return report
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate grant report", error=str(e))
|
||||
raise
|
||||
98
services/inventory/app/services/tenant_deletion_service.py
Normal file
98
services/inventory/app/services/tenant_deletion_service.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""
|
||||
Inventory Service - Tenant Data Deletion
|
||||
Handles deletion of all inventory-related data for a tenant
|
||||
"""
|
||||
from typing import Dict
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, delete, func
|
||||
import structlog
|
||||
|
||||
from shared.services.tenant_deletion import BaseTenantDataDeletionService, TenantDataDeletionResult
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryTenantDeletionService(BaseTenantDataDeletionService):
|
||||
"""Service for deleting all inventory-related data for a tenant"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
super().__init__("inventory-service")
|
||||
self.db = db_session
|
||||
|
||||
async def get_tenant_data_preview(self, tenant_id: str) -> Dict[str, int]:
|
||||
"""Get counts of what would be deleted"""
|
||||
|
||||
try:
|
||||
preview = {}
|
||||
|
||||
# Import models here to avoid circular imports
|
||||
from app.models.inventory import InventoryItem, InventoryTransaction
|
||||
|
||||
# Count inventory items
|
||||
item_count = await self.db.scalar(
|
||||
select(func.count(InventoryItem.id)).where(InventoryItem.tenant_id == tenant_id)
|
||||
)
|
||||
preview["inventory_items"] = item_count or 0
|
||||
|
||||
# Count inventory transactions
|
||||
transaction_count = await self.db.scalar(
|
||||
select(func.count(InventoryTransaction.id)).where(InventoryTransaction.tenant_id == tenant_id)
|
||||
)
|
||||
preview["inventory_transactions"] = transaction_count or 0
|
||||
|
||||
return preview
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting deletion preview",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
return {}
|
||||
|
||||
async def delete_tenant_data(self, tenant_id: str) -> TenantDataDeletionResult:
|
||||
"""Delete all data for a tenant"""
|
||||
|
||||
result = TenantDataDeletionResult(tenant_id, self.service_name)
|
||||
|
||||
try:
|
||||
# Import models here to avoid circular imports
|
||||
from app.models.inventory import InventoryItem, InventoryTransaction
|
||||
|
||||
# Delete inventory transactions
|
||||
try:
|
||||
trans_delete = await self.db.execute(
|
||||
delete(InventoryTransaction).where(InventoryTransaction.tenant_id == tenant_id)
|
||||
)
|
||||
result.add_deleted_items("inventory_transactions", trans_delete.rowcount)
|
||||
except Exception as e:
|
||||
logger.error("Error deleting inventory transactions",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Inventory transaction deletion: {str(e)}")
|
||||
|
||||
# Delete inventory items
|
||||
try:
|
||||
item_delete = await self.db.execute(
|
||||
delete(InventoryItem).where(InventoryItem.tenant_id == tenant_id)
|
||||
)
|
||||
result.add_deleted_items("inventory_items", item_delete.rowcount)
|
||||
except Exception as e:
|
||||
logger.error("Error deleting inventory items",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
result.add_error(f"Inventory item deletion: {str(e)}")
|
||||
|
||||
# Commit all deletions
|
||||
await self.db.commit()
|
||||
|
||||
logger.info("Tenant data deletion completed",
|
||||
tenant_id=tenant_id,
|
||||
deleted_counts=result.deleted_counts)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Fatal error during tenant data deletion",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
await self.db.rollback()
|
||||
result.add_error(f"Fatal error: {str(e)}")
|
||||
|
||||
return result
|
||||
346
services/inventory/app/services/transformation_service.py
Normal file
346
services/inventory/app/services/transformation_service.py
Normal file
@@ -0,0 +1,346 @@
|
||||
# services/inventory/app/services/transformation_service.py
|
||||
"""
|
||||
Product Transformation Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from uuid import UUID
|
||||
from datetime import datetime, timedelta
|
||||
import structlog
|
||||
import json
|
||||
|
||||
from app.models.inventory import ProductTransformation, Stock, StockMovement, StockMovementType, ProductionStage
|
||||
from app.repositories.transformation_repository import TransformationRepository
|
||||
from app.repositories.ingredient_repository import IngredientRepository
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.schemas.inventory import (
|
||||
ProductTransformationCreate, ProductTransformationResponse,
|
||||
StockCreate, StockMovementCreate,
|
||||
IngredientResponse
|
||||
)
|
||||
from app.core.database import get_db_transaction
|
||||
from shared.database.exceptions import DatabaseError
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class TransformationService:
|
||||
"""Service layer for product transformation operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def create_transformation(
|
||||
self,
|
||||
transformation_data: ProductTransformationCreate,
|
||||
tenant_id: UUID,
|
||||
user_id: Optional[UUID] = None
|
||||
) -> ProductTransformationResponse:
|
||||
"""Create a product transformation with stock movements"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
transformation_repo = TransformationRepository(db)
|
||||
ingredient_repo = IngredientRepository(db)
|
||||
stock_repo = StockRepository(db)
|
||||
movement_repo = StockMovementRepository(db)
|
||||
|
||||
# Validate ingredients exist
|
||||
source_ingredient = await ingredient_repo.get_by_id(UUID(transformation_data.source_ingredient_id))
|
||||
target_ingredient = await ingredient_repo.get_by_id(UUID(transformation_data.target_ingredient_id))
|
||||
|
||||
if not source_ingredient or source_ingredient.tenant_id != tenant_id:
|
||||
raise ValueError("Source ingredient not found")
|
||||
if not target_ingredient or target_ingredient.tenant_id != tenant_id:
|
||||
raise ValueError("Target ingredient not found")
|
||||
|
||||
# Reserve source stock using FIFO by default
|
||||
source_reservations = await stock_repo.reserve_stock(
|
||||
tenant_id,
|
||||
UUID(transformation_data.source_ingredient_id),
|
||||
transformation_data.source_quantity,
|
||||
fifo=True
|
||||
)
|
||||
|
||||
if not source_reservations:
|
||||
raise ValueError(f"Insufficient stock available for transformation. Required: {transformation_data.source_quantity}")
|
||||
|
||||
# Create transformation record
|
||||
source_batch_numbers = [res.get('batch_number') for res in source_reservations if res.get('batch_number')]
|
||||
transformation = await transformation_repo.create_transformation(
|
||||
transformation_data,
|
||||
tenant_id,
|
||||
user_id,
|
||||
source_batch_numbers
|
||||
)
|
||||
|
||||
# Calculate expiration date for target product
|
||||
target_expiration_date = self._calculate_target_expiration(
|
||||
transformation_data.expiration_calculation_method,
|
||||
transformation_data.expiration_days_offset,
|
||||
source_reservations
|
||||
)
|
||||
|
||||
# Get current stock level before source consumption
|
||||
current_source_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, UUID(transformation_data.source_ingredient_id))
|
||||
running_stock_level = current_source_stock['total_available']
|
||||
|
||||
# Consume source stock and create movements with progressive tracking
|
||||
consumed_items = []
|
||||
for reservation in source_reservations:
|
||||
stock_id = UUID(reservation['stock_id'])
|
||||
reserved_qty = reservation['reserved_quantity']
|
||||
|
||||
# Calculate before/after for this specific batch
|
||||
batch_quantity_before = running_stock_level
|
||||
batch_quantity_after = running_stock_level - reserved_qty
|
||||
running_stock_level = batch_quantity_after # Update for next iteration
|
||||
|
||||
# Consume from reserved stock
|
||||
await stock_repo.consume_stock(stock_id, reserved_qty, from_reserved=True)
|
||||
|
||||
# Create movement record for source consumption with progressive tracking
|
||||
movement_data = StockMovementCreate(
|
||||
ingredient_id=transformation_data.source_ingredient_id,
|
||||
stock_id=str(stock_id),
|
||||
movement_type=StockMovementType.TRANSFORMATION,
|
||||
quantity=reserved_qty,
|
||||
reference_number=transformation.transformation_reference,
|
||||
notes=f"Transformation: {transformation_data.source_stage.value} → {transformation_data.target_stage.value}"
|
||||
)
|
||||
await movement_repo.create_movement(movement_data, tenant_id, user_id, batch_quantity_before, batch_quantity_after)
|
||||
|
||||
consumed_items.append({
|
||||
'stock_id': str(stock_id),
|
||||
'quantity_consumed': reserved_qty,
|
||||
'batch_number': reservation.get('batch_number')
|
||||
})
|
||||
|
||||
# Create target stock entry
|
||||
target_stock_data = StockCreate(
|
||||
ingredient_id=transformation_data.target_ingredient_id,
|
||||
production_stage=transformation_data.target_stage,
|
||||
transformation_reference=transformation.transformation_reference,
|
||||
current_quantity=transformation_data.target_quantity,
|
||||
batch_number=transformation_data.target_batch_number or f"TRANS-{transformation.transformation_reference}",
|
||||
expiration_date=target_expiration_date['expiration_date'],
|
||||
original_expiration_date=target_expiration_date.get('original_expiration_date'),
|
||||
transformation_date=transformation.transformation_date,
|
||||
final_expiration_date=target_expiration_date['expiration_date'],
|
||||
unit_cost=self._calculate_target_unit_cost(consumed_items, transformation_data.target_quantity),
|
||||
quality_status="good"
|
||||
)
|
||||
|
||||
target_stock = await stock_repo.create_stock_entry(target_stock_data, tenant_id)
|
||||
|
||||
# Get current stock level before target addition
|
||||
current_target_stock = await stock_repo.get_total_stock_by_ingredient(tenant_id, UUID(transformation_data.target_ingredient_id))
|
||||
target_quantity_before = current_target_stock['total_available']
|
||||
target_quantity_after = target_quantity_before + transformation_data.target_quantity
|
||||
|
||||
# Create target stock movement
|
||||
target_movement_data = StockMovementCreate(
|
||||
ingredient_id=transformation_data.target_ingredient_id,
|
||||
stock_id=str(target_stock.id),
|
||||
movement_type=StockMovementType.TRANSFORMATION,
|
||||
quantity=transformation_data.target_quantity,
|
||||
reference_number=transformation.transformation_reference,
|
||||
notes=f"Transformation result: {transformation_data.source_stage.value} → {transformation_data.target_stage.value}"
|
||||
)
|
||||
await movement_repo.create_movement(target_movement_data, tenant_id, user_id, target_quantity_before, target_quantity_after)
|
||||
|
||||
# Convert to response schema
|
||||
response = ProductTransformationResponse(**transformation.to_dict())
|
||||
response.source_ingredient = IngredientResponse(**source_ingredient.to_dict())
|
||||
response.target_ingredient = IngredientResponse(**target_ingredient.to_dict())
|
||||
|
||||
logger.info(
|
||||
"Transformation completed successfully",
|
||||
transformation_id=transformation.id,
|
||||
reference=transformation.transformation_reference,
|
||||
source_quantity=transformation_data.source_quantity,
|
||||
target_quantity=transformation_data.target_quantity
|
||||
)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to create transformation", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_transformation(
|
||||
self,
|
||||
transformation_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[ProductTransformationResponse]:
|
||||
"""Get transformation by ID"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
transformation_repo = TransformationRepository(db)
|
||||
ingredient_repo = IngredientRepository(db)
|
||||
|
||||
transformation = await transformation_repo.get_by_id(transformation_id)
|
||||
if not transformation or transformation.tenant_id != tenant_id:
|
||||
return None
|
||||
|
||||
# Get related ingredients
|
||||
source_ingredient = await ingredient_repo.get_by_id(transformation.source_ingredient_id)
|
||||
target_ingredient = await ingredient_repo.get_by_id(transformation.target_ingredient_id)
|
||||
|
||||
response = ProductTransformationResponse(**transformation.to_dict())
|
||||
if source_ingredient:
|
||||
response.source_ingredient = IngredientResponse(**source_ingredient.to_dict())
|
||||
if target_ingredient:
|
||||
response.target_ingredient = IngredientResponse(**target_ingredient.to_dict())
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformation", error=str(e), transformation_id=transformation_id)
|
||||
raise
|
||||
|
||||
async def get_transformations(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
ingredient_id: Optional[UUID] = None,
|
||||
source_stage: Optional[ProductionStage] = None,
|
||||
target_stage: Optional[ProductionStage] = None,
|
||||
days_back: Optional[int] = None
|
||||
) -> List[ProductTransformationResponse]:
|
||||
"""Get transformations with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
transformation_repo = TransformationRepository(db)
|
||||
ingredient_repo = IngredientRepository(db)
|
||||
|
||||
if ingredient_id:
|
||||
# Get transformations where ingredient is either source or target
|
||||
source_transformations = await transformation_repo.get_transformations_by_ingredient(
|
||||
tenant_id, ingredient_id, is_source=True, skip=0, limit=limit//2, days_back=days_back
|
||||
)
|
||||
target_transformations = await transformation_repo.get_transformations_by_ingredient(
|
||||
tenant_id, ingredient_id, is_source=False, skip=0, limit=limit//2, days_back=days_back
|
||||
)
|
||||
transformations = source_transformations + target_transformations
|
||||
# Remove duplicates and sort by date
|
||||
unique_transformations = {t.id: t for t in transformations}.values()
|
||||
transformations = sorted(unique_transformations, key=lambda x: x.transformation_date, reverse=True)
|
||||
transformations = transformations[skip:skip+limit]
|
||||
else:
|
||||
transformations = await transformation_repo.get_transformations_by_stage(
|
||||
tenant_id, source_stage, target_stage, skip, limit, days_back
|
||||
)
|
||||
|
||||
responses = []
|
||||
for transformation in transformations:
|
||||
# Get related ingredients
|
||||
source_ingredient = await ingredient_repo.get_by_id(transformation.source_ingredient_id)
|
||||
target_ingredient = await ingredient_repo.get_by_id(transformation.target_ingredient_id)
|
||||
|
||||
response = ProductTransformationResponse(**transformation.to_dict())
|
||||
if source_ingredient:
|
||||
response.source_ingredient = IngredientResponse(**source_ingredient.to_dict())
|
||||
if target_ingredient:
|
||||
response.target_ingredient = IngredientResponse(**target_ingredient.to_dict())
|
||||
|
||||
responses.append(response)
|
||||
|
||||
return responses
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformations", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
def _calculate_target_expiration(
|
||||
self,
|
||||
calculation_method: str,
|
||||
expiration_days_offset: Optional[int],
|
||||
source_reservations: List[Dict[str, Any]]
|
||||
) -> Dict[str, Optional[datetime]]:
|
||||
"""Calculate expiration date for target product"""
|
||||
current_time = datetime.now()
|
||||
|
||||
if calculation_method == "days_from_transformation":
|
||||
# Calculate expiration based on transformation date + offset
|
||||
if expiration_days_offset:
|
||||
expiration_date = current_time + timedelta(days=expiration_days_offset)
|
||||
else:
|
||||
expiration_date = current_time + timedelta(days=1) # Default 1 day for fresh baked goods
|
||||
|
||||
# Use earliest source expiration as original
|
||||
original_expiration = None
|
||||
if source_reservations:
|
||||
source_expirations = [res.get('expiration_date') for res in source_reservations if res.get('expiration_date')]
|
||||
if source_expirations:
|
||||
original_expiration = min(source_expirations)
|
||||
|
||||
return {
|
||||
'expiration_date': expiration_date,
|
||||
'original_expiration_date': original_expiration
|
||||
}
|
||||
|
||||
elif calculation_method == "preserve_original":
|
||||
# Use the earliest expiration date from source stock
|
||||
if source_reservations:
|
||||
source_expirations = [res.get('expiration_date') for res in source_reservations if res.get('expiration_date')]
|
||||
if source_expirations:
|
||||
expiration_date = min(source_expirations)
|
||||
return {
|
||||
'expiration_date': expiration_date,
|
||||
'original_expiration_date': expiration_date
|
||||
}
|
||||
|
||||
# Fallback to default
|
||||
return {
|
||||
'expiration_date': current_time + timedelta(days=7),
|
||||
'original_expiration_date': None
|
||||
}
|
||||
|
||||
else:
|
||||
# Default fallback
|
||||
return {
|
||||
'expiration_date': current_time + timedelta(days=1),
|
||||
'original_expiration_date': None
|
||||
}
|
||||
|
||||
def _calculate_target_unit_cost(
|
||||
self,
|
||||
consumed_items: List[Dict[str, Any]],
|
||||
target_quantity: float
|
||||
) -> Optional[float]:
|
||||
"""Calculate unit cost for target product based on consumed items"""
|
||||
# This is a simplified calculation - in reality you'd want to consider
|
||||
# additional costs like labor, energy, etc.
|
||||
total_source_cost = 0.0
|
||||
total_source_quantity = 0.0
|
||||
|
||||
for item in consumed_items:
|
||||
quantity = item.get('quantity_consumed', 0)
|
||||
# Note: In a real implementation, you'd fetch the unit cost from the stock items
|
||||
# For now, we'll use a placeholder
|
||||
total_source_quantity += quantity
|
||||
|
||||
if total_source_quantity > 0 and target_quantity > 0:
|
||||
# Simple cost transfer based on quantity ratio
|
||||
return total_source_cost / target_quantity
|
||||
|
||||
return None
|
||||
|
||||
async def get_transformation_summary(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
days_back: int = 30
|
||||
) -> Dict[str, Any]:
|
||||
"""Get transformation summary for dashboard"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
transformation_repo = TransformationRepository(db)
|
||||
|
||||
summary = await transformation_repo.get_transformation_summary_by_period(tenant_id, days_back)
|
||||
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transformation summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
26
services/inventory/app/utils/__init__.py
Normal file
26
services/inventory/app/utils/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# services/alert_processor/app/utils/__init__.py
|
||||
"""
|
||||
Utility modules for alert processor service
|
||||
"""
|
||||
|
||||
from .cache import (
|
||||
get_redis_client,
|
||||
close_redis,
|
||||
get_cached,
|
||||
set_cached,
|
||||
delete_cached,
|
||||
delete_pattern,
|
||||
cache_response,
|
||||
make_cache_key,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_redis_client',
|
||||
'close_redis',
|
||||
'get_cached',
|
||||
'set_cached',
|
||||
'delete_cached',
|
||||
'delete_pattern',
|
||||
'cache_response',
|
||||
'make_cache_key',
|
||||
]
|
||||
265
services/inventory/app/utils/cache.py
Normal file
265
services/inventory/app/utils/cache.py
Normal file
@@ -0,0 +1,265 @@
|
||||
# services/orchestrator/app/utils/cache.py
|
||||
"""
|
||||
Redis caching utilities for dashboard endpoints
|
||||
"""
|
||||
|
||||
import json
|
||||
import redis.asyncio as redis
|
||||
from typing import Optional, Any, Callable
|
||||
from functools import wraps
|
||||
import structlog
|
||||
from app.core.config import settings
|
||||
from pydantic import BaseModel
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Redis client instance
|
||||
_redis_client: Optional[redis.Redis] = None
|
||||
|
||||
|
||||
async def get_redis_client() -> redis.Redis:
|
||||
"""Get or create Redis client"""
|
||||
global _redis_client
|
||||
|
||||
if _redis_client is None:
|
||||
try:
|
||||
# Check if TLS is enabled - convert string to boolean properly
|
||||
redis_tls_str = str(getattr(settings, 'REDIS_TLS_ENABLED', 'false')).lower()
|
||||
redis_tls_enabled = redis_tls_str in ('true', '1', 'yes', 'on')
|
||||
|
||||
connection_kwargs = {
|
||||
'host': str(getattr(settings, 'REDIS_HOST', 'localhost')),
|
||||
'port': int(getattr(settings, 'REDIS_PORT', 6379)),
|
||||
'db': int(getattr(settings, 'REDIS_DB', 0)),
|
||||
'decode_responses': True,
|
||||
'socket_connect_timeout': 5,
|
||||
'socket_timeout': 5
|
||||
}
|
||||
|
||||
# Add password if configured
|
||||
redis_password = getattr(settings, 'REDIS_PASSWORD', None)
|
||||
if redis_password:
|
||||
connection_kwargs['password'] = redis_password
|
||||
|
||||
# Add SSL/TLS support if enabled
|
||||
if redis_tls_enabled:
|
||||
import ssl
|
||||
connection_kwargs['ssl'] = True
|
||||
connection_kwargs['ssl_cert_reqs'] = ssl.CERT_NONE
|
||||
logger.debug(f"Redis TLS enabled - connecting with SSL to {connection_kwargs['host']}:{connection_kwargs['port']}")
|
||||
|
||||
_redis_client = redis.Redis(**connection_kwargs)
|
||||
|
||||
# Test connection
|
||||
await _redis_client.ping()
|
||||
logger.info(f"Redis client connected successfully (TLS: {redis_tls_enabled})")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to Redis: {e}. Caching will be disabled.")
|
||||
_redis_client = None
|
||||
|
||||
return _redis_client
|
||||
|
||||
|
||||
async def close_redis():
|
||||
"""Close Redis connection"""
|
||||
global _redis_client
|
||||
if _redis_client:
|
||||
await _redis_client.close()
|
||||
_redis_client = None
|
||||
logger.info("Redis connection closed")
|
||||
|
||||
|
||||
async def get_cached(key: str) -> Optional[Any]:
|
||||
"""
|
||||
Get cached value by key
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
|
||||
Returns:
|
||||
Cached value (deserialized from JSON) or None if not found or error
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return None
|
||||
|
||||
cached = await client.get(key)
|
||||
if cached:
|
||||
logger.debug(f"Cache hit: {key}")
|
||||
return json.loads(cached)
|
||||
else:
|
||||
logger.debug(f"Cache miss: {key}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache get error for key {key}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _serialize_value(value: Any) -> Any:
|
||||
"""
|
||||
Recursively serialize values for JSON storage, handling Pydantic models properly.
|
||||
|
||||
Args:
|
||||
value: Value to serialize
|
||||
|
||||
Returns:
|
||||
JSON-serializable value
|
||||
"""
|
||||
if isinstance(value, BaseModel):
|
||||
# Convert Pydantic model to dictionary
|
||||
return value.model_dump()
|
||||
elif isinstance(value, (list, tuple)):
|
||||
# Recursively serialize list/tuple elements
|
||||
return [_serialize_value(item) for item in value]
|
||||
elif isinstance(value, dict):
|
||||
# Recursively serialize dictionary values
|
||||
return {key: _serialize_value(val) for key, val in value.items()}
|
||||
else:
|
||||
# For other types, use default serialization
|
||||
return value
|
||||
|
||||
|
||||
async def set_cached(key: str, value: Any, ttl: int = 60) -> bool:
|
||||
"""
|
||||
Set cached value with TTL
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
value: Value to cache (will be JSON serialized)
|
||||
ttl: Time to live in seconds
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return False
|
||||
|
||||
# Serialize value properly before JSON encoding
|
||||
serialized_value = _serialize_value(value)
|
||||
serialized = json.dumps(serialized_value)
|
||||
await client.setex(key, ttl, serialized)
|
||||
logger.debug(f"Cache set: {key} (TTL: {ttl}s)")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache set error for key {key}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def delete_cached(key: str) -> bool:
|
||||
"""
|
||||
Delete cached value
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return False
|
||||
|
||||
await client.delete(key)
|
||||
logger.debug(f"Cache deleted: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete error for key {key}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def delete_pattern(pattern: str) -> int:
|
||||
"""
|
||||
Delete all keys matching pattern
|
||||
|
||||
Args:
|
||||
pattern: Redis key pattern (e.g., "dashboard:*")
|
||||
|
||||
Returns:
|
||||
Number of keys deleted
|
||||
"""
|
||||
try:
|
||||
client = await get_redis_client()
|
||||
if not client:
|
||||
return 0
|
||||
|
||||
keys = []
|
||||
async for key in client.scan_iter(match=pattern):
|
||||
keys.append(key)
|
||||
|
||||
if keys:
|
||||
deleted = await client.delete(*keys)
|
||||
logger.info(f"Deleted {deleted} keys matching pattern: {pattern}")
|
||||
return deleted
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.warning(f"Cache delete pattern error for {pattern}: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
def cache_response(key_prefix: str, ttl: int = 60):
|
||||
"""
|
||||
Decorator to cache endpoint responses
|
||||
|
||||
Args:
|
||||
key_prefix: Prefix for cache key (will be combined with tenant_id)
|
||||
ttl: Time to live in seconds
|
||||
|
||||
Usage:
|
||||
@cache_response("dashboard:health", ttl=30)
|
||||
async def get_health(tenant_id: str):
|
||||
...
|
||||
"""
|
||||
def decorator(func: Callable):
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
# Extract tenant_id from kwargs or args
|
||||
tenant_id = kwargs.get('tenant_id')
|
||||
if not tenant_id and args:
|
||||
# Try to find tenant_id in args (assuming it's the first argument)
|
||||
tenant_id = args[0] if len(args) > 0 else None
|
||||
|
||||
if not tenant_id:
|
||||
# No tenant_id, skip caching
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
# Build cache key
|
||||
cache_key = f"{key_prefix}:{tenant_id}"
|
||||
|
||||
# Try to get from cache
|
||||
cached_value = await get_cached(cache_key)
|
||||
if cached_value is not None:
|
||||
return cached_value
|
||||
|
||||
# Execute function
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
# Cache result
|
||||
await set_cached(cache_key, result, ttl)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def make_cache_key(prefix: str, tenant_id: str, **params) -> str:
|
||||
"""
|
||||
Create a cache key with optional parameters
|
||||
|
||||
Args:
|
||||
prefix: Key prefix
|
||||
tenant_id: Tenant ID
|
||||
**params: Additional parameters to include in key
|
||||
|
||||
Returns:
|
||||
Cache key string
|
||||
"""
|
||||
key_parts = [prefix, tenant_id]
|
||||
for k, v in sorted(params.items()):
|
||||
if v is not None:
|
||||
key_parts.append(f"{k}:{v}")
|
||||
return ":".join(key_parts)
|
||||
Reference in New Issue
Block a user