Initial commit - production deployment

This commit is contained in:
2026-01-21 17:17:16 +01:00
commit c23d00dd92
2289 changed files with 638440 additions and 0 deletions

View File

View File

@@ -0,0 +1,314 @@
# services/inventory/app/api/analytics.py
"""
Analytics API endpoints for Inventory Service
Following standardized URL structure: /api/v1/tenants/{tenant_id}/inventory/analytics/{operation}
Requires: Professional or Enterprise subscription tier
"""
from datetime import datetime, timedelta
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import analytics_tier_required
from app.core.database import get_db
from app.services.inventory_service import InventoryService
from app.services.dashboard_service import DashboardService
from app.services.food_safety_service import FoodSafetyService
from app.schemas.dashboard import (
InventoryAnalytics,
BusinessModelInsights,
)
from shared.routing import RouteBuilder
logger = structlog.get_logger()
# Create route builder for consistent URL structure
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["inventory-analytics"])
# ===== Dependency Injection =====
async def get_dashboard_service(db: AsyncSession = Depends(get_db)) -> DashboardService:
"""Get dashboard service with dependencies"""
return DashboardService(
inventory_service=InventoryService(),
food_safety_service=FoodSafetyService()
)
# ===== ANALYTICS ENDPOINTS (Professional/Enterprise Only) =====
@router.get(
route_builder.build_analytics_route("inventory-insights"),
response_model=InventoryAnalytics
)
@analytics_tier_required
async def get_inventory_analytics(
tenant_id: UUID = Path(...),
days_back: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""
Get advanced inventory analytics (Professional/Enterprise only)
Provides:
- Stock turnover rates
- Inventory valuation trends
- ABC analysis
- Stockout risk predictions
- Seasonal patterns
"""
try:
analytics = await dashboard_service.get_inventory_analytics(db, tenant_id, days_back)
logger.info("Inventory analytics retrieved",
tenant_id=str(tenant_id),
days_analyzed=days_back,
user_id=current_user.get('user_id'))
return analytics
except Exception as e:
logger.error("Error getting inventory analytics",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve inventory analytics"
)
@router.get(
route_builder.build_analytics_route("business-model"),
response_model=BusinessModelInsights
)
@analytics_tier_required
async def get_business_model_insights(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""
Get business model insights based on inventory patterns (Professional/Enterprise only)
Analyzes inventory patterns to provide insights on:
- Detected business model (retail, wholesale, production, etc.)
- Product mix recommendations
- Inventory optimization suggestions
"""
try:
insights = await dashboard_service.get_business_model_insights(db, tenant_id)
logger.info("Business model insights retrieved",
tenant_id=str(tenant_id),
detected_model=insights.detected_model,
user_id=current_user.get('user_id'))
return insights
except Exception as e:
logger.error("Error getting business model insights",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve business model insights"
)
@router.get(
route_builder.build_analytics_route("turnover-rate"),
response_model=dict
)
@analytics_tier_required
async def get_inventory_turnover_rate(
tenant_id: UUID = Path(...),
start_date: Optional[datetime] = Query(None, description="Start date for analysis"),
end_date: Optional[datetime] = Query(None, description="End date for analysis"),
category: Optional[str] = Query(None, description="Filter by category"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Calculate inventory turnover rate (Professional/Enterprise only)
Metrics:
- Overall turnover rate
- By category
- By product
- Trend analysis
"""
try:
service = InventoryService()
# Set default dates if not provided
if not end_date:
end_date = datetime.now()
if not start_date:
start_date = end_date - timedelta(days=90)
# Calculate turnover metrics
turnover_data = await service.calculate_turnover_rate(
tenant_id,
start_date,
end_date,
category
)
logger.info("Turnover rate calculated",
tenant_id=str(tenant_id),
category=category,
user_id=current_user.get('user_id'))
return turnover_data
except Exception as e:
logger.error("Error calculating turnover rate",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to calculate turnover rate"
)
@router.get(
route_builder.build_analytics_route("abc-analysis"),
response_model=dict
)
@analytics_tier_required
async def get_abc_analysis(
tenant_id: UUID = Path(...),
days_back: int = Query(90, ge=30, le=365, description="Days to analyze"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Perform ABC analysis on inventory (Professional/Enterprise only)
Categorizes inventory items by:
- A: High-value items requiring tight control
- B: Moderate-value items with moderate control
- C: Low-value items with simple control
"""
try:
service = InventoryService()
abc_analysis = await service.perform_abc_analysis(tenant_id, days_back)
logger.info("ABC analysis completed",
tenant_id=str(tenant_id),
days_analyzed=days_back,
user_id=current_user.get('user_id'))
return abc_analysis
except Exception as e:
logger.error("Error performing ABC analysis",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to perform ABC analysis"
)
@router.get(
route_builder.build_analytics_route("stockout-predictions"),
response_model=dict
)
@analytics_tier_required
async def get_stockout_predictions(
tenant_id: UUID = Path(...),
forecast_days: int = Query(30, ge=7, le=90, description="Days to forecast"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Predict potential stockouts (Professional/Enterprise only)
Provides:
- Items at risk of stockout
- Predicted stockout dates
- Recommended reorder quantities
- Lead time considerations
"""
try:
service = InventoryService()
predictions = await service.predict_stockouts(tenant_id, forecast_days)
logger.info("Stockout predictions generated",
tenant_id=str(tenant_id),
forecast_days=forecast_days,
at_risk_items=len(predictions.get('items_at_risk', [])),
user_id=current_user.get('user_id'))
return predictions
except Exception as e:
logger.error("Error predicting stockouts",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to predict stockouts"
)
@router.get(
route_builder.build_analytics_route("waste-analysis"),
response_model=dict
)
@analytics_tier_required
async def get_waste_analysis(
tenant_id: UUID = Path(...),
start_date: Optional[datetime] = Query(None, description="Start date"),
end_date: Optional[datetime] = Query(None, description="End date"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Analyze inventory waste and expiration (Professional/Enterprise only)
Metrics:
- Total waste value
- Waste by category
- Expiration patterns
- Optimization recommendations
"""
try:
service = InventoryService()
# Set default dates
if not end_date:
end_date = datetime.now()
if not start_date:
start_date = end_date - timedelta(days=30)
waste_analysis = await service.analyze_waste(tenant_id, start_date, end_date)
logger.info("Waste analysis completed",
tenant_id=str(tenant_id),
total_waste_value=waste_analysis.get('total_waste_value', 0),
user_id=current_user.get('user_id'))
return waste_analysis
except Exception as e:
logger.error("Error analyzing waste",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to analyze waste"
)

View File

@@ -0,0 +1,237 @@
# services/inventory/app/api/audit.py
"""
Audit Logs API - Retrieve audit trail for inventory service
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from typing import Optional, Dict, Any
from uuid import UUID
from datetime import datetime
import structlog
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import AuditLog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from shared.models.audit_log_schemas import (
AuditLogResponse,
AuditLogListResponse,
AuditLogStatsResponse
)
from app.core.database import database_manager
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["audit-logs"])
logger = structlog.get_logger()
async def get_db():
"""Database session dependency"""
async with database_manager.get_session() as session:
yield session
@router.get(
route_builder.build_base_route("audit-logs"),
response_model=AuditLogListResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_logs(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
user_id: Optional[UUID] = Query(None, description="Filter by user ID"),
action: Optional[str] = Query(None, description="Filter by action type"),
resource_type: Optional[str] = Query(None, description="Filter by resource type"),
severity: Optional[str] = Query(None, description="Filter by severity level"),
search: Optional[str] = Query(None, description="Search in description field"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
offset: int = Query(0, ge=0, description="Number of records to skip"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit logs for inventory service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit logs",
tenant_id=tenant_id,
user_id=current_user.get("user_id"),
filters={
"start_date": start_date,
"end_date": end_date,
"action": action,
"resource_type": resource_type,
"severity": severity
}
)
# Build query filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
if user_id:
filters.append(AuditLog.user_id == user_id)
if action:
filters.append(AuditLog.action == action)
if resource_type:
filters.append(AuditLog.resource_type == resource_type)
if severity:
filters.append(AuditLog.severity == severity)
if search:
filters.append(AuditLog.description.ilike(f"%{search}%"))
# Count total matching records
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total = total_result.scalar() or 0
# Fetch paginated results
query = (
select(AuditLog)
.where(and_(*filters))
.order_by(AuditLog.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await db.execute(query)
audit_logs = result.scalars().all()
# Convert to response models
items = [AuditLogResponse.from_orm(log) for log in audit_logs]
logger.info(
"Successfully retrieved audit logs",
tenant_id=tenant_id,
total=total,
returned=len(items)
)
return AuditLogListResponse(
items=items,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(items)) < total
)
except Exception as e:
logger.error(
"Failed to retrieve audit logs",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit logs: {str(e)}"
)
@router.get(
route_builder.build_base_route("audit-logs/stats"),
response_model=AuditLogStatsResponse
)
@require_user_role(['admin', 'owner'])
async def get_audit_log_stats(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Filter logs from this date"),
end_date: Optional[datetime] = Query(None, description="Filter logs until this date"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get audit log statistics for inventory service.
Requires admin or owner role.
"""
try:
logger.info(
"Retrieving audit log statistics",
tenant_id=tenant_id,
user_id=current_user.get("user_id")
)
# Build base filters
filters = [AuditLog.tenant_id == tenant_id]
if start_date:
filters.append(AuditLog.created_at >= start_date)
if end_date:
filters.append(AuditLog.created_at <= end_date)
# Total events
count_query = select(func.count()).select_from(AuditLog).where(and_(*filters))
total_result = await db.execute(count_query)
total_events = total_result.scalar() or 0
# Events by action
action_query = (
select(AuditLog.action, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.action)
)
action_result = await db.execute(action_query)
events_by_action = {row.action: row.count for row in action_result}
# Events by severity
severity_query = (
select(AuditLog.severity, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.severity)
)
severity_result = await db.execute(severity_query)
events_by_severity = {row.severity: row.count for row in severity_result}
# Events by resource type
resource_query = (
select(AuditLog.resource_type, func.count().label('count'))
.where(and_(*filters))
.group_by(AuditLog.resource_type)
)
resource_result = await db.execute(resource_query)
events_by_resource_type = {row.resource_type: row.count for row in resource_result}
# Date range
date_range_query = (
select(
func.min(AuditLog.created_at).label('min_date'),
func.max(AuditLog.created_at).label('max_date')
)
.where(and_(*filters))
)
date_result = await db.execute(date_range_query)
date_row = date_result.one()
logger.info(
"Successfully retrieved audit log statistics",
tenant_id=tenant_id,
total_events=total_events
)
return AuditLogStatsResponse(
total_events=total_events,
events_by_action=events_by_action,
events_by_severity=events_by_severity,
events_by_resource_type=events_by_resource_type,
date_range={
"min": date_row.min_date,
"max": date_row.max_date
}
)
except Exception as e:
logger.error(
"Failed to retrieve audit log statistics",
error=str(e),
tenant_id=tenant_id
)
raise HTTPException(
status_code=500,
detail=f"Failed to retrieve audit log statistics: {str(e)}"
)

View File

@@ -0,0 +1,149 @@
# services/inventory/app/api/batch.py
"""
Inventory Batch API - Batch operations for enterprise dashboards
Phase 2 optimization: Eliminate N+1 query patterns by fetching inventory data
for multiple tenants in a single request.
"""
from fastapi import APIRouter, Depends, HTTPException, Body
from typing import List, Dict, Any
from uuid import UUID
from pydantic import BaseModel, Field
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
import asyncio
from app.core.database import get_db
from app.services.dashboard_service import DashboardService
from app.services.inventory_service import InventoryService
from shared.auth.decorators import get_current_user_dep
router = APIRouter(tags=["inventory-batch"])
logger = structlog.get_logger()
class InventorySummaryBatchRequest(BaseModel):
"""Request model for batch inventory summary"""
tenant_ids: List[str] = Field(..., description="List of tenant IDs", max_length=100)
class InventorySummary(BaseModel):
"""Inventory summary for a single tenant"""
tenant_id: str
total_value: float
out_of_stock_count: int
low_stock_count: int
adequate_stock_count: int
total_ingredients: int
@router.post("/batch/inventory-summary", response_model=Dict[str, InventorySummary])
async def get_inventory_summary_batch(
request: InventorySummaryBatchRequest = Body(...),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get inventory summary for multiple tenants in a single request.
Optimized for enterprise dashboards to eliminate N+1 query patterns.
Fetches inventory data for all tenants in parallel.
Args:
request: Batch request with tenant IDs
Returns:
Dictionary mapping tenant_id -> inventory summary
Example:
POST /api/v1/inventory/batch/inventory-summary
{
"tenant_ids": ["tenant-1", "tenant-2", "tenant-3"]
}
Response:
{
"tenant-1": {"tenant_id": "tenant-1", "total_value": 15000, ...},
"tenant-2": {"tenant_id": "tenant-2", "total_value": 12000, ...},
"tenant-3": {"tenant_id": "tenant-3", "total_value": 18000, ...}
}
"""
try:
if len(request.tenant_ids) > 100:
raise HTTPException(
status_code=400,
detail="Maximum 100 tenant IDs allowed per batch request"
)
if not request.tenant_ids:
return {}
logger.info(
"Batch fetching inventory summaries",
tenant_count=len(request.tenant_ids)
)
async def fetch_tenant_inventory(tenant_id: str) -> tuple[str, InventorySummary]:
"""Fetch inventory summary for a single tenant"""
try:
tenant_uuid = UUID(tenant_id)
dashboard_service = DashboardService(
inventory_service=InventoryService(),
food_safety_service=None
)
overview = await dashboard_service.get_inventory_overview(db, tenant_uuid)
return tenant_id, InventorySummary(
tenant_id=tenant_id,
total_value=float(overview.get('total_value', 0)),
out_of_stock_count=int(overview.get('out_of_stock_count', 0)),
low_stock_count=int(overview.get('low_stock_count', 0)),
adequate_stock_count=int(overview.get('adequate_stock_count', 0)),
total_ingredients=int(overview.get('total_ingredients', 0))
)
except Exception as e:
logger.warning(
"Failed to fetch inventory for tenant in batch",
tenant_id=tenant_id,
error=str(e)
)
return tenant_id, InventorySummary(
tenant_id=tenant_id,
total_value=0.0,
out_of_stock_count=0,
low_stock_count=0,
adequate_stock_count=0,
total_ingredients=0
)
# Fetch all tenant inventory data in parallel
tasks = [fetch_tenant_inventory(tid) for tid in request.tenant_ids]
results = await asyncio.gather(*tasks, return_exceptions=True)
# Build result dictionary
result_dict = {}
for result in results:
if isinstance(result, Exception):
logger.error("Exception in batch inventory fetch", error=str(result))
continue
tenant_id, summary = result
result_dict[tenant_id] = summary
logger.info(
"Batch inventory summaries retrieved",
requested_count=len(request.tenant_ids),
successful_count=len(result_dict)
)
return result_dict
except HTTPException:
raise
except Exception as e:
logger.error("Error in batch inventory summary", error=str(e), exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to fetch batch inventory summaries: {str(e)}"
)

View File

@@ -0,0 +1,498 @@
# ================================================================
# services/inventory/app/api/dashboard.py
# ================================================================
"""
Dashboard API endpoints for Inventory Service
"""
from datetime import datetime, timedelta
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role, analytics_tier_required
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.inventory_service import InventoryService
from app.services.food_safety_service import FoodSafetyService
from app.services.dashboard_service import DashboardService
from app.schemas.dashboard import (
InventoryDashboardSummary,
FoodSafetyDashboard,
BusinessModelInsights,
InventoryAnalytics,
DashboardFilter,
AlertsFilter,
StockStatusSummary,
AlertSummary,
RecentActivity
)
from app.utils.cache import get_cached, set_cached, make_cache_key
logger = structlog.get_logger()
# Create route builder for consistent URL structure
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["dashboard"])
# ===== Dependency Injection =====
async def get_dashboard_service(db: AsyncSession = Depends(get_db)) -> DashboardService:
"""Get dashboard service with dependencies"""
return DashboardService(
inventory_service=InventoryService(),
food_safety_service=FoodSafetyService()
)
# ===== Main Dashboard Endpoints =====
@router.get(
route_builder.build_dashboard_route("summary"),
response_model=InventoryDashboardSummary
)
async def get_inventory_dashboard_summary(
tenant_id: UUID = Path(...),
filters: Optional[DashboardFilter] = None,
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get comprehensive inventory dashboard summary with caching (30s TTL)"""
try:
# PHASE 2: Check cache first (only if no filters applied)
cache_key = None
if filters is None:
cache_key = make_cache_key("inventory_dashboard", str(tenant_id))
cached_result = await get_cached(cache_key)
if cached_result is not None:
logger.debug("Cache hit for inventory dashboard", cache_key=cache_key, tenant_id=str(tenant_id))
return InventoryDashboardSummary(**cached_result)
# Cache miss or filters applied - fetch from database
summary = await dashboard_service.get_inventory_dashboard_summary(db, tenant_id, filters)
# PHASE 2: Cache the result (30s TTL for inventory levels)
if cache_key:
await set_cached(cache_key, summary.model_dump(), ttl=30)
logger.debug("Cached inventory dashboard", cache_key=cache_key, ttl=30, tenant_id=str(tenant_id))
logger.info("Dashboard summary retrieved",
tenant_id=str(tenant_id),
total_ingredients=summary.total_ingredients)
return summary
except Exception as e:
logger.error("Error getting dashboard summary",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve dashboard summary"
)
@router.get(
route_builder.build_dashboard_route("overview")
)
async def get_inventory_dashboard_overview(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""
Get lightweight inventory dashboard overview for health checks.
This endpoint is optimized for frequent polling by the orchestrator service
for dashboard health-status checks. It returns only essential metrics needed
to determine inventory health status.
"""
try:
overview = await dashboard_service.get_inventory_overview(db, tenant_id)
logger.info("Inventory dashboard overview retrieved",
tenant_id=str(tenant_id),
out_of_stock_count=overview.get('out_of_stock_count', 0))
return overview
except Exception as e:
logger.error("Error getting inventory dashboard overview",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve inventory dashboard overview"
)
@router.get(
route_builder.build_dashboard_route("food-safety"),
response_model=FoodSafetyDashboard
)
async def get_food_safety_dashboard(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(lambda: FoodSafetyService()),
db: AsyncSession = Depends(get_db)
):
"""Get food safety dashboard data"""
try:
dashboard = await food_safety_service.get_food_safety_dashboard(db, tenant_id)
logger.info("Food safety dashboard retrieved",
tenant_id=str(tenant_id),
compliance_percentage=dashboard.compliance_percentage)
return dashboard
except Exception as e:
logger.error("Error getting food safety dashboard",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve food safety dashboard"
)
@router.get(
route_builder.build_dashboard_route("analytics"),
response_model=InventoryAnalytics
)
async def get_inventory_analytics(
tenant_id: UUID = Path(...),
days_back: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get advanced inventory analytics"""
try:
analytics = await dashboard_service.get_inventory_analytics(db, tenant_id, days_back)
logger.info("Inventory analytics retrieved",
tenant_id=str(tenant_id),
days_analyzed=days_back)
return analytics
except Exception as e:
logger.error("Error getting inventory analytics",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve inventory analytics"
)
@router.get(
route_builder.build_dashboard_route("business-model"),
response_model=BusinessModelInsights
)
async def get_business_model_insights(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get business model insights based on inventory patterns"""
try:
insights = await dashboard_service.get_business_model_insights(db, tenant_id)
logger.info("Business model insights retrieved",
tenant_id=str(tenant_id),
detected_model=insights.detected_model)
return insights
except Exception as e:
logger.error("Error getting business model insights",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve business model insights"
)
# ===== Detailed Dashboard Data Endpoints =====
@router.get(
route_builder.build_dashboard_route("stock-status"),
response_model=List[StockStatusSummary]
)
async def get_stock_status_by_category(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get stock status breakdown by category"""
try:
stock_status = await dashboard_service.get_stock_status_by_category(db, tenant_id)
return stock_status
except Exception as e:
logger.error("Error getting stock status by category",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve stock status by category"
)
@router.get(
route_builder.build_dashboard_route("alerts-summary"),
response_model=List[AlertSummary]
)
async def get_alerts_summary(
tenant_id: UUID = Path(...),
filters: Optional[AlertsFilter] = None,
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get alerts summary by type and severity"""
try:
alerts_summary = await dashboard_service.get_alerts_summary(db, tenant_id, filters)
return alerts_summary
except Exception as e:
logger.error("Error getting alerts summary",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve alerts summary"
)
@router.get(
route_builder.build_dashboard_route("recent-activity"),
response_model=List[RecentActivity]
)
async def get_recent_activity(
tenant_id: UUID = Path(...),
limit: int = Query(20, ge=1, le=100, description="Number of activities to return"),
activity_types: Optional[List[str]] = Query(None, description="Filter by activity types"),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get recent inventory activity"""
try:
activities = await dashboard_service.get_recent_activity(
db, tenant_id, limit, activity_types
)
return activities
except Exception as e:
logger.error("Error getting recent activity",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve recent activity"
)
# ===== Real-time Data Endpoints =====
@router.get(
route_builder.build_dashboard_route("live-metrics")
)
async def get_live_metrics(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Get real-time inventory metrics"""
try:
metrics = await dashboard_service.get_live_metrics(db, tenant_id)
return {
"timestamp": datetime.now().isoformat(),
"metrics": metrics,
"cache_ttl": 60 # Seconds
}
except Exception as e:
logger.error("Error getting live metrics",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve live metrics"
)
@router.get(
route_builder.build_dashboard_route("temperature-status")
)
async def get_temperature_monitoring_status(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(lambda: FoodSafetyService()),
db: AsyncSession = Depends(get_db)
):
"""Get current temperature monitoring status"""
try:
temp_status = await food_safety_service.get_temperature_monitoring_status(db, tenant_id)
return {
"timestamp": datetime.now().isoformat(),
"temperature_monitoring": temp_status
}
except Exception as e:
logger.error("Error getting temperature status",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve temperature monitoring status"
)
# ===== Dashboard Configuration Endpoints =====
@router.get(
route_builder.build_dashboard_route("config")
)
async def get_dashboard_config(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep)
):
"""Get dashboard configuration and settings"""
try:
from app.core.config import settings
config = {
"refresh_intervals": {
"dashboard_cache_ttl": settings.DASHBOARD_CACHE_TTL,
"alerts_refresh_interval": settings.ALERTS_REFRESH_INTERVAL,
"temperature_log_interval": settings.TEMPERATURE_LOG_INTERVAL
},
"features": {
"food_safety_enabled": settings.FOOD_SAFETY_ENABLED,
"temperature_monitoring_enabled": settings.TEMPERATURE_MONITORING_ENABLED,
"business_model_detection": settings.ENABLE_BUSINESS_MODEL_DETECTION
},
"thresholds": {
"low_stock_default": settings.DEFAULT_LOW_STOCK_THRESHOLD,
"reorder_point_default": settings.DEFAULT_REORDER_POINT,
"expiration_warning_days": settings.EXPIRATION_WARNING_DAYS,
"critical_expiration_hours": settings.CRITICAL_EXPIRATION_HOURS
},
"business_model_thresholds": {
"central_bakery_ingredients": settings.CENTRAL_BAKERY_THRESHOLD_INGREDIENTS,
"individual_bakery_ingredients": settings.INDIVIDUAL_BAKERY_THRESHOLD_INGREDIENTS
}
}
return config
except Exception as e:
logger.error("Error getting dashboard config",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve dashboard configuration"
)
# ===== Export and Reporting Endpoints =====
@router.get(
route_builder.build_operations_route("export/summary")
)
async def export_dashboard_summary(
tenant_id: UUID = Path(...),
format: str = Query("json", description="Export format: json, csv, excel"),
date_from: Optional[datetime] = Query(None, description="Start date for data export"),
date_to: Optional[datetime] = Query(None, description="End date for data export"),
current_user: dict = Depends(get_current_user_dep),
dashboard_service: DashboardService = Depends(get_dashboard_service),
db: AsyncSession = Depends(get_db)
):
"""Export dashboard summary data"""
try:
if format.lower() not in ["json", "csv", "excel"]:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unsupported export format. Use: json, csv, excel"
)
export_data = await dashboard_service.export_dashboard_data(
db, tenant_id, format, date_from, date_to
)
logger.info("Dashboard data exported",
tenant_id=str(tenant_id),
format=format)
return export_data
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error("Error exporting dashboard data",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to export dashboard data"
)
# ===== Health and Status Endpoints =====
@router.get(
route_builder.build_base_route("health")
)
async def get_dashboard_health(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep)
):
"""Get dashboard service health status"""
try:
return {
"service": "inventory-dashboard",
"status": "healthy",
"timestamp": datetime.now().isoformat(),
"tenant_id": str(tenant_id),
"features": {
"food_safety": "enabled",
"temperature_monitoring": "enabled",
"business_model_detection": "enabled",
"real_time_alerts": "enabled"
}
}
except Exception as e:
logger.error("Error getting dashboard health",
tenant_id=str(tenant_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get dashboard health status"
)

View File

@@ -0,0 +1,314 @@
"""
Enterprise Inventory API Endpoints
APIs for enterprise-level inventory management across outlets
"""
from fastapi import APIRouter, Depends, HTTPException, Query
from typing import List, Optional
from datetime import date
from pydantic import BaseModel, Field
import structlog
from app.services.enterprise_inventory_service import EnterpriseInventoryService
from shared.auth.tenant_access import verify_tenant_permission_dep
from shared.clients import get_inventory_client, get_tenant_client
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter()
# Pydantic models for request/response
class InventoryCoverageResponse(BaseModel):
outlet_id: str = Field(..., description="Outlet tenant ID")
outlet_name: str = Field(..., description="Outlet name")
overall_coverage: float = Field(..., description="Overall inventory coverage percentage (0-100)")
critical_items_count: int = Field(..., description="Number of items at critical stock levels")
high_risk_items_count: int = Field(..., description="Number of items at high risk of stockout")
medium_risk_items_count: int = Field(..., description="Number of items at medium risk")
low_risk_items_count: int = Field(..., description="Number of items at low risk")
fulfillment_rate: float = Field(..., description="Order fulfillment rate percentage (0-100)")
last_updated: str = Field(..., description="Last inventory update timestamp")
status: str = Field(..., description="Overall status: normal, warning, critical")
class ProductCoverageDetail(BaseModel):
product_id: str = Field(..., description="Product ID")
product_name: str = Field(..., description="Product name")
current_stock: int = Field(..., description="Current stock quantity")
safety_stock: int = Field(..., description="Safety stock threshold")
coverage_percentage: float = Field(..., description="Coverage percentage (current/safety)")
risk_level: str = Field(..., description="Risk level: critical, high, medium, low")
days_until_stockout: Optional[int] = Field(None, description="Estimated days until stockout")
class OutletInventoryDetailResponse(BaseModel):
outlet_id: str = Field(..., description="Outlet tenant ID")
outlet_name: str = Field(..., description="Outlet name")
overall_coverage: float = Field(..., description="Overall inventory coverage percentage")
products: List[ProductCoverageDetail] = Field(..., description="Product-level inventory details")
last_updated: str = Field(..., description="Last update timestamp")
class NetworkInventorySummary(BaseModel):
total_outlets: int = Field(..., description="Total number of outlets")
average_coverage: float = Field(..., description="Network average inventory coverage")
average_fulfillment_rate: float = Field(..., description="Network average fulfillment rate")
critical_outlets: int = Field(..., description="Number of outlets with critical status")
warning_outlets: int = Field(..., description="Number of outlets with warning status")
normal_outlets: int = Field(..., description="Number of outlets with normal status")
total_critical_items: int = Field(..., description="Total critical items across network")
network_health_score: float = Field(..., description="Overall network health score (0-100)")
class InventoryAlert(BaseModel):
alert_id: str = Field(..., description="Alert ID")
outlet_id: str = Field(..., description="Outlet ID")
outlet_name: str = Field(..., description="Outlet name")
product_id: Optional[str] = Field(None, description="Product ID if applicable")
product_name: Optional[str] = Field(None, description="Product name if applicable")
alert_type: str = Field(..., description="Type of alert: stockout_risk, low_coverage, etc.")
severity: str = Field(..., description="Severity: critical, high, medium, low")
current_coverage: float = Field(..., description="Current inventory coverage percentage")
threshold: float = Field(..., description="Threshold that triggered alert")
timestamp: str = Field(..., description="Alert timestamp")
message: str = Field(..., description="Alert message")
async def get_enterprise_inventory_service() -> "EnterpriseInventoryService":
"""Dependency injection for EnterpriseInventoryService"""
inventory_client = get_inventory_client(settings, "inventory-service")
tenant_client = get_tenant_client(settings, "inventory-service")
return EnterpriseInventoryService(
inventory_client=inventory_client,
tenant_client=tenant_client
)
@router.get("/tenants/{parent_id}/outlets/inventory-coverage",
response_model=List[InventoryCoverageResponse],
summary="Get inventory coverage for all outlets in network")
async def get_outlet_inventory_coverage(
parent_id: str,
min_coverage: Optional[float] = Query(None, description="Filter outlets with coverage below this threshold"),
risk_level: Optional[str] = Query(None, description="Filter by risk level: critical, high, medium, low"),
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get inventory coverage metrics for all child outlets in a parent tenant's network
This endpoint provides a comprehensive view of inventory health across all outlets,
enabling enterprise managers to identify stockout risks and prioritize inventory transfers.
"""
try:
# Verify this is a parent tenant
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
if tenant_info.get('tenant_type') != 'parent':
raise HTTPException(
status_code=403,
detail="Only parent tenants can access outlet inventory coverage"
)
# Get all child outlets for this parent
child_outlets = await enterprise_inventory_service.get_child_outlets(parent_id)
if not child_outlets:
return []
# Get inventory coverage for each outlet
coverage_data = []
for outlet in child_outlets:
outlet_id = outlet['id']
# Get inventory coverage data
coverage = await enterprise_inventory_service.get_inventory_coverage(outlet_id)
if coverage:
# Apply filters if specified
if min_coverage is not None and coverage['overall_coverage'] >= min_coverage:
continue
if risk_level is not None and coverage.get('status') != risk_level:
continue
coverage_data.append(coverage)
# Sort by coverage (lowest first) to prioritize critical outlets
coverage_data.sort(key=lambda x: x['overall_coverage'])
return coverage_data
except Exception as e:
logger.error("Failed to get outlet inventory coverage", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get inventory coverage: {str(e)}")
@router.get("/tenants/{parent_id}/outlets/inventory-summary",
response_model=NetworkInventorySummary,
summary="Get network-wide inventory summary")
async def get_network_inventory_summary(
parent_id: str,
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get aggregated inventory summary across the entire network
Provides key metrics for network health monitoring and decision making.
"""
try:
# Verify this is a parent tenant
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
if tenant_info.get('tenant_type') != 'parent':
raise HTTPException(
status_code=403,
detail="Only parent tenants can access network inventory summary"
)
return await enterprise_inventory_service.get_network_inventory_summary(parent_id)
except Exception as e:
logger.error("Failed to get network inventory summary", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get inventory summary: {str(e)}")
@router.get("/tenants/{parent_id}/outlets/{outlet_id}/inventory-details",
response_model=OutletInventoryDetailResponse,
summary="Get detailed inventory for specific outlet")
async def get_outlet_inventory_details(
parent_id: str,
outlet_id: str,
product_id: Optional[str] = Query(None, description="Filter by specific product ID"),
risk_level: Optional[str] = Query(None, description="Filter products by risk level"),
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get detailed product-level inventory data for a specific outlet
Enables drill-down analysis of inventory issues at the product level.
"""
try:
# Verify parent-child relationship
await enterprise_inventory_service.verify_parent_child_relationship(parent_id, outlet_id)
return await enterprise_inventory_service.get_outlet_inventory_details(outlet_id, product_id, risk_level)
except Exception as e:
logger.error("Failed to get outlet inventory details", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get inventory details: {str(e)}")
@router.get("/tenants/{parent_id}/inventory-alerts",
response_model=List[InventoryAlert],
summary="Get real-time inventory alerts across network")
async def get_network_inventory_alerts(
parent_id: str,
severity: Optional[str] = Query(None, description="Filter by severity: critical, high, medium, low"),
alert_type: Optional[str] = Query(None, description="Filter by alert type"),
limit: int = Query(50, description="Maximum number of alerts to return"),
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get real-time inventory alerts across all outlets
Provides actionable alerts for inventory management and stockout prevention.
"""
try:
# Verify this is a parent tenant
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
if tenant_info.get('tenant_type') != 'parent':
raise HTTPException(
status_code=403,
detail="Only parent tenants can access network inventory alerts"
)
alerts = await enterprise_inventory_service.get_inventory_alerts(parent_id)
# Apply filters
if severity:
alerts = [alert for alert in alerts if alert.get('severity') == severity]
if alert_type:
alerts = [alert for alert in alerts if alert.get('alert_type') == alert_type]
# Sort by severity (critical first) and timestamp (newest first)
severity_order = {'critical': 1, 'high': 2, 'medium': 3, 'low': 4}
alerts.sort(key=lambda x: (severity_order.get(x.get('severity', 'low'), 5), -int(x.get('timestamp', 0))))
return alerts[:limit]
except Exception as e:
logger.error("Failed to get inventory alerts", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get inventory alerts: {str(e)}")
@router.post("/tenants/{parent_id}/inventory-transfers/recommend",
summary="Get inventory transfer recommendations")
async def get_inventory_transfer_recommendations(
parent_id: str,
urgency: str = Query("medium", description="Urgency level: low, medium, high, critical"),
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get AI-powered inventory transfer recommendations
Analyzes inventory levels across outlets and suggests optimal transfers
to prevent stockouts and balance inventory.
"""
try:
# Verify this is a parent tenant
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
if tenant_info.get('tenant_type') != 'parent':
raise HTTPException(
status_code=403,
detail="Only parent tenants can request transfer recommendations"
)
recommendations = await enterprise_inventory_service.get_transfer_recommendations(parent_id, urgency)
return {
'success': True,
'recommendations': recommendations,
'message': f'Generated {len(recommendations)} transfer recommendations'
}
except Exception as e:
logger.error("Failed to get transfer recommendations", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get recommendations: {str(e)}")
@router.get("/tenants/{parent_id}/inventory/coverage-trends",
summary="Get inventory coverage trends over time")
async def get_inventory_coverage_trends(
parent_id: str,
days: int = Query(30, description="Number of days to analyze"),
enterprise_inventory_service: EnterpriseInventoryService = Depends(get_enterprise_inventory_service),
verified_tenant: str = Depends(verify_tenant_permission_dep)
):
"""
Get historical inventory coverage trends
Enables analysis of inventory performance over time.
"""
try:
# Verify this is a parent tenant
tenant_info = await enterprise_inventory_service.tenant_client.get_tenant(parent_id)
if tenant_info.get('tenant_type') != 'parent':
raise HTTPException(
status_code=403,
detail="Only parent tenants can access coverage trends"
)
trends = await enterprise_inventory_service.get_coverage_trends(parent_id, days)
return {
'success': True,
'trends': trends,
'period': f'Last {days} days'
}
except Exception as e:
logger.error("Failed to get coverage trends", error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get coverage trends: {str(e)}")

View File

@@ -0,0 +1,262 @@
# services/inventory/app/api/food_safety_alerts.py
"""
Food Safety Alerts API - ATOMIC CRUD operations on FoodSafetyAlert model
"""
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.food_safety_service import FoodSafetyService
from app.schemas.food_safety import (
FoodSafetyAlertCreate,
FoodSafetyAlertUpdate,
FoodSafetyAlertResponse
)
logger = structlog.get_logger()
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["food-safety-alerts"])
async def get_food_safety_service() -> FoodSafetyService:
"""Get food safety service instance"""
return FoodSafetyService()
@router.post(
route_builder.build_base_route("food-safety/alerts"),
response_model=FoodSafetyAlertResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def create_food_safety_alert(
alert_data: FoodSafetyAlertCreate,
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
db: AsyncSession = Depends(get_db)
):
"""Create a food safety alert"""
try:
alert_data.tenant_id = tenant_id
alert = await food_safety_service.create_food_safety_alert(
db,
alert_data,
user_id=UUID(current_user["user_id"])
)
logger.info("Food safety alert created",
alert_id=str(alert.id),
alert_type=alert.alert_type)
return alert
except Exception as e:
logger.error("Error creating food safety alert", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create food safety alert"
)
@router.get(
route_builder.build_base_route("food-safety/alerts"),
response_model=List[FoodSafetyAlertResponse]
)
async def get_food_safety_alerts(
tenant_id: UUID = Path(...),
alert_type: Optional[str] = Query(None, description="Filter by alert type"),
severity: Optional[str] = Query(None, description="Filter by severity"),
status_filter: Optional[str] = Query(None, description="Filter by status"),
unresolved_only: bool = Query(True, description="Show only unresolved alerts"),
skip: int = Query(0, ge=0, description="Number of alerts to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of alerts to return"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get food safety alerts with filtering"""
try:
where_conditions = ["tenant_id = :tenant_id"]
params = {"tenant_id": tenant_id}
if alert_type:
where_conditions.append("alert_type = :alert_type")
params["alert_type"] = alert_type
if severity:
where_conditions.append("severity = :severity")
params["severity"] = severity
if status_filter:
where_conditions.append("status = :status")
params["status"] = status_filter
elif unresolved_only:
where_conditions.append("status NOT IN ('resolved', 'dismissed')")
where_clause = " AND ".join(where_conditions)
query = f"""
SELECT * FROM food_safety_alerts
WHERE {where_clause}
ORDER BY created_at DESC
LIMIT :limit OFFSET :skip
"""
params.update({"limit": limit, "skip": skip})
result = await db.execute(query, params)
alerts = result.fetchall()
return [
FoodSafetyAlertResponse(**dict(alert))
for alert in alerts
]
except Exception as e:
logger.error("Error getting food safety alerts", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve food safety alerts"
)
@router.get(
route_builder.build_resource_detail_route("food-safety/alerts", "alert_id"),
response_model=FoodSafetyAlertResponse
)
async def get_food_safety_alert(
alert_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get specific food safety alert"""
try:
query = "SELECT * FROM food_safety_alerts WHERE id = :alert_id AND tenant_id = :tenant_id"
result = await db.execute(query, {"alert_id": alert_id, "tenant_id": tenant_id})
alert = result.fetchone()
if not alert:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Food safety alert not found"
)
return FoodSafetyAlertResponse(**dict(alert))
except HTTPException:
raise
except Exception as e:
logger.error("Error getting food safety alert", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve food safety alert"
)
@router.put(
route_builder.build_resource_detail_route("food-safety/alerts", "alert_id"),
response_model=FoodSafetyAlertResponse
)
@require_user_role(['admin', 'owner', 'member'])
async def update_food_safety_alert(
alert_data: FoodSafetyAlertUpdate,
tenant_id: UUID = Path(...),
alert_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Update a food safety alert"""
try:
alert_query = "SELECT * FROM food_safety_alerts WHERE id = :alert_id AND tenant_id = :tenant_id"
result = await db.execute(alert_query, {"alert_id": alert_id, "tenant_id": tenant_id})
alert_record = result.fetchone()
if not alert_record:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Food safety alert not found"
)
update_fields = alert_data.dict(exclude_unset=True)
if update_fields:
set_clauses = []
params = {"alert_id": alert_id, "tenant_id": tenant_id}
for field, value in update_fields.items():
set_clauses.append(f"{field} = :{field}")
params[field] = value
set_clauses.append("updated_at = NOW()")
set_clauses.append("updated_by = :updated_by")
params["updated_by"] = UUID(current_user["user_id"])
update_query = f"""
UPDATE food_safety_alerts
SET {', '.join(set_clauses)}
WHERE id = :alert_id AND tenant_id = :tenant_id
"""
await db.execute(update_query, params)
await db.commit()
result = await db.execute(alert_query, {"alert_id": alert_id, "tenant_id": tenant_id})
updated_alert = result.fetchone()
logger.info("Food safety alert updated",
alert_id=str(alert_id))
return FoodSafetyAlertResponse(**dict(updated_alert))
except HTTPException:
raise
except Exception as e:
logger.error("Error updating food safety alert",
alert_id=str(alert_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update food safety alert"
)
@router.delete(
route_builder.build_resource_detail_route("food-safety/alerts", "alert_id"),
status_code=status.HTTP_204_NO_CONTENT
)
@require_user_role(['admin', 'owner'])
async def delete_food_safety_alert(
alert_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete food safety alert"""
try:
query = "DELETE FROM food_safety_alerts WHERE id = :alert_id AND tenant_id = :tenant_id"
result = await db.execute(query, {"alert_id": alert_id, "tenant_id": tenant_id})
if result.rowcount == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Food safety alert not found"
)
await db.commit()
return None
except HTTPException:
raise
except Exception as e:
logger.error("Error deleting food safety alert", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to delete food safety alert"
)

View File

@@ -0,0 +1,302 @@
# services/inventory/app/api/food_safety_compliance.py
"""
Food Safety Compliance API - ATOMIC CRUD operations on FoodSafetyCompliance model
"""
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.food_safety_service import FoodSafetyService
from app.models import AuditLog
from app.schemas.food_safety import (
FoodSafetyComplianceCreate,
FoodSafetyComplianceUpdate,
FoodSafetyComplianceResponse
)
logger = structlog.get_logger()
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["food-safety-compliance"])
async def get_food_safety_service() -> FoodSafetyService:
"""Get food safety service instance"""
return FoodSafetyService()
@router.post(
route_builder.build_base_route("food-safety/compliance"),
response_model=FoodSafetyComplianceResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def create_compliance_record(
compliance_data: FoodSafetyComplianceCreate,
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
db: AsyncSession = Depends(get_db)
):
"""Create a new food safety compliance record"""
try:
compliance_data.tenant_id = tenant_id
compliance = await food_safety_service.create_compliance_record(
db,
compliance_data,
user_id=UUID(current_user["user_id"])
)
logger.info("Compliance record created",
compliance_id=str(compliance.id),
standard=compliance.standard)
return compliance
except ValueError as e:
logger.warning("Invalid compliance data", error=str(e))
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error("Error creating compliance record", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create compliance record"
)
@router.get(
route_builder.build_base_route("food-safety/compliance"),
response_model=List[FoodSafetyComplianceResponse]
)
async def get_compliance_records(
tenant_id: UUID = Path(...),
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient ID"),
standard: Optional[str] = Query(None, description="Filter by compliance standard"),
status_filter: Optional[str] = Query(None, description="Filter by compliance status"),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get compliance records with filtering"""
try:
filters = {}
if ingredient_id:
filters["ingredient_id"] = ingredient_id
if standard:
filters["standard"] = standard
if status_filter:
filters["compliance_status"] = status_filter
query = """
SELECT * FROM food_safety_compliance
WHERE tenant_id = :tenant_id AND is_active = true
"""
params = {"tenant_id": tenant_id}
if filters:
for key, value in filters.items():
query += f" AND {key} = :{key}"
params[key] = value
query += " ORDER BY created_at DESC LIMIT :limit OFFSET :skip"
params.update({"limit": limit, "skip": skip})
result = await db.execute(query, params)
records = result.fetchall()
return [
FoodSafetyComplianceResponse(**dict(record))
for record in records
]
except Exception as e:
logger.error("Error getting compliance records", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve compliance records"
)
@router.get(
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
response_model=FoodSafetyComplianceResponse
)
async def get_compliance_record(
compliance_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get specific compliance record"""
try:
query = "SELECT * FROM food_safety_compliance WHERE id = :compliance_id AND tenant_id = :tenant_id"
result = await db.execute(query, {"compliance_id": compliance_id, "tenant_id": tenant_id})
record = result.fetchone()
if not record:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Compliance record not found"
)
return FoodSafetyComplianceResponse(**dict(record))
except HTTPException:
raise
except Exception as e:
logger.error("Error getting compliance record", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve compliance record"
)
@router.put(
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
response_model=FoodSafetyComplianceResponse
)
@require_user_role(['admin', 'owner', 'member'])
async def update_compliance_record(
compliance_data: FoodSafetyComplianceUpdate,
tenant_id: UUID = Path(...),
compliance_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
db: AsyncSession = Depends(get_db)
):
"""Update an existing compliance record"""
try:
compliance = await food_safety_service.update_compliance_record(
db,
compliance_id,
tenant_id,
compliance_data,
user_id=UUID(current_user["user_id"])
)
if not compliance:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Compliance record not found"
)
logger.info("Compliance record updated",
compliance_id=str(compliance.id))
return compliance
except HTTPException:
raise
except Exception as e:
logger.error("Error updating compliance record",
compliance_id=str(compliance_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update compliance record"
)
@router.delete(
route_builder.build_resource_detail_route("food-safety/compliance", "compliance_id"),
status_code=status.HTTP_403_FORBIDDEN
)
@require_user_role(['admin', 'owner'])
async def delete_compliance_record(
compliance_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Compliance records CANNOT be deleted for regulatory compliance.
Use the archive endpoint to mark records as inactive.
"""
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail={
"error": "compliance_records_cannot_be_deleted",
"message": "Compliance records cannot be deleted for regulatory compliance. Use PUT /food-safety/compliance/{id}/archive to archive records instead.",
"reason": "Food safety compliance records must be retained for regulatory audits",
"alternative_endpoint": f"/api/v1/tenants/{tenant_id}/inventory/food-safety/compliance/{compliance_id}/archive"
}
)
@router.put(
route_builder.build_nested_resource_route("food-safety/compliance", "compliance_id", "archive"),
response_model=dict
)
@require_user_role(['admin', 'owner'])
async def archive_compliance_record(
compliance_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Archive (soft delete) compliance record - marks as inactive but retains for audit"""
try:
query = """
UPDATE food_safety_compliance
SET is_active = false, updated_at = NOW(), updated_by = :user_id
WHERE id = :compliance_id AND tenant_id = :tenant_id
"""
result = await db.execute(query, {
"compliance_id": compliance_id,
"tenant_id": tenant_id,
"user_id": UUID(current_user["user_id"])
})
if result.rowcount == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Compliance record not found"
)
await db.commit()
# Log audit event for archiving compliance record
try:
from shared.security import create_audit_logger, AuditSeverity, AuditAction
audit_logger = create_audit_logger("inventory-service", AuditLog)
await audit_logger.log_event(
db_session=db,
tenant_id=str(tenant_id),
user_id=current_user["user_id"],
action="archive",
resource_type="compliance_record",
resource_id=str(compliance_id),
severity=AuditSeverity.HIGH.value,
description=f"Archived compliance record (retained for regulatory compliance)",
endpoint=f"/food-safety/compliance/{compliance_id}/archive",
method="PUT"
)
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
return {
"message": "Compliance record archived successfully",
"compliance_id": str(compliance_id),
"archived": True,
"note": "Record retained for regulatory compliance audits"
}
except HTTPException:
raise
except Exception as e:
logger.error("Error archiving compliance record", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to archive compliance record"
)

View File

@@ -0,0 +1,287 @@
# services/inventory/app/api/food_safety_operations.py
"""
Food Safety Operations API - Business operations for food safety management
"""
from datetime import datetime
from typing import Optional
from decimal import Decimal
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role, analytics_tier_required
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.food_safety_service import FoodSafetyService
from app.schemas.food_safety import FoodSafetyMetrics
logger = structlog.get_logger()
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["food-safety-operations"])
async def get_food_safety_service() -> FoodSafetyService:
"""Get food safety service instance"""
return FoodSafetyService()
@router.post(
route_builder.build_nested_resource_route("food-safety/alerts", "alert_id", "acknowledge"),
response_model=dict
)
@require_user_role(['admin', 'owner', 'member'])
async def acknowledge_alert(
tenant_id: UUID = Path(...),
alert_id: UUID = Path(...),
notes: Optional[str] = Query(None, description="Acknowledgment notes"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Acknowledge a food safety alert"""
try:
update_query = """
UPDATE food_safety_alerts
SET status = 'acknowledged',
acknowledged_at = NOW(),
acknowledged_by = :user_id,
investigation_notes = COALESCE(investigation_notes, '') || :notes,
updated_at = NOW(),
updated_by = :user_id
WHERE id = :alert_id AND tenant_id = :tenant_id
"""
result = await db.execute(update_query, {
"alert_id": alert_id,
"tenant_id": tenant_id,
"user_id": UUID(current_user["user_id"]),
"notes": f"\nAcknowledged: {notes}" if notes else "\nAcknowledged"
})
if result.rowcount == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Food safety alert not found"
)
await db.commit()
logger.info("Food safety alert acknowledged",
alert_id=str(alert_id))
return {"message": "Alert acknowledged successfully"}
except HTTPException:
raise
except Exception as e:
logger.error("Error acknowledging alert",
alert_id=str(alert_id),
error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to acknowledge alert"
)
@router.get(
route_builder.build_analytics_route("food-safety-metrics"),
response_model=FoodSafetyMetrics
)
async def get_food_safety_metrics(
tenant_id: UUID = Path(...),
days_back: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get food safety performance metrics"""
try:
compliance_query = """
SELECT
COUNT(*) as total,
COUNT(CASE WHEN compliance_status = 'compliant' THEN 1 END) as compliant
FROM food_safety_compliance
WHERE tenant_id = :tenant_id AND is_active = true
"""
result = await db.execute(compliance_query, {"tenant_id": tenant_id})
compliance_stats = result.fetchone()
compliance_rate = 0.0
if compliance_stats.total > 0:
compliance_rate = (compliance_stats.compliant / compliance_stats.total) * 100
temp_query = """
SELECT
COUNT(*) as total_readings,
COUNT(CASE WHEN is_within_range THEN 1 END) as compliant_readings
FROM temperature_logs
WHERE tenant_id = :tenant_id
AND recorded_at > NOW() - INTERVAL '%s days'
""" % days_back
result = await db.execute(temp_query, {"tenant_id": tenant_id})
temp_stats = result.fetchone()
temp_compliance_rate = 0.0
if temp_stats.total_readings > 0:
temp_compliance_rate = (temp_stats.compliant_readings / temp_stats.total_readings) * 100
alert_query = """
SELECT
COUNT(*) as total_alerts,
COUNT(CASE WHEN is_recurring THEN 1 END) as recurring_alerts,
COUNT(CASE WHEN regulatory_action_required THEN 1 END) as regulatory_violations,
AVG(CASE WHEN response_time_minutes IS NOT NULL THEN response_time_minutes END) as avg_response_time,
AVG(CASE WHEN resolution_time_minutes IS NOT NULL THEN resolution_time_minutes END) as avg_resolution_time
FROM food_safety_alerts
WHERE tenant_id = :tenant_id
AND created_at > NOW() - INTERVAL '%s days'
""" % days_back
result = await db.execute(alert_query, {"tenant_id": tenant_id})
alert_stats = result.fetchone()
return FoodSafetyMetrics(
compliance_rate=Decimal(str(compliance_rate)),
temperature_compliance_rate=Decimal(str(temp_compliance_rate)),
alert_response_time_avg=Decimal(str(alert_stats.avg_response_time or 0)),
alert_resolution_time_avg=Decimal(str(alert_stats.avg_resolution_time or 0)),
recurring_issues_count=alert_stats.recurring_alerts or 0,
regulatory_violations=alert_stats.regulatory_violations or 0,
certification_coverage=Decimal(str(compliance_rate)),
audit_score_avg=Decimal("85.0"),
risk_score=Decimal("3.2")
)
except Exception as e:
logger.error("Error getting food safety metrics", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve food safety metrics"
)
@router.get(
route_builder.build_operations_route("food-safety/status")
)
async def get_food_safety_status(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep)
):
"""Get food safety service status"""
try:
return {
"service": "food-safety",
"status": "healthy",
"timestamp": datetime.now().isoformat(),
"tenant_id": str(tenant_id),
"features": {
"compliance_tracking": "enabled",
"temperature_monitoring": "enabled",
"automated_alerts": "enabled",
"regulatory_reporting": "enabled"
}
}
except Exception as e:
logger.error("Error getting food safety status", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get food safety status"
)
@router.get(
route_builder.build_operations_route("food-safety/temperature/violations")
)
async def get_temperature_violations(
tenant_id: UUID = Path(...),
days_back: int = Query(7, ge=1, le=90, description="Days to analyze"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get temperature violations summary"""
try:
query = """
SELECT
COUNT(*) as total_violations,
COUNT(DISTINCT storage_location) as affected_locations,
COUNT(DISTINCT equipment_id) as affected_equipment,
AVG(ABS(temperature_celsius - (min_temp_celsius + max_temp_celsius)/2)) as avg_deviation
FROM temperature_logs
WHERE tenant_id = :tenant_id
AND is_within_range = false
AND recorded_at > NOW() - INTERVAL '%s days'
""" % days_back
result = await db.execute(query, {"tenant_id": tenant_id})
stats = result.fetchone()
return {
"period_days": days_back,
"total_violations": stats.total_violations or 0,
"affected_locations": stats.affected_locations or 0,
"affected_equipment": stats.affected_equipment or 0,
"average_deviation_celsius": float(stats.avg_deviation or 0)
}
except Exception as e:
logger.error("Error getting temperature violations", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get temperature violations"
)
@router.get(
route_builder.build_operations_route("food-safety/compliance/summary")
)
async def get_compliance_summary(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get compliance summary by standard"""
try:
query = """
SELECT
standard,
COUNT(*) as total,
COUNT(CASE WHEN compliance_status = 'compliant' THEN 1 END) as compliant,
COUNT(CASE WHEN compliance_status = 'non_compliant' THEN 1 END) as non_compliant,
COUNT(CASE WHEN compliance_status = 'pending' THEN 1 END) as pending
FROM food_safety_compliance
WHERE tenant_id = :tenant_id AND is_active = true
GROUP BY standard
ORDER BY standard
"""
result = await db.execute(query, {"tenant_id": tenant_id})
records = result.fetchall()
summary = []
for record in records:
compliance_rate = (record.compliant / record.total * 100) if record.total > 0 else 0
summary.append({
"standard": record.standard,
"total_items": record.total,
"compliant": record.compliant,
"non_compliant": record.non_compliant,
"pending": record.pending,
"compliance_rate": round(compliance_rate, 2)
})
return {
"tenant_id": str(tenant_id),
"standards": summary,
"total_standards": len(summary)
}
except Exception as e:
logger.error("Error getting compliance summary", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get compliance summary"
)

View File

@@ -0,0 +1,556 @@
# services/inventory/app/api/ingredients.py
"""
Base CRUD operations for inventory ingredients resources
Following standardized URL structure: /api/v1/tenants/{tenant_id}/inventory/{resource}
"""
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import httpx
import structlog
from app.core.database import get_db
from app.services.inventory_service import InventoryService
from app.models import AuditLog
from app.schemas.inventory import (
IngredientCreate,
IngredientUpdate,
IngredientResponse,
StockResponse,
StockCreate,
StockUpdate,
BulkIngredientCreate,
BulkIngredientResponse,
BulkIngredientResult,
)
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role, admin_role_required, owner_role_required
from shared.routing import RouteBuilder
from shared.security import create_audit_logger, AuditSeverity, AuditAction
logger = structlog.get_logger()
# Create route builder for consistent URL structure
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["ingredients"])
# Initialize audit logger
audit_logger = create_audit_logger("inventory-service", AuditLog)
# Helper function to extract user ID from user object
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
"""Extract user ID from current user context"""
user_id = current_user.get('user_id')
if not user_id:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="User ID not found in context"
)
return UUID(user_id)
# ===== INGREDIENTS ENDPOINTS =====
@router.post(
route_builder.build_base_route("ingredients"),
response_model=IngredientResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner'])
async def create_ingredient(
ingredient_data: IngredientCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create a new ingredient (Admin/Manager only)"""
try:
# CRITICAL: Check subscription limit before creating
from app.core.config import settings
async with httpx.AsyncClient(timeout=5.0) as client:
try:
limit_check_response = await client.get(
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/subscription/limits/products",
headers={
"x-user-id": str(current_user.get('user_id')),
"x-tenant-id": str(tenant_id)
}
)
if limit_check_response.status_code == 200:
limit_check = limit_check_response.json()
if not limit_check.get('can_add', False):
logger.warning(
"Product limit exceeded",
tenant_id=str(tenant_id),
current=limit_check.get('current_count'),
max=limit_check.get('max_allowed'),
reason=limit_check.get('reason')
)
raise HTTPException(
status_code=status.HTTP_402_PAYMENT_REQUIRED,
detail={
"error": "product_limit_exceeded",
"message": limit_check.get('reason', 'Product limit exceeded'),
"current_count": limit_check.get('current_count'),
"max_allowed": limit_check.get('max_allowed'),
"upgrade_required": True
}
)
else:
logger.warning(
"Failed to check product limit, allowing creation",
tenant_id=str(tenant_id),
status_code=limit_check_response.status_code
)
except httpx.TimeoutException:
logger.warning(
"Timeout checking product limit, allowing creation",
tenant_id=str(tenant_id)
)
except httpx.RequestError as e:
logger.warning(
"Error checking product limit, allowing creation",
tenant_id=str(tenant_id),
error=str(e)
)
# Extract user ID - handle service tokens
raw_user_id = current_user.get('user_id')
if current_user.get('type') == 'service':
user_id = None
else:
try:
user_id = UUID(raw_user_id)
except (ValueError, TypeError):
user_id = None
service = InventoryService()
ingredient = await service.create_ingredient(ingredient_data, tenant_id, user_id)
logger.info(
"Ingredient created successfully",
tenant_id=str(tenant_id),
ingredient_id=str(ingredient.id),
ingredient_name=ingredient.name
)
return ingredient
except HTTPException:
raise
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error(
"Failed to create ingredient",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create ingredient"
)
@router.post(
route_builder.build_base_route("ingredients/bulk"),
response_model=BulkIngredientResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner'])
async def bulk_create_ingredients(
bulk_data: BulkIngredientCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create multiple ingredients in a single transaction (Admin/Manager only)"""
import uuid
transaction_id = str(uuid.uuid4())
try:
# CRITICAL: Check subscription limit ONCE before creating any ingredients
from app.core.config import settings
total_requested = len(bulk_data.ingredients)
async with httpx.AsyncClient(timeout=5.0) as client:
try:
# Check if we can add this many products
limit_check_response = await client.get(
f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/subscription/limits/products",
headers={
"x-user-id": str(current_user.get('user_id')),
"x-tenant-id": str(tenant_id)
}
)
if limit_check_response.status_code == 200:
limit_check = limit_check_response.json()
if not limit_check.get('can_add', False):
logger.warning(
"Bulk product limit exceeded",
tenant_id=str(tenant_id),
requested=total_requested,
current=limit_check.get('current_count'),
max=limit_check.get('max_allowed'),
reason=limit_check.get('reason')
)
raise HTTPException(
status_code=status.HTTP_402_PAYMENT_REQUIRED,
detail={
"error": "product_limit_exceeded",
"message": limit_check.get('reason', 'Product limit exceeded'),
"requested": total_requested,
"current_count": limit_check.get('current_count'),
"max_allowed": limit_check.get('max_allowed'),
"upgrade_required": True
}
)
else:
logger.warning(
"Failed to check product limit, allowing bulk creation",
tenant_id=str(tenant_id),
status_code=limit_check_response.status_code
)
except httpx.TimeoutException:
logger.warning(
"Timeout checking product limit, allowing bulk creation",
tenant_id=str(tenant_id)
)
except httpx.RequestError as e:
logger.warning(
"Error checking product limit, allowing bulk creation",
tenant_id=str(tenant_id),
error=str(e)
)
# Extract user ID - handle service tokens
raw_user_id = current_user.get('user_id')
if current_user.get('type') == 'service':
user_id = None
else:
try:
user_id = UUID(raw_user_id)
except (ValueError, TypeError):
user_id = None
# Create all ingredients
service = InventoryService()
results: List[BulkIngredientResult] = []
total_created = 0
total_failed = 0
for index, ingredient_data in enumerate(bulk_data.ingredients):
try:
ingredient = await service.create_ingredient(ingredient_data, tenant_id, user_id)
results.append(BulkIngredientResult(
index=index,
success=True,
ingredient=IngredientResponse.from_orm(ingredient),
error=None
))
total_created += 1
logger.debug(
"Ingredient created in bulk operation",
tenant_id=str(tenant_id),
ingredient_id=str(ingredient.id),
ingredient_name=ingredient.name,
index=index,
transaction_id=transaction_id
)
except Exception as e:
results.append(BulkIngredientResult(
index=index,
success=False,
ingredient=None,
error=str(e)
))
total_failed += 1
logger.warning(
"Failed to create ingredient in bulk operation",
tenant_id=str(tenant_id),
index=index,
error=str(e),
transaction_id=transaction_id
)
logger.info(
"Bulk ingredient creation completed",
tenant_id=str(tenant_id),
total_requested=total_requested,
total_created=total_created,
total_failed=total_failed,
transaction_id=transaction_id
)
return BulkIngredientResponse(
total_requested=total_requested,
total_created=total_created,
total_failed=total_failed,
results=results,
transaction_id=transaction_id
)
except HTTPException:
raise
except Exception as e:
logger.error(
"Failed to process bulk ingredient creation",
tenant_id=str(tenant_id),
error=str(e),
transaction_id=transaction_id
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to process bulk ingredient creation"
)
@router.get(
route_builder.build_base_route("ingredients/count"),
response_model=dict
)
async def count_ingredients(
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get count of ingredients for a tenant (All users)"""
try:
service = InventoryService()
count = await service.count_ingredients_by_tenant(tenant_id)
return {
"tenant_id": str(tenant_id),
"ingredient_count": count
}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to count ingredients: {str(e)}"
)
@router.get(
route_builder.build_resource_detail_route("ingredients", "ingredient_id"),
response_model=IngredientResponse
)
async def get_ingredient(
ingredient_id: UUID,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get ingredient by ID (All users)"""
try:
service = InventoryService()
ingredient = await service.get_ingredient(ingredient_id, tenant_id)
if not ingredient:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Ingredient not found"
)
return ingredient
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get ingredient"
)
@router.put(
route_builder.build_resource_detail_route("ingredients", "ingredient_id"),
response_model=IngredientResponse
)
@require_user_role(['admin', 'owner', 'member'])
async def update_ingredient(
ingredient_id: UUID,
ingredient_data: IngredientUpdate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Update ingredient (Admin/Manager/User)"""
try:
service = InventoryService()
ingredient = await service.update_ingredient(ingredient_id, ingredient_data, tenant_id)
if not ingredient:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Ingredient not found"
)
return ingredient
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update ingredient"
)
@router.get(
route_builder.build_base_route("ingredients"),
response_model=List[IngredientResponse]
)
async def list_ingredients(
tenant_id: UUID = Path(..., description="Tenant ID"),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
category: Optional[str] = Query(None, description="Filter by category"),
product_type: Optional[str] = Query(None, description="Filter by product type"),
is_active: Optional[bool] = Query(None, description="Filter by active status"),
is_low_stock: Optional[bool] = Query(None, description="Filter by low stock status"),
needs_reorder: Optional[bool] = Query(None, description="Filter by reorder needed"),
search: Optional[str] = Query(None, description="Search in name, SKU, or barcode"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""List ingredients with filtering (All users)"""
try:
service = InventoryService()
# Build filters
filters = {}
if category:
filters['category'] = category
if product_type:
filters['product_type'] = product_type
if is_active is not None:
filters['is_active'] = is_active
if is_low_stock is not None:
filters['is_low_stock'] = is_low_stock
if needs_reorder is not None:
filters['needs_reorder'] = needs_reorder
if search:
filters['search'] = search
ingredients = await service.get_ingredients(tenant_id, skip, limit, filters)
return ingredients
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to list ingredients"
)
@router.delete(
route_builder.build_resource_detail_route("ingredients", "ingredient_id"),
status_code=status.HTTP_204_NO_CONTENT
)
@admin_role_required
async def soft_delete_ingredient(
ingredient_id: UUID,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Soft delete ingredient - mark as inactive (Admin only)"""
try:
service = InventoryService()
result = await service.soft_delete_ingredient(ingredient_id, tenant_id)
return None
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to soft delete ingredient"
)
@router.delete(
route_builder.build_nested_resource_route("ingredients", "ingredient_id", "hard"),
response_model=dict
)
@admin_role_required
async def hard_delete_ingredient(
ingredient_id: UUID,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Hard delete ingredient and all associated data (Admin only)"""
try:
service = InventoryService()
deletion_summary = await service.hard_delete_ingredient(ingredient_id, tenant_id)
# Log audit event for hard deletion
try:
await audit_logger.log_deletion(
db_session=db,
tenant_id=str(tenant_id),
user_id=current_user["user_id"],
resource_type="ingredient",
resource_id=str(ingredient_id),
resource_data=deletion_summary,
description=f"Hard deleted ingredient and all associated data",
endpoint=f"/ingredients/{ingredient_id}/hard",
method="DELETE"
)
except Exception as audit_error:
import structlog
logger = structlog.get_logger()
logger.warning("Failed to log audit event", error=str(audit_error))
return deletion_summary
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to hard delete ingredient"
)
@router.get(
route_builder.build_nested_resource_route("ingredients", "ingredient_id", "stock"),
response_model=List[StockResponse]
)
async def get_ingredient_stock(
ingredient_id: UUID,
tenant_id: UUID = Path(..., description="Tenant ID"),
include_unavailable: bool = Query(False, description="Include unavailable stock"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get stock entries for an ingredient (All users)"""
try:
service = InventoryService()
stock_entries = await service.get_stock_by_ingredient(
ingredient_id, tenant_id, include_unavailable
)
return stock_entries
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get ingredient stock"
)

View File

@@ -0,0 +1,46 @@
"""
Internal API for Inventory Service
Handles internal service-to-service operations
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func
from uuid import UUID
import structlog
from app.core.database import get_db
from app.core.config import settings
from app.models import Ingredient
logger = structlog.get_logger()
router = APIRouter(prefix="/internal", tags=["internal"])
@router.get("/count")
async def get_ingredient_count(
tenant_id: str,
db: AsyncSession = Depends(get_db)
):
"""
Get count of active ingredients for onboarding status check.
Internal endpoint for tenant service.
"""
try:
count = await db.scalar(
select(func.count()).select_from(Ingredient)
.where(
Ingredient.tenant_id == UUID(tenant_id),
Ingredient.is_active == True
)
)
return {
"count": count or 0,
"tenant_id": tenant_id
}
except Exception as e:
logger.error("Failed to get ingredient count", tenant_id=tenant_id, error=str(e))
raise HTTPException(status_code=500, detail=f"Failed to get ingredient count: {str(e)}")

View File

@@ -0,0 +1,87 @@
# services/inventory/app/api/internal_alert_trigger.py
"""
Internal API for triggering inventory alerts.
Used by demo session cloning to generate realistic inventory alerts.
URL Pattern: /api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger
This follows the tenant-scoped pattern so gateway can proxy correctly.
"""
from fastapi import APIRouter, HTTPException, Request, Path
from uuid import UUID
import structlog
logger = structlog.get_logger()
router = APIRouter()
# New URL pattern: tenant-scoped so gateway proxies to inventory service correctly
@router.post("/api/v1/tenants/{tenant_id}/inventory/internal/alerts/trigger")
async def trigger_inventory_alerts(
tenant_id: UUID = Path(..., description="Tenant ID to check inventory for"),
request: Request = None
) -> dict:
"""
Trigger comprehensive inventory alert checks for a specific tenant (internal use only).
This endpoint is called by the demo session cloning process after inventory
data is seeded to generate realistic inventory alerts including:
- Critical stock shortages
- Expiring ingredients
- Overstock situations
Security: Protected by x-internal-service header check.
"""
try:
# Verify internal service header
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
logger.warning("Unauthorized internal API call", tenant_id=str(tenant_id))
raise HTTPException(
status_code=403,
detail="This endpoint is for internal service use only"
)
# Get inventory scheduler from app state
inventory_scheduler = getattr(request.app.state, 'inventory_scheduler', None)
if not inventory_scheduler:
logger.error("Inventory scheduler not initialized")
raise HTTPException(
status_code=500,
detail="Inventory scheduler not available"
)
# Trigger comprehensive inventory alert checks for the specific tenant
logger.info("Triggering comprehensive inventory alert checks", tenant_id=str(tenant_id))
# Call the scheduler's manual trigger method
result = await inventory_scheduler.trigger_manual_check(tenant_id)
if result.get("success", False):
logger.info(
"Inventory alert checks completed successfully",
tenant_id=str(tenant_id),
alerts_generated=result.get("alerts_generated", 0)
)
else:
logger.error(
"Inventory alert checks failed",
tenant_id=str(tenant_id),
error=result.get("error", "Unknown error")
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(
"Error triggering inventory alerts",
tenant_id=str(tenant_id),
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Failed to trigger inventory alerts: {str(e)}"
)

View File

@@ -0,0 +1,602 @@
"""
Internal Demo Cloning API for Inventory Service
Handles internal demo data cloning operations
"""
from fastapi import APIRouter, Depends, HTTPException, Header
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, delete
from typing import Optional
import structlog
import json
from pathlib import Path
from datetime import datetime, timezone, timedelta
import uuid
from uuid import UUID
from app.core.database import get_db
from app.core.config import settings
from app.models import Ingredient, Stock, ProductType
from shared.utils.demo_dates import adjust_date_for_demo, resolve_time_marker, calculate_edge_case_times
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
def parse_date_field(date_value, session_time: datetime, field_name: str = "date") -> Optional[datetime]:
"""
Parse date field, handling both ISO strings and BASE_TS markers.
Supports:
- BASE_TS markers: "BASE_TS + 1h30m", "BASE_TS - 2d"
- ISO 8601 strings: "2025-01-15T06:00:00Z"
- None values (returns None)
Returns timezone-aware datetime or None.
"""
if not date_value:
return None
# Check if it's a BASE_TS marker
if isinstance(date_value, str) and date_value.startswith("BASE_TS"):
try:
return resolve_time_marker(date_value, session_time)
except ValueError as e:
logger.warning(
f"Invalid BASE_TS marker in {field_name}",
marker=date_value,
error=str(e)
)
return None
# Handle regular ISO date strings
try:
if isinstance(date_value, str):
original_date = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
elif hasattr(date_value, 'isoformat'):
original_date = date_value
else:
logger.warning(f"Unsupported date format in {field_name}", date_value=date_value)
return None
return adjust_date_for_demo(original_date, session_time)
except (ValueError, AttributeError) as e:
logger.warning(
f"Invalid date format in {field_name}",
date_value=date_value,
error=str(e)
)
return None
@router.post("/clone")
async def clone_demo_data_internal(
base_tenant_id: str,
virtual_tenant_id: str,
demo_account_type: str,
session_id: Optional[str] = None,
session_created_at: Optional[str] = None,
db: AsyncSession = Depends(get_db)
):
"""
Clone inventory service data for a virtual demo tenant
This endpoint creates fresh demo data by:
1. Loading seed data from JSON files
2. Applying XOR-based ID transformation
3. Adjusting dates relative to session creation time
4. Creating records in the virtual tenant
Args:
base_tenant_id: Template tenant UUID (for reference)
virtual_tenant_id: Target virtual tenant UUID
demo_account_type: Type of demo account
session_id: Originating session ID for tracing
session_created_at: Session creation timestamp for date adjustment
db: Database session
Returns:
Dictionary with cloning results
Raises:
HTTPException: On validation or cloning errors
"""
start_time = datetime.now(timezone.utc)
try:
# Validate UUIDs
virtual_uuid = UUID(virtual_tenant_id)
# Parse session creation time for date adjustment
if session_created_at:
try:
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_time = start_time
else:
session_time = start_time
# Debug logging for UUID values
logger.debug("Received UUID values", base_tenant_id=base_tenant_id, virtual_tenant_id=virtual_tenant_id)
if not all([base_tenant_id, virtual_tenant_id, session_id]):
raise HTTPException(
status_code=400,
detail="Missing required parameters: base_tenant_id, virtual_tenant_id, session_id"
)
# Validate UUID format before processing
try:
UUID(base_tenant_id)
UUID(virtual_tenant_id)
except ValueError as e:
logger.error("Invalid UUID format in request",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format: {str(e)}"
)
# Parse session creation time
if session_created_at:
try:
session_created_at_parsed = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
except (ValueError, AttributeError):
session_created_at_parsed = datetime.now(timezone.utc)
else:
session_created_at_parsed = datetime.now(timezone.utc)
logger.info(
"Starting inventory data cloning with date adjustment",
base_tenant_id=base_tenant_id,
virtual_tenant_id=virtual_tenant_id,
demo_account_type=demo_account_type,
session_id=session_id,
session_time=session_created_at_parsed.isoformat()
)
# Load seed data from JSON files
from shared.utils.seed_data_paths import get_seed_data_path
if demo_account_type == "professional":
json_file = get_seed_data_path("professional", "03-inventory.json")
elif demo_account_type == "enterprise":
json_file = get_seed_data_path("enterprise", "03-inventory.json")
elif demo_account_type == "enterprise_child":
json_file = get_seed_data_path("enterprise", "03-inventory.json", child_id=base_tenant_id)
else:
raise ValueError(f"Invalid demo account type: {demo_account_type}")
# Load JSON data
with open(json_file, 'r', encoding='utf-8') as f:
seed_data = json.load(f)
# Check if data already exists for this virtual tenant (idempotency)
existing_check = await db.execute(
select(Ingredient).where(Ingredient.tenant_id == virtual_tenant_id).limit(1)
)
existing_ingredient = existing_check.scalar_one_or_none()
if existing_ingredient:
logger.warning(
"Demo data already exists, skipping clone",
virtual_tenant_id=virtual_tenant_id
)
return {
"status": "skipped",
"reason": "Data already exists",
"records_cloned": 0
}
# Transform and insert data
records_cloned = 0
# Clone ingredients
for ingredient_data in seed_data.get('ingredients', []):
# Transform ID
from shared.utils.demo_id_transformer import transform_id
try:
ingredient_uuid = UUID(ingredient_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(ingredient_data['id'], tenant_uuid)
except ValueError as e:
logger.error("Failed to parse UUIDs for ID transformation",
ingredient_id=ingredient_data['id'],
virtual_tenant_id=virtual_tenant_id,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid UUID format in ingredient data: {str(e)}"
)
# Transform dates using standardized helper
ingredient_data['created_at'] = parse_date_field(
ingredient_data.get('created_at'), session_time, 'created_at'
) or session_time
ingredient_data['updated_at'] = parse_date_field(
ingredient_data.get('updated_at'), session_time, 'updated_at'
) or session_time
# Map category field to ingredient_category enum
if 'category' in ingredient_data:
category_value = ingredient_data.pop('category')
# Convert category string to IngredientCategory enum
from app.models.inventory import IngredientCategory
try:
ingredient_data['ingredient_category'] = IngredientCategory[category_value.upper()]
except KeyError:
# If category not found in enum, use OTHER
ingredient_data['ingredient_category'] = IngredientCategory.OTHER
# Map unit_of_measure string to enum
if 'unit_of_measure' in ingredient_data:
from app.models.inventory import UnitOfMeasure
unit_mapping = {
'kilograms': UnitOfMeasure.KILOGRAMS,
'grams': UnitOfMeasure.GRAMS,
'liters': UnitOfMeasure.LITERS,
'milliliters': UnitOfMeasure.MILLILITERS,
'units': UnitOfMeasure.UNITS,
'pieces': UnitOfMeasure.PIECES,
'packages': UnitOfMeasure.PACKAGES,
'bags': UnitOfMeasure.BAGS,
'boxes': UnitOfMeasure.BOXES
}
# Also support uppercase versions
unit_mapping.update({
'KILOGRAMS': UnitOfMeasure.KILOGRAMS,
'GRAMS': UnitOfMeasure.GRAMS,
'LITERS': UnitOfMeasure.LITERS,
'MILLILITERS': UnitOfMeasure.MILLILITERS,
'UNITS': UnitOfMeasure.UNITS,
'PIECES': UnitOfMeasure.PIECES,
'PACKAGES': UnitOfMeasure.PACKAGES,
'BAGS': UnitOfMeasure.BAGS,
'BOXES': UnitOfMeasure.BOXES
})
unit_str = ingredient_data['unit_of_measure']
if unit_str in unit_mapping:
ingredient_data['unit_of_measure'] = unit_mapping[unit_str]
else:
# Default to units if not found
ingredient_data['unit_of_measure'] = UnitOfMeasure.UNITS
logger.warning("Unknown unit_of_measure, defaulting to UNITS",
original_unit=unit_str)
# Note: All seed data fields now match the model schema exactly
# No field filtering needed
# Remove original id and tenant_id from ingredient_data to avoid conflict
ingredient_data.pop('id', None)
ingredient_data.pop('tenant_id', None)
# Create ingredient
ingredient = Ingredient(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**ingredient_data
)
db.add(ingredient)
records_cloned += 1
# Commit ingredients before creating stock to ensure foreign key references exist
await db.flush() # Use flush instead of commit to maintain transaction while continuing
# Clone stock batches
for stock_data in seed_data.get('stock', []):
# Transform ID - handle both UUID and string IDs
from shared.utils.demo_id_transformer import transform_id
try:
# Try to parse as UUID first
stock_uuid = UUID(stock_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(stock_data['id'], tenant_uuid)
except ValueError:
# If not a UUID, generate a deterministic UUID from the string ID
import hashlib
stock_id_string = stock_data['id']
tenant_uuid = UUID(virtual_tenant_id)
# Create a deterministic UUID from the string ID and tenant ID
combined = f"{stock_id_string}-{tenant_uuid}"
hash_obj = hashlib.sha256(combined.encode('utf-8'))
transformed_id = UUID(hash_obj.hexdigest()[:32])
logger.info("Generated UUID for non-UUID stock ID",
original_id=stock_id_string,
generated_id=str(transformed_id))
# Transform dates using standardized helper
stock_data['received_date'] = parse_date_field(
stock_data.get('received_date'), session_time, 'received_date'
)
stock_data['expiration_date'] = parse_date_field(
stock_data.get('expiration_date'), session_time, 'expiration_date'
)
stock_data['best_before_date'] = parse_date_field(
stock_data.get('best_before_date'), session_time, 'best_before_date'
)
stock_data['created_at'] = parse_date_field(
stock_data.get('created_at'), session_time, 'created_at'
) or session_time
stock_data['updated_at'] = parse_date_field(
stock_data.get('updated_at'), session_time, 'updated_at'
) or session_time
# Remove original id and tenant_id from stock_data to avoid conflict
stock_data.pop('id', None)
stock_data.pop('tenant_id', None)
# Remove notes field as it doesn't exist in the Stock model
stock_data.pop('notes', None)
# Transform ingredient_id to match transformed ingredient IDs
if 'ingredient_id' in stock_data:
ingredient_id_str = stock_data['ingredient_id']
try:
ingredient_uuid = UUID(ingredient_id_str)
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
stock_data['ingredient_id'] = str(transformed_ingredient_id)
except ValueError as e:
logger.error("Failed to transform ingredient_id",
original_ingredient_id=ingredient_id_str,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid ingredient_id format: {str(e)}"
)
# Transform supplier_id if present
if 'supplier_id' in stock_data:
supplier_id_str = stock_data['supplier_id']
try:
supplier_uuid = UUID(supplier_id_str)
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
stock_data['supplier_id'] = str(transformed_supplier_id)
except ValueError as e:
logger.error("Failed to transform supplier_id",
original_supplier_id=supplier_id_str,
error=str(e))
raise HTTPException(
status_code=400,
detail=f"Invalid supplier_id format: {str(e)}"
)
# Create stock batch
stock = Stock(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**stock_data
)
db.add(stock)
records_cloned += 1
# Clone stock movements (for waste tracking and sustainability metrics)
from app.models.inventory import StockMovement, StockMovementType
for movement_data in seed_data.get('stock_movements', []):
# Transform ID
from shared.utils.demo_id_transformer import transform_id
try:
movement_uuid = UUID(movement_data['id'])
tenant_uuid = UUID(virtual_tenant_id)
transformed_id = transform_id(movement_data['id'], tenant_uuid)
except ValueError:
import hashlib
movement_id_string = movement_data['id']
tenant_uuid = UUID(virtual_tenant_id)
combined = f"{movement_id_string}-{tenant_uuid}"
hash_obj = hashlib.sha256(combined.encode('utf-8'))
transformed_id = UUID(hash_obj.hexdigest()[:32])
# Transform dates
movement_data['movement_date'] = parse_date_field(
movement_data.get('movement_date'), session_time, 'movement_date'
) or session_time
movement_data['created_at'] = parse_date_field(
movement_data.get('created_at'), session_time, 'created_at'
) or session_time
# Transform related IDs
if 'ingredient_id' in movement_data:
ingredient_id_str = movement_data['ingredient_id']
try:
transformed_ingredient_id = transform_id(ingredient_id_str, tenant_uuid)
movement_data['ingredient_id'] = str(transformed_ingredient_id)
except ValueError as e:
logger.error("Failed to transform ingredient_id in movement",
original_id=ingredient_id_str, error=str(e))
raise HTTPException(status_code=400, detail=f"Invalid ingredient_id: {str(e)}")
if 'stock_id' in movement_data and movement_data['stock_id']:
stock_id_str = movement_data['stock_id']
try:
transformed_stock_id = transform_id(stock_id_str, tenant_uuid)
movement_data['stock_id'] = str(transformed_stock_id)
except ValueError:
# If stock_id doesn't exist or can't be transformed, set to None
movement_data['stock_id'] = None
if 'supplier_id' in movement_data and movement_data['supplier_id']:
supplier_id_str = movement_data['supplier_id']
try:
transformed_supplier_id = transform_id(supplier_id_str, tenant_uuid)
movement_data['supplier_id'] = str(transformed_supplier_id)
except ValueError:
movement_data['supplier_id'] = None
if 'created_by' in movement_data and movement_data['created_by']:
created_by_str = movement_data['created_by']
try:
transformed_created_by = transform_id(created_by_str, tenant_uuid)
movement_data['created_by'] = str(transformed_created_by)
except ValueError:
movement_data['created_by'] = None
# Remove original id and tenant_id
movement_data.pop('id', None)
movement_data.pop('tenant_id', None)
# Create stock movement
stock_movement = StockMovement(
id=str(transformed_id),
tenant_id=str(virtual_tenant_id),
**movement_data
)
db.add(stock_movement)
records_cloned += 1
# Note: Edge cases are now handled exclusively through JSON seed data
# The seed data files already contain comprehensive edge cases including:
# - Low stock items below reorder points
# - Items expiring soon
# - Freshly received stock
# - Waste movements for sustainability tracking
# This ensures standardization and single source of truth for demo data
logger.info(
"Edge cases handled by JSON seed data - no manual creation needed",
seed_data_edge_cases="low_stock, expiring_soon, fresh_stock, waste_movements"
)
await db.commit()
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(
"Inventory data cloned successfully",
virtual_tenant_id=virtual_tenant_id,
records_cloned=records_cloned,
duration_ms=duration_ms,
ingredients_cloned=len(seed_data.get('ingredients', [])),
stock_batches_cloned=len(seed_data.get('stock', [])),
stock_movements_cloned=len(seed_data.get('stock_movements', []))
)
return {
"service": "inventory",
"status": "completed",
"records_cloned": records_cloned,
"duration_ms": duration_ms,
"details": {
"ingredients": len(seed_data.get('ingredients', [])),
"stock": len(seed_data.get('stock', [])),
"virtual_tenant_id": str(virtual_tenant_id)
}
}
except ValueError as e:
logger.error("Invalid UUID format", error=str(e), virtual_tenant_id=virtual_tenant_id)
raise HTTPException(status_code=400, detail=f"Invalid UUID: {str(e)}")
except Exception as e:
logger.error(
"Failed to clone inventory data",
error=str(e),
virtual_tenant_id=virtual_tenant_id,
exc_info=True
)
# Rollback on error
await db.rollback()
return {
"service": "inventory",
"status": "failed",
"records_cloned": 0,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000),
"error": str(e)
}
@router.get("/clone/health")
async def clone_health_check():
"""
Health check for internal cloning endpoint
Used by orchestrator to verify service availability
"""
return {
"service": "inventory",
"clone_endpoint": "available",
"version": "2.0.0"
}
@router.delete("/tenant/{virtual_tenant_id}")
async def delete_demo_tenant_data(
virtual_tenant_id: UUID,
db: AsyncSession = Depends(get_db)
):
"""
Delete all demo data for a virtual tenant.
This endpoint is idempotent - safe to call multiple times.
"""
start_time = datetime.now(timezone.utc)
from app.models.inventory import StockMovement
records_deleted = {
"stock_movements": 0,
"stock": 0,
"ingredients": 0,
"total": 0
}
try:
# Delete in reverse dependency order
# 1. Delete stock movements (depends on stock and ingredients)
result = await db.execute(
delete(StockMovement)
.where(StockMovement.tenant_id == virtual_tenant_id)
)
records_deleted["stock_movements"] = result.rowcount
# 2. Delete stock batches (depends on ingredients)
result = await db.execute(
delete(Stock)
.where(Stock.tenant_id == virtual_tenant_id)
)
records_deleted["stock"] = result.rowcount
# 3. Delete ingredients
result = await db.execute(
delete(Ingredient)
.where(Ingredient.tenant_id == virtual_tenant_id)
)
records_deleted["ingredients"] = result.rowcount
records_deleted["total"] = records_deleted["stock_movements"] + records_deleted["stock"] + records_deleted["ingredients"]
await db.commit()
logger.info(
"demo_data_deleted",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
records_deleted=records_deleted
)
return {
"service": "inventory",
"status": "deleted",
"virtual_tenant_id": str(virtual_tenant_id),
"records_deleted": records_deleted,
"duration_ms": int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
}
except Exception as e:
await db.rollback()
logger.error(
"demo_data_deletion_failed",
service="inventory",
virtual_tenant_id=str(virtual_tenant_id),
error=str(e)
)
raise HTTPException(
status_code=500,
detail=f"Failed to delete demo data: {str(e)}"
)

View File

@@ -0,0 +1,747 @@
# services/inventory/app/api/inventory_operations.py
"""
Inventory Operations API - Business operations for inventory management
"""
from typing import List, Optional, Dict, Any
from uuid import UUID, uuid4
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
from pydantic import BaseModel, Field
import structlog
from app.core.database import get_db
from app.services.inventory_service import InventoryService
from app.services.product_classifier import ProductClassifierService, get_product_classifier
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
logger = structlog.get_logger()
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["inventory-operations"])
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
"""Extract user ID from current user context"""
user_id = current_user.get('user_id')
if not user_id:
if current_user.get('type') == 'service':
return None
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="User ID not found in context"
)
try:
return UUID(user_id)
except (ValueError, TypeError):
return None
# ===== Stock Operations =====
@router.post(
route_builder.build_operations_route("consume-stock"),
response_model=dict
)
@require_user_role(['admin', 'owner', 'member'])
async def consume_stock(
tenant_id: UUID = Path(..., description="Tenant ID"),
ingredient_id: UUID = Query(..., description="Ingredient ID to consume"),
quantity: float = Query(..., gt=0, description="Quantity to consume"),
reference_number: Optional[str] = Query(None, description="Reference number"),
notes: Optional[str] = Query(None, description="Additional notes"),
fifo: bool = Query(True, description="Use FIFO method"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Consume stock for production"""
try:
user_id = get_current_user_id(current_user)
service = InventoryService()
consumed_items = await service.consume_stock(
ingredient_id, quantity, tenant_id, user_id, reference_number, notes, fifo
)
return {
"ingredient_id": str(ingredient_id),
"total_quantity_consumed": quantity,
"consumed_items": consumed_items,
"method": "FIFO" if fifo else "LIFO"
}
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to consume stock"
)
@router.get(
route_builder.build_operations_route("stock/expiring"),
response_model=List[dict]
)
async def get_expiring_stock(
tenant_id: UUID = Path(..., description="Tenant ID"),
days_ahead: int = Query(7, ge=1, le=365, description="Days ahead to check"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get stock items expiring within specified days"""
try:
service = InventoryService()
expiring_items = await service.check_expiration_alerts(tenant_id, days_ahead)
return expiring_items
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get expiring stock"
)
@router.get(
route_builder.build_operations_route("stock/low-stock"),
response_model=List[dict]
)
async def get_low_stock(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get ingredients with low stock levels"""
try:
service = InventoryService()
low_stock_items = await service.check_low_stock_alerts(tenant_id)
return low_stock_items
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get low stock items"
)
@router.get(
route_builder.build_operations_route("stock/summary"),
response_model=dict
)
async def get_stock_summary(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get stock summary for tenant"""
try:
service = InventoryService()
summary = await service.get_inventory_summary(tenant_id)
return summary.dict()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get stock summary"
)
# ===== Product Classification Operations =====
class ProductClassificationRequest(BaseModel):
"""Request for single product classification"""
product_name: str = Field(..., description="Product name to classify")
sales_volume: float = Field(None, description="Total sales volume for context")
sales_data: Dict[str, Any] = Field(default_factory=dict, description="Additional sales context")
class BatchClassificationRequest(BaseModel):
"""Request for batch product classification"""
products: List[ProductClassificationRequest] = Field(..., description="Products to classify")
class ProductSuggestionResponse(BaseModel):
"""Response with product classification suggestion"""
suggestion_id: str
original_name: str
suggested_name: str
product_type: str
category: str
unit_of_measure: str
confidence_score: float
estimated_shelf_life_days: Optional[int] = None
requires_refrigeration: bool = False
requires_freezing: bool = False
is_seasonal: bool = False
suggested_supplier: Optional[str] = None
notes: Optional[str] = None
class BusinessModelAnalysisResponse(BaseModel):
"""Response with business model analysis"""
model: str
confidence: float
ingredient_count: int
finished_product_count: int
ingredient_ratio: float
recommendations: List[str]
class BatchClassificationResponse(BaseModel):
"""Response for batch classification"""
suggestions: List[ProductSuggestionResponse]
business_model_analysis: BusinessModelAnalysisResponse
total_products: int
high_confidence_count: int
low_confidence_count: int
@router.post(
route_builder.build_operations_route("classify-product"),
response_model=ProductSuggestionResponse
)
async def classify_single_product(
request: ProductClassificationRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
classifier: ProductClassifierService = Depends(get_product_classifier)
):
"""Classify a single product for inventory creation"""
try:
suggestion = classifier.classify_product(
request.product_name,
request.sales_volume
)
response = ProductSuggestionResponse(
suggestion_id=str(uuid4()),
original_name=suggestion.original_name,
suggested_name=suggestion.suggested_name,
product_type=suggestion.product_type.value,
category=suggestion.category,
unit_of_measure=suggestion.unit_of_measure.value,
confidence_score=suggestion.confidence_score,
estimated_shelf_life_days=suggestion.estimated_shelf_life_days,
requires_refrigeration=suggestion.requires_refrigeration,
requires_freezing=suggestion.requires_freezing,
is_seasonal=suggestion.is_seasonal,
suggested_supplier=suggestion.suggested_supplier,
notes=suggestion.notes
)
logger.info("Classified single product",
product=request.product_name,
classification=suggestion.product_type.value,
confidence=suggestion.confidence_score,
tenant_id=tenant_id)
return response
except Exception as e:
logger.error("Failed to classify product",
error=str(e), product=request.product_name, tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Classification failed: {str(e)}")
@router.post(
route_builder.build_operations_route("classify-products-batch"),
response_model=BatchClassificationResponse
)
async def classify_products_batch(
request: BatchClassificationRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
classifier: ProductClassifierService = Depends(get_product_classifier)
):
"""Classify multiple products for onboarding automation"""
try:
if not request.products:
raise HTTPException(status_code=400, detail="No products provided for classification")
product_names = [p.product_name for p in request.products]
sales_volumes = {p.product_name: p.sales_volume for p in request.products if p.sales_volume}
suggestions = classifier.classify_products_batch(product_names, sales_volumes)
suggestion_responses = []
for suggestion in suggestions:
suggestion_responses.append(ProductSuggestionResponse(
suggestion_id=str(uuid4()),
original_name=suggestion.original_name,
suggested_name=suggestion.suggested_name,
product_type=suggestion.product_type.value,
category=suggestion.category,
unit_of_measure=suggestion.unit_of_measure.value,
confidence_score=suggestion.confidence_score,
estimated_shelf_life_days=suggestion.estimated_shelf_life_days,
requires_refrigeration=suggestion.requires_refrigeration,
requires_freezing=suggestion.requires_freezing,
is_seasonal=suggestion.is_seasonal,
suggested_supplier=suggestion.suggested_supplier,
notes=suggestion.notes
))
# Analyze business model
ingredient_count = sum(1 for s in suggestions if s.product_type.value == 'INGREDIENT')
finished_count = sum(1 for s in suggestions if s.product_type.value == 'FINISHED_PRODUCT')
semi_finished_count = sum(1 for s in suggestions if 'semi' in s.suggested_name.lower() or 'frozen' in s.suggested_name.lower() or 'pre' in s.suggested_name.lower())
total = len(suggestions)
ingredient_ratio = ingredient_count / total if total > 0 else 0
semi_finished_ratio = semi_finished_count / total if total > 0 else 0
if ingredient_ratio >= 0.7:
model = 'individual_bakery'
elif ingredient_ratio <= 0.2 and semi_finished_ratio >= 0.3:
model = 'central_baker_satellite'
elif ingredient_ratio <= 0.3:
model = 'retail_bakery'
else:
model = 'hybrid_bakery'
if model == 'individual_bakery':
confidence = min(ingredient_ratio * 1.2, 0.95)
elif model == 'central_baker_satellite':
confidence = min((semi_finished_ratio + (1 - ingredient_ratio)) / 2 * 1.2, 0.95)
else:
confidence = max(abs(ingredient_ratio - 0.5) * 2, 0.1)
recommendations = {
'individual_bakery': [
'Set up raw ingredient inventory management',
'Configure recipe cost calculation and production planning',
'Enable supplier relationships for flour, yeast, sugar, etc.',
'Set up full production workflow with proofing and baking schedules',
'Enable waste tracking for overproduction'
],
'central_baker_satellite': [
'Configure central baker delivery schedules',
'Set up semi-finished product inventory (frozen dough, par-baked items)',
'Enable finish-baking workflow and timing optimization',
'Track freshness and shelf-life for received products',
'Focus on customer demand forecasting for final products'
],
'retail_bakery': [
'Set up finished product supplier relationships',
'Configure delivery schedule tracking',
'Enable freshness monitoring and expiration management',
'Focus on sales forecasting and customer preferences'
],
'hybrid_bakery': [
'Configure both ingredient and semi-finished product management',
'Set up flexible production workflows',
'Enable both supplier and central baker relationships',
'Configure multi-tier inventory categories'
]
}
business_model_analysis = BusinessModelAnalysisResponse(
model=model,
confidence=confidence,
ingredient_count=ingredient_count,
finished_product_count=finished_count,
ingredient_ratio=ingredient_ratio,
recommendations=recommendations.get(model, [])
)
high_confidence_count = sum(1 for s in suggestions if s.confidence_score >= 0.7)
low_confidence_count = sum(1 for s in suggestions if s.confidence_score < 0.6)
response = BatchClassificationResponse(
suggestions=suggestion_responses,
business_model_analysis=business_model_analysis,
total_products=len(suggestions),
high_confidence_count=high_confidence_count,
low_confidence_count=low_confidence_count
)
logger.info("Batch classification complete",
total_products=len(suggestions),
business_model=model,
high_confidence=high_confidence_count,
low_confidence=low_confidence_count,
tenant_id=tenant_id)
return response
except Exception as e:
logger.error("Failed batch classification",
error=str(e), products_count=len(request.products), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Batch classification failed: {str(e)}")
class BatchProductResolutionRequest(BaseModel):
"""Request for batch product resolution or creation"""
products: List[Dict[str, Any]] = Field(..., description="Products to resolve or create")
class BatchProductResolutionResponse(BaseModel):
"""Response with product name to inventory ID mappings"""
product_mappings: Dict[str, str] = Field(..., description="Product name to inventory product ID mapping")
created_count: int = Field(..., description="Number of products created")
resolved_count: int = Field(..., description="Number of existing products resolved")
failed_count: int = Field(0, description="Number of products that failed")
@router.post(
route_builder.build_operations_route("resolve-or-create-products-batch"),
response_model=BatchProductResolutionResponse
)
async def resolve_or_create_products_batch(
request: BatchProductResolutionRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db),
classifier: ProductClassifierService = Depends(get_product_classifier)
):
"""Resolve or create multiple products in a single optimized operation for sales import"""
try:
if not request.products:
raise HTTPException(status_code=400, detail="No products provided")
service = InventoryService()
product_mappings = {}
created_count = 0
resolved_count = 0
failed_count = 0
for product_data in request.products:
product_name = product_data.get('name', product_data.get('product_name', ''))
if not product_name:
failed_count += 1
continue
try:
existing = await service.search_ingredients_by_name(product_name, tenant_id, db)
if existing:
product_mappings[product_name] = str(existing.id)
resolved_count += 1
logger.debug("Resolved existing product", product=product_name, tenant_id=tenant_id)
else:
# Use the product classifier to determine the appropriate type
suggestion = classifier.classify_product(product_name)
category = product_data.get('category', suggestion.category if hasattr(suggestion, 'category') else 'general')
ingredient_data = {
'name': product_name,
'type': suggestion.product_type.value if hasattr(suggestion, 'product_type') else 'finished_product',
'unit': suggestion.unit_of_measure.value if hasattr(suggestion, 'unit_of_measure') else 'unit',
'current_stock': 0,
'reorder_point': 0,
'cost_per_unit': 0,
'category': category
}
created = await service.create_ingredient_fast(ingredient_data, tenant_id, db)
product_mappings[product_name] = str(created.id)
created_count += 1
logger.debug("Created new product", product=product_name,
product_type=ingredient_data['type'], tenant_id=tenant_id)
except Exception as e:
logger.warning("Failed to resolve/create product",
product=product_name, error=str(e), tenant_id=tenant_id)
failed_count += 1
continue
logger.info("Batch product resolution complete",
total=len(request.products),
created=created_count,
resolved=resolved_count,
failed=failed_count,
tenant_id=tenant_id)
return BatchProductResolutionResponse(
product_mappings=product_mappings,
created_count=created_count,
resolved_count=resolved_count,
failed_count=failed_count
)
except Exception as e:
logger.error("Batch product resolution failed",
error=str(e), tenant_id=tenant_id)
raise HTTPException(status_code=500, detail=f"Batch resolution failed: {str(e)}")
# ================================================================
# NEW: BATCH API ENDPOINTS FOR ORCHESTRATOR
# ================================================================
class BatchIngredientsRequest(BaseModel):
"""Request for batch ingredient fetching"""
ingredient_ids: List[UUID] = Field(..., description="List of ingredient IDs to fetch")
class BatchIngredientsResponse(BaseModel):
"""Response with ingredient data"""
ingredients: List[Dict[str, Any]] = Field(..., description="List of ingredient data")
found_count: int = Field(..., description="Number of ingredients found")
missing_ids: List[str] = Field(default_factory=list, description="IDs not found")
@router.post(
route_builder.build_operations_route("ingredients/batch"),
response_model=BatchIngredientsResponse
)
async def get_ingredients_batch(
request: BatchIngredientsRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Fetch multiple ingredients in a single request (for Orchestrator).
This endpoint reduces N API calls to 1, improving performance when
the orchestrator needs ingredient data for production/procurement planning.
"""
try:
if not request.ingredient_ids:
return BatchIngredientsResponse(
ingredients=[],
found_count=0,
missing_ids=[]
)
service = InventoryService()
ingredients = []
found_ids = set()
for ingredient_id in request.ingredient_ids:
try:
ingredient = await service.get_ingredient_by_id(ingredient_id, tenant_id, db)
if ingredient:
ingredients.append({
'id': str(ingredient.id),
'name': ingredient.name,
'type': ingredient.type,
'unit': ingredient.unit,
'current_stock': float(ingredient.current_stock) if ingredient.current_stock else 0,
'reorder_point': float(ingredient.reorder_point) if ingredient.reorder_point else 0,
'cost_per_unit': float(ingredient.cost_per_unit) if ingredient.cost_per_unit else 0,
'category': ingredient.category,
'is_active': ingredient.is_active,
'shelf_life_days': ingredient.shelf_life_days
})
found_ids.add(str(ingredient_id))
except Exception as e:
logger.warning(
"Failed to fetch ingredient in batch",
ingredient_id=str(ingredient_id),
error=str(e)
)
continue
missing_ids = [str(id) for id in request.ingredient_ids if str(id) not in found_ids]
logger.info(
"Batch ingredient fetch complete",
requested=len(request.ingredient_ids),
found=len(ingredients),
missing=len(missing_ids),
tenant_id=str(tenant_id)
)
return BatchIngredientsResponse(
ingredients=ingredients,
found_count=len(ingredients),
missing_ids=missing_ids
)
except Exception as e:
logger.error(
"Batch ingredient fetch failed",
error=str(e),
tenant_id=str(tenant_id)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Batch ingredient fetch failed: {str(e)}"
)
class BatchStockLevelsRequest(BaseModel):
"""Request for batch stock level fetching"""
ingredient_ids: List[UUID] = Field(..., description="List of ingredient IDs")
class BatchStockLevelsResponse(BaseModel):
"""Response with stock level data"""
stock_levels: Dict[str, float] = Field(..., description="Ingredient ID to stock level mapping")
found_count: int = Field(..., description="Number of stock levels found")
@router.post(
route_builder.build_operations_route("stock-levels/batch"),
response_model=BatchStockLevelsResponse
)
async def get_stock_levels_batch(
request: BatchStockLevelsRequest,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: Dict[str, Any] = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Fetch stock levels for multiple ingredients in a single request.
Optimized endpoint for Orchestrator to quickly check inventory levels
without making individual API calls per ingredient.
"""
try:
if not request.ingredient_ids:
return BatchStockLevelsResponse(
stock_levels={},
found_count=0
)
service = InventoryService()
stock_levels = {}
for ingredient_id in request.ingredient_ids:
try:
ingredient = await service.get_ingredient_by_id(ingredient_id, tenant_id, db)
if ingredient:
stock_levels[str(ingredient_id)] = float(ingredient.current_stock) if ingredient.current_stock else 0.0
except Exception as e:
logger.warning(
"Failed to fetch stock level in batch",
ingredient_id=str(ingredient_id),
error=str(e)
)
continue
logger.info(
"Batch stock level fetch complete",
requested=len(request.ingredient_ids),
found=len(stock_levels),
tenant_id=str(tenant_id)
)
return BatchStockLevelsResponse(
stock_levels=stock_levels,
found_count=len(stock_levels)
)
except Exception as e:
logger.error(
"Batch stock level fetch failed",
error=str(e),
tenant_id=str(tenant_id)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Batch stock level fetch failed: {str(e)}"
)
# ============================================================================
# Tenant Data Deletion Operations (Internal Service Only)
# ============================================================================
from shared.auth.access_control import service_only_access
from shared.services.tenant_deletion import TenantDataDeletionResult
from app.services.tenant_deletion_service import InventoryTenantDeletionService
@router.delete(
route_builder.build_base_route("tenant/{tenant_id}", include_tenant_prefix=False),
response_model=dict
)
@service_only_access
async def delete_tenant_data(
tenant_id: str = Path(..., description="Tenant ID to delete data for"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Delete all inventory data for a tenant (Internal service only)
This endpoint is called by the orchestrator during tenant deletion.
It permanently deletes all inventory-related data.
**WARNING**: This operation is irreversible!
Returns:
Deletion summary with counts of deleted records
"""
try:
logger.info("inventory.tenant_deletion.api_called", tenant_id=tenant_id)
deletion_service = InventoryTenantDeletionService(db)
result = await deletion_service.safe_delete_tenant_data(tenant_id)
if not result.success:
raise HTTPException(
status_code=500,
detail=f"Tenant data deletion failed: {', '.join(result.errors)}"
)
return {
"message": "Tenant data deletion completed successfully",
"summary": result.to_dict()
}
except HTTPException:
raise
except Exception as e:
logger.error("inventory.tenant_deletion.api_error",
tenant_id=tenant_id,
error=str(e),
exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to delete tenant data: {str(e)}"
)
@router.get(
route_builder.build_base_route("tenant/{tenant_id}/deletion-preview", include_tenant_prefix=False),
response_model=dict
)
@service_only_access
async def preview_tenant_data_deletion(
tenant_id: str = Path(..., description="Tenant ID to preview deletion for"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Preview what data would be deleted for a tenant (dry-run)
This endpoint shows counts of all data that would be deleted
without actually deleting anything.
Returns:
Preview with counts of records to be deleted
"""
try:
logger.info("inventory.tenant_deletion.preview_called", tenant_id=tenant_id)
deletion_service = InventoryTenantDeletionService(db)
preview_data = await deletion_service.get_tenant_data_preview(tenant_id)
result = TenantDataDeletionResult(tenant_id=tenant_id, service_name=deletion_service.service_name)
result.deleted_counts = preview_data
result.success = True
if not result.success:
raise HTTPException(
status_code=500,
detail=f"Tenant deletion preview failed: {', '.join(result.errors)}"
)
return {
"tenant_id": tenant_id,
"service": "inventory-service",
"data_counts": result.deleted_counts,
"total_items": sum(result.deleted_counts.values())
}
except HTTPException:
raise
except Exception as e:
logger.error("inventory.tenant_deletion.preview_error",
tenant_id=tenant_id,
error=str(e),
exc_info=True)
raise HTTPException(
status_code=500,
detail=f"Failed to preview tenant data deletion: {str(e)}"
)

View File

@@ -0,0 +1,413 @@
"""
ML Insights API Endpoints for Inventory Service
Provides endpoints to trigger ML insight generation for:
- Safety stock optimization
- Inventory level recommendations
- Demand pattern analysis
"""
from fastapi import APIRouter, Depends, HTTPException, Request
from pydantic import BaseModel, Field
from typing import Optional, List
from uuid import UUID
from datetime import datetime, timedelta
import structlog
import pandas as pd
from app.core.database import get_db
from sqlalchemy.ext.asyncio import AsyncSession
logger = structlog.get_logger()
router = APIRouter(
prefix="/api/v1/tenants/{tenant_id}/inventory/ml/insights",
tags=["ML Insights"]
)
# ================================================================
# REQUEST/RESPONSE SCHEMAS
# ================================================================
class SafetyStockOptimizationRequest(BaseModel):
"""Request schema for safety stock optimization"""
product_ids: Optional[List[str]] = Field(
None,
description="Specific product IDs to optimize. If None, optimizes all products"
)
lookback_days: int = Field(
90,
description="Days of historical demand to analyze",
ge=30,
le=365
)
min_history_days: int = Field(
30,
description="Minimum days of history required",
ge=7,
le=180
)
class SafetyStockOptimizationResponse(BaseModel):
"""Response schema for safety stock optimization"""
success: bool
message: str
tenant_id: str
products_optimized: int
total_insights_generated: int
total_insights_posted: int
total_cost_savings: float
insights_by_product: dict
errors: List[str] = []
# ================================================================
# API ENDPOINTS
# ================================================================
@router.post("/optimize-safety-stock", response_model=SafetyStockOptimizationResponse)
async def trigger_safety_stock_optimization(
tenant_id: str,
request_data: SafetyStockOptimizationRequest,
request: Request,
db: AsyncSession = Depends(get_db)
):
"""
Trigger safety stock optimization for inventory products.
This endpoint:
1. Fetches historical demand data for specified products
2. Runs the SafetyStockInsightsOrchestrator to optimize levels
3. Generates insights about safety stock recommendations
4. Posts insights to AI Insights Service
5. Publishes recommendation events to RabbitMQ
Args:
tenant_id: Tenant UUID
request_data: Optimization parameters
request: FastAPI request (for app state access)
db: Database session
Returns:
SafetyStockOptimizationResponse with optimization results
"""
logger.info(
"ML insights safety stock optimization requested",
tenant_id=tenant_id,
product_ids=request_data.product_ids,
lookback_days=request_data.lookback_days
)
try:
# Import ML orchestrator
from app.ml.safety_stock_insights_orchestrator import SafetyStockInsightsOrchestrator
from app.models.inventory import Ingredient
from sqlalchemy import select
# Get event publisher from app state (if available)
event_publisher = getattr(request.app.state, 'event_publisher', None) if hasattr(request, 'app') else None
# Initialize orchestrator
orchestrator = SafetyStockInsightsOrchestrator(
event_publisher=event_publisher
)
# Get products to optimize
if request_data.product_ids:
query = select(Ingredient).where(
Ingredient.tenant_id == UUID(tenant_id),
Ingredient.id.in_([UUID(pid) for pid in request_data.product_ids])
)
else:
query = select(Ingredient).where(
Ingredient.tenant_id == UUID(tenant_id)
).limit(10) # Limit to prevent timeout
result = await db.execute(query)
products = result.scalars().all()
if not products:
return SafetyStockOptimizationResponse(
success=False,
message="No products found for optimization",
tenant_id=tenant_id,
products_optimized=0,
total_insights_generated=0,
total_insights_posted=0,
total_cost_savings=0.0,
insights_by_product={},
errors=["No products found"]
)
# Calculate date range for demand history
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=request_data.lookback_days)
# Process each product
total_insights_generated = 0
total_insights_posted = 0
total_cost_savings = 0.0
insights_by_product = {}
errors = []
for product in products:
try:
product_id = str(product.id)
logger.info(f"Optimizing safety stock for {product.name} ({product_id})")
# Fetch real sales/demand history from sales service
from shared.clients.sales_client import SalesServiceClient
from app.core.config import settings
sales_client = SalesServiceClient(settings)
try:
# Fetch sales data for this product
sales_data = await sales_client.get_sales_data(
tenant_id=tenant_id,
product_id=product_id,
start_date=start_date.strftime('%Y-%m-%d'),
end_date=end_date.strftime('%Y-%m-%d')
)
if not sales_data:
logger.warning(
f"No sales history for product {product_id}, skipping"
)
continue
demand_data = []
for sale in sales_data:
demand_data.append({
'date': pd.to_datetime(sale.get('date') or sale.get('sale_date')),
'quantity': float(sale.get('quantity', 0))
})
if not demand_data:
logger.warning(
f"No valid demand data for product {product_id}, skipping"
)
continue
demand_history = pd.DataFrame(demand_data)
# Aggregate by date if there are multiple sales per day
demand_history = demand_history.groupby('date').agg({
'quantity': 'sum'
}).reset_index()
if len(demand_history) < request_data.min_history_days:
logger.warning(
f"Insufficient demand history for product {product_id}: "
f"{len(demand_history)} days < {request_data.min_history_days} required"
)
continue
except Exception as e:
logger.error(
f"Error fetching sales data for product {product_id}: {e}",
exc_info=True
)
continue
# Get lead time from supplier if available
lead_time_days = 7 # Default fallback
if product.supplier_id:
try:
from shared.clients.suppliers_client import SuppliersClient
suppliers_client = SuppliersClient()
supplier_data = await suppliers_client.get_supplier_by_id(
tenant_id=str(tenant_id),
supplier_id=str(product.supplier_id)
)
if supplier_data and 'standard_lead_time' in supplier_data:
lead_time_days = supplier_data['standard_lead_time']
logger.debug(
f"Using supplier lead time for product {product_id}",
lead_time=lead_time_days,
supplier_id=str(product.supplier_id)
)
except Exception as e:
logger.warning(
f"Failed to fetch supplier lead time for product {product_id}, using default",
error=str(e),
supplier_id=str(product.supplier_id)
)
# Product characteristics
product_characteristics = {
'lead_time_days': lead_time_days,
'shelf_life_days': 30 if product.is_perishable else 365,
'perishable': product.is_perishable
}
# Run optimization
results = await orchestrator.optimize_and_post_insights(
tenant_id=tenant_id,
inventory_product_id=product_id,
demand_history=demand_history,
product_characteristics=product_characteristics,
min_history_days=request_data.min_history_days
)
# Track results
total_insights_generated += results['insights_generated']
total_insights_posted += results['insights_posted']
if results.get('cost_savings'):
total_cost_savings += results['cost_savings']
insights_by_product[product_id] = {
'product_name': product.name,
'insights_posted': results['insights_posted'],
'optimal_safety_stock': results.get('optimal_safety_stock'),
'cost_savings': results.get('cost_savings', 0.0)
}
logger.info(
f"Product {product_id} optimization complete",
insights_posted=results['insights_posted'],
cost_savings=results.get('cost_savings', 0)
)
except Exception as e:
error_msg = f"Error optimizing product {product_id}: {str(e)}"
logger.error(error_msg, exc_info=True)
errors.append(error_msg)
# Close orchestrator
await orchestrator.close()
# Build response
response = SafetyStockOptimizationResponse(
success=total_insights_posted > 0,
message=f"Successfully optimized {len(products)} products, generated {total_insights_posted} insights",
tenant_id=tenant_id,
products_optimized=len(products),
total_insights_generated=total_insights_generated,
total_insights_posted=total_insights_posted,
total_cost_savings=round(total_cost_savings, 2),
insights_by_product=insights_by_product,
errors=errors
)
logger.info(
"ML insights safety stock optimization complete",
tenant_id=tenant_id,
total_insights=total_insights_posted,
total_savings=total_cost_savings
)
return response
except Exception as e:
logger.error(
"ML insights safety stock optimization failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Safety stock optimization failed: {str(e)}"
)
@router.get("/health")
async def ml_insights_health():
"""Health check for ML insights endpoints"""
return {
"status": "healthy",
"service": "inventory-ml-insights",
"endpoints": [
"POST /ml/insights/optimize-safety-stock"
]
}
# ================================================================
# INTERNAL ENDPOINTS (for demo-session service)
# ================================================================
from fastapi import Request
# Create a separate router for internal endpoints to avoid the tenant prefix
internal_router = APIRouter(
tags=["ML Insights - Internal"]
)
@internal_router.post("/api/v1/tenants/{tenant_id}/inventory/internal/ml/generate-safety-stock-insights")
async def generate_safety_stock_insights_internal(
tenant_id: str,
request: Request,
db: AsyncSession = Depends(get_db)
):
"""
Internal endpoint to trigger safety stock insights generation for demo sessions.
This endpoint is called by the demo-session service after cloning data.
It uses the same ML logic as the public endpoint but with optimized defaults.
Security: Protected by x-internal-service header check.
Args:
tenant_id: The tenant UUID
request: FastAPI request object
db: Database session
Returns:
{
"insights_posted": int,
"tenant_id": str,
"status": str
}
"""
# Verify internal service header
if not request or request.headers.get("x-internal-service") not in ["demo-session", "internal"]:
logger.warning("Unauthorized internal API call", tenant_id=tenant_id)
raise HTTPException(
status_code=403,
detail="This endpoint is for internal service use only"
)
logger.info("Internal safety stock insights generation triggered", tenant_id=tenant_id)
try:
# Use the existing safety stock optimization logic with sensible defaults
request_data = SafetyStockOptimizationRequest(
product_ids=None, # Analyze all products
lookback_days=90, # 3 months of history
min_history_days=30 # Minimum 30 days required
)
# Call the existing safety stock optimization endpoint logic
result = await trigger_safety_stock_optimization(
tenant_id=tenant_id,
request_data=request_data,
request=request,
db=db
)
# Return simplified response for internal use
return {
"insights_posted": result.total_insights_posted,
"tenant_id": tenant_id,
"status": "success" if result.success else "failed",
"message": result.message,
"products_optimized": result.products_optimized,
"total_cost_savings": result.total_cost_savings
}
except Exception as e:
logger.error(
"Internal safety stock insights generation failed",
tenant_id=tenant_id,
error=str(e),
exc_info=True
)
raise HTTPException(
status_code=500,
detail=f"Internal safety stock insights generation failed: {str(e)}"
)

View File

@@ -0,0 +1,334 @@
# services/inventory/app/api/stock_entries.py
"""
Stock Entries API - ATOMIC CRUD operations on Stock model
"""
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from app.core.database import get_db
from app.services.inventory_service import InventoryService
from app.schemas.inventory import (
StockCreate,
StockUpdate,
StockResponse,
StockMovementCreate,
StockMovementResponse,
BulkStockCreate,
BulkStockResponse
)
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role, admin_role_required
from shared.routing import RouteBuilder
logger = structlog.get_logger()
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["stock-entries"])
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
"""Extract user ID from current user context"""
user_id = current_user.get('user_id')
if not user_id:
if current_user.get('type') == 'service':
return None
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="User ID not found in context"
)
try:
return UUID(user_id)
except (ValueError, TypeError):
return None
@router.post(
route_builder.build_base_route("stock"),
response_model=StockResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def add_stock(
stock_data: StockCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Add new stock entry"""
try:
user_id = get_current_user_id(current_user)
service = InventoryService()
stock = await service.add_stock(stock_data, tenant_id, user_id)
return stock
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to add stock"
)
@router.post(
route_builder.build_base_route("stock/bulk"),
response_model=BulkStockResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def bulk_add_stock(
bulk_data: BulkStockCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Bulk add stock entries for efficient batch operations"""
try:
user_id = get_current_user_id(current_user)
service = InventoryService()
result = await service.bulk_add_stock(bulk_data, tenant_id, user_id)
return result
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error("Failed to bulk add stock", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to bulk add stock"
)
@router.get(
route_builder.build_base_route("stock"),
response_model=List[StockResponse]
)
async def get_stock(
tenant_id: UUID = Path(..., description="Tenant ID"),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient"),
available_only: bool = Query(True, description="Show only available stock"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get stock entries with filtering"""
try:
service = InventoryService()
stock_entries = await service.get_stock(
tenant_id, skip, limit, ingredient_id, available_only
)
return stock_entries
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get stock entries"
)
# ===== STOCK MOVEMENTS ROUTES (must come before stock/{stock_id} route) =====
@router.get(
route_builder.build_base_route("stock/movements"),
response_model=List[StockMovementResponse]
)
async def get_stock_movements(
tenant_id: UUID = Path(..., description="Tenant ID"),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
ingredient_id: Optional[str] = Query(None, description="Filter by ingredient"),
movement_type: Optional[str] = Query(None, description="Filter by movement type"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get stock movements with filtering"""
logger.info("Stock movements endpoint called",
tenant_id=str(tenant_id),
ingredient_id=ingredient_id,
skip=skip,
limit=limit,
movement_type=movement_type)
# Validate and convert ingredient_id if provided
ingredient_uuid = None
if ingredient_id:
try:
ingredient_uuid = UUID(ingredient_id)
logger.info("Ingredient ID validated", ingredient_id=str(ingredient_uuid))
except (ValueError, AttributeError) as e:
logger.error("Invalid ingredient_id format",
ingredient_id=ingredient_id,
error=str(e))
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid ingredient_id format: {ingredient_id}. Must be a valid UUID."
)
try:
service = InventoryService()
movements = await service.get_stock_movements(
tenant_id, skip, limit, ingredient_uuid, movement_type
)
logger.info("Successfully retrieved stock movements",
count=len(movements),
tenant_id=str(tenant_id))
return movements
except ValueError as e:
logger.error("Validation error in stock movements",
error=str(e),
tenant_id=str(tenant_id),
ingredient_id=ingredient_id)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error("Failed to get stock movements",
error=str(e),
error_type=type(e).__name__,
tenant_id=str(tenant_id),
ingredient_id=ingredient_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to get stock movements: {str(e)}"
)
@router.post(
route_builder.build_base_route("stock/movements"),
response_model=StockMovementResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def create_stock_movement(
movement_data: StockMovementCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create stock movement record"""
try:
user_id = get_current_user_id(current_user)
service = InventoryService()
movement = await service.create_stock_movement(movement_data, tenant_id, user_id)
return movement
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create stock movement"
)
# ===== STOCK DETAIL ROUTES (must come after stock/movements routes) =====
@router.get(
route_builder.build_resource_detail_route("stock", "stock_id"),
response_model=StockResponse
)
async def get_stock_entry(
stock_id: UUID = Path(..., description="Stock entry ID"),
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get specific stock entry"""
try:
service = InventoryService()
stock = await service.get_stock_entry(stock_id, tenant_id)
if not stock:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Stock entry not found"
)
return stock
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get stock entry"
)
@router.put(
route_builder.build_resource_detail_route("stock", "stock_id"),
response_model=StockResponse
)
@require_user_role(['admin', 'owner', 'member'])
async def update_stock(
stock_data: StockUpdate,
stock_id: UUID = Path(..., description="Stock entry ID"),
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Update stock entry"""
try:
service = InventoryService()
stock = await service.update_stock(stock_id, stock_data, tenant_id)
if not stock:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Stock entry not found"
)
return stock
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update stock entry"
)
@router.delete(
route_builder.build_resource_detail_route("stock", "stock_id"),
status_code=status.HTTP_204_NO_CONTENT
)
@admin_role_required
async def delete_stock(
stock_id: UUID = Path(..., description="Stock entry ID"),
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete stock entry"""
try:
service = InventoryService()
deleted = await service.delete_stock(stock_id, tenant_id)
if not deleted:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Stock entry not found"
)
return None
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to delete stock entry"
)

View File

@@ -0,0 +1,459 @@
"""
Stock Receipt API Endpoints
Handles delivery receipt confirmation with lot-level tracking.
Critical for food safety compliance - captures expiration dates per lot.
"""
from fastapi import APIRouter, HTTPException, Depends, status
from pydantic import BaseModel, Field, validator
from typing import List, Optional
from uuid import UUID
from datetime import datetime, date
from decimal import Decimal
import structlog
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.stock_receipt import StockReceipt, StockReceiptLineItem, StockLot, ReceiptStatus
from app.models.inventory import Stock, StockMovement, StockMovementType
from shared.database.dependencies import get_db
from shared.security import get_current_user
logger = structlog.get_logger()
router = APIRouter(prefix="/stock-receipts", tags=["stock-receipts"])
# ============================================================
# Request/Response Models
# ============================================================
class LotInput(BaseModel):
"""Individual lot details within a line item"""
lot_number: Optional[str] = None
supplier_lot_number: Optional[str] = None
quantity: Decimal = Field(..., gt=0)
unit_of_measure: str
expiration_date: date = Field(..., description="Required for food safety")
best_before_date: Optional[date] = None
warehouse_location: Optional[str] = None
storage_zone: Optional[str] = None
quality_notes: Optional[str] = None
class LineItemInput(BaseModel):
"""Line item input for stock receipt"""
ingredient_id: UUID
ingredient_name: Optional[str] = None
po_line_id: Optional[UUID] = None
expected_quantity: Decimal
actual_quantity: Decimal
unit_of_measure: str
discrepancy_reason: Optional[str] = None
unit_cost: Optional[Decimal] = None
lots: List[LotInput] = Field(..., min_items=1, description="At least one lot required")
@validator('lots')
def validate_lot_totals(cls, lots, values):
"""Ensure lot quantities sum to actual quantity"""
if 'actual_quantity' not in values:
return lots
total_lot_qty = sum(lot.quantity for lot in lots)
actual_qty = values['actual_quantity']
if abs(total_lot_qty - actual_qty) > Decimal('0.01'): # Allow small floating point errors
raise ValueError(
f"Lot quantities ({total_lot_qty}) must sum to actual quantity ({actual_qty})"
)
return lots
class CreateStockReceiptRequest(BaseModel):
"""Create draft stock receipt"""
tenant_id: UUID
po_id: UUID
po_number: Optional[str] = None
received_by_user_id: UUID
supplier_id: Optional[UUID] = None
supplier_name: Optional[str] = None
notes: Optional[str] = None
line_items: List[LineItemInput] = Field(..., min_items=1)
class UpdateStockReceiptRequest(BaseModel):
"""Update draft stock receipt"""
notes: Optional[str] = None
line_items: Optional[List[LineItemInput]] = None
class ConfirmStockReceiptRequest(BaseModel):
"""Confirm stock receipt and update inventory"""
confirmed_by_user_id: UUID
# ============================================================
# API Endpoints
# ============================================================
@router.post("/", status_code=status.HTTP_201_CREATED)
async def create_stock_receipt(
request: CreateStockReceiptRequest,
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
Create a draft stock receipt from a delivery.
Workflow:
1. User clicks "Mark as Received" on delivery alert
2. This endpoint creates draft receipt
3. Frontend opens StockReceiptModal with draft data
4. User fills in lot details
5. User saves draft (PUT endpoint) or confirms (POST /confirm)
"""
try:
# Create receipt
receipt = StockReceipt(
tenant_id=request.tenant_id,
po_id=request.po_id,
po_number=request.po_number,
received_at=datetime.utcnow(),
received_by_user_id=request.received_by_user_id,
status=ReceiptStatus.DRAFT,
supplier_id=request.supplier_id,
supplier_name=request.supplier_name,
notes=request.notes,
has_discrepancies=False
)
db.add(receipt)
await db.flush() # Get receipt ID
# Create line items and lots
for line_input in request.line_items:
has_discrepancy = abs(line_input.expected_quantity - line_input.actual_quantity) > Decimal('0.01')
if has_discrepancy:
receipt.has_discrepancies = True
line_item = StockReceiptLineItem(
tenant_id=request.tenant_id,
receipt_id=receipt.id,
ingredient_id=line_input.ingredient_id,
ingredient_name=line_input.ingredient_name,
po_line_id=line_input.po_line_id,
expected_quantity=line_input.expected_quantity,
actual_quantity=line_input.actual_quantity,
unit_of_measure=line_input.unit_of_measure,
has_discrepancy=has_discrepancy,
discrepancy_reason=line_input.discrepancy_reason,
unit_cost=line_input.unit_cost,
total_cost=line_input.unit_cost * line_input.actual_quantity if line_input.unit_cost else None
)
db.add(line_item)
await db.flush() # Get line item ID
# Create lots
for lot_input in line_input.lots:
lot = StockLot(
tenant_id=request.tenant_id,
line_item_id=line_item.id,
lot_number=lot_input.lot_number,
supplier_lot_number=lot_input.supplier_lot_number,
quantity=lot_input.quantity,
unit_of_measure=lot_input.unit_of_measure,
expiration_date=lot_input.expiration_date,
best_before_date=lot_input.best_before_date,
warehouse_location=lot_input.warehouse_location,
storage_zone=lot_input.storage_zone,
quality_notes=lot_input.quality_notes
)
db.add(lot)
await db.commit()
await db.refresh(receipt)
logger.info(
"Stock receipt created",
receipt_id=str(receipt.id),
po_id=str(request.po_id),
line_items=len(request.line_items),
tenant_id=str(request.tenant_id)
)
return receipt.to_dict()
except Exception as e:
await db.rollback()
logger.error(
"Failed to create stock receipt",
error=str(e),
po_id=str(request.po_id)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to create stock receipt: {str(e)}"
)
@router.get("/{receipt_id}")
async def get_stock_receipt(
receipt_id: UUID,
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
Retrieve stock receipt with all line items and lots.
Used to resume editing a draft receipt.
"""
try:
stmt = select(StockReceipt).where(
StockReceipt.id == receipt_id,
StockReceipt.tenant_id == current_user['tenant_id']
)
result = await db.execute(stmt)
receipt = result.scalar_one_or_none()
if not receipt:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Stock receipt not found"
)
return receipt.to_dict()
except HTTPException:
raise
except Exception as e:
logger.error(
"Failed to retrieve stock receipt",
receipt_id=str(receipt_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve stock receipt: {str(e)}"
)
@router.put("/{receipt_id}")
async def update_stock_receipt(
receipt_id: UUID,
request: UpdateStockReceiptRequest,
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
Update draft stock receipt.
Allows user to save progress while filling in lot details.
"""
try:
stmt = select(StockReceipt).where(
StockReceipt.id == receipt_id,
StockReceipt.tenant_id == current_user['tenant_id'],
StockReceipt.status == ReceiptStatus.DRAFT
)
result = await db.execute(stmt)
receipt = result.scalar_one_or_none()
if not receipt:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Draft stock receipt not found"
)
# Update notes if provided
if request.notes is not None:
receipt.notes = request.notes
# Update line items if provided
if request.line_items:
# Delete existing line items (cascade deletes lots)
for line_item in receipt.line_items:
await db.delete(line_item)
# Create new line items
for line_input in request.line_items:
has_discrepancy = abs(line_input.expected_quantity - line_input.actual_quantity) > Decimal('0.01')
line_item = StockReceiptLineItem(
tenant_id=current_user['tenant_id'],
receipt_id=receipt.id,
ingredient_id=line_input.ingredient_id,
ingredient_name=line_input.ingredient_name,
po_line_id=line_input.po_line_id,
expected_quantity=line_input.expected_quantity,
actual_quantity=line_input.actual_quantity,
unit_of_measure=line_input.unit_of_measure,
has_discrepancy=has_discrepancy,
discrepancy_reason=line_input.discrepancy_reason,
unit_cost=line_input.unit_cost,
total_cost=line_input.unit_cost * line_input.actual_quantity if line_input.unit_cost else None
)
db.add(line_item)
await db.flush()
# Create lots
for lot_input in line_input.lots:
lot = StockLot(
tenant_id=current_user['tenant_id'],
line_item_id=line_item.id,
lot_number=lot_input.lot_number,
supplier_lot_number=lot_input.supplier_lot_number,
quantity=lot_input.quantity,
unit_of_measure=lot_input.unit_of_measure,
expiration_date=lot_input.expiration_date,
best_before_date=lot_input.best_before_date,
warehouse_location=lot_input.warehouse_location,
storage_zone=lot_input.storage_zone,
quality_notes=lot_input.quality_notes
)
db.add(lot)
await db.commit()
await db.refresh(receipt)
logger.info(
"Stock receipt updated",
receipt_id=str(receipt_id),
tenant_id=str(current_user['tenant_id'])
)
return receipt.to_dict()
except HTTPException:
raise
except Exception as e:
await db.rollback()
logger.error(
"Failed to update stock receipt",
receipt_id=str(receipt_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to update stock receipt: {str(e)}"
)
@router.post("/{receipt_id}/confirm")
async def confirm_stock_receipt(
receipt_id: UUID,
request: ConfirmStockReceiptRequest,
db: AsyncSession = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
Confirm stock receipt and update inventory.
This finalizes the receipt:
1. Creates Stock records for each lot
2. Creates StockMovement records (PURCHASE type)
3. Marks receipt as CONFIRMED
4. Updates PO status to RECEIVED (via procurement service)
"""
try:
stmt = select(StockReceipt).where(
StockReceipt.id == receipt_id,
StockReceipt.tenant_id == current_user['tenant_id'],
StockReceipt.status == ReceiptStatus.DRAFT
)
result = await db.execute(stmt)
receipt = result.scalar_one_or_none()
if not receipt:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Draft stock receipt not found"
)
# Process each line item and its lots
for line_item in receipt.line_items:
for lot in line_item.lots:
# Create Stock record
stock = Stock(
tenant_id=current_user['tenant_id'],
ingredient_id=line_item.ingredient_id,
supplier_id=receipt.supplier_id,
batch_number=f"RCV-{receipt_id}-{lot.id}",
lot_number=lot.lot_number,
supplier_batch_ref=lot.supplier_lot_number,
production_stage='raw_ingredient',
current_quantity=float(lot.quantity),
reserved_quantity=0.0,
available_quantity=float(lot.quantity),
received_date=receipt.received_at,
expiration_date=datetime.combine(lot.expiration_date, datetime.min.time()),
best_before_date=datetime.combine(lot.best_before_date, datetime.min.time()) if lot.best_before_date else None,
unit_cost=line_item.unit_cost,
total_cost=line_item.unit_cost * lot.quantity if line_item.unit_cost else None,
storage_location=lot.warehouse_location,
warehouse_zone=lot.storage_zone,
is_available=True,
is_expired=False,
quality_status="good"
)
db.add(stock)
await db.flush()
# Link lot to stock
lot.stock_id = stock.id
# Create StockMovement record
movement = StockMovement(
tenant_id=current_user['tenant_id'],
ingredient_id=line_item.ingredient_id,
stock_id=stock.id,
movement_type=StockMovementType.PURCHASE,
quantity=float(lot.quantity),
unit_cost=line_item.unit_cost,
total_cost=line_item.unit_cost * lot.quantity if line_item.unit_cost else None,
quantity_before=0.0,
quantity_after=float(lot.quantity),
reference_number=receipt.po_number,
supplier_id=receipt.supplier_id,
notes=f"Stock receipt {receipt_id}",
movement_date=receipt.received_at
)
db.add(movement)
# Mark receipt as confirmed
receipt.status = ReceiptStatus.CONFIRMED
receipt.confirmed_at = datetime.utcnow()
await db.commit()
logger.info(
"Stock receipt confirmed",
receipt_id=str(receipt_id),
po_id=str(receipt.po_id),
tenant_id=str(current_user['tenant_id'])
)
return {
"status": "success",
"receipt_id": str(receipt_id),
"message": "Stock receipt confirmed and inventory updated"
}
except HTTPException:
raise
except Exception as e:
await db.rollback()
logger.error(
"Failed to confirm stock receipt",
receipt_id=str(receipt_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to confirm stock receipt: {str(e)}"
)

View File

@@ -0,0 +1,398 @@
# ================================================================
# services/inventory/app/api/sustainability.py
# ================================================================
"""
Inventory Sustainability API - Microservices Architecture
Provides inventory-specific sustainability metrics (waste tracking, expiry alerts)
Following microservices principles: each service owns its domain data
"""
from datetime import datetime, timedelta
from typing import Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from app.core.database import get_db
from app.repositories.stock_movement_repository import StockMovementRepository
from app.repositories.stock_repository import StockRepository
logger = structlog.get_logger()
router = APIRouter(tags=["sustainability"])
# ===== INVENTORY SUSTAINABILITY ENDPOINTS =====
@router.get(
"/api/v1/tenants/{tenant_id}/inventory/sustainability/waste-metrics",
summary="Get Inventory Waste Metrics",
description="Get inventory-specific waste metrics from stock movements and expired items"
)
async def get_inventory_waste_metrics(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date for metrics (default: 30 days ago)"),
end_date: Optional[datetime] = Query(None, description="End date for metrics (default: now)"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get inventory waste metrics including:
- Waste from stock movements (expired, damaged, contaminated, spillage)
- Total waste quantity and cost
- Breakdown by waste reason
- Number of waste incidents
**Domain**: Inventory Service owns this data
**Use case**: Frontend aggregates with production service waste metrics
"""
try:
# Default to last 30 days
if not end_date:
end_date = datetime.now()
if not start_date:
start_date = end_date - timedelta(days=30)
# Get inventory waste from stock movements
stock_movement_repo = StockMovementRepository(db)
# Get waste movements using explicit date range
waste_movements = await stock_movement_repo.get_waste_movements(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date,
limit=1000
)
# Calculate period days
days_back = (end_date - start_date).days
# Calculate totals
total_waste_kg = 0.0
total_waste_cost_eur = 0.0
waste_by_reason = {
'expired': 0.0,
'damaged': 0.0,
'contaminated': 0.0,
'spillage': 0.0,
'other': 0.0
}
for movement in (waste_movements or []):
quantity = float(movement.quantity) if movement.quantity else 0.0
total_waste_kg += quantity
# Add to cost if available
if movement.total_cost:
total_waste_cost_eur += float(movement.total_cost)
# Categorize by reason
reason = movement.reason_code or 'other'
if reason in waste_by_reason:
waste_by_reason[reason] += quantity
else:
waste_by_reason['other'] += quantity
result = {
'inventory_waste_kg': round(total_waste_kg, 2),
'waste_cost_eur': round(total_waste_cost_eur, 2),
'waste_by_reason': {
key: round(val, 2) for key, val in waste_by_reason.items()
},
'waste_movements_count': len(waste_movements) if waste_movements else 0,
'period': {
'start_date': start_date.isoformat(),
'end_date': end_date.isoformat(),
'days': days_back
}
}
logger.info(
"Inventory waste metrics retrieved",
tenant_id=str(tenant_id),
waste_kg=result['inventory_waste_kg'],
movements=result['waste_movements_count']
)
return result
except Exception as e:
logger.error(
"Error getting inventory waste metrics",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve inventory waste metrics: {str(e)}"
)
@router.get(
"/api/v1/tenants/{tenant_id}/inventory/sustainability/expiry-alerts",
summary="Get Expiry Alerts",
description="Get items at risk of expiring soon (waste prevention opportunities)"
)
async def get_expiry_alerts(
tenant_id: UUID = Path(..., description="Tenant ID"),
days_ahead: int = Query(7, ge=1, le=30, description="Days ahead to check for expiry"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get items at risk of expiring within the specified time window.
**Purpose**: Waste prevention and FIFO compliance
**Returns**:
- Items expiring soon
- Potential waste value
- Recommended actions
"""
try:
stock_repo = StockRepository(db)
# Get stock items expiring soon
expiring_soon = await stock_repo.get_expiring_stock(
tenant_id=tenant_id,
days_ahead=days_ahead
)
at_risk_items = []
total_at_risk_kg = 0.0
total_at_risk_value_eur = 0.0
for stock in (expiring_soon or []):
quantity = float(stock.quantity) if stock.quantity else 0.0
unit_cost = float(stock.unit_cost) if stock.unit_cost else 0.0
total_value = quantity * unit_cost
total_at_risk_kg += quantity
total_at_risk_value_eur += total_value
at_risk_items.append({
'stock_id': str(stock.id),
'ingredient_id': str(stock.ingredient_id),
'ingredient_name': stock.ingredient.name if stock.ingredient else 'Unknown',
'quantity': round(quantity, 2),
'unit': stock.unit,
'expiry_date': stock.expiry_date.isoformat() if stock.expiry_date else None,
'days_until_expiry': (stock.expiry_date - datetime.now()).days if stock.expiry_date else None,
'value_eur': round(total_value, 2),
'location': stock.location or 'unspecified'
})
result = {
'at_risk_items': at_risk_items,
'total_items': len(at_risk_items),
'total_at_risk_kg': round(total_at_risk_kg, 2),
'total_at_risk_value_eur': round(total_at_risk_value_eur, 2),
'alert_window_days': days_ahead,
'checked_at': datetime.now().isoformat()
}
logger.info(
"Expiry alerts retrieved",
tenant_id=str(tenant_id),
at_risk_items=result['total_items'],
at_risk_value=result['total_at_risk_value_eur']
)
return result
except Exception as e:
logger.error(
"Error getting expiry alerts",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve expiry alerts: {str(e)}"
)
@router.get(
"/api/v1/tenants/{tenant_id}/inventory/sustainability/waste-events",
summary="Get Waste Event Log",
description="Get detailed waste event history with reasons, costs, and timestamps"
)
async def get_waste_events(
tenant_id: UUID = Path(..., description="Tenant ID"),
limit: int = Query(50, ge=1, le=500, description="Maximum number of events to return"),
offset: int = Query(0, ge=0, description="Number of events to skip"),
start_date: Optional[datetime] = Query(None, description="Start date filter"),
end_date: Optional[datetime] = Query(None, description="End date filter"),
reason_code: Optional[str] = Query(None, description="Filter by reason code (expired, damaged, etc.)"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get detailed waste event log for trend analysis and auditing.
**Use cases**:
- Root cause analysis
- Waste trend identification
- Compliance auditing
- Process improvement
"""
try:
stock_movement_repo = StockMovementRepository(db)
# Default to last 90 days if no date range
if not end_date:
end_date = datetime.now()
if not start_date:
start_date = end_date - timedelta(days=90)
days_back = (end_date - start_date).days
# Get waste movements
waste_movements = await stock_movement_repo.get_waste_movements(
tenant_id=tenant_id,
days_back=days_back,
limit=limit + offset # Get extra for offset handling
)
# Filter by reason if specified
if reason_code and waste_movements:
waste_movements = [
m for m in waste_movements
if m.reason_code == reason_code
]
# Apply pagination
total_count = len(waste_movements) if waste_movements else 0
paginated_movements = (waste_movements or [])[offset:offset + limit]
# Format events
events = []
for movement in paginated_movements:
events.append({
'event_id': str(movement.id),
'ingredient_id': str(movement.ingredient_id),
'ingredient_name': movement.ingredient.name if movement.ingredient else 'Unknown',
'quantity': float(movement.quantity) if movement.quantity else 0.0,
'unit': movement.unit,
'reason_code': movement.reason_code,
'total_cost_eur': float(movement.total_cost) if movement.total_cost else 0.0,
'movement_date': movement.movement_date.isoformat() if movement.movement_date else None,
'notes': movement.notes or '',
'created_by': movement.created_by
})
result = {
'events': events,
'total_count': total_count,
'returned_count': len(events),
'offset': offset,
'limit': limit,
'period': {
'start_date': start_date.isoformat(),
'end_date': end_date.isoformat()
},
'filter': {
'reason_code': reason_code
}
}
logger.info(
"Waste events retrieved",
tenant_id=str(tenant_id),
total_events=total_count,
returned=len(events)
)
return result
except Exception as e:
logger.error(
"Error getting waste events",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve waste events: {str(e)}"
)
@router.get(
"/api/v1/tenants/{tenant_id}/inventory/sustainability/summary",
summary="Get Inventory Sustainability Summary",
description="Get condensed inventory sustainability data for dashboard widgets"
)
async def get_inventory_sustainability_summary(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""
Get summary of inventory sustainability metrics optimized for widgets.
**Returns**: Condensed version of waste metrics and expiry alerts
**Use case**: Dashboard widgets, quick overview cards
"""
try:
end_date = datetime.now()
start_date = end_date - timedelta(days=days)
# Get waste metrics
stock_movement_repo = StockMovementRepository(db)
waste_movements = await stock_movement_repo.get_waste_movements(
tenant_id=tenant_id,
days_back=days,
limit=1000
)
total_waste_kg = sum(
float(m.quantity) for m in (waste_movements or [])
if m.quantity
)
total_waste_cost = sum(
float(m.total_cost) for m in (waste_movements or [])
if m.total_cost
)
# Get expiry alerts
stock_repo = StockRepository(db)
expiring_soon = await stock_repo.get_expiring_stock(
tenant_id=tenant_id,
days_ahead=7
)
at_risk_count = len(expiring_soon) if expiring_soon else 0
result = {
'inventory_waste_kg': round(total_waste_kg, 2),
'waste_cost_eur': round(total_waste_cost, 2),
'waste_incidents': len(waste_movements) if waste_movements else 0,
'items_at_risk_expiry': at_risk_count,
'period_days': days,
'period': {
'start_date': start_date.isoformat(),
'end_date': end_date.isoformat()
}
}
logger.info(
"Inventory sustainability summary retrieved",
tenant_id=str(tenant_id),
waste_kg=result['inventory_waste_kg']
)
return result
except Exception as e:
logger.error(
"Error getting inventory sustainability summary",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve inventory sustainability summary: {str(e)}"
)

View File

@@ -0,0 +1,240 @@
# services/inventory/app/api/temperature_logs.py
"""
Temperature Logs API - ATOMIC CRUD operations on TemperatureLog model
"""
from datetime import datetime
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role
from shared.routing import RouteBuilder
from app.core.database import get_db
from app.services.food_safety_service import FoodSafetyService
from app.schemas.food_safety import (
TemperatureLogCreate,
TemperatureLogResponse,
BulkTemperatureLogCreate
)
logger = structlog.get_logger()
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["temperature-logs"])
async def get_food_safety_service() -> FoodSafetyService:
"""Get food safety service instance"""
return FoodSafetyService()
@router.post(
route_builder.build_base_route("food-safety/temperature"),
response_model=TemperatureLogResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def log_temperature(
temp_data: TemperatureLogCreate,
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
db: AsyncSession = Depends(get_db)
):
"""Log a temperature reading"""
try:
temp_data.tenant_id = tenant_id
temp_log = await food_safety_service.log_temperature(
db,
temp_data,
user_id=UUID(current_user["user_id"])
)
logger.info("Temperature logged",
location=temp_data.storage_location,
temperature=temp_data.temperature_celsius)
return temp_log
except Exception as e:
logger.error("Error logging temperature", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to log temperature"
)
@router.post(
route_builder.build_base_route("food-safety/temperature/bulk"),
response_model=List[TemperatureLogResponse],
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def bulk_log_temperatures(
bulk_data: BulkTemperatureLogCreate,
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
food_safety_service: FoodSafetyService = Depends(get_food_safety_service),
db: AsyncSession = Depends(get_db)
):
"""Bulk log temperature readings"""
try:
for reading in bulk_data.readings:
reading.tenant_id = tenant_id
temp_logs = await food_safety_service.bulk_log_temperatures(
db,
bulk_data.readings,
user_id=UUID(current_user["user_id"])
)
logger.info("Bulk temperature logging completed",
count=len(bulk_data.readings))
return temp_logs
except Exception as e:
logger.error("Error bulk logging temperatures", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to bulk log temperatures"
)
@router.get(
route_builder.build_base_route("food-safety/temperature"),
response_model=List[TemperatureLogResponse]
)
async def get_temperature_logs(
tenant_id: UUID = Path(...),
location: Optional[str] = Query(None, description="Filter by storage location"),
equipment_id: Optional[str] = Query(None, description="Filter by equipment ID"),
date_from: Optional[datetime] = Query(None, description="Start date for filtering"),
date_to: Optional[datetime] = Query(None, description="End date for filtering"),
violations_only: bool = Query(False, description="Show only temperature violations"),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get temperature logs with filtering"""
try:
where_conditions = ["tenant_id = :tenant_id"]
params = {"tenant_id": tenant_id}
if location:
where_conditions.append("storage_location ILIKE :location")
params["location"] = f"%{location}%"
if equipment_id:
where_conditions.append("equipment_id = :equipment_id")
params["equipment_id"] = equipment_id
if date_from:
where_conditions.append("recorded_at >= :date_from")
params["date_from"] = date_from
if date_to:
where_conditions.append("recorded_at <= :date_to")
params["date_to"] = date_to
if violations_only:
where_conditions.append("is_within_range = false")
where_clause = " AND ".join(where_conditions)
query = f"""
SELECT * FROM temperature_logs
WHERE {where_clause}
ORDER BY recorded_at DESC
LIMIT :limit OFFSET :skip
"""
params.update({"limit": limit, "skip": skip})
result = await db.execute(query, params)
logs = result.fetchall()
return [
TemperatureLogResponse(**dict(log))
for log in logs
]
except Exception as e:
logger.error("Error getting temperature logs", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve temperature logs"
)
@router.get(
route_builder.build_resource_detail_route("food-safety/temperature", "log_id"),
response_model=TemperatureLogResponse
)
async def get_temperature_log(
log_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Get specific temperature log"""
try:
query = "SELECT * FROM temperature_logs WHERE id = :log_id AND tenant_id = :tenant_id"
result = await db.execute(query, {"log_id": log_id, "tenant_id": tenant_id})
log = result.fetchone()
if not log:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Temperature log not found"
)
return TemperatureLogResponse(**dict(log))
except HTTPException:
raise
except Exception as e:
logger.error("Error getting temperature log", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve temperature log"
)
@router.delete(
route_builder.build_resource_detail_route("food-safety/temperature", "log_id"),
status_code=status.HTTP_204_NO_CONTENT
)
@require_user_role(['admin', 'owner'])
async def delete_temperature_log(
log_id: UUID = Path(...),
tenant_id: UUID = Path(...),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Delete temperature log"""
try:
query = "DELETE FROM temperature_logs WHERE id = :log_id AND tenant_id = :tenant_id"
result = await db.execute(query, {"log_id": log_id, "tenant_id": tenant_id})
if result.rowcount == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Temperature log not found"
)
await db.commit()
return None
except HTTPException:
raise
except Exception as e:
logger.error("Error deleting temperature log", error=str(e))
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to delete temperature log"
)

View File

@@ -0,0 +1,222 @@
# services/inventory/app/api/transformations.py
"""
API endpoints for product transformations
Following standardized URL structure with role-based access control
"""
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from app.core.database import get_db
from app.services.transformation_service import TransformationService
from app.schemas.inventory import (
ProductTransformationCreate,
ProductTransformationResponse
)
from app.models.inventory import ProductionStage
from shared.auth.decorators import get_current_user_dep
from shared.auth.access_control import require_user_role, admin_role_required
from shared.routing import RouteBuilder
logger = structlog.get_logger()
# Create route builder for consistent URL structure
route_builder = RouteBuilder('inventory')
router = APIRouter(tags=["transformations"])
# Helper function to extract user ID from user object
def get_current_user_id(current_user: dict = Depends(get_current_user_dep)) -> UUID:
"""Extract user ID from current user context"""
user_id = current_user.get('user_id')
if not user_id:
# Handle service tokens that don't have UUID user_ids
if current_user.get('type') == 'service':
return None
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="User ID not found in context"
)
try:
return UUID(user_id)
except (ValueError, TypeError):
return None
@router.post(
route_builder.build_base_route("transformations"),
response_model=ProductTransformationResponse,
status_code=status.HTTP_201_CREATED
)
@require_user_role(['admin', 'owner', 'member'])
async def create_transformation(
transformation_data: ProductTransformationCreate,
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Create a new product transformation (e.g., par-baked to fully baked)"""
try:
# Extract user ID - handle service tokens
user_id = get_current_user_id(current_user)
service = TransformationService()
transformation = await service.create_transformation(transformation_data, tenant_id, user_id)
return transformation
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error("Failed to create transformation", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create transformation"
)
@router.get(
route_builder.build_base_route("transformations"),
response_model=List[ProductTransformationResponse]
)
async def get_transformations(
tenant_id: UUID = Path(..., description="Tenant ID"),
skip: int = Query(0, ge=0, description="Number of records to skip"),
limit: int = Query(100, ge=1, le=1000, description="Number of records to return"),
ingredient_id: Optional[UUID] = Query(None, description="Filter by ingredient (source or target)"),
source_stage: Optional[ProductionStage] = Query(None, description="Filter by source production stage"),
target_stage: Optional[ProductionStage] = Query(None, description="Filter by target production stage"),
days_back: Optional[int] = Query(None, ge=1, le=365, description="Filter by days back from today"),
db: AsyncSession = Depends(get_db)
):
"""Get product transformations with filtering"""
try:
service = TransformationService()
transformations = await service.get_transformations(
tenant_id, skip, limit, ingredient_id, source_stage, target_stage, days_back
)
return transformations
except Exception as e:
logger.error("Failed to get transformations", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get transformations"
)
@router.get(
route_builder.build_resource_detail_route("transformations", "transformation_id"),
response_model=ProductTransformationResponse
)
async def get_transformation(
transformation_id: UUID = Path(..., description="Transformation ID"),
tenant_id: UUID = Path(..., description="Tenant ID"),
db: AsyncSession = Depends(get_db)
):
"""Get specific transformation by ID"""
try:
service = TransformationService()
transformation = await service.get_transformation(transformation_id, tenant_id)
if not transformation:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Transformation not found"
)
return transformation
except HTTPException:
raise
except Exception as e:
logger.error("Failed to get transformation", error=str(e), transformation_id=transformation_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get transformation"
)
@router.get(
route_builder.build_base_route("transformations/summary"),
response_model=dict
)
async def get_transformation_summary(
tenant_id: UUID = Path(..., description="Tenant ID"),
days_back: int = Query(30, ge=1, le=365, description="Days back for summary"),
db: AsyncSession = Depends(get_db)
):
"""Get transformation summary for dashboard"""
try:
service = TransformationService()
summary = await service.get_transformation_summary(tenant_id, days_back)
return summary
except Exception as e:
logger.error("Failed to get transformation summary", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get transformation summary"
)
@router.post(
route_builder.build_operations_route("transformations/par-bake-to-fresh"),
response_model=dict
)
@require_user_role(['admin', 'owner', 'member'])
async def create_par_bake_transformation(
source_ingredient_id: UUID = Query(..., description="Par-baked ingredient ID"),
target_ingredient_id: UUID = Query(..., description="Fresh baked ingredient ID"),
quantity: float = Query(..., gt=0, description="Quantity to transform"),
target_batch_number: Optional[str] = Query(None, description="Target batch number"),
expiration_hours: int = Query(24, ge=1, le=72, description="Hours until expiration after baking"),
notes: Optional[str] = Query(None, description="Process notes"),
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
db: AsyncSession = Depends(get_db)
):
"""Convenience endpoint for par-baked to fresh transformation"""
try:
# Extract user ID - handle service tokens
user_id = get_current_user_id(current_user)
# Create transformation data for par-baked to fully baked
transformation_data = ProductTransformationCreate(
source_ingredient_id=str(source_ingredient_id),
target_ingredient_id=str(target_ingredient_id),
source_stage=ProductionStage.PAR_BAKED,
target_stage=ProductionStage.FULLY_BAKED,
source_quantity=quantity,
target_quantity=quantity, # Assume 1:1 ratio for par-baked goods
expiration_calculation_method="days_from_transformation",
expiration_days_offset=max(1, expiration_hours // 24), # Convert hours to days, minimum 1 day
process_notes=notes,
target_batch_number=target_batch_number
)
service = TransformationService()
transformation = await service.create_transformation(transformation_data, tenant_id, user_id)
return {
"transformation_id": transformation.id,
"transformation_reference": transformation.transformation_reference,
"source_quantity": transformation.source_quantity,
"target_quantity": transformation.target_quantity,
"expiration_date": transformation.transformation_date,
"message": f"Successfully transformed {quantity} units from par-baked to fresh baked"
}
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error("Failed to create par-bake transformation", error=str(e), tenant_id=tenant_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to create par-bake transformation"
)