Improve the frontend and repository layer

This commit is contained in:
Urtzi Alfaro
2025-10-23 07:44:54 +02:00
parent 8d30172483
commit 07c33fa578
112 changed files with 14726 additions and 2733 deletions

View File

@@ -20,7 +20,6 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from app.core.database import get_db
from app.models.inventory import Ingredient, Stock
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from shared.messaging.rabbitmq import RabbitMQClient
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
@@ -254,44 +253,12 @@ async def clone_demo_data(
# Commit all changes
await db.commit()
# Generate inventory alerts with RabbitMQ publishing
rabbitmq_client = None
try:
from shared.utils.alert_generator import generate_inventory_alerts
# NOTE: Alert generation removed - alerts are now generated automatically by the
# inventory_alert_service which runs scheduled checks every 2-5 minutes.
# This eliminates duplicate alerts and provides a more realistic demo experience.
stats["alerts_generated"] = 0
# Initialize RabbitMQ client for alert publishing
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="inventory")
await rabbitmq_client.connect()
# Generate alerts and publish to RabbitMQ
alerts_count = await generate_inventory_alerts(
db,
virtual_uuid,
session_created_at,
rabbitmq_client=rabbitmq_client
)
stats["alerts_generated"] = alerts_count
await db.commit()
logger.info(f"Generated {alerts_count} inventory alerts", virtual_tenant_id=virtual_tenant_id)
except Exception as e:
logger.warning(f"Failed to generate alerts: {str(e)}", exc_info=True)
stats["alerts_generated"] = 0
finally:
# Clean up RabbitMQ connection
if rabbitmq_client:
try:
await rabbitmq_client.disconnect()
except Exception as cleanup_error:
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
total_records = sum(stats.values())
total_records = stats["ingredients"] + stats["stock_batches"]
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
logger.info(

View File

@@ -0,0 +1,374 @@
# ================================================================
# services/inventory/app/api/sustainability.py
# ================================================================
"""
Sustainability API endpoints for Environmental Impact & SDG Compliance
Following standardized URL structure: /api/v1/tenants/{tenant_id}/sustainability/{operation}
"""
from datetime import datetime, timedelta
from typing import Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from fastapi.responses import JSONResponse
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from shared.auth.decorators import get_current_user_dep
from app.core.database import get_db
from app.services.sustainability_service import SustainabilityService
from app.schemas.sustainability import (
SustainabilityMetrics,
GrantReport,
SustainabilityWidgetData,
SustainabilityMetricsRequest,
GrantReportRequest
)
from shared.routing import RouteBuilder
logger = structlog.get_logger()
# Create route builder for consistent URL structure
route_builder = RouteBuilder('sustainability')
router = APIRouter(tags=["sustainability"])
# ===== Dependency Injection =====
async def get_sustainability_service() -> SustainabilityService:
"""Get sustainability service instance"""
return SustainabilityService()
# ===== SUSTAINABILITY ENDPOINTS =====
@router.get(
"/api/v1/tenants/{tenant_id}/sustainability/metrics",
response_model=SustainabilityMetrics,
summary="Get Sustainability Metrics",
description="Get comprehensive sustainability metrics including environmental impact, SDG compliance, and grant readiness"
)
async def get_sustainability_metrics(
tenant_id: UUID = Path(..., description="Tenant ID"),
start_date: Optional[datetime] = Query(None, description="Start date for metrics (default: 30 days ago)"),
end_date: Optional[datetime] = Query(None, description="End date for metrics (default: now)"),
current_user: dict = Depends(get_current_user_dep),
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
db: AsyncSession = Depends(get_db)
):
"""
Get comprehensive sustainability metrics for the tenant.
**Includes:**
- Food waste metrics (production, inventory, total)
- Environmental impact (CO2, water, land use)
- UN SDG 12.3 compliance tracking
- Waste avoided through AI predictions
- Financial impact analysis
- Grant program eligibility assessment
**Use cases:**
- Dashboard displays
- Grant applications
- Sustainability reporting
- Compliance verification
"""
try:
metrics = await sustainability_service.get_sustainability_metrics(
db=db,
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date
)
logger.info(
"Sustainability metrics retrieved",
tenant_id=str(tenant_id),
user_id=current_user.get('user_id'),
waste_reduction=metrics.get('sdg_compliance', {}).get('sdg_12_3', {}).get('reduction_achieved', 0)
)
return metrics
except Exception as e:
logger.error(
"Error getting sustainability metrics",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve sustainability metrics: {str(e)}"
)
@router.get(
"/api/v1/tenants/{tenant_id}/sustainability/widget",
response_model=SustainabilityWidgetData,
summary="Get Sustainability Widget Data",
description="Get simplified sustainability data optimized for dashboard widgets"
)
async def get_sustainability_widget_data(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep),
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
db: AsyncSession = Depends(get_db)
):
"""
Get simplified sustainability metrics for dashboard widgets.
**Optimized for:**
- Dashboard displays
- Quick overview cards
- Real-time monitoring
**Returns:**
- Key metrics only
- Human-readable values
- Status indicators
"""
try:
end_date = datetime.now()
start_date = end_date - timedelta(days=days)
metrics = await sustainability_service.get_sustainability_metrics(
db=db,
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date
)
# Extract widget-friendly data
widget_data = {
'total_waste_kg': metrics['waste_metrics']['total_waste_kg'],
'waste_reduction_percentage': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved'],
'co2_saved_kg': metrics['environmental_impact']['co2_emissions']['kg'],
'water_saved_liters': metrics['environmental_impact']['water_footprint']['liters'],
'trees_equivalent': metrics['environmental_impact']['co2_emissions']['trees_to_offset'],
'sdg_status': metrics['sdg_compliance']['sdg_12_3']['status'],
'sdg_progress': metrics['sdg_compliance']['sdg_12_3']['progress_to_target'],
'grant_programs_ready': len(metrics['grant_readiness']['recommended_applications']),
'financial_savings_eur': metrics['financial_impact']['waste_cost_eur']
}
logger.info(
"Widget data retrieved",
tenant_id=str(tenant_id),
user_id=current_user.get('user_id')
)
return widget_data
except Exception as e:
logger.error(
"Error getting widget data",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve widget data: {str(e)}"
)
@router.post(
"/api/v1/tenants/{tenant_id}/sustainability/export/grant-report",
response_model=GrantReport,
summary="Export Grant Application Report",
description="Generate a comprehensive report formatted for grant applications"
)
async def export_grant_report(
tenant_id: UUID = Path(..., description="Tenant ID"),
request: GrantReportRequest = None,
current_user: dict = Depends(get_current_user_dep),
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
db: AsyncSession = Depends(get_db)
):
"""
Generate comprehensive grant application report.
**Supported grant types:**
- `general`: General sustainability report
- `eu_horizon`: EU Horizon Europe format
- `farm_to_fork`: EU Farm to Fork Strategy
- `circular_economy`: Circular Economy grants
- `un_sdg`: UN SDG certification
**Export formats:**
- `json`: JSON format (default)
- `pdf`: PDF document (future)
- `csv`: CSV export (future)
**Use cases:**
- Grant applications
- Compliance reporting
- Investor presentations
- Certification requests
"""
try:
if request is None:
request = GrantReportRequest()
report = await sustainability_service.export_grant_report(
db=db,
tenant_id=tenant_id,
grant_type=request.grant_type,
start_date=request.start_date,
end_date=request.end_date
)
logger.info(
"Grant report exported",
tenant_id=str(tenant_id),
grant_type=request.grant_type,
user_id=current_user.get('user_id')
)
# For now, return JSON. In future, support PDF/CSV generation
if request.format == 'json':
return report
else:
# Future: Generate PDF or CSV
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail=f"Export format '{request.format}' not yet implemented. Use 'json' for now."
)
except Exception as e:
logger.error(
"Error exporting grant report",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to export grant report: {str(e)}"
)
@router.get(
"/api/v1/tenants/{tenant_id}/sustainability/sdg-compliance",
summary="Get SDG 12.3 Compliance Status",
description="Get detailed UN SDG 12.3 compliance status and progress"
)
async def get_sdg_compliance(
tenant_id: UUID = Path(..., description="Tenant ID"),
current_user: dict = Depends(get_current_user_dep),
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
db: AsyncSession = Depends(get_db)
):
"""
Get detailed UN SDG 12.3 compliance information.
**SDG 12.3 Target:**
By 2030, halve per capita global food waste at the retail and consumer levels
and reduce food losses along production and supply chains, including post-harvest losses.
**Returns:**
- Current compliance status
- Progress toward 50% reduction target
- Baseline comparison
- Certification readiness
- Improvement recommendations
"""
try:
metrics = await sustainability_service.get_sustainability_metrics(
db=db,
tenant_id=tenant_id
)
sdg_data = {
'sdg_12_3_compliance': metrics['sdg_compliance']['sdg_12_3'],
'baseline_period': metrics['sdg_compliance']['baseline_period'],
'certification_ready': metrics['sdg_compliance']['certification_ready'],
'improvement_areas': metrics['sdg_compliance']['improvement_areas'],
'current_waste': metrics['waste_metrics'],
'environmental_impact': metrics['environmental_impact']
}
logger.info(
"SDG compliance data retrieved",
tenant_id=str(tenant_id),
status=sdg_data['sdg_12_3_compliance']['status']
)
return sdg_data
except Exception as e:
logger.error(
"Error getting SDG compliance",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve SDG compliance data: {str(e)}"
)
@router.get(
"/api/v1/tenants/{tenant_id}/sustainability/environmental-impact",
summary="Get Environmental Impact",
description="Get detailed environmental impact metrics"
)
async def get_environmental_impact(
tenant_id: UUID = Path(..., description="Tenant ID"),
days: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
current_user: dict = Depends(get_current_user_dep),
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
db: AsyncSession = Depends(get_db)
):
"""
Get detailed environmental impact of food waste.
**Metrics included:**
- CO2 emissions (kg and tons)
- Water footprint (liters and cubic meters)
- Land use (m² and hectares)
- Human-relatable equivalents (car km, showers, etc.)
**Use cases:**
- Sustainability reports
- Marketing materials
- Customer communication
- ESG reporting
"""
try:
end_date = datetime.now()
start_date = end_date - timedelta(days=days)
metrics = await sustainability_service.get_sustainability_metrics(
db=db,
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date
)
impact_data = {
'period': metrics['period'],
'waste_metrics': metrics['waste_metrics'],
'environmental_impact': metrics['environmental_impact'],
'avoided_impact': metrics['avoided_waste']['environmental_impact_avoided'],
'financial_impact': metrics['financial_impact']
}
logger.info(
"Environmental impact data retrieved",
tenant_id=str(tenant_id),
co2_kg=impact_data['environmental_impact']['co2_emissions']['kg']
)
return impact_data
except Exception as e:
logger.error(
"Error getting environmental impact",
tenant_id=str(tenant_id),
error=str(e)
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve environmental impact: {str(e)}"
)

View File

@@ -24,6 +24,7 @@ from app.api import (
food_safety_operations,
dashboard,
analytics,
sustainability,
internal_demo
)
@@ -103,7 +104,11 @@ class InventoryService(StandardFastAPIService):
"dashboard_analytics",
"business_model_detection",
"real_time_alerts",
"regulatory_reporting"
"regulatory_reporting",
"sustainability_tracking",
"sdg_compliance",
"environmental_impact",
"grant_reporting"
]
@@ -127,6 +132,7 @@ service.add_router(food_safety_alerts.router)
service.add_router(food_safety_operations.router)
service.add_router(dashboard.router)
service.add_router(analytics.router)
service.add_router(sustainability.router)
service.add_router(internal_demo.router)

View File

@@ -0,0 +1,464 @@
# services/inventory/app/repositories/dashboard_repository.py
"""
Dashboard Repository for complex dashboard queries
"""
from typing import List, Optional, Dict, Any
from uuid import UUID
from datetime import datetime
from decimal import Decimal
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
logger = structlog.get_logger()
class DashboardRepository:
"""Repository for dashboard-specific database queries"""
def __init__(self, session: AsyncSession):
self.session = session
async def get_business_model_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
"""Get ingredient metrics for business model detection"""
try:
query = text("""
SELECT
COUNT(*) as total_ingredients,
COUNT(CASE WHEN product_type::text = 'finished_product' THEN 1 END) as finished_products,
COUNT(CASE WHEN product_type::text = 'ingredient' THEN 1 END) as raw_ingredients,
COUNT(DISTINCT st.supplier_id) as supplier_count,
AVG(CASE WHEN s.available_quantity IS NOT NULL THEN s.available_quantity ELSE 0 END) as avg_stock_level
FROM ingredients i
LEFT JOIN (
SELECT ingredient_id, SUM(available_quantity) as available_quantity
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
) s ON i.id = s.ingredient_id
LEFT JOIN (
SELECT ingredient_id, supplier_id
FROM stock WHERE tenant_id = :tenant_id AND supplier_id IS NOT NULL
GROUP BY ingredient_id, supplier_id
) st ON i.id = st.ingredient_id
WHERE i.tenant_id = :tenant_id AND i.is_active = true
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
row = result.fetchone()
if not row:
return {
"total_ingredients": 0,
"finished_products": 0,
"raw_ingredients": 0,
"supplier_count": 0,
"avg_stock_level": 0
}
return {
"total_ingredients": row.total_ingredients,
"finished_products": row.finished_products,
"raw_ingredients": row.raw_ingredients,
"supplier_count": row.supplier_count,
"avg_stock_level": float(row.avg_stock_level) if row.avg_stock_level else 0
}
except Exception as e:
logger.error("Failed to get business model metrics", error=str(e), tenant_id=str(tenant_id))
raise
async def get_stock_by_category(self, tenant_id: UUID) -> Dict[str, Dict[str, Any]]:
"""Get stock breakdown by category"""
try:
query = text("""
SELECT
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
COUNT(*) as count,
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
FROM ingredients i
LEFT JOIN (
SELECT ingredient_id, SUM(available_quantity) as available_quantity, AVG(unit_cost) as unit_cost
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
) s ON i.id = s.ingredient_id
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY category
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
categories = {}
for row in result.fetchall():
categories[row.category] = {
"count": row.count,
"total_value": float(row.total_value)
}
return categories
except Exception as e:
logger.error("Failed to get stock by category", error=str(e), tenant_id=str(tenant_id))
raise
async def get_alerts_by_severity(self, tenant_id: UUID) -> Dict[str, int]:
"""Get active alerts breakdown by severity"""
try:
query = text("""
SELECT severity, COUNT(*) as count
FROM food_safety_alerts
WHERE tenant_id = :tenant_id AND status = 'active'
GROUP BY severity
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
alerts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
for row in result.fetchall():
alerts[row.severity] = row.count
return alerts
except Exception as e:
logger.error("Failed to get alerts by severity", error=str(e), tenant_id=str(tenant_id))
raise
async def get_movements_by_type(self, tenant_id: UUID, days: int = 7) -> Dict[str, int]:
"""Get stock movements breakdown by type for recent period"""
try:
query = text("""
SELECT sm.movement_type, COUNT(*) as count
FROM stock_movements sm
JOIN ingredients i ON sm.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
AND sm.movement_date > NOW() - INTERVAL '7 days'
GROUP BY sm.movement_type
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
movements = {}
for row in result.fetchall():
movements[row.movement_type] = row.count
return movements
except Exception as e:
logger.error("Failed to get movements by type", error=str(e), tenant_id=str(tenant_id))
raise
async def get_alert_trend(self, tenant_id: UUID, days: int = 30) -> List[Dict[str, Any]]:
"""Get alert trend over time"""
try:
query = text(f"""
SELECT
DATE(created_at) as alert_date,
COUNT(*) as alert_count,
COUNT(CASE WHEN severity IN ('high', 'critical') THEN 1 END) as high_severity_count
FROM food_safety_alerts
WHERE tenant_id = :tenant_id
AND created_at > NOW() - INTERVAL '{days} days'
GROUP BY DATE(created_at)
ORDER BY alert_date
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [
{
"date": row.alert_date.isoformat(),
"total_alerts": row.alert_count,
"high_severity_alerts": row.high_severity_count
}
for row in result.fetchall()
]
except Exception as e:
logger.error("Failed to get alert trend", error=str(e), tenant_id=str(tenant_id))
raise
async def get_recent_stock_movements(
self,
tenant_id: UUID,
limit: int = 20
) -> List[Dict[str, Any]]:
"""Get recent stock movements"""
try:
query = text("""
SELECT
'stock_movement' as activity_type,
CASE
WHEN movement_type = 'PURCHASE' THEN 'Stock added: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
WHEN movement_type = 'PRODUCTION_USE' THEN 'Stock consumed: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
WHEN movement_type = 'WASTE' THEN 'Stock wasted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
WHEN movement_type = 'ADJUSTMENT' THEN 'Stock adjusted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
ELSE 'Stock movement: ' || i.name
END as description,
sm.movement_date as timestamp,
sm.created_by as user_id,
CASE
WHEN movement_type = 'WASTE' THEN 'high'
WHEN movement_type = 'ADJUSTMENT' THEN 'medium'
ELSE 'low'
END as impact_level,
sm.id as entity_id,
'stock_movement' as entity_type
FROM stock_movements sm
JOIN ingredients i ON sm.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
ORDER BY sm.movement_date DESC
LIMIT :limit
""")
result = await self.session.execute(query, {"tenant_id": tenant_id, "limit": limit})
return [
{
"activity_type": row.activity_type,
"description": row.description,
"timestamp": row.timestamp,
"user_id": row.user_id,
"impact_level": row.impact_level,
"entity_id": row.entity_id,
"entity_type": row.entity_type
}
for row in result.fetchall()
]
except Exception as e:
logger.error("Failed to get recent stock movements", error=str(e), tenant_id=str(tenant_id))
raise
async def get_recent_food_safety_alerts(
self,
tenant_id: UUID,
limit: int = 20
) -> List[Dict[str, Any]]:
"""Get recent food safety alerts"""
try:
query = text("""
SELECT
'food_safety_alert' as activity_type,
title as description,
created_at as timestamp,
created_by as user_id,
CASE
WHEN severity = 'critical' THEN 'high'
WHEN severity = 'high' THEN 'medium'
ELSE 'low'
END as impact_level,
id as entity_id,
'food_safety_alert' as entity_type
FROM food_safety_alerts
WHERE tenant_id = :tenant_id
ORDER BY created_at DESC
LIMIT :limit
""")
result = await self.session.execute(query, {"tenant_id": tenant_id, "limit": limit})
return [
{
"activity_type": row.activity_type,
"description": row.description,
"timestamp": row.timestamp,
"user_id": row.user_id,
"impact_level": row.impact_level,
"entity_id": row.entity_id,
"entity_type": row.entity_type
}
for row in result.fetchall()
]
except Exception as e:
logger.error("Failed to get recent food safety alerts", error=str(e), tenant_id=str(tenant_id))
raise
async def get_live_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
"""Get real-time inventory metrics"""
try:
query = text("""
SELECT
COUNT(DISTINCT i.id) as total_ingredients,
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold THEN 1 END) as low_stock,
COUNT(CASE WHEN s.available_quantity = 0 THEN 1 END) as out_of_stock,
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value,
COUNT(CASE WHEN s.expiration_date < NOW() THEN 1 END) as expired_items,
COUNT(CASE WHEN s.expiration_date BETWEEN NOW() AND NOW() + INTERVAL '7 days' THEN 1 END) as expiring_soon
FROM ingredients i
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
metrics = result.fetchone()
if not metrics:
return {
"total_ingredients": 0,
"in_stock": 0,
"low_stock": 0,
"out_of_stock": 0,
"total_value": 0.0,
"expired_items": 0,
"expiring_soon": 0,
"last_updated": datetime.now().isoformat()
}
return {
"total_ingredients": metrics.total_ingredients,
"in_stock": metrics.in_stock,
"low_stock": metrics.low_stock,
"out_of_stock": metrics.out_of_stock,
"total_value": float(metrics.total_value),
"expired_items": metrics.expired_items,
"expiring_soon": metrics.expiring_soon,
"last_updated": datetime.now().isoformat()
}
except Exception as e:
logger.error("Failed to get live metrics", error=str(e), tenant_id=str(tenant_id))
raise
async def get_stock_status_by_category(
self,
tenant_id: UUID
) -> List[Dict[str, Any]]:
"""Get stock status breakdown by category"""
try:
query = text("""
SELECT
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
COUNT(DISTINCT i.id) as total_ingredients,
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold AND s.available_quantity > 0 THEN 1 END) as low_stock,
COUNT(CASE WHEN COALESCE(s.available_quantity, 0) = 0 THEN 1 END) as out_of_stock,
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
FROM ingredients i
LEFT JOIN (
SELECT
ingredient_id,
SUM(available_quantity) as available_quantity,
AVG(unit_cost) as unit_cost
FROM stock
WHERE tenant_id = :tenant_id AND is_available = true
GROUP BY ingredient_id
) s ON i.id = s.ingredient_id
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY category
ORDER BY total_value DESC
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [
{
"category": row.category,
"total_ingredients": row.total_ingredients,
"in_stock": row.in_stock,
"low_stock": row.low_stock,
"out_of_stock": row.out_of_stock,
"total_value": float(row.total_value)
}
for row in result.fetchall()
]
except Exception as e:
logger.error("Failed to get stock status by category", error=str(e), tenant_id=str(tenant_id))
raise
async def get_alerts_summary(
self,
tenant_id: UUID,
alert_types: Optional[List[str]] = None,
severities: Optional[List[str]] = None,
date_from: Optional[datetime] = None,
date_to: Optional[datetime] = None
) -> List[Dict[str, Any]]:
"""Get alerts summary by type and severity with filters"""
try:
# Build query with filters
where_conditions = ["tenant_id = :tenant_id", "status = 'active'"]
params = {"tenant_id": tenant_id}
if alert_types:
where_conditions.append("alert_type = ANY(:alert_types)")
params["alert_types"] = alert_types
if severities:
where_conditions.append("severity = ANY(:severities)")
params["severities"] = severities
if date_from:
where_conditions.append("created_at >= :date_from")
params["date_from"] = date_from
if date_to:
where_conditions.append("created_at <= :date_to")
params["date_to"] = date_to
where_clause = " AND ".join(where_conditions)
query = text(f"""
SELECT
alert_type,
severity,
COUNT(*) as count,
MIN(EXTRACT(EPOCH FROM (NOW() - created_at))/3600)::int as oldest_alert_age_hours,
AVG(CASE WHEN resolved_at IS NOT NULL
THEN EXTRACT(EPOCH FROM (resolved_at - created_at))/3600
ELSE NULL END)::int as avg_resolution_hours
FROM food_safety_alerts
WHERE {where_clause}
GROUP BY alert_type, severity
ORDER BY severity DESC, count DESC
""")
result = await self.session.execute(query, params)
return [
{
"alert_type": row.alert_type,
"severity": row.severity,
"count": row.count,
"oldest_alert_age_hours": row.oldest_alert_age_hours,
"average_resolution_time_hours": row.avg_resolution_hours
}
for row in result.fetchall()
]
except Exception as e:
logger.error("Failed to get alerts summary", error=str(e), tenant_id=str(tenant_id))
raise
async def get_ingredient_stock_levels(self, tenant_id: UUID) -> Dict[str, float]:
"""
Get current stock levels for all ingredients
Args:
tenant_id: Tenant UUID
Returns:
Dictionary mapping ingredient_id to current stock level
"""
try:
stock_query = text("""
SELECT
i.id as ingredient_id,
COALESCE(SUM(s.available_quantity), 0) as current_stock
FROM ingredients i
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY i.id
""")
result = await self.session.execute(stock_query, {"tenant_id": tenant_id})
stock_levels = {}
for row in result.fetchall():
stock_levels[str(row.ingredient_id)] = float(row.current_stock)
return stock_levels
except Exception as e:
logger.error("Failed to get ingredient stock levels", error=str(e), tenant_id=str(tenant_id))
raise

View File

@@ -0,0 +1,279 @@
# services/inventory/app/repositories/food_safety_repository.py
"""
Food Safety Repository
Data access layer for food safety compliance and monitoring
"""
from typing import List, Optional, Dict, Any
from uuid import UUID
from datetime import datetime
from sqlalchemy import text, select
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
from app.models.food_safety import (
FoodSafetyCompliance,
FoodSafetyAlert,
TemperatureLog,
ComplianceStatus
)
logger = structlog.get_logger()
class FoodSafetyRepository:
"""Repository for food safety data access"""
def __init__(self, session: AsyncSession):
self.session = session
# ===== COMPLIANCE METHODS =====
async def create_compliance(self, compliance: FoodSafetyCompliance) -> FoodSafetyCompliance:
"""
Create a new compliance record
Args:
compliance: FoodSafetyCompliance instance
Returns:
Created FoodSafetyCompliance instance
"""
self.session.add(compliance)
await self.session.flush()
await self.session.refresh(compliance)
return compliance
async def get_compliance_by_id(
self,
compliance_id: UUID,
tenant_id: UUID
) -> Optional[FoodSafetyCompliance]:
"""
Get compliance record by ID
Args:
compliance_id: Compliance record UUID
tenant_id: Tenant UUID for authorization
Returns:
FoodSafetyCompliance or None
"""
compliance = await self.session.get(FoodSafetyCompliance, compliance_id)
if compliance and compliance.tenant_id == tenant_id:
return compliance
return None
async def update_compliance(
self,
compliance: FoodSafetyCompliance
) -> FoodSafetyCompliance:
"""
Update compliance record
Args:
compliance: FoodSafetyCompliance instance with updates
Returns:
Updated FoodSafetyCompliance instance
"""
await self.session.flush()
await self.session.refresh(compliance)
return compliance
async def get_compliance_stats(self, tenant_id: UUID) -> Dict[str, int]:
"""
Get compliance statistics for dashboard
Args:
tenant_id: Tenant UUID
Returns:
Dictionary with compliance counts by status
"""
try:
query = text("""
SELECT
COUNT(*) as total,
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
FROM food_safety_compliance
WHERE tenant_id = :tenant_id AND is_active = true
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
row = result.fetchone()
if not row:
return {
"total": 0,
"compliant": 0,
"non_compliant": 0,
"pending_review": 0
}
return {
"total": row.total or 0,
"compliant": row.compliant or 0,
"non_compliant": row.non_compliant or 0,
"pending_review": row.pending_review or 0
}
except Exception as e:
logger.error("Failed to get compliance stats", error=str(e), tenant_id=str(tenant_id))
raise
# ===== TEMPERATURE MONITORING METHODS =====
async def get_temperature_stats(self, tenant_id: UUID) -> Dict[str, Any]:
"""
Get temperature monitoring statistics
Args:
tenant_id: Tenant UUID
Returns:
Dictionary with temperature monitoring stats
"""
try:
query = text("""
SELECT
COUNT(DISTINCT equipment_id) as sensors_online,
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
FROM temperature_logs
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
row = result.fetchone()
if not row:
return {
"sensors_online": 0,
"violations_24h": 0
}
return {
"sensors_online": row.sensors_online or 0,
"violations_24h": row.violations_24h or 0
}
except Exception as e:
logger.error("Failed to get temperature stats", error=str(e), tenant_id=str(tenant_id))
raise
# ===== EXPIRATION TRACKING METHODS =====
async def get_expiration_stats(self, tenant_id: UUID) -> Dict[str, int]:
"""
Get expiration tracking statistics
Args:
tenant_id: Tenant UUID
Returns:
Dictionary with expiration counts
"""
try:
query = text("""
SELECT
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
COUNT(CASE WHEN expiration_date < CURRENT_DATE AND is_available THEN 1 END) as expired_requiring_action
FROM stock s
JOIN ingredients i ON s.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id AND s.is_available = true
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
row = result.fetchone()
if not row:
return {
"expiring_today": 0,
"expiring_week": 0,
"expired_requiring_action": 0
}
return {
"expiring_today": row.expiring_today or 0,
"expiring_week": row.expiring_week or 0,
"expired_requiring_action": row.expired_requiring_action or 0
}
except Exception as e:
logger.error("Failed to get expiration stats", error=str(e), tenant_id=str(tenant_id))
raise
# ===== ALERT METHODS =====
async def get_alert_stats(self, tenant_id: UUID) -> Dict[str, int]:
"""
Get food safety alert statistics
Args:
tenant_id: Tenant UUID
Returns:
Dictionary with alert counts by severity
"""
try:
query = text("""
SELECT
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
FROM food_safety_alerts
WHERE tenant_id = :tenant_id AND status = 'active'
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
row = result.fetchone()
if not row:
return {
"high_risk": 0,
"critical": 0,
"regulatory_pending": 0
}
return {
"high_risk": row.high_risk or 0,
"critical": row.critical or 0,
"regulatory_pending": row.regulatory_pending or 0
}
except Exception as e:
logger.error("Failed to get alert stats", error=str(e), tenant_id=str(tenant_id))
raise
# ===== VALIDATION METHODS =====
async def validate_ingredient_exists(
self,
ingredient_id: UUID,
tenant_id: UUID
) -> bool:
"""
Validate that an ingredient exists for a tenant
Args:
ingredient_id: Ingredient UUID
tenant_id: Tenant UUID
Returns:
True if ingredient exists, False otherwise
"""
try:
query = text("""
SELECT id
FROM ingredients
WHERE id = :ingredient_id AND tenant_id = :tenant_id
""")
result = await self.session.execute(query, {
"ingredient_id": ingredient_id,
"tenant_id": tenant_id
})
return result.fetchone() is not None
except Exception as e:
logger.error("Failed to validate ingredient", error=str(e))
raise

View File

@@ -0,0 +1,301 @@
# services/inventory/app/repositories/inventory_alert_repository.py
"""
Inventory Alert Repository
Data access layer for inventory alert detection and analysis
"""
from typing import List, Dict, Any
from uuid import UUID
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
logger = structlog.get_logger()
class InventoryAlertRepository:
"""Repository for inventory alert data access"""
def __init__(self, session: AsyncSession):
self.session = session
async def get_stock_issues(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Get stock level issues with CTE analysis
Returns list of critical, low, and overstock situations
"""
try:
query = text("""
WITH stock_analysis AS (
SELECT
i.id, i.name, i.tenant_id,
COALESCE(SUM(s.current_quantity), 0) as current_stock,
i.low_stock_threshold as minimum_stock,
i.max_stock_level as maximum_stock,
i.reorder_point,
0 as tomorrow_needed,
0 as avg_daily_usage,
7 as lead_time_days,
CASE
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
ELSE 'normal'
END as status,
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
FROM ingredients i
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
)
SELECT * FROM stock_analysis WHERE status != 'normal'
ORDER BY
CASE status
WHEN 'critical' THEN 1
WHEN 'low' THEN 2
WHEN 'overstock' THEN 3
END,
shortage_amount DESC
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get stock issues", error=str(e), tenant_id=str(tenant_id))
raise
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
"""
Get products expiring soon or already expired
"""
try:
query = text("""
SELECT
i.id as ingredient_id,
i.name as ingredient_name,
s.id as stock_id,
s.batch_number,
s.expiration_date,
s.current_quantity,
i.unit_of_measure,
s.unit_cost,
(s.current_quantity * s.unit_cost) as total_value,
CASE
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
ELSE 'warning'
END as urgency,
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
FROM stock s
JOIN ingredients i ON s.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
AND s.is_available = true
AND s.expiration_date <= CURRENT_DATE + INTERVAL ':days_threshold days'
ORDER BY s.expiration_date ASC, total_value DESC
""")
result = await self.session.execute(query, {
"tenant_id": tenant_id,
"days_threshold": days_threshold
})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
raise
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
"""
Get temperature monitoring breaches
"""
try:
query = text("""
SELECT
tl.id,
tl.equipment_id,
tl.equipment_name,
tl.storage_type,
tl.temperature_celsius,
tl.min_threshold,
tl.max_threshold,
tl.is_within_range,
tl.recorded_at,
tl.alert_triggered,
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
CASE
WHEN tl.temperature_celsius < tl.min_threshold
THEN tl.min_threshold - tl.temperature_celsius
WHEN tl.temperature_celsius > tl.max_threshold
THEN tl.temperature_celsius - tl.max_threshold
ELSE 0
END as deviation
FROM temperature_logs tl
WHERE tl.tenant_id = :tenant_id
AND tl.is_within_range = false
AND tl.recorded_at > NOW() - INTERVAL ':hours_back hours'
AND tl.alert_triggered = false
ORDER BY deviation DESC, tl.recorded_at DESC
""")
result = await self.session.execute(query, {
"tenant_id": tenant_id,
"hours_back": hours_back
})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
raise
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
"""
Mark a temperature log as having triggered an alert
"""
try:
query = text("""
UPDATE temperature_logs
SET alert_triggered = true
WHERE id = :id
""")
await self.session.execute(query, {"id": log_id})
await self.session.commit()
except Exception as e:
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
raise
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Identify waste reduction opportunities
"""
try:
query = text("""
WITH waste_analysis AS (
SELECT
i.id as ingredient_id,
i.name as ingredient_name,
i.ingredient_category,
COUNT(sm.id) as waste_incidents,
SUM(sm.quantity) as total_waste_quantity,
SUM(sm.total_cost) as total_waste_cost,
AVG(sm.quantity) as avg_waste_per_incident,
MAX(sm.movement_date) as last_waste_date
FROM stock_movements sm
JOIN ingredients i ON sm.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
AND sm.movement_type = 'WASTE'
AND sm.movement_date > NOW() - INTERVAL '30 days'
GROUP BY i.id, i.name, i.ingredient_category
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
)
SELECT * FROM waste_analysis
ORDER BY total_waste_cost DESC, waste_incidents DESC
LIMIT 20
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
raise
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Get ingredients that need reordering based on stock levels and usage
"""
try:
query = text("""
WITH usage_analysis AS (
SELECT
i.id,
i.name,
COALESCE(SUM(s.current_quantity), 0) as current_stock,
i.reorder_point,
i.low_stock_threshold,
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
i.preferred_supplier_id,
i.standard_order_quantity
FROM ingredients i
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
AND sm.movement_type = 'PRODUCTION_USE'
AND sm.movement_date > NOW() - INTERVAL '7 days'
WHERE i.tenant_id = :tenant_id
AND i.is_active = true
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
i.preferred_supplier_id, i.standard_order_quantity
)
SELECT *,
CASE
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
ELSE 999
END as days_of_stock,
GREATEST(
standard_order_quantity,
CEIL(daily_usage * 14)
) as recommended_order_quantity
FROM usage_analysis
WHERE current_stock <= reorder_point
ORDER BY days_of_stock ASC, current_stock ASC
LIMIT 50
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
raise
async def get_active_tenant_ids(self) -> List[UUID]:
"""
Get list of active tenant IDs from ingredients table
"""
try:
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
result = await self.session.execute(query)
tenant_ids = []
for row in result.fetchall():
tenant_id = row.tenant_id
# Convert to UUID if it's not already
if isinstance(tenant_id, UUID):
tenant_ids.append(tenant_id)
else:
tenant_ids.append(UUID(str(tenant_id)))
return tenant_ids
except Exception as e:
logger.error("Failed to get active tenant IDs", error=str(e))
raise
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Dict[str, Any]:
"""
Get stock information after hypothetical order
"""
try:
query = text("""
SELECT i.id, i.name,
COALESCE(SUM(s.current_quantity), 0) as current_stock,
i.low_stock_threshold as minimum_stock,
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
FROM ingredients i
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
WHERE i.id = :ingredient_id
GROUP BY i.id, i.name, i.low_stock_threshold
""")
result = await self.session.execute(query, {
"ingredient_id": ingredient_id,
"order_quantity": order_quantity
})
row = result.fetchone()
return dict(row._mapping) if row else None
except Exception as e:
logger.error("Failed to get stock after order", error=str(e), ingredient_id=ingredient_id)
raise

View File

@@ -491,4 +491,49 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
tenant_id=str(tenant_id),
ingredient_id=str(ingredient_id),
stock_id=str(stock_id))
raise
async def get_inventory_waste_total(
self,
tenant_id: UUID,
start_date: datetime,
end_date: datetime
) -> float:
"""
Get total inventory waste for sustainability reporting
Args:
tenant_id: Tenant UUID
start_date: Start date for period
end_date: End date for period
Returns:
Total waste quantity
"""
try:
from sqlalchemy import text
query = text("""
SELECT COALESCE(SUM(sm.quantity), 0) as total_inventory_waste
FROM stock_movements sm
JOIN ingredients i ON sm.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
AND sm.movement_type = 'WASTE'
AND sm.movement_date BETWEEN :start_date AND :end_date
""")
result = await self.session.execute(
query,
{
'tenant_id': tenant_id,
'start_date': start_date,
'end_date': end_date
}
)
row = result.fetchone()
return float(row.total_inventory_waste or 0)
except Exception as e:
logger.error("Failed to get inventory waste total", error=str(e), tenant_id=str(tenant_id))
raise

View File

@@ -0,0 +1,206 @@
# ================================================================
# services/inventory/app/schemas/sustainability.py
# ================================================================
"""
Sustainability Schemas - Environmental Impact & SDG Compliance
"""
from datetime import datetime
from typing import Dict, Any, List, Optional
from decimal import Decimal
from pydantic import BaseModel, Field
class PeriodInfo(BaseModel):
"""Time period for metrics"""
start_date: str
end_date: str
days: int
class WasteMetrics(BaseModel):
"""Waste tracking metrics"""
total_waste_kg: float = Field(description="Total waste in kilograms")
production_waste_kg: float = Field(description="Waste from production processes")
expired_waste_kg: float = Field(description="Waste from expired inventory")
waste_percentage: float = Field(description="Waste as percentage of total production")
waste_by_reason: Dict[str, float] = Field(description="Breakdown by waste reason")
class CO2Emissions(BaseModel):
"""CO2 emission metrics"""
kg: float = Field(description="CO2 emissions in kilograms")
tons: float = Field(description="CO2 emissions in tons")
trees_to_offset: float = Field(description="Equivalent trees needed to offset emissions")
class WaterFootprint(BaseModel):
"""Water usage metrics"""
liters: float = Field(description="Water footprint in liters")
cubic_meters: float = Field(description="Water footprint in cubic meters")
class LandUse(BaseModel):
"""Land use metrics"""
square_meters: float = Field(description="Land use in square meters")
hectares: float = Field(description="Land use in hectares")
class HumanEquivalents(BaseModel):
"""Human-relatable equivalents for impact"""
car_km_equivalent: float = Field(description="Equivalent kilometers driven by car")
smartphone_charges: float = Field(description="Equivalent smartphone charges")
showers_equivalent: float = Field(description="Equivalent showers taken")
trees_planted: float = Field(description="Equivalent trees planted")
class EnvironmentalImpact(BaseModel):
"""Environmental impact of food waste"""
co2_emissions: CO2Emissions
water_footprint: WaterFootprint
land_use: LandUse
human_equivalents: HumanEquivalents
class SDG123Metrics(BaseModel):
"""UN SDG 12.3 specific metrics"""
baseline_waste_percentage: float = Field(description="Baseline waste percentage")
current_waste_percentage: float = Field(description="Current waste percentage")
reduction_achieved: float = Field(description="Reduction achieved from baseline (%)")
target_reduction: float = Field(description="Target reduction (50%)", default=50.0)
progress_to_target: float = Field(description="Progress toward target (%)")
status: str = Field(description="Status code: sdg_compliant, on_track, progressing, baseline")
status_label: str = Field(description="Human-readable status")
target_waste_percentage: float = Field(description="Target waste percentage to achieve")
class SDGCompliance(BaseModel):
"""SDG compliance assessment"""
sdg_12_3: SDG123Metrics
baseline_period: str = Field(description="Period used for baseline calculation")
certification_ready: bool = Field(description="Ready for SDG certification")
improvement_areas: List[str] = Field(description="Identified areas for improvement")
class EnvironmentalImpactAvoided(BaseModel):
"""Environmental impact avoided through AI"""
co2_kg: float = Field(description="CO2 emissions avoided (kg)")
water_liters: float = Field(description="Water saved (liters)")
class AvoidedWaste(BaseModel):
"""Waste avoided through AI predictions"""
waste_avoided_kg: float = Field(description="Waste avoided in kilograms")
ai_assisted_batches: int = Field(description="Number of AI-assisted batches")
environmental_impact_avoided: EnvironmentalImpactAvoided
methodology: str = Field(description="Calculation methodology")
class FinancialImpact(BaseModel):
"""Financial impact of waste"""
waste_cost_eur: float = Field(description="Cost of waste in euros")
cost_per_kg: float = Field(description="Average cost per kg")
potential_monthly_savings: float = Field(description="Potential monthly savings")
annual_projection: float = Field(description="Annual cost projection")
class GrantProgramEligibility(BaseModel):
"""Eligibility for a specific grant program"""
eligible: bool = Field(description="Whether eligible for this grant")
confidence: str = Field(description="Confidence level: high, medium, low")
requirements_met: bool = Field(description="Whether requirements are met")
class GrantReadiness(BaseModel):
"""Grant application readiness assessment"""
overall_readiness_percentage: float = Field(description="Overall readiness percentage")
grant_programs: Dict[str, GrantProgramEligibility] = Field(description="Eligibility by program")
recommended_applications: List[str] = Field(description="Recommended grant programs to apply for")
class SustainabilityMetrics(BaseModel):
"""Complete sustainability metrics response"""
period: PeriodInfo
waste_metrics: WasteMetrics
environmental_impact: EnvironmentalImpact
sdg_compliance: SDGCompliance
avoided_waste: AvoidedWaste
financial_impact: FinancialImpact
grant_readiness: GrantReadiness
class BaselineComparison(BaseModel):
"""Baseline comparison for grants"""
baseline: float
current: float
improvement: float
class SupportingData(BaseModel):
"""Supporting data for grant applications"""
baseline_comparison: BaselineComparison
environmental_benefits: EnvironmentalImpact
financial_benefits: FinancialImpact
class Certifications(BaseModel):
"""Certification status"""
sdg_12_3_compliant: bool
grant_programs_eligible: List[str]
class ExecutiveSummary(BaseModel):
"""Executive summary for grant reports"""
total_waste_reduced_kg: float
waste_reduction_percentage: float
co2_emissions_avoided_kg: float
financial_savings_eur: float
sdg_compliance_status: str
class ReportMetadata(BaseModel):
"""Report metadata"""
generated_at: str
report_type: str
period: PeriodInfo
tenant_id: str
class GrantReport(BaseModel):
"""Complete grant application report"""
report_metadata: ReportMetadata
executive_summary: ExecutiveSummary
detailed_metrics: SustainabilityMetrics
certifications: Certifications
supporting_data: SupportingData
# Request schemas
class SustainabilityMetricsRequest(BaseModel):
"""Request for sustainability metrics"""
start_date: Optional[datetime] = Field(None, description="Start date for metrics")
end_date: Optional[datetime] = Field(None, description="End date for metrics")
class GrantReportRequest(BaseModel):
"""Request for grant report export"""
grant_type: str = Field("general", description="Type of grant: general, eu_horizon, farm_to_fork, etc.")
start_date: Optional[datetime] = Field(None, description="Start date for report")
end_date: Optional[datetime] = Field(None, description="End date for report")
format: str = Field("json", description="Export format: json, pdf, csv")
# Widget/Dashboard schemas
class SustainabilityWidgetData(BaseModel):
"""Simplified data for dashboard widgets"""
total_waste_kg: float
waste_reduction_percentage: float
co2_saved_kg: float
water_saved_liters: float
trees_equivalent: float
sdg_status: str
sdg_progress: float
grant_programs_ready: int
financial_savings_eur: float

View File

@@ -10,6 +10,7 @@ from decimal import Decimal
from typing import List, Optional, Dict, Any
from uuid import UUID
import structlog
from sqlalchemy import text
from app.core.config import settings
from app.services.inventory_service import InventoryService
@@ -17,6 +18,7 @@ from app.services.food_safety_service import FoodSafetyService
from app.repositories.ingredient_repository import IngredientRepository
from app.repositories.stock_repository import StockRepository
from app.repositories.stock_movement_repository import StockMovementRepository
from app.repositories.dashboard_repository import DashboardRepository
from app.schemas.dashboard import (
InventoryDashboardSummary,
BusinessModelInsights,
@@ -40,20 +42,23 @@ class DashboardService:
food_safety_service: FoodSafetyService,
ingredient_repository: Optional[IngredientRepository] = None,
stock_repository: Optional[StockRepository] = None,
stock_movement_repository: Optional[StockMovementRepository] = None
stock_movement_repository: Optional[StockMovementRepository] = None,
dashboard_repository: Optional[DashboardRepository] = None
):
self.inventory_service = inventory_service
self.food_safety_service = food_safety_service
self._ingredient_repository = ingredient_repository
self._stock_repository = stock_repository
self._stock_movement_repository = stock_movement_repository
self._dashboard_repository = dashboard_repository
def _get_repositories(self, db):
"""Get repository instances for the current database session"""
return {
'ingredient_repo': self._ingredient_repository or IngredientRepository(db),
'stock_repo': self._stock_repository or StockRepository(db),
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db)
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db),
'dashboard_repo': self._dashboard_repository or DashboardRepository(db)
}
async def get_inventory_dashboard_summary(
@@ -75,22 +80,26 @@ class DashboardService:
# Get business model insights
business_model = await self._detect_business_model(db, tenant_id)
# Get dashboard repository
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
# Get category breakdown
stock_by_category = await self._get_stock_by_category(db, tenant_id)
stock_by_category = await dashboard_repo.get_stock_by_category(tenant_id)
# Get alerts breakdown
alerts_by_severity = await self._get_alerts_by_severity(db, tenant_id)
alerts_by_severity = await dashboard_repo.get_alerts_by_severity(tenant_id)
# Get movements breakdown
movements_by_type = await self._get_movements_by_type(db, tenant_id)
movements_by_type = await dashboard_repo.get_movements_by_type(tenant_id)
# Get performance indicators
performance_metrics = await self._calculate_performance_indicators(db, tenant_id)
# Get trending data
stock_value_trend = await self._get_stock_value_trend(db, tenant_id, days=30)
alert_trend = await self._get_alert_trend(db, tenant_id, days=30)
alert_trend = await dashboard_repo.get_alert_trend(tenant_id, days=30)
# Recent activity
recent_activity = await self.get_recent_activity(db, tenant_id, limit=10)
@@ -200,26 +209,10 @@ class DashboardService:
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
stock_summary = await repos['stock_repo'].get_stock_summary_by_tenant(tenant_id)
# Get current stock levels for all ingredients using a direct query
# Get current stock levels for all ingredients using repository
ingredient_stock_levels = {}
try:
from sqlalchemy import text
# Query to get current stock for all ingredients
stock_query = text("""
SELECT
i.id as ingredient_id,
COALESCE(SUM(s.available_quantity), 0) as current_stock
FROM ingredients i
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY i.id
""")
result = await db.execute(stock_query, {"tenant_id": tenant_id})
for row in result.fetchall():
ingredient_stock_levels[str(row.ingredient_id)] = float(row.current_stock)
ingredient_stock_levels = await dashboard_repo.get_ingredient_stock_levels(tenant_id)
except Exception as e:
logger.warning(f"Could not fetch current stock levels: {e}")
@@ -320,50 +313,29 @@ class DashboardService:
) -> List[StockStatusSummary]:
"""Get stock status breakdown by category"""
try:
query = """
SELECT
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
COUNT(DISTINCT i.id) as total_ingredients,
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold AND s.available_quantity > 0 THEN 1 END) as low_stock,
COUNT(CASE WHEN COALESCE(s.available_quantity, 0) = 0 THEN 1 END) as out_of_stock,
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
FROM ingredients i
LEFT JOIN (
SELECT
ingredient_id,
SUM(available_quantity) as available_quantity,
AVG(unit_cost) as unit_cost
FROM stock
WHERE tenant_id = :tenant_id AND is_available = true
GROUP BY ingredient_id
) s ON i.id = s.ingredient_id
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY category
ORDER BY total_value DESC
"""
result = await db.execute(query, {"tenant_id": tenant_id})
rows = result.fetchall()
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
rows = await dashboard_repo.get_stock_status_by_category(tenant_id)
summaries = []
total_value = sum(row.total_value for row in rows)
total_value = sum(row["total_value"] for row in rows)
for row in rows:
percentage = (row.total_value / total_value * 100) if total_value > 0 else 0
percentage = (row["total_value"] / total_value * 100) if total_value > 0 else 0
summaries.append(StockStatusSummary(
category=row.category,
total_ingredients=row.total_ingredients,
in_stock=row.in_stock,
low_stock=row.low_stock,
out_of_stock=row.out_of_stock,
total_value=Decimal(str(row.total_value)),
category=row["category"],
total_ingredients=row["total_ingredients"],
in_stock=row["in_stock"],
low_stock=row["low_stock"],
out_of_stock=row["out_of_stock"],
total_value=Decimal(str(row["total_value"])),
percentage_of_total=Decimal(str(percentage))
))
return summaries
except Exception as e:
logger.error("Failed to get stock status by category", error=str(e))
raise
@@ -376,58 +348,30 @@ class DashboardService:
) -> List[AlertSummary]:
"""Get alerts summary by type and severity"""
try:
# Build query with filters
where_conditions = ["tenant_id = :tenant_id", "status = 'active'"]
params = {"tenant_id": tenant_id}
if filters:
if filters.alert_types:
where_conditions.append("alert_type = ANY(:alert_types)")
params["alert_types"] = filters.alert_types
if filters.severities:
where_conditions.append("severity = ANY(:severities)")
params["severities"] = filters.severities
if filters.date_from:
where_conditions.append("created_at >= :date_from")
params["date_from"] = filters.date_from
if filters.date_to:
where_conditions.append("created_at <= :date_to")
params["date_to"] = filters.date_to
where_clause = " AND ".join(where_conditions)
query = f"""
SELECT
alert_type,
severity,
COUNT(*) as count,
MIN(EXTRACT(EPOCH FROM (NOW() - created_at))/3600)::int as oldest_alert_age_hours,
AVG(CASE WHEN resolved_at IS NOT NULL
THEN EXTRACT(EPOCH FROM (resolved_at - created_at))/3600
ELSE NULL END)::int as avg_resolution_hours
FROM food_safety_alerts
WHERE {where_clause}
GROUP BY alert_type, severity
ORDER BY severity DESC, count DESC
"""
result = await db.execute(query, params)
rows = result.fetchall()
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
# Extract filter parameters
alert_types = filters.alert_types if filters else None
severities = filters.severities if filters else None
date_from = filters.date_from if filters else None
date_to = filters.date_to if filters else None
rows = await dashboard_repo.get_alerts_summary(
tenant_id, alert_types, severities, date_from, date_to
)
return [
AlertSummary(
alert_type=row.alert_type,
severity=row.severity,
count=row.count,
oldest_alert_age_hours=row.oldest_alert_age_hours,
average_resolution_time_hours=row.avg_resolution_hours
alert_type=row["alert_type"],
severity=row["severity"],
count=row["count"],
oldest_alert_age_hours=row["oldest_alert_age_hours"],
average_resolution_time_hours=row["average_resolution_time_hours"]
)
for row in rows
]
except Exception as e:
logger.error("Failed to get alerts summary", error=str(e))
raise
@@ -441,81 +385,39 @@ class DashboardService:
) -> List[RecentActivity]:
"""Get recent inventory activity"""
try:
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
activities = []
# Get recent stock movements
stock_query = """
SELECT
'stock_movement' as activity_type,
CASE
WHEN movement_type = 'PURCHASE' THEN 'Stock added: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
WHEN movement_type = 'PRODUCTION_USE' THEN 'Stock consumed: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
WHEN movement_type = 'WASTE' THEN 'Stock wasted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
WHEN movement_type = 'ADJUSTMENT' THEN 'Stock adjusted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
ELSE 'Stock movement: ' || i.name
END as description,
sm.movement_date as timestamp,
sm.created_by as user_id,
CASE
WHEN movement_type = 'WASTE' THEN 'high'
WHEN movement_type = 'ADJUSTMENT' THEN 'medium'
ELSE 'low'
END as impact_level,
sm.id as entity_id,
'stock_movement' as entity_type
FROM stock_movements sm
JOIN ingredients i ON sm.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
ORDER BY sm.movement_date DESC
LIMIT :limit
"""
result = await db.execute(stock_query, {"tenant_id": tenant_id, "limit": limit // 2})
for row in result.fetchall():
stock_movements = await dashboard_repo.get_recent_stock_movements(tenant_id, limit // 2)
for row in stock_movements:
activities.append(RecentActivity(
activity_type=row.activity_type,
description=row.description,
timestamp=row.timestamp,
impact_level=row.impact_level,
entity_id=row.entity_id,
entity_type=row.entity_type
activity_type=row["activity_type"],
description=row["description"],
timestamp=row["timestamp"],
impact_level=row["impact_level"],
entity_id=row["entity_id"],
entity_type=row["entity_type"]
))
# Get recent food safety alerts
alert_query = """
SELECT
'food_safety_alert' as activity_type,
title as description,
created_at as timestamp,
created_by as user_id,
CASE
WHEN severity = 'critical' THEN 'high'
WHEN severity = 'high' THEN 'medium'
ELSE 'low'
END as impact_level,
id as entity_id,
'food_safety_alert' as entity_type
FROM food_safety_alerts
WHERE tenant_id = :tenant_id
ORDER BY created_at DESC
LIMIT :limit
"""
result = await db.execute(alert_query, {"tenant_id": tenant_id, "limit": limit // 2})
for row in result.fetchall():
safety_alerts = await dashboard_repo.get_recent_food_safety_alerts(tenant_id, limit // 2)
for row in safety_alerts:
activities.append(RecentActivity(
activity_type=row.activity_type,
description=row.description,
timestamp=row.timestamp,
impact_level=row.impact_level,
entity_id=row.entity_id,
entity_type=row.entity_type
activity_type=row["activity_type"],
description=row["description"],
timestamp=row["timestamp"],
impact_level=row["impact_level"],
entity_id=row["entity_id"],
entity_type=row["entity_type"]
))
# Sort by timestamp and limit
activities.sort(key=lambda x: x.timestamp, reverse=True)
return activities[:limit]
except Exception as e:
logger.error("Failed to get recent activity", error=str(e))
raise
@@ -523,34 +425,11 @@ class DashboardService:
async def get_live_metrics(self, db, tenant_id: UUID) -> Dict[str, Any]:
"""Get real-time inventory metrics"""
try:
query = """
SELECT
COUNT(DISTINCT i.id) as total_ingredients,
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold THEN 1 END) as low_stock,
COUNT(CASE WHEN s.available_quantity = 0 THEN 1 END) as out_of_stock,
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value,
COUNT(CASE WHEN s.expiration_date < NOW() THEN 1 END) as expired_items,
COUNT(CASE WHEN s.expiration_date BETWEEN NOW() AND NOW() + INTERVAL '7 days' THEN 1 END) as expiring_soon
FROM ingredients i
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
"""
result = await db.execute(query, {"tenant_id": tenant_id})
metrics = result.fetchone()
return {
"total_ingredients": metrics.total_ingredients,
"in_stock": metrics.in_stock,
"low_stock": metrics.low_stock,
"out_of_stock": metrics.out_of_stock,
"total_value": float(metrics.total_value),
"expired_items": metrics.expired_items,
"expiring_soon": metrics.expiring_soon,
"last_updated": datetime.now().isoformat()
}
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
return await dashboard_repo.get_live_metrics(tenant_id)
except Exception as e:
logger.error("Failed to get live metrics", error=str(e))
raise
@@ -607,34 +486,16 @@ class DashboardService:
try:
if not settings.ENABLE_BUSINESS_MODEL_DETECTION:
return {"model": "unknown", "confidence": Decimal("0")}
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
# Get ingredient metrics
query = """
SELECT
COUNT(*) as total_ingredients,
COUNT(CASE WHEN product_type = 'finished_product' THEN 1 END) as finished_products,
COUNT(CASE WHEN product_type = 'ingredient' THEN 1 END) as raw_ingredients,
COUNT(DISTINCT st.supplier_id) as supplier_count,
AVG(CASE WHEN s.available_quantity IS NOT NULL THEN s.available_quantity ELSE 0 END) as avg_stock_level
FROM ingredients i
LEFT JOIN (
SELECT ingredient_id, SUM(available_quantity) as available_quantity
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
) s ON i.id = s.ingredient_id
LEFT JOIN (
SELECT ingredient_id, supplier_id
FROM stock WHERE tenant_id = :tenant_id AND supplier_id IS NOT NULL
GROUP BY ingredient_id, supplier_id
) st ON i.id = st.ingredient_id
WHERE i.tenant_id = :tenant_id AND i.is_active = true
"""
result = await db.execute(query, {"tenant_id": tenant_id})
metrics = result.fetchone()
metrics = await dashboard_repo.get_business_model_metrics(tenant_id)
# Business model detection logic
total_ingredients = metrics.total_ingredients
finished_ratio = metrics.finished_products / total_ingredients if total_ingredients > 0 else 0
total_ingredients = metrics["total_ingredients"]
finished_ratio = metrics["finished_products"] / total_ingredients if total_ingredients > 0 else 0
if total_ingredients >= settings.CENTRAL_BAKERY_THRESHOLD_INGREDIENTS:
if finished_ratio > 0.3: # More than 30% finished products
@@ -659,31 +520,11 @@ class DashboardService:
async def _get_stock_by_category(self, db, tenant_id: UUID) -> Dict[str, Any]:
"""Get stock breakdown by category"""
try:
query = """
SELECT
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
COUNT(*) as count,
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
FROM ingredients i
LEFT JOIN (
SELECT ingredient_id, SUM(available_quantity) as available_quantity, AVG(unit_cost) as unit_cost
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
) s ON i.id = s.ingredient_id
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY category
"""
result = await db.execute(query, {"tenant_id": tenant_id})
categories = {}
for row in result.fetchall():
categories[row.category] = {
"count": row.count,
"total_value": float(row.total_value)
}
return categories
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
return await dashboard_repo.get_stock_by_category(tenant_id)
except Exception as e:
logger.error("Failed to get stock by category", error=str(e))
return {}
@@ -691,21 +532,11 @@ class DashboardService:
async def _get_alerts_by_severity(self, db, tenant_id: UUID) -> Dict[str, int]:
"""Get alerts breakdown by severity"""
try:
query = """
SELECT severity, COUNT(*) as count
FROM food_safety_alerts
WHERE tenant_id = :tenant_id AND status = 'active'
GROUP BY severity
"""
result = await db.execute(query, {"tenant_id": tenant_id})
alerts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
for row in result.fetchall():
alerts[row.severity] = row.count
return alerts
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
return await dashboard_repo.get_alerts_by_severity(tenant_id)
except Exception as e:
logger.error("Failed to get alerts by severity", error=str(e))
return {"critical": 0, "high": 0, "medium": 0, "low": 0}
@@ -713,23 +544,11 @@ class DashboardService:
async def _get_movements_by_type(self, db, tenant_id: UUID) -> Dict[str, int]:
"""Get movements breakdown by type"""
try:
query = """
SELECT sm.movement_type, COUNT(*) as count
FROM stock_movements sm
JOIN ingredients i ON sm.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id
AND sm.movement_date > NOW() - INTERVAL '7 days'
GROUP BY sm.movement_type
"""
result = await db.execute(query, {"tenant_id": tenant_id})
movements = {}
for row in result.fetchall():
movements[row.movement_type] = row.count
return movements
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
return await dashboard_repo.get_movements_by_type(tenant_id)
except Exception as e:
logger.error("Failed to get movements by type", error=str(e))
return {}
@@ -773,29 +592,11 @@ class DashboardService:
async def _get_alert_trend(self, db, tenant_id: UUID, days: int) -> List[Dict[str, Any]]:
"""Get alert trend over time"""
try:
query = """
SELECT
DATE(created_at) as alert_date,
COUNT(*) as alert_count,
COUNT(CASE WHEN severity IN ('high', 'critical') THEN 1 END) as high_severity_count
FROM food_safety_alerts
WHERE tenant_id = :tenant_id
AND created_at > NOW() - INTERVAL '%s days'
GROUP BY DATE(created_at)
ORDER BY alert_date
""" % days
result = await db.execute(query, {"tenant_id": tenant_id})
return [
{
"date": row.alert_date.isoformat(),
"total_alerts": row.alert_count,
"high_severity_alerts": row.high_severity_count
}
for row in result.fetchall()
]
repos = self._get_repositories(db)
dashboard_repo = repos['dashboard_repo']
return await dashboard_repo.get_alert_trend(tenant_id, days)
except Exception as e:
logger.error("Failed to get alert trend", error=str(e))
return []
@@ -870,26 +671,10 @@ class DashboardService:
# Get ingredients to analyze costs by category
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
# Get current stock levels for all ingredients using a direct query
# Get current stock levels for all ingredients using repository
ingredient_stock_levels = {}
try:
from sqlalchemy import text
# Query to get current stock for all ingredients
stock_query = text("""
SELECT
i.id as ingredient_id,
COALESCE(SUM(s.available_quantity), 0) as current_stock
FROM ingredients i
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY i.id
""")
result = await db.execute(stock_query, {"tenant_id": tenant_id})
for row in result.fetchall():
ingredient_stock_levels[str(row.ingredient_id)] = float(row.current_stock)
ingredient_stock_levels = await repos['dashboard_repo'].get_ingredient_stock_levels(tenant_id)
except Exception as e:
logger.warning(f"Could not fetch current stock levels for cost analysis: {e}")

View File

@@ -16,13 +16,14 @@ from shared.database.transactions import transactional
from app.core.config import settings
from app.models.food_safety import (
FoodSafetyCompliance,
TemperatureLog,
FoodSafetyCompliance,
TemperatureLog,
FoodSafetyAlert,
FoodSafetyStandard,
ComplianceStatus,
FoodSafetyAlertType
)
from app.repositories.food_safety_repository import FoodSafetyRepository
from app.schemas.food_safety import (
FoodSafetyComplianceCreate,
FoodSafetyComplianceUpdate,
@@ -42,9 +43,13 @@ logger = structlog.get_logger()
class FoodSafetyService:
"""Service for food safety and compliance operations"""
def __init__(self):
pass
def _get_repository(self, db) -> FoodSafetyRepository:
"""Get repository instance for the current database session"""
return FoodSafetyRepository(db)
# ===== COMPLIANCE MANAGEMENT =====
@@ -90,9 +95,9 @@ class FoodSafetyService:
updated_by=user_id
)
db.add(compliance)
await db.flush()
await db.refresh(compliance)
# Create compliance record using repository
repo = self._get_repository(db)
compliance = await repo.create_compliance(compliance)
# Check for compliance alerts
await self._check_compliance_alerts(db, compliance)
@@ -117,9 +122,10 @@ class FoodSafetyService:
) -> Optional[FoodSafetyComplianceResponse]:
"""Update an existing compliance record"""
try:
# Get existing compliance record
compliance = await db.get(FoodSafetyCompliance, compliance_id)
if not compliance or compliance.tenant_id != tenant_id:
# Get existing compliance record using repository
repo = self._get_repository(db)
compliance = await repo.get_compliance_by_id(compliance_id, tenant_id)
if not compliance:
return None
# Update fields
@@ -132,9 +138,9 @@ class FoodSafetyService:
setattr(compliance, field, value)
compliance.updated_by = user_id
await db.flush()
await db.refresh(compliance)
# Update compliance record using repository
compliance = await repo.update_compliance(compliance)
# Check for compliance alerts after update
await self._check_compliance_alerts(db, compliance)
@@ -336,85 +342,44 @@ class FoodSafetyService:
) -> FoodSafetyDashboard:
"""Get food safety dashboard data"""
try:
# Get compliance overview
from sqlalchemy import text
compliance_query = text("""
SELECT
COUNT(*) as total,
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
FROM food_safety_compliance
WHERE tenant_id = :tenant_id AND is_active = true
""")
compliance_result = await db.execute(compliance_query, {"tenant_id": tenant_id})
compliance_stats = compliance_result.fetchone()
total_compliance = compliance_stats.total or 0
compliant_items = compliance_stats.compliant or 0
# Get repository instance
repo = self._get_repository(db)
# Get compliance overview using repository
compliance_stats = await repo.get_compliance_stats(tenant_id)
total_compliance = compliance_stats["total"]
compliant_items = compliance_stats["compliant"]
compliance_percentage = (compliant_items / total_compliance * 100) if total_compliance > 0 else 0
# Get temperature monitoring status
temp_query = text("""
SELECT
COUNT(DISTINCT equipment_id) as sensors_online,
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
FROM temperature_logs
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
""")
temp_result = await db.execute(temp_query, {"tenant_id": tenant_id})
temp_stats = temp_result.fetchone()
# Get expiration tracking
expiration_query = text("""
SELECT
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
COUNT(CASE WHEN expiration_date < CURRENT_DATE AND is_available THEN 1 END) as expired_requiring_action
FROM stock s
JOIN ingredients i ON s.ingredient_id = i.id
WHERE i.tenant_id = :tenant_id AND s.is_available = true
""")
expiration_result = await db.execute(expiration_query, {"tenant_id": tenant_id})
expiration_stats = expiration_result.fetchone()
# Get alert counts
alert_query = text("""
SELECT
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
FROM food_safety_alerts
WHERE tenant_id = :tenant_id AND status = 'active'
""")
alert_result = await db.execute(alert_query, {"tenant_id": tenant_id})
alert_stats = alert_result.fetchone()
# Get temperature monitoring status using repository
temp_stats = await repo.get_temperature_stats(tenant_id)
# Get expiration tracking using repository
expiration_stats = await repo.get_expiration_stats(tenant_id)
# Get alert counts using repository
alert_stats = await repo.get_alert_stats(tenant_id)
return FoodSafetyDashboard(
total_compliance_items=total_compliance,
compliant_items=compliant_items,
non_compliant_items=compliance_stats.non_compliant or 0,
pending_review_items=compliance_stats.pending_review or 0,
non_compliant_items=compliance_stats["non_compliant"],
pending_review_items=compliance_stats["pending_review"],
compliance_percentage=Decimal(str(compliance_percentage)),
temperature_sensors_online=temp_stats.sensors_online or 0,
temperature_sensors_total=temp_stats.sensors_online or 0, # Would need actual count
temperature_violations_24h=temp_stats.violations_24h or 0,
temperature_sensors_online=temp_stats["sensors_online"],
temperature_sensors_total=temp_stats["sensors_online"], # Would need actual count
temperature_violations_24h=temp_stats["violations_24h"],
current_temperature_status="normal", # Would need to calculate
items_expiring_today=expiration_stats.expiring_today or 0,
items_expiring_this_week=expiration_stats.expiring_week or 0,
expired_items_requiring_action=expiration_stats.expired_requiring_action or 0,
items_expiring_today=expiration_stats["expiring_today"],
items_expiring_this_week=expiration_stats["expiring_week"],
expired_items_requiring_action=expiration_stats["expired_requiring_action"],
upcoming_audits=0, # Would need to calculate
overdue_audits=0, # Would need to calculate
certifications_valid=compliant_items,
certifications_expiring_soon=0, # Would need to calculate
high_risk_items=alert_stats.high_risk or 0,
critical_alerts=alert_stats.critical or 0,
regulatory_notifications_pending=alert_stats.regulatory_pending or 0,
high_risk_items=alert_stats["high_risk"],
critical_alerts=alert_stats["critical"],
regulatory_notifications_pending=alert_stats["regulatory_pending"],
recent_safety_incidents=[] # Would need to get recent incidents
)
@@ -426,16 +391,14 @@ class FoodSafetyService:
async def _validate_compliance_data(self, db, compliance_data: FoodSafetyComplianceCreate):
"""Validate compliance data for business rules"""
# Check if ingredient exists
from sqlalchemy import text
ingredient_query = text("SELECT id FROM ingredients WHERE id = :ingredient_id AND tenant_id = :tenant_id")
result = await db.execute(ingredient_query, {
"ingredient_id": compliance_data.ingredient_id,
"tenant_id": compliance_data.tenant_id
})
if not result.fetchone():
# Check if ingredient exists using repository
repo = self._get_repository(db)
ingredient_exists = await repo.validate_ingredient_exists(
compliance_data.ingredient_id,
compliance_data.tenant_id
)
if not ingredient_exists:
raise ValueError("Ingredient not found")
# Validate standard

View File

@@ -18,6 +18,7 @@ from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
from shared.alerts.templates import format_item_message
from app.repositories.stock_repository import StockRepository
from app.repositories.stock_movement_repository import StockMovementRepository
from app.repositories.inventory_alert_repository import InventoryAlertRepository
logger = structlog.get_logger()
@@ -90,54 +91,20 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
"""Batch check all stock levels for critical shortages (alerts)"""
try:
self._checks_performed += 1
query = """
WITH stock_analysis AS (
SELECT
i.id, i.name, i.tenant_id,
COALESCE(SUM(s.current_quantity), 0) as current_stock,
i.low_stock_threshold as minimum_stock,
i.max_stock_level as maximum_stock,
i.reorder_point,
0 as tomorrow_needed,
0 as avg_daily_usage,
7 as lead_time_days,
CASE
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
ELSE 'normal'
END as status,
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
FROM ingredients i
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
WHERE i.tenant_id = :tenant_id AND i.is_active = true
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
)
SELECT * FROM stock_analysis WHERE status != 'normal'
ORDER BY
CASE status
WHEN 'critical' THEN 1
WHEN 'low' THEN 2
WHEN 'overstock' THEN 3
END,
shortage_amount DESC
"""
tenants = await self.get_active_tenants()
for tenant_id in tenants:
try:
# Add timeout to prevent hanging connections
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(text(query), {"tenant_id": tenant_id})
issues = result.fetchall()
# Use repository for stock analysis
alert_repo = InventoryAlertRepository(session)
issues = await alert_repo.get_stock_issues(tenant_id)
for issue in issues:
# Convert SQLAlchemy Row to dictionary for easier access
issue_dict = dict(issue._mapping) if hasattr(issue, '_mapping') else dict(issue)
await self._process_stock_issue(tenant_id, issue_dict)
await self._process_stock_issue(tenant_id, issue)
except Exception as e:
logger.error("Error checking stock for tenant",
@@ -230,39 +197,24 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
"""Check for products approaching expiry (alerts)"""
try:
self._checks_performed += 1
query = """
SELECT
i.id, i.name, i.tenant_id,
s.id as stock_id, s.expiration_date, s.current_quantity,
EXTRACT(days FROM (s.expiration_date - CURRENT_DATE)) as days_to_expiry
FROM ingredients i
JOIN stock s ON s.ingredient_id = i.id
WHERE s.expiration_date <= CURRENT_DATE + INTERVAL '7 days'
AND s.current_quantity > 0
AND s.is_available = true
AND s.expiration_date IS NOT NULL
ORDER BY s.expiration_date ASC
"""
tenants = await self.get_active_tenants()
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(text(query))
expiring_items = result.fetchall()
# Group by tenant
by_tenant = {}
for item in expiring_items:
# Convert SQLAlchemy Row to dictionary for easier access
item_dict = dict(item._mapping) if hasattr(item, '_mapping') else dict(item)
tenant_id = item_dict['tenant_id']
if tenant_id not in by_tenant:
by_tenant[tenant_id] = []
by_tenant[tenant_id].append(item_dict)
for tenant_id, items in by_tenant.items():
await self._process_expiring_items(tenant_id, items)
alert_repo = InventoryAlertRepository(session)
for tenant_id in tenants:
try:
# Get expiring products for this tenant
items = await alert_repo.get_expiring_products(tenant_id, days_threshold=7)
if items:
await self._process_expiring_items(tenant_id, items)
except Exception as e:
logger.error("Error checking expiring products for tenant",
tenant_id=str(tenant_id),
error=str(e))
except Exception as e:
logger.error("Expiry check failed", error=str(e))
@@ -334,31 +286,23 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
"""Check for temperature breaches (alerts)"""
try:
self._checks_performed += 1
query = """
SELECT
t.id, t.equipment_id as sensor_id, t.storage_location as location,
t.temperature_celsius as temperature,
t.target_temperature_max as max_threshold, t.tenant_id,
COALESCE(t.deviation_minutes, 0) as breach_duration_minutes
FROM temperature_logs t
WHERE t.temperature_celsius > COALESCE(t.target_temperature_max, 25)
AND NOT t.is_within_range
AND COALESCE(t.deviation_minutes, 0) >= 30 -- Only after 30 minutes
AND (t.recorded_at < NOW() - INTERVAL '15 minutes' OR t.alert_triggered = false) -- Avoid spam
ORDER BY t.temperature_celsius DESC, t.deviation_minutes DESC
"""
tenants = await self.get_active_tenants()
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(text(query))
breaches = result.fetchall()
for breach in breaches:
# Convert SQLAlchemy Row to dictionary for easier access
breach_dict = dict(breach._mapping) if hasattr(breach, '_mapping') else dict(breach)
await self._process_temperature_breach(breach_dict)
alert_repo = InventoryAlertRepository(session)
for tenant_id in tenants:
try:
breaches = await alert_repo.get_temperature_breaches(tenant_id, hours_back=24)
for breach in breaches:
await self._process_temperature_breach(breach)
except Exception as e:
logger.error("Error checking temperature breaches for tenant",
tenant_id=str(tenant_id),
error=str(e))
except Exception as e:
logger.error("Temperature check failed", error=str(e))
@@ -405,10 +349,8 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
# Add timeout to prevent hanging connections
async with asyncio.timeout(10): # 10 second timeout for simple update
async with self.db_manager.get_background_session() as session:
await session.execute(
text("UPDATE temperature_logs SET alert_triggered = true WHERE id = :id"),
{"id": breach['id']}
)
alert_repo = InventoryAlertRepository(session)
await alert_repo.mark_temperature_alert_triggered(breach['id'])
except Exception as e:
logger.error("Error processing temperature breach",
@@ -458,20 +400,17 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
"""
tenants = await self.get_active_tenants()
for tenant_id in tenants:
try:
from sqlalchemy import text
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(text(query), {"tenant_id": tenant_id})
recommendations = result.fetchall()
for rec in recommendations:
# Convert SQLAlchemy Row to dictionary for easier access
rec_dict = dict(rec._mapping) if hasattr(rec, '_mapping') else dict(rec)
await self._generate_stock_recommendation(tenant_id, rec_dict)
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
alert_repo = InventoryAlertRepository(session)
for tenant_id in tenants:
try:
recommendations = await alert_repo.get_reorder_recommendations(tenant_id)
for rec in recommendations:
await self._generate_stock_recommendation(tenant_id, rec)
except Exception as e:
logger.error("Error generating recommendations for tenant",
@@ -559,20 +498,17 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
"""
tenants = await self.get_active_tenants()
for tenant_id in tenants:
try:
from sqlalchemy import text
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(text(query), {"tenant_id": tenant_id})
waste_data = result.fetchall()
for waste in waste_data:
# Convert SQLAlchemy Row to dictionary for easier access
waste_dict = dict(waste._mapping) if hasattr(waste, '_mapping') else dict(waste)
await self._generate_waste_recommendation(tenant_id, waste_dict)
# Add timeout to prevent hanging connections
async with asyncio.timeout(30): # 30 second timeout
async with self.db_manager.get_background_session() as session:
alert_repo = InventoryAlertRepository(session)
for tenant_id in tenants:
try:
waste_data = await alert_repo.get_waste_opportunities(tenant_id)
for waste in waste_data:
await self._generate_waste_recommendation(tenant_id, waste)
except Exception as e:
logger.error("Error generating waste recommendations",
@@ -738,21 +674,11 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
async def get_active_tenants(self) -> List[UUID]:
"""Get list of active tenant IDs from ingredients table (inventory service specific)"""
try:
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
# Add timeout to prevent hanging connections
async with asyncio.timeout(10): # 10 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(query)
# Handle PostgreSQL UUID objects properly
tenant_ids = []
for row in result.fetchall():
tenant_id = row.tenant_id
# Convert to UUID if it's not already
if isinstance(tenant_id, UUID):
tenant_ids.append(tenant_id)
else:
tenant_ids.append(UUID(str(tenant_id)))
return tenant_ids
alert_repo = InventoryAlertRepository(session)
return await alert_repo.get_active_tenant_ids()
except Exception as e:
logger.error("Error fetching active tenants from ingredients", error=str(e))
return []
@@ -760,27 +686,15 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Optional[Dict[str, Any]]:
"""Get stock information after hypothetical order"""
try:
query = """
SELECT i.id, i.name,
COALESCE(SUM(s.current_quantity), 0) as current_stock,
i.low_stock_threshold as minimum_stock,
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
FROM ingredients i
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
WHERE i.id = :ingredient_id
GROUP BY i.id, i.name, i.low_stock_threshold
"""
# Add timeout to prevent hanging connections
async with asyncio.timeout(10): # 10 second timeout
async with self.db_manager.get_background_session() as session:
result = await session.execute(text(query), {"ingredient_id": ingredient_id, "order_quantity": order_quantity})
row = result.fetchone()
return dict(row) if row else None
alert_repo = InventoryAlertRepository(session)
return await alert_repo.get_stock_after_order(ingredient_id, order_quantity)
except Exception as e:
logger.error("Error getting stock after order",
ingredient_id=ingredient_id,
logger.error("Error getting stock after order",
ingredient_id=ingredient_id,
error=str(e))
return None

View File

@@ -0,0 +1,583 @@
# ================================================================
# services/inventory/app/services/sustainability_service.py
# ================================================================
"""
Sustainability Service - Environmental Impact & SDG Compliance Tracking
Aligned with UN SDG 12.3 and EU Farm to Fork Strategy
"""
from datetime import datetime, timedelta
from decimal import Decimal
from typing import Dict, Any, Optional, List
from uuid import UUID
import structlog
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings
from app.repositories.stock_movement_repository import StockMovementRepository
from shared.clients.production_client import create_production_client
logger = structlog.get_logger()
# Environmental Impact Constants (Research-based averages for bakery products)
class EnvironmentalConstants:
"""Environmental impact factors for bakery production"""
# CO2 equivalent per kg of food waste (kg CO2e/kg)
# Source: EU Commission, average for baked goods
CO2_PER_KG_WASTE = 1.9
# Water footprint (liters per kg of ingredient)
WATER_FOOTPRINT = {
'flour': 1827, # Wheat flour
'dairy': 1020, # Average dairy products
'eggs': 3265, # Eggs
'sugar': 1782, # Sugar
'yeast': 500, # Estimated for yeast
'fats': 1600, # Butter/oils average
'default': 1500 # Conservative default
}
# Land use per kg (m² per kg)
LAND_USE_PER_KG = 3.4
# Average trees needed to offset 1 ton CO2
TREES_PER_TON_CO2 = 50
# EU bakery waste baseline (average industry waste %)
EU_BAKERY_BASELINE_WASTE = 0.25 # 25% average
# UN SDG 12.3 target: 50% reduction by 2030
SDG_TARGET_REDUCTION = 0.50
class SustainabilityService:
"""Service for calculating environmental impact and SDG compliance"""
def __init__(self):
pass
async def get_sustainability_metrics(
self,
db: AsyncSession,
tenant_id: UUID,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None
) -> Dict[str, Any]:
"""
Get comprehensive sustainability metrics for a tenant
Returns metrics aligned with:
- UN SDG 12.3 (Food waste reduction)
- EU Farm to Fork Strategy
- Green Deal objectives
"""
try:
# Default to last 30 days if no date range provided
if not end_date:
end_date = datetime.now()
if not start_date:
start_date = end_date - timedelta(days=30)
# Get waste data from production and inventory
waste_data = await self._get_waste_data(db, tenant_id, start_date, end_date)
# Calculate environmental impact
environmental_impact = self._calculate_environmental_impact(waste_data)
# Calculate SDG compliance
sdg_compliance = await self._calculate_sdg_compliance(
db, tenant_id, waste_data, start_date, end_date
)
# Calculate avoided waste (through AI predictions)
avoided_waste = await self._calculate_avoided_waste(
db, tenant_id, start_date, end_date
)
# Calculate financial impact
financial_impact = self._calculate_financial_impact(waste_data)
return {
'period': {
'start_date': start_date.isoformat(),
'end_date': end_date.isoformat(),
'days': (end_date - start_date).days
},
'waste_metrics': {
'total_waste_kg': waste_data['total_waste_kg'],
'production_waste_kg': waste_data['production_waste_kg'],
'expired_waste_kg': waste_data['expired_waste_kg'],
'waste_percentage': waste_data['waste_percentage'],
'waste_by_reason': waste_data['waste_by_reason']
},
'environmental_impact': environmental_impact,
'sdg_compliance': sdg_compliance,
'avoided_waste': avoided_waste,
'financial_impact': financial_impact,
'grant_readiness': self._assess_grant_readiness(sdg_compliance)
}
except Exception as e:
logger.error("Failed to calculate sustainability metrics",
tenant_id=str(tenant_id), error=str(e))
raise
async def _get_waste_data(
self,
db: AsyncSession,
tenant_id: UUID,
start_date: datetime,
end_date: datetime
) -> Dict[str, Any]:
"""Get waste data from production service and inventory"""
try:
# Get production waste data via HTTP call to production service
production_waste_data = await self._get_production_waste_data(
tenant_id, start_date, end_date
)
prod_data = production_waste_data if production_waste_data else {
'total_production_waste': 0,
'total_defects': 0,
'total_planned': 0,
'total_actual': 0
}
# Query inventory waste using repository
stock_movement_repo = StockMovementRepository(db)
inventory_waste = await stock_movement_repo.get_inventory_waste_total(
tenant_id=tenant_id,
start_date=start_date,
end_date=end_date
)
# Calculate totals
production_waste = float(prod_data.get('total_production_waste', 0) or 0)
defect_waste = float(prod_data.get('total_defects', 0) or 0)
total_waste = production_waste + defect_waste + inventory_waste
total_production = float(prod_data.get('total_planned', 0) or 0)
waste_percentage = (total_waste / total_production * 100) if total_production > 0 else 0
# Categorize waste by reason
waste_by_reason = {
'production_defects': defect_waste,
'production_waste': production_waste - defect_waste,
'expired_inventory': inventory_waste * 0.7, # Estimate: 70% expires
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
}
return {
'total_waste_kg': total_waste,
'production_waste_kg': production_waste + defect_waste,
'expired_waste_kg': inventory_waste,
'waste_percentage': waste_percentage,
'total_production_kg': total_production,
'waste_by_reason': waste_by_reason,
'waste_incidents': int(inv_data.waste_incidents or 0)
}
except Exception as e:
logger.error("Failed to get waste data", error=str(e))
raise
async def _get_production_waste_data(
self,
tenant_id: UUID,
start_date: datetime,
end_date: datetime
) -> Optional[Dict[str, Any]]:
"""Get production waste data from production service using shared client"""
try:
# Use the shared production client with proper authentication and resilience
production_client = create_production_client(settings)
data = await production_client.get_waste_analytics(
str(tenant_id),
start_date.isoformat(),
end_date.isoformat()
)
if data:
logger.info(
"Retrieved production waste data via production client",
tenant_id=str(tenant_id),
total_waste=data.get('total_production_waste', 0)
)
return data
else:
# Client returned None, return zeros as fallback
logger.warning(
"Production waste analytics returned None, using zeros",
tenant_id=str(tenant_id)
)
return {
'total_production_waste': 0,
'total_defects': 0,
'total_planned': 0,
'total_actual': 0
}
except Exception as e:
logger.error(
"Error calling production service for waste data via client",
error=str(e),
tenant_id=str(tenant_id)
)
# Return zeros on error to not break the flow
return {
'total_production_waste': 0,
'total_defects': 0,
'total_planned': 0,
'total_actual': 0
}
def _calculate_environmental_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
"""Calculate environmental impact of food waste"""
try:
total_waste_kg = waste_data['total_waste_kg']
# CO2 emissions
co2_emissions_kg = total_waste_kg * EnvironmentalConstants.CO2_PER_KG_WASTE
co2_emissions_tons = co2_emissions_kg / 1000
# Equivalent trees to offset
trees_equivalent = co2_emissions_tons * EnvironmentalConstants.TREES_PER_TON_CO2
# Water footprint (using average for bakery products)
water_liters = total_waste_kg * EnvironmentalConstants.WATER_FOOTPRINT['default']
# Land use
land_use_m2 = total_waste_kg * EnvironmentalConstants.LAND_USE_PER_KG
# Human-readable equivalents for marketing
equivalents = {
'car_km': co2_emissions_kg / 0.12, # Average car emits 120g CO2/km
'smartphone_charges': (co2_emissions_kg * 1000) / 8, # 8g CO2 per charge
'showers': water_liters / 65, # Average shower uses 65L
'trees_year_growth': trees_equivalent
}
return {
'co2_emissions': {
'kg': round(co2_emissions_kg, 2),
'tons': round(co2_emissions_tons, 4),
'trees_to_offset': round(trees_equivalent, 1)
},
'water_footprint': {
'liters': round(water_liters, 2),
'cubic_meters': round(water_liters / 1000, 2)
},
'land_use': {
'square_meters': round(land_use_m2, 2),
'hectares': round(land_use_m2 / 10000, 4)
},
'human_equivalents': {
'car_km_equivalent': round(equivalents['car_km'], 0),
'smartphone_charges': round(equivalents['smartphone_charges'], 0),
'showers_equivalent': round(equivalents['showers'], 0),
'trees_planted': round(equivalents['trees_year_growth'], 1)
}
}
except Exception as e:
logger.error("Failed to calculate environmental impact", error=str(e))
raise
async def _calculate_sdg_compliance(
self,
db: AsyncSession,
tenant_id: UUID,
waste_data: Dict[str, Any],
start_date: datetime,
end_date: datetime
) -> Dict[str, Any]:
"""
Calculate compliance with UN SDG 12.3
Target: Halve per capita global food waste by 2030
"""
try:
# Get baseline (first 90 days of operation or industry average)
baseline = await self._get_baseline_waste(db, tenant_id)
current_waste_percentage = waste_data['waste_percentage']
baseline_percentage = baseline.get('waste_percentage', EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100)
# Calculate reduction from baseline
if baseline_percentage > 0:
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
else:
reduction_percentage = 0
# SDG 12.3 target is 50% reduction
sdg_target = baseline_percentage * (1 - EnvironmentalConstants.SDG_TARGET_REDUCTION)
progress_to_target = (reduction_percentage / (EnvironmentalConstants.SDG_TARGET_REDUCTION * 100)) * 100
# Status assessment
if reduction_percentage >= 50:
status = 'sdg_compliant'
status_label = 'SDG 12.3 Compliant'
elif reduction_percentage >= 30:
status = 'on_track'
status_label = 'On Track to Compliance'
elif reduction_percentage >= 10:
status = 'progressing'
status_label = 'Making Progress'
else:
status = 'baseline'
status_label = 'Establishing Baseline'
return {
'sdg_12_3': {
'baseline_waste_percentage': round(baseline_percentage, 2),
'current_waste_percentage': round(current_waste_percentage, 2),
'reduction_achieved': round(reduction_percentage, 2),
'target_reduction': 50.0,
'progress_to_target': round(min(progress_to_target, 100), 1),
'status': status,
'status_label': status_label,
'target_waste_percentage': round(sdg_target, 2)
},
'baseline_period': baseline.get('period', 'industry_average'),
'certification_ready': reduction_percentage >= 50,
'improvement_areas': self._identify_improvement_areas(waste_data)
}
except Exception as e:
logger.error("Failed to calculate SDG compliance", error=str(e))
raise
async def _get_baseline_waste(
self,
db: AsyncSession,
tenant_id: UUID
) -> Dict[str, Any]:
"""Get baseline waste percentage from production service using shared client"""
try:
# Use the shared production client with proper authentication and resilience
production_client = create_production_client(settings)
baseline_data = await production_client.get_baseline(str(tenant_id))
if baseline_data and baseline_data.get('data_available', False):
# Production service has real baseline data
logger.info(
"Retrieved baseline from production service via client",
tenant_id=str(tenant_id),
baseline_percentage=baseline_data.get('waste_percentage', 0)
)
return {
'waste_percentage': baseline_data['waste_percentage'],
'period': baseline_data['period'].get('type', 'first_90_days'),
'total_production_kg': baseline_data.get('total_production_kg', 0),
'total_waste_kg': baseline_data.get('total_waste_kg', 0)
}
else:
# Production service doesn't have enough data yet
logger.info(
"Production service baseline not available, using industry average",
tenant_id=str(tenant_id)
)
return {
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
'period': 'industry_average',
'note': 'Using EU bakery industry average of 25% as baseline'
}
except Exception as e:
logger.warning(
"Error calling production service for baseline via client, using industry average",
error=str(e),
tenant_id=str(tenant_id)
)
# Fallback to industry average
return {
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
'period': 'industry_average',
'note': 'Using EU bakery industry average of 25% as baseline'
}
async def _calculate_avoided_waste(
self,
db: AsyncSession,
tenant_id: UUID,
start_date: datetime,
end_date: datetime
) -> Dict[str, Any]:
"""
Calculate waste avoided through AI predictions and smart planning
This is a KEY metric for marketing and grant applications
"""
try:
# Get AI-assisted batch data from production service
production_data = await self._get_production_waste_data(tenant_id, start_date, end_date)
# Extract data with AI batch tracking
total_planned = production_data.get('total_planned', 0) if production_data else 0
total_waste = production_data.get('total_production_waste', 0) if production_data else 0
ai_assisted_batches = production_data.get('ai_assisted_batches', 0) if production_data else 0
# Estimate waste avoided by comparing to industry average
if total_planned > 0:
# Industry average waste: 25%
# Current actual waste from production
industry_expected_waste = total_planned * EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE
actual_waste = total_waste
estimated_avoided = max(0, industry_expected_waste - actual_waste)
# Calculate environmental impact of avoided waste
avoided_co2 = estimated_avoided * EnvironmentalConstants.CO2_PER_KG_WASTE
avoided_water = estimated_avoided * EnvironmentalConstants.WATER_FOOTPRINT['default']
return {
'waste_avoided_kg': round(estimated_avoided, 2),
'ai_assisted_batches': ai_assisted_batches,
'environmental_impact_avoided': {
'co2_kg': round(avoided_co2, 2),
'water_liters': round(avoided_water, 2)
},
'methodology': 'compared_to_industry_baseline'
}
else:
return {
'waste_avoided_kg': 0,
'ai_assisted_batches': 0,
'note': 'Insufficient data for avoided waste calculation'
}
except Exception as e:
logger.error("Failed to calculate avoided waste", error=str(e))
return {'waste_avoided_kg': 0, 'error': str(e)}
def _calculate_financial_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
"""Calculate financial impact of food waste"""
# Average cost per kg of bakery products: €3.50
avg_cost_per_kg = 3.50
total_waste_kg = waste_data['total_waste_kg']
waste_cost = total_waste_kg * avg_cost_per_kg
# If waste was reduced by 30%, potential savings
potential_savings = waste_cost * 0.30
return {
'waste_cost_eur': round(waste_cost, 2),
'cost_per_kg': avg_cost_per_kg,
'potential_monthly_savings': round(potential_savings, 2),
'annual_projection': round(waste_cost * 12, 2)
}
def _identify_improvement_areas(self, waste_data: Dict[str, Any]) -> List[str]:
"""Identify areas for improvement based on waste data"""
areas = []
waste_by_reason = waste_data.get('waste_by_reason', {})
if waste_by_reason.get('production_defects', 0) > waste_data['total_waste_kg'] * 0.3:
areas.append('quality_control_in_production')
if waste_by_reason.get('expired_inventory', 0) > waste_data['total_waste_kg'] * 0.4:
areas.append('inventory_rotation_management')
if waste_data.get('waste_percentage', 0) > 20:
areas.append('demand_forecasting_accuracy')
if not areas:
areas.append('maintain_current_practices')
return areas
def _assess_grant_readiness(self, sdg_compliance: Dict[str, Any]) -> Dict[str, Any]:
"""Assess readiness for various grant programs"""
reduction = sdg_compliance['sdg_12_3']['reduction_achieved']
grants = {
'eu_horizon_europe': {
'eligible': reduction >= 30,
'confidence': 'high' if reduction >= 50 else 'medium' if reduction >= 30 else 'low',
'requirements_met': reduction >= 30
},
'eu_farm_to_fork': {
'eligible': reduction >= 20,
'confidence': 'high' if reduction >= 40 else 'medium' if reduction >= 20 else 'low',
'requirements_met': reduction >= 20
},
'national_circular_economy': {
'eligible': reduction >= 15,
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
'requirements_met': reduction >= 15
},
'un_sdg_certified': {
'eligible': reduction >= 50,
'confidence': 'high' if reduction >= 50 else 'low',
'requirements_met': reduction >= 50
}
}
overall_readiness = sum(1 for g in grants.values() if g['eligible']) / len(grants) * 100
return {
'overall_readiness_percentage': round(overall_readiness, 1),
'grant_programs': grants,
'recommended_applications': [
name for name, details in grants.items() if details['eligible']
]
}
async def export_grant_report(
self,
db: AsyncSession,
tenant_id: UUID,
grant_type: str = 'general',
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None
) -> Dict[str, Any]:
"""
Generate export-ready report for grant applications
Formats data according to common grant application requirements
"""
try:
metrics = await self.get_sustainability_metrics(
db, tenant_id, start_date, end_date
)
# Format for grant applications
report = {
'report_metadata': {
'generated_at': datetime.now().isoformat(),
'report_type': grant_type,
'period': metrics['period'],
'tenant_id': str(tenant_id)
},
'executive_summary': {
'total_waste_reduced_kg': metrics['waste_metrics']['total_waste_kg'],
'waste_reduction_percentage': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved'],
'co2_emissions_avoided_kg': metrics['environmental_impact']['co2_emissions']['kg'],
'financial_savings_eur': metrics['financial_impact']['waste_cost_eur'],
'sdg_compliance_status': metrics['sdg_compliance']['sdg_12_3']['status_label']
},
'detailed_metrics': metrics,
'certifications': {
'sdg_12_3_compliant': metrics['sdg_compliance']['certification_ready'],
'grant_programs_eligible': metrics['grant_readiness']['recommended_applications']
},
'supporting_data': {
'baseline_comparison': {
'baseline': metrics['sdg_compliance']['sdg_12_3']['baseline_waste_percentage'],
'current': metrics['sdg_compliance']['sdg_12_3']['current_waste_percentage'],
'improvement': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved']
},
'environmental_benefits': metrics['environmental_impact'],
'financial_benefits': metrics['financial_impact']
}
}
return report
except Exception as e:
logger.error("Failed to generate grant report", error=str(e))
raise

View File

@@ -126,6 +126,27 @@ async def create_stock_batches_for_ingredient(
stocks = []
num_batches = random.randint(1, 2) # Reduced from 3-5 for faster demo loading
# Calculate target total stock for this ingredient
# Use 40-80% of max_stock_level to allow for realistic variation
# If max_stock_level is not set, use reorder_point * 3 as a reasonable target
if ingredient.max_stock_level:
target_total_stock = float(ingredient.max_stock_level) * random.uniform(0.4, 0.8)
else:
target_total_stock = float(ingredient.reorder_point or 50.0) * 3.0
# Distribute total stock across batches
batch_quantities = []
remaining = target_total_stock
for i in range(num_batches):
if i == num_batches - 1:
# Last batch gets whatever is remaining
batch_quantities.append(remaining)
else:
# Earlier batches get a random portion of remaining
portion = remaining * random.uniform(0.3, 0.7)
batch_quantities.append(portion)
remaining -= portion
for i in range(num_batches):
# Calculate expiration days offset
days_offset = calculate_expiration_distribution()
@@ -146,17 +167,11 @@ async def create_stock_batches_for_ingredient(
quality_status = "good"
is_available = True
# Generate quantities
if ingredient.unit_of_measure.value in ['kg', 'l']:
current_quantity = round(random.uniform(5.0, 50.0), 2)
reserved_quantity = round(random.uniform(0.0, current_quantity * 0.3), 2) if is_available else 0.0
elif ingredient.unit_of_measure.value in ['g', 'ml']:
current_quantity = round(random.uniform(500.0, 5000.0), 2)
reserved_quantity = round(random.uniform(0.0, current_quantity * 0.3), 2) if is_available else 0.0
else: # units, pieces, etc.
current_quantity = float(random.randint(10, 200))
reserved_quantity = float(random.randint(0, int(current_quantity * 0.3))) if is_available else 0.0
# Use pre-calculated batch quantity
current_quantity = round(batch_quantities[i], 2)
# Reserve 0-30% of current quantity if available
reserved_quantity = round(random.uniform(0.0, current_quantity * 0.3), 2) if is_available else 0.0
available_quantity = current_quantity - reserved_quantity
# Calculate costs with variation