Improve the frontend and repository layer
This commit is contained in:
@@ -10,6 +10,7 @@ from decimal import Decimal
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
from sqlalchemy import text
|
||||
|
||||
from app.core.config import settings
|
||||
from app.services.inventory_service import InventoryService
|
||||
@@ -17,6 +18,7 @@ from app.services.food_safety_service import FoodSafetyService
|
||||
from app.repositories.ingredient_repository import IngredientRepository
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.dashboard_repository import DashboardRepository
|
||||
from app.schemas.dashboard import (
|
||||
InventoryDashboardSummary,
|
||||
BusinessModelInsights,
|
||||
@@ -40,20 +42,23 @@ class DashboardService:
|
||||
food_safety_service: FoodSafetyService,
|
||||
ingredient_repository: Optional[IngredientRepository] = None,
|
||||
stock_repository: Optional[StockRepository] = None,
|
||||
stock_movement_repository: Optional[StockMovementRepository] = None
|
||||
stock_movement_repository: Optional[StockMovementRepository] = None,
|
||||
dashboard_repository: Optional[DashboardRepository] = None
|
||||
):
|
||||
self.inventory_service = inventory_service
|
||||
self.food_safety_service = food_safety_service
|
||||
self._ingredient_repository = ingredient_repository
|
||||
self._stock_repository = stock_repository
|
||||
self._stock_movement_repository = stock_movement_repository
|
||||
self._dashboard_repository = dashboard_repository
|
||||
|
||||
def _get_repositories(self, db):
|
||||
"""Get repository instances for the current database session"""
|
||||
return {
|
||||
'ingredient_repo': self._ingredient_repository or IngredientRepository(db),
|
||||
'stock_repo': self._stock_repository or StockRepository(db),
|
||||
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db)
|
||||
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db),
|
||||
'dashboard_repo': self._dashboard_repository or DashboardRepository(db)
|
||||
}
|
||||
|
||||
async def get_inventory_dashboard_summary(
|
||||
@@ -75,22 +80,26 @@ class DashboardService:
|
||||
# Get business model insights
|
||||
business_model = await self._detect_business_model(db, tenant_id)
|
||||
|
||||
# Get dashboard repository
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Get category breakdown
|
||||
stock_by_category = await self._get_stock_by_category(db, tenant_id)
|
||||
|
||||
stock_by_category = await dashboard_repo.get_stock_by_category(tenant_id)
|
||||
|
||||
# Get alerts breakdown
|
||||
alerts_by_severity = await self._get_alerts_by_severity(db, tenant_id)
|
||||
|
||||
alerts_by_severity = await dashboard_repo.get_alerts_by_severity(tenant_id)
|
||||
|
||||
# Get movements breakdown
|
||||
movements_by_type = await self._get_movements_by_type(db, tenant_id)
|
||||
movements_by_type = await dashboard_repo.get_movements_by_type(tenant_id)
|
||||
|
||||
# Get performance indicators
|
||||
performance_metrics = await self._calculate_performance_indicators(db, tenant_id)
|
||||
|
||||
# Get trending data
|
||||
stock_value_trend = await self._get_stock_value_trend(db, tenant_id, days=30)
|
||||
alert_trend = await self._get_alert_trend(db, tenant_id, days=30)
|
||||
|
||||
alert_trend = await dashboard_repo.get_alert_trend(tenant_id, days=30)
|
||||
|
||||
# Recent activity
|
||||
recent_activity = await self.get_recent_activity(db, tenant_id, limit=10)
|
||||
|
||||
@@ -200,26 +209,10 @@ class DashboardService:
|
||||
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
stock_summary = await repos['stock_repo'].get_stock_summary_by_tenant(tenant_id)
|
||||
|
||||
# Get current stock levels for all ingredients using a direct query
|
||||
# Get current stock levels for all ingredients using repository
|
||||
ingredient_stock_levels = {}
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# Query to get current stock for all ingredients
|
||||
stock_query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
COALESCE(SUM(s.available_quantity), 0) as current_stock
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id
|
||||
""")
|
||||
|
||||
result = await db.execute(stock_query, {"tenant_id": tenant_id})
|
||||
for row in result.fetchall():
|
||||
ingredient_stock_levels[str(row.ingredient_id)] = float(row.current_stock)
|
||||
|
||||
ingredient_stock_levels = await dashboard_repo.get_ingredient_stock_levels(tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch current stock levels: {e}")
|
||||
|
||||
@@ -320,50 +313,29 @@ class DashboardService:
|
||||
) -> List[StockStatusSummary]:
|
||||
"""Get stock status breakdown by category"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold AND s.available_quantity > 0 THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN COALESCE(s.available_quantity, 0) = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
ingredient_id,
|
||||
SUM(available_quantity) as available_quantity,
|
||||
AVG(unit_cost) as unit_cost
|
||||
FROM stock
|
||||
WHERE tenant_id = :tenant_id AND is_available = true
|
||||
GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
ORDER BY total_value DESC
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
rows = result.fetchall()
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
rows = await dashboard_repo.get_stock_status_by_category(tenant_id)
|
||||
|
||||
summaries = []
|
||||
total_value = sum(row.total_value for row in rows)
|
||||
|
||||
total_value = sum(row["total_value"] for row in rows)
|
||||
|
||||
for row in rows:
|
||||
percentage = (row.total_value / total_value * 100) if total_value > 0 else 0
|
||||
|
||||
percentage = (row["total_value"] / total_value * 100) if total_value > 0 else 0
|
||||
|
||||
summaries.append(StockStatusSummary(
|
||||
category=row.category,
|
||||
total_ingredients=row.total_ingredients,
|
||||
in_stock=row.in_stock,
|
||||
low_stock=row.low_stock,
|
||||
out_of_stock=row.out_of_stock,
|
||||
total_value=Decimal(str(row.total_value)),
|
||||
category=row["category"],
|
||||
total_ingredients=row["total_ingredients"],
|
||||
in_stock=row["in_stock"],
|
||||
low_stock=row["low_stock"],
|
||||
out_of_stock=row["out_of_stock"],
|
||||
total_value=Decimal(str(row["total_value"])),
|
||||
percentage_of_total=Decimal(str(percentage))
|
||||
))
|
||||
|
||||
|
||||
return summaries
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock status by category", error=str(e))
|
||||
raise
|
||||
@@ -376,58 +348,30 @@ class DashboardService:
|
||||
) -> List[AlertSummary]:
|
||||
"""Get alerts summary by type and severity"""
|
||||
try:
|
||||
# Build query with filters
|
||||
where_conditions = ["tenant_id = :tenant_id", "status = 'active'"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if filters:
|
||||
if filters.alert_types:
|
||||
where_conditions.append("alert_type = ANY(:alert_types)")
|
||||
params["alert_types"] = filters.alert_types
|
||||
|
||||
if filters.severities:
|
||||
where_conditions.append("severity = ANY(:severities)")
|
||||
params["severities"] = filters.severities
|
||||
|
||||
if filters.date_from:
|
||||
where_conditions.append("created_at >= :date_from")
|
||||
params["date_from"] = filters.date_from
|
||||
|
||||
if filters.date_to:
|
||||
where_conditions.append("created_at <= :date_to")
|
||||
params["date_to"] = filters.date_to
|
||||
|
||||
where_clause = " AND ".join(where_conditions)
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
alert_type,
|
||||
severity,
|
||||
COUNT(*) as count,
|
||||
MIN(EXTRACT(EPOCH FROM (NOW() - created_at))/3600)::int as oldest_alert_age_hours,
|
||||
AVG(CASE WHEN resolved_at IS NOT NULL
|
||||
THEN EXTRACT(EPOCH FROM (resolved_at - created_at))/3600
|
||||
ELSE NULL END)::int as avg_resolution_hours
|
||||
FROM food_safety_alerts
|
||||
WHERE {where_clause}
|
||||
GROUP BY alert_type, severity
|
||||
ORDER BY severity DESC, count DESC
|
||||
"""
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Extract filter parameters
|
||||
alert_types = filters.alert_types if filters else None
|
||||
severities = filters.severities if filters else None
|
||||
date_from = filters.date_from if filters else None
|
||||
date_to = filters.date_to if filters else None
|
||||
|
||||
rows = await dashboard_repo.get_alerts_summary(
|
||||
tenant_id, alert_types, severities, date_from, date_to
|
||||
)
|
||||
|
||||
return [
|
||||
AlertSummary(
|
||||
alert_type=row.alert_type,
|
||||
severity=row.severity,
|
||||
count=row.count,
|
||||
oldest_alert_age_hours=row.oldest_alert_age_hours,
|
||||
average_resolution_time_hours=row.avg_resolution_hours
|
||||
alert_type=row["alert_type"],
|
||||
severity=row["severity"],
|
||||
count=row["count"],
|
||||
oldest_alert_age_hours=row["oldest_alert_age_hours"],
|
||||
average_resolution_time_hours=row["average_resolution_time_hours"]
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts summary", error=str(e))
|
||||
raise
|
||||
@@ -441,81 +385,39 @@ class DashboardService:
|
||||
) -> List[RecentActivity]:
|
||||
"""Get recent inventory activity"""
|
||||
try:
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
activities = []
|
||||
|
||||
|
||||
# Get recent stock movements
|
||||
stock_query = """
|
||||
SELECT
|
||||
'stock_movement' as activity_type,
|
||||
CASE
|
||||
WHEN movement_type = 'PURCHASE' THEN 'Stock added: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'PRODUCTION_USE' THEN 'Stock consumed: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'WASTE' THEN 'Stock wasted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'Stock adjusted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
ELSE 'Stock movement: ' || i.name
|
||||
END as description,
|
||||
sm.movement_date as timestamp,
|
||||
sm.created_by as user_id,
|
||||
CASE
|
||||
WHEN movement_type = 'WASTE' THEN 'high'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
sm.id as entity_id,
|
||||
'stock_movement' as entity_type
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
ORDER BY sm.movement_date DESC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await db.execute(stock_query, {"tenant_id": tenant_id, "limit": limit // 2})
|
||||
for row in result.fetchall():
|
||||
stock_movements = await dashboard_repo.get_recent_stock_movements(tenant_id, limit // 2)
|
||||
for row in stock_movements:
|
||||
activities.append(RecentActivity(
|
||||
activity_type=row.activity_type,
|
||||
description=row.description,
|
||||
timestamp=row.timestamp,
|
||||
impact_level=row.impact_level,
|
||||
entity_id=row.entity_id,
|
||||
entity_type=row.entity_type
|
||||
activity_type=row["activity_type"],
|
||||
description=row["description"],
|
||||
timestamp=row["timestamp"],
|
||||
impact_level=row["impact_level"],
|
||||
entity_id=row["entity_id"],
|
||||
entity_type=row["entity_type"]
|
||||
))
|
||||
|
||||
|
||||
# Get recent food safety alerts
|
||||
alert_query = """
|
||||
SELECT
|
||||
'food_safety_alert' as activity_type,
|
||||
title as description,
|
||||
created_at as timestamp,
|
||||
created_by as user_id,
|
||||
CASE
|
||||
WHEN severity = 'critical' THEN 'high'
|
||||
WHEN severity = 'high' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
id as entity_id,
|
||||
'food_safety_alert' as entity_type
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
ORDER BY created_at DESC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await db.execute(alert_query, {"tenant_id": tenant_id, "limit": limit // 2})
|
||||
for row in result.fetchall():
|
||||
safety_alerts = await dashboard_repo.get_recent_food_safety_alerts(tenant_id, limit // 2)
|
||||
for row in safety_alerts:
|
||||
activities.append(RecentActivity(
|
||||
activity_type=row.activity_type,
|
||||
description=row.description,
|
||||
timestamp=row.timestamp,
|
||||
impact_level=row.impact_level,
|
||||
entity_id=row.entity_id,
|
||||
entity_type=row.entity_type
|
||||
activity_type=row["activity_type"],
|
||||
description=row["description"],
|
||||
timestamp=row["timestamp"],
|
||||
impact_level=row["impact_level"],
|
||||
entity_id=row["entity_id"],
|
||||
entity_type=row["entity_type"]
|
||||
))
|
||||
|
||||
|
||||
# Sort by timestamp and limit
|
||||
activities.sort(key=lambda x: x.timestamp, reverse=True)
|
||||
return activities[:limit]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent activity", error=str(e))
|
||||
raise
|
||||
@@ -523,34 +425,11 @@ class DashboardService:
|
||||
async def get_live_metrics(self, db, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get real-time inventory metrics"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN s.available_quantity = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value,
|
||||
COUNT(CASE WHEN s.expiration_date < NOW() THEN 1 END) as expired_items,
|
||||
COUNT(CASE WHEN s.expiration_date BETWEEN NOW() AND NOW() + INTERVAL '7 days' THEN 1 END) as expiring_soon
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
metrics = result.fetchone()
|
||||
|
||||
return {
|
||||
"total_ingredients": metrics.total_ingredients,
|
||||
"in_stock": metrics.in_stock,
|
||||
"low_stock": metrics.low_stock,
|
||||
"out_of_stock": metrics.out_of_stock,
|
||||
"total_value": float(metrics.total_value),
|
||||
"expired_items": metrics.expired_items,
|
||||
"expiring_soon": metrics.expiring_soon,
|
||||
"last_updated": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_live_metrics(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get live metrics", error=str(e))
|
||||
raise
|
||||
@@ -607,34 +486,16 @@ class DashboardService:
|
||||
try:
|
||||
if not settings.ENABLE_BUSINESS_MODEL_DETECTION:
|
||||
return {"model": "unknown", "confidence": Decimal("0")}
|
||||
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Get ingredient metrics
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_ingredients,
|
||||
COUNT(CASE WHEN product_type = 'finished_product' THEN 1 END) as finished_products,
|
||||
COUNT(CASE WHEN product_type = 'ingredient' THEN 1 END) as raw_ingredients,
|
||||
COUNT(DISTINCT st.supplier_id) as supplier_count,
|
||||
AVG(CASE WHEN s.available_quantity IS NOT NULL THEN s.available_quantity ELSE 0 END) as avg_stock_level
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, supplier_id
|
||||
FROM stock WHERE tenant_id = :tenant_id AND supplier_id IS NOT NULL
|
||||
GROUP BY ingredient_id, supplier_id
|
||||
) st ON i.id = st.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
metrics = result.fetchone()
|
||||
|
||||
metrics = await dashboard_repo.get_business_model_metrics(tenant_id)
|
||||
|
||||
# Business model detection logic
|
||||
total_ingredients = metrics.total_ingredients
|
||||
finished_ratio = metrics.finished_products / total_ingredients if total_ingredients > 0 else 0
|
||||
total_ingredients = metrics["total_ingredients"]
|
||||
finished_ratio = metrics["finished_products"] / total_ingredients if total_ingredients > 0 else 0
|
||||
|
||||
if total_ingredients >= settings.CENTRAL_BAKERY_THRESHOLD_INGREDIENTS:
|
||||
if finished_ratio > 0.3: # More than 30% finished products
|
||||
@@ -659,31 +520,11 @@ class DashboardService:
|
||||
async def _get_stock_by_category(self, db, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get stock breakdown by category"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(*) as count,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity, AVG(unit_cost) as unit_cost
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
categories = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
categories[row.category] = {
|
||||
"count": row.count,
|
||||
"total_value": float(row.total_value)
|
||||
}
|
||||
|
||||
return categories
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_stock_by_category(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock by category", error=str(e))
|
||||
return {}
|
||||
@@ -691,21 +532,11 @@ class DashboardService:
|
||||
async def _get_alerts_by_severity(self, db, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get alerts breakdown by severity"""
|
||||
try:
|
||||
query = """
|
||||
SELECT severity, COUNT(*) as count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
GROUP BY severity
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
alerts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
||||
|
||||
for row in result.fetchall():
|
||||
alerts[row.severity] = row.count
|
||||
|
||||
return alerts
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_alerts_by_severity(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts by severity", error=str(e))
|
||||
return {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
||||
@@ -713,23 +544,11 @@ class DashboardService:
|
||||
async def _get_movements_by_type(self, db, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get movements breakdown by type"""
|
||||
try:
|
||||
query = """
|
||||
SELECT sm.movement_type, COUNT(*) as count
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
GROUP BY sm.movement_type
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
movements = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
movements[row.movement_type] = row.count
|
||||
|
||||
return movements
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_movements_by_type(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by type", error=str(e))
|
||||
return {}
|
||||
@@ -773,29 +592,11 @@ class DashboardService:
|
||||
async def _get_alert_trend(self, db, tenant_id: UUID, days: int) -> List[Dict[str, Any]]:
|
||||
"""Get alert trend over time"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
DATE(created_at) as alert_date,
|
||||
COUNT(*) as alert_count,
|
||||
COUNT(CASE WHEN severity IN ('high', 'critical') THEN 1 END) as high_severity_count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at > NOW() - INTERVAL '%s days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY alert_date
|
||||
""" % days
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
|
||||
return [
|
||||
{
|
||||
"date": row.alert_date.isoformat(),
|
||||
"total_alerts": row.alert_count,
|
||||
"high_severity_alerts": row.high_severity_count
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_alert_trend(tenant_id, days)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert trend", error=str(e))
|
||||
return []
|
||||
@@ -870,26 +671,10 @@ class DashboardService:
|
||||
# Get ingredients to analyze costs by category
|
||||
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
|
||||
# Get current stock levels for all ingredients using a direct query
|
||||
# Get current stock levels for all ingredients using repository
|
||||
ingredient_stock_levels = {}
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# Query to get current stock for all ingredients
|
||||
stock_query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
COALESCE(SUM(s.available_quantity), 0) as current_stock
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id
|
||||
""")
|
||||
|
||||
result = await db.execute(stock_query, {"tenant_id": tenant_id})
|
||||
for row in result.fetchall():
|
||||
ingredient_stock_levels[str(row.ingredient_id)] = float(row.current_stock)
|
||||
|
||||
ingredient_stock_levels = await repos['dashboard_repo'].get_ingredient_stock_levels(tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch current stock levels for cost analysis: {e}")
|
||||
|
||||
|
||||
@@ -16,13 +16,14 @@ from shared.database.transactions import transactional
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models.food_safety import (
|
||||
FoodSafetyCompliance,
|
||||
TemperatureLog,
|
||||
FoodSafetyCompliance,
|
||||
TemperatureLog,
|
||||
FoodSafetyAlert,
|
||||
FoodSafetyStandard,
|
||||
ComplianceStatus,
|
||||
FoodSafetyAlertType
|
||||
)
|
||||
from app.repositories.food_safety_repository import FoodSafetyRepository
|
||||
from app.schemas.food_safety import (
|
||||
FoodSafetyComplianceCreate,
|
||||
FoodSafetyComplianceUpdate,
|
||||
@@ -42,9 +43,13 @@ logger = structlog.get_logger()
|
||||
|
||||
class FoodSafetyService:
|
||||
"""Service for food safety and compliance operations"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def _get_repository(self, db) -> FoodSafetyRepository:
|
||||
"""Get repository instance for the current database session"""
|
||||
return FoodSafetyRepository(db)
|
||||
|
||||
# ===== COMPLIANCE MANAGEMENT =====
|
||||
|
||||
@@ -90,9 +95,9 @@ class FoodSafetyService:
|
||||
updated_by=user_id
|
||||
)
|
||||
|
||||
db.add(compliance)
|
||||
await db.flush()
|
||||
await db.refresh(compliance)
|
||||
# Create compliance record using repository
|
||||
repo = self._get_repository(db)
|
||||
compliance = await repo.create_compliance(compliance)
|
||||
|
||||
# Check for compliance alerts
|
||||
await self._check_compliance_alerts(db, compliance)
|
||||
@@ -117,9 +122,10 @@ class FoodSafetyService:
|
||||
) -> Optional[FoodSafetyComplianceResponse]:
|
||||
"""Update an existing compliance record"""
|
||||
try:
|
||||
# Get existing compliance record
|
||||
compliance = await db.get(FoodSafetyCompliance, compliance_id)
|
||||
if not compliance or compliance.tenant_id != tenant_id:
|
||||
# Get existing compliance record using repository
|
||||
repo = self._get_repository(db)
|
||||
compliance = await repo.get_compliance_by_id(compliance_id, tenant_id)
|
||||
if not compliance:
|
||||
return None
|
||||
|
||||
# Update fields
|
||||
@@ -132,9 +138,9 @@ class FoodSafetyService:
|
||||
setattr(compliance, field, value)
|
||||
|
||||
compliance.updated_by = user_id
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(compliance)
|
||||
|
||||
# Update compliance record using repository
|
||||
compliance = await repo.update_compliance(compliance)
|
||||
|
||||
# Check for compliance alerts after update
|
||||
await self._check_compliance_alerts(db, compliance)
|
||||
@@ -336,85 +342,44 @@ class FoodSafetyService:
|
||||
) -> FoodSafetyDashboard:
|
||||
"""Get food safety dashboard data"""
|
||||
try:
|
||||
# Get compliance overview
|
||||
from sqlalchemy import text
|
||||
|
||||
compliance_query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
|
||||
FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
""")
|
||||
|
||||
compliance_result = await db.execute(compliance_query, {"tenant_id": tenant_id})
|
||||
compliance_stats = compliance_result.fetchone()
|
||||
|
||||
total_compliance = compliance_stats.total or 0
|
||||
compliant_items = compliance_stats.compliant or 0
|
||||
# Get repository instance
|
||||
repo = self._get_repository(db)
|
||||
|
||||
# Get compliance overview using repository
|
||||
compliance_stats = await repo.get_compliance_stats(tenant_id)
|
||||
total_compliance = compliance_stats["total"]
|
||||
compliant_items = compliance_stats["compliant"]
|
||||
compliance_percentage = (compliant_items / total_compliance * 100) if total_compliance > 0 else 0
|
||||
|
||||
# Get temperature monitoring status
|
||||
temp_query = text("""
|
||||
SELECT
|
||||
COUNT(DISTINCT equipment_id) as sensors_online,
|
||||
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
|
||||
FROM temperature_logs
|
||||
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
|
||||
""")
|
||||
|
||||
temp_result = await db.execute(temp_query, {"tenant_id": tenant_id})
|
||||
temp_stats = temp_result.fetchone()
|
||||
|
||||
# Get expiration tracking
|
||||
expiration_query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
|
||||
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
|
||||
COUNT(CASE WHEN expiration_date < CURRENT_DATE AND is_available THEN 1 END) as expired_requiring_action
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id AND s.is_available = true
|
||||
""")
|
||||
|
||||
expiration_result = await db.execute(expiration_query, {"tenant_id": tenant_id})
|
||||
expiration_stats = expiration_result.fetchone()
|
||||
|
||||
# Get alert counts
|
||||
alert_query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
|
||||
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
|
||||
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
""")
|
||||
|
||||
alert_result = await db.execute(alert_query, {"tenant_id": tenant_id})
|
||||
alert_stats = alert_result.fetchone()
|
||||
|
||||
# Get temperature monitoring status using repository
|
||||
temp_stats = await repo.get_temperature_stats(tenant_id)
|
||||
|
||||
# Get expiration tracking using repository
|
||||
expiration_stats = await repo.get_expiration_stats(tenant_id)
|
||||
|
||||
# Get alert counts using repository
|
||||
alert_stats = await repo.get_alert_stats(tenant_id)
|
||||
|
||||
return FoodSafetyDashboard(
|
||||
total_compliance_items=total_compliance,
|
||||
compliant_items=compliant_items,
|
||||
non_compliant_items=compliance_stats.non_compliant or 0,
|
||||
pending_review_items=compliance_stats.pending_review or 0,
|
||||
non_compliant_items=compliance_stats["non_compliant"],
|
||||
pending_review_items=compliance_stats["pending_review"],
|
||||
compliance_percentage=Decimal(str(compliance_percentage)),
|
||||
temperature_sensors_online=temp_stats.sensors_online or 0,
|
||||
temperature_sensors_total=temp_stats.sensors_online or 0, # Would need actual count
|
||||
temperature_violations_24h=temp_stats.violations_24h or 0,
|
||||
temperature_sensors_online=temp_stats["sensors_online"],
|
||||
temperature_sensors_total=temp_stats["sensors_online"], # Would need actual count
|
||||
temperature_violations_24h=temp_stats["violations_24h"],
|
||||
current_temperature_status="normal", # Would need to calculate
|
||||
items_expiring_today=expiration_stats.expiring_today or 0,
|
||||
items_expiring_this_week=expiration_stats.expiring_week or 0,
|
||||
expired_items_requiring_action=expiration_stats.expired_requiring_action or 0,
|
||||
items_expiring_today=expiration_stats["expiring_today"],
|
||||
items_expiring_this_week=expiration_stats["expiring_week"],
|
||||
expired_items_requiring_action=expiration_stats["expired_requiring_action"],
|
||||
upcoming_audits=0, # Would need to calculate
|
||||
overdue_audits=0, # Would need to calculate
|
||||
certifications_valid=compliant_items,
|
||||
certifications_expiring_soon=0, # Would need to calculate
|
||||
high_risk_items=alert_stats.high_risk or 0,
|
||||
critical_alerts=alert_stats.critical or 0,
|
||||
regulatory_notifications_pending=alert_stats.regulatory_pending or 0,
|
||||
high_risk_items=alert_stats["high_risk"],
|
||||
critical_alerts=alert_stats["critical"],
|
||||
regulatory_notifications_pending=alert_stats["regulatory_pending"],
|
||||
recent_safety_incidents=[] # Would need to get recent incidents
|
||||
)
|
||||
|
||||
@@ -426,16 +391,14 @@ class FoodSafetyService:
|
||||
|
||||
async def _validate_compliance_data(self, db, compliance_data: FoodSafetyComplianceCreate):
|
||||
"""Validate compliance data for business rules"""
|
||||
# Check if ingredient exists
|
||||
from sqlalchemy import text
|
||||
|
||||
ingredient_query = text("SELECT id FROM ingredients WHERE id = :ingredient_id AND tenant_id = :tenant_id")
|
||||
result = await db.execute(ingredient_query, {
|
||||
"ingredient_id": compliance_data.ingredient_id,
|
||||
"tenant_id": compliance_data.tenant_id
|
||||
})
|
||||
|
||||
if not result.fetchone():
|
||||
# Check if ingredient exists using repository
|
||||
repo = self._get_repository(db)
|
||||
ingredient_exists = await repo.validate_ingredient_exists(
|
||||
compliance_data.ingredient_id,
|
||||
compliance_data.tenant_id
|
||||
)
|
||||
|
||||
if not ingredient_exists:
|
||||
raise ValueError("Ingredient not found")
|
||||
|
||||
# Validate standard
|
||||
|
||||
@@ -18,6 +18,7 @@ from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
|
||||
from shared.alerts.templates import format_item_message
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -90,54 +91,20 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Batch check all stock levels for critical shortages (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
i.max_stock_level as maximum_stock,
|
||||
i.reorder_point,
|
||||
0 as tomorrow_needed,
|
||||
0 as avg_daily_usage,
|
||||
7 as lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
|
||||
ELSE 'normal'
|
||||
END as status,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
|
||||
)
|
||||
SELECT * FROM stock_analysis WHERE status != 'normal'
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'critical' THEN 1
|
||||
WHEN 'low' THEN 2
|
||||
WHEN 'overstock' THEN 3
|
||||
END,
|
||||
shortage_amount DESC
|
||||
"""
|
||||
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
# Add timeout to prevent hanging connections
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
issues = result.fetchall()
|
||||
|
||||
# Use repository for stock analysis
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
issues = await alert_repo.get_stock_issues(tenant_id)
|
||||
|
||||
for issue in issues:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
issue_dict = dict(issue._mapping) if hasattr(issue, '_mapping') else dict(issue)
|
||||
await self._process_stock_issue(tenant_id, issue_dict)
|
||||
await self._process_stock_issue(tenant_id, issue)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking stock for tenant",
|
||||
@@ -230,39 +197,24 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for products approaching expiry (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
s.id as stock_id, s.expiration_date, s.current_quantity,
|
||||
EXTRACT(days FROM (s.expiration_date - CURRENT_DATE)) as days_to_expiry
|
||||
FROM ingredients i
|
||||
JOIN stock s ON s.ingredient_id = i.id
|
||||
WHERE s.expiration_date <= CURRENT_DATE + INTERVAL '7 days'
|
||||
AND s.current_quantity > 0
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date IS NOT NULL
|
||||
ORDER BY s.expiration_date ASC
|
||||
"""
|
||||
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query))
|
||||
expiring_items = result.fetchall()
|
||||
|
||||
# Group by tenant
|
||||
by_tenant = {}
|
||||
for item in expiring_items:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
item_dict = dict(item._mapping) if hasattr(item, '_mapping') else dict(item)
|
||||
tenant_id = item_dict['tenant_id']
|
||||
if tenant_id not in by_tenant:
|
||||
by_tenant[tenant_id] = []
|
||||
by_tenant[tenant_id].append(item_dict)
|
||||
|
||||
for tenant_id, items in by_tenant.items():
|
||||
await self._process_expiring_items(tenant_id, items)
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
# Get expiring products for this tenant
|
||||
items = await alert_repo.get_expiring_products(tenant_id, days_threshold=7)
|
||||
if items:
|
||||
await self._process_expiring_items(tenant_id, items)
|
||||
except Exception as e:
|
||||
logger.error("Error checking expiring products for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Expiry check failed", error=str(e))
|
||||
@@ -334,31 +286,23 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for temperature breaches (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
t.id, t.equipment_id as sensor_id, t.storage_location as location,
|
||||
t.temperature_celsius as temperature,
|
||||
t.target_temperature_max as max_threshold, t.tenant_id,
|
||||
COALESCE(t.deviation_minutes, 0) as breach_duration_minutes
|
||||
FROM temperature_logs t
|
||||
WHERE t.temperature_celsius > COALESCE(t.target_temperature_max, 25)
|
||||
AND NOT t.is_within_range
|
||||
AND COALESCE(t.deviation_minutes, 0) >= 30 -- Only after 30 minutes
|
||||
AND (t.recorded_at < NOW() - INTERVAL '15 minutes' OR t.alert_triggered = false) -- Avoid spam
|
||||
ORDER BY t.temperature_celsius DESC, t.deviation_minutes DESC
|
||||
"""
|
||||
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query))
|
||||
breaches = result.fetchall()
|
||||
|
||||
for breach in breaches:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
breach_dict = dict(breach._mapping) if hasattr(breach, '_mapping') else dict(breach)
|
||||
await self._process_temperature_breach(breach_dict)
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
breaches = await alert_repo.get_temperature_breaches(tenant_id, hours_back=24)
|
||||
for breach in breaches:
|
||||
await self._process_temperature_breach(breach)
|
||||
except Exception as e:
|
||||
logger.error("Error checking temperature breaches for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Temperature check failed", error=str(e))
|
||||
@@ -405,10 +349,8 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(10): # 10 second timeout for simple update
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
await session.execute(
|
||||
text("UPDATE temperature_logs SET alert_triggered = true WHERE id = :id"),
|
||||
{"id": breach['id']}
|
||||
)
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
await alert_repo.mark_temperature_alert_triggered(breach['id'])
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing temperature breach",
|
||||
@@ -458,20 +400,17 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
recommendations = result.fetchall()
|
||||
|
||||
for rec in recommendations:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
rec_dict = dict(rec._mapping) if hasattr(rec, '_mapping') else dict(rec)
|
||||
await self._generate_stock_recommendation(tenant_id, rec_dict)
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
recommendations = await alert_repo.get_reorder_recommendations(tenant_id)
|
||||
for rec in recommendations:
|
||||
await self._generate_stock_recommendation(tenant_id, rec)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating recommendations for tenant",
|
||||
@@ -559,20 +498,17 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
waste_data = result.fetchall()
|
||||
|
||||
for waste in waste_data:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
waste_dict = dict(waste._mapping) if hasattr(waste, '_mapping') else dict(waste)
|
||||
await self._generate_waste_recommendation(tenant_id, waste_dict)
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
waste_data = await alert_repo.get_waste_opportunities(tenant_id)
|
||||
for waste in waste_data:
|
||||
await self._generate_waste_recommendation(tenant_id, waste)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating waste recommendations",
|
||||
@@ -738,21 +674,11 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
async def get_active_tenants(self) -> List[UUID]:
|
||||
"""Get list of active tenant IDs from ingredients table (inventory service specific)"""
|
||||
try:
|
||||
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(10): # 10 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(query)
|
||||
# Handle PostgreSQL UUID objects properly
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row.tenant_id
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
return tenant_ids
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
return await alert_repo.get_active_tenant_ids()
|
||||
except Exception as e:
|
||||
logger.error("Error fetching active tenants from ingredients", error=str(e))
|
||||
return []
|
||||
@@ -760,27 +686,15 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Optional[Dict[str, Any]]:
|
||||
"""Get stock information after hypothetical order"""
|
||||
try:
|
||||
query = """
|
||||
SELECT i.id, i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.id = :ingredient_id
|
||||
GROUP BY i.id, i.name, i.low_stock_threshold
|
||||
"""
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(10): # 10 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"ingredient_id": ingredient_id, "order_quantity": order_quantity})
|
||||
row = result.fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
return await alert_repo.get_stock_after_order(ingredient_id, order_quantity)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting stock after order",
|
||||
ingredient_id=ingredient_id,
|
||||
logger.error("Error getting stock after order",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
|
||||
583
services/inventory/app/services/sustainability_service.py
Normal file
583
services/inventory/app/services/sustainability_service.py
Normal file
@@ -0,0 +1,583 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/services/sustainability_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Sustainability Service - Environmental Impact & SDG Compliance Tracking
|
||||
Aligned with UN SDG 12.3 and EU Farm to Fork Strategy
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.config import settings
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from shared.clients.production_client import create_production_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
# Environmental Impact Constants (Research-based averages for bakery products)
|
||||
class EnvironmentalConstants:
|
||||
"""Environmental impact factors for bakery production"""
|
||||
|
||||
# CO2 equivalent per kg of food waste (kg CO2e/kg)
|
||||
# Source: EU Commission, average for baked goods
|
||||
CO2_PER_KG_WASTE = 1.9
|
||||
|
||||
# Water footprint (liters per kg of ingredient)
|
||||
WATER_FOOTPRINT = {
|
||||
'flour': 1827, # Wheat flour
|
||||
'dairy': 1020, # Average dairy products
|
||||
'eggs': 3265, # Eggs
|
||||
'sugar': 1782, # Sugar
|
||||
'yeast': 500, # Estimated for yeast
|
||||
'fats': 1600, # Butter/oils average
|
||||
'default': 1500 # Conservative default
|
||||
}
|
||||
|
||||
# Land use per kg (m² per kg)
|
||||
LAND_USE_PER_KG = 3.4
|
||||
|
||||
# Average trees needed to offset 1 ton CO2
|
||||
TREES_PER_TON_CO2 = 50
|
||||
|
||||
# EU bakery waste baseline (average industry waste %)
|
||||
EU_BAKERY_BASELINE_WASTE = 0.25 # 25% average
|
||||
|
||||
# UN SDG 12.3 target: 50% reduction by 2030
|
||||
SDG_TARGET_REDUCTION = 0.50
|
||||
|
||||
|
||||
class SustainabilityService:
|
||||
"""Service for calculating environmental impact and SDG compliance"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_sustainability_metrics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive sustainability metrics for a tenant
|
||||
|
||||
Returns metrics aligned with:
|
||||
- UN SDG 12.3 (Food waste reduction)
|
||||
- EU Farm to Fork Strategy
|
||||
- Green Deal objectives
|
||||
"""
|
||||
try:
|
||||
# Default to last 30 days if no date range provided
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
# Get waste data from production and inventory
|
||||
waste_data = await self._get_waste_data(db, tenant_id, start_date, end_date)
|
||||
|
||||
# Calculate environmental impact
|
||||
environmental_impact = self._calculate_environmental_impact(waste_data)
|
||||
|
||||
# Calculate SDG compliance
|
||||
sdg_compliance = await self._calculate_sdg_compliance(
|
||||
db, tenant_id, waste_data, start_date, end_date
|
||||
)
|
||||
|
||||
# Calculate avoided waste (through AI predictions)
|
||||
avoided_waste = await self._calculate_avoided_waste(
|
||||
db, tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
# Calculate financial impact
|
||||
financial_impact = self._calculate_financial_impact(waste_data)
|
||||
|
||||
return {
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat(),
|
||||
'days': (end_date - start_date).days
|
||||
},
|
||||
'waste_metrics': {
|
||||
'total_waste_kg': waste_data['total_waste_kg'],
|
||||
'production_waste_kg': waste_data['production_waste_kg'],
|
||||
'expired_waste_kg': waste_data['expired_waste_kg'],
|
||||
'waste_percentage': waste_data['waste_percentage'],
|
||||
'waste_by_reason': waste_data['waste_by_reason']
|
||||
},
|
||||
'environmental_impact': environmental_impact,
|
||||
'sdg_compliance': sdg_compliance,
|
||||
'avoided_waste': avoided_waste,
|
||||
'financial_impact': financial_impact,
|
||||
'grant_readiness': self._assess_grant_readiness(sdg_compliance)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate sustainability metrics",
|
||||
tenant_id=str(tenant_id), error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_waste_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get waste data from production service and inventory"""
|
||||
try:
|
||||
# Get production waste data via HTTP call to production service
|
||||
production_waste_data = await self._get_production_waste_data(
|
||||
tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
prod_data = production_waste_data if production_waste_data else {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
# Query inventory waste using repository
|
||||
stock_movement_repo = StockMovementRepository(db)
|
||||
inventory_waste = await stock_movement_repo.get_inventory_waste_total(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
# Calculate totals
|
||||
production_waste = float(prod_data.get('total_production_waste', 0) or 0)
|
||||
defect_waste = float(prod_data.get('total_defects', 0) or 0)
|
||||
total_waste = production_waste + defect_waste + inventory_waste
|
||||
|
||||
total_production = float(prod_data.get('total_planned', 0) or 0)
|
||||
waste_percentage = (total_waste / total_production * 100) if total_production > 0 else 0
|
||||
|
||||
# Categorize waste by reason
|
||||
waste_by_reason = {
|
||||
'production_defects': defect_waste,
|
||||
'production_waste': production_waste - defect_waste,
|
||||
'expired_inventory': inventory_waste * 0.7, # Estimate: 70% expires
|
||||
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
|
||||
}
|
||||
|
||||
return {
|
||||
'total_waste_kg': total_waste,
|
||||
'production_waste_kg': production_waste + defect_waste,
|
||||
'expired_waste_kg': inventory_waste,
|
||||
'waste_percentage': waste_percentage,
|
||||
'total_production_kg': total_production,
|
||||
'waste_by_reason': waste_by_reason,
|
||||
'waste_incidents': int(inv_data.waste_incidents or 0)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste data", error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_production_waste_data(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get production waste data from production service using shared client"""
|
||||
try:
|
||||
# Use the shared production client with proper authentication and resilience
|
||||
production_client = create_production_client(settings)
|
||||
|
||||
data = await production_client.get_waste_analytics(
|
||||
str(tenant_id),
|
||||
start_date.isoformat(),
|
||||
end_date.isoformat()
|
||||
)
|
||||
|
||||
if data:
|
||||
logger.info(
|
||||
"Retrieved production waste data via production client",
|
||||
tenant_id=str(tenant_id),
|
||||
total_waste=data.get('total_production_waste', 0)
|
||||
)
|
||||
return data
|
||||
else:
|
||||
# Client returned None, return zeros as fallback
|
||||
logger.warning(
|
||||
"Production waste analytics returned None, using zeros",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error calling production service for waste data via client",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
# Return zeros on error to not break the flow
|
||||
return {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
def _calculate_environmental_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate environmental impact of food waste"""
|
||||
try:
|
||||
total_waste_kg = waste_data['total_waste_kg']
|
||||
|
||||
# CO2 emissions
|
||||
co2_emissions_kg = total_waste_kg * EnvironmentalConstants.CO2_PER_KG_WASTE
|
||||
co2_emissions_tons = co2_emissions_kg / 1000
|
||||
|
||||
# Equivalent trees to offset
|
||||
trees_equivalent = co2_emissions_tons * EnvironmentalConstants.TREES_PER_TON_CO2
|
||||
|
||||
# Water footprint (using average for bakery products)
|
||||
water_liters = total_waste_kg * EnvironmentalConstants.WATER_FOOTPRINT['default']
|
||||
|
||||
# Land use
|
||||
land_use_m2 = total_waste_kg * EnvironmentalConstants.LAND_USE_PER_KG
|
||||
|
||||
# Human-readable equivalents for marketing
|
||||
equivalents = {
|
||||
'car_km': co2_emissions_kg / 0.12, # Average car emits 120g CO2/km
|
||||
'smartphone_charges': (co2_emissions_kg * 1000) / 8, # 8g CO2 per charge
|
||||
'showers': water_liters / 65, # Average shower uses 65L
|
||||
'trees_year_growth': trees_equivalent
|
||||
}
|
||||
|
||||
return {
|
||||
'co2_emissions': {
|
||||
'kg': round(co2_emissions_kg, 2),
|
||||
'tons': round(co2_emissions_tons, 4),
|
||||
'trees_to_offset': round(trees_equivalent, 1)
|
||||
},
|
||||
'water_footprint': {
|
||||
'liters': round(water_liters, 2),
|
||||
'cubic_meters': round(water_liters / 1000, 2)
|
||||
},
|
||||
'land_use': {
|
||||
'square_meters': round(land_use_m2, 2),
|
||||
'hectares': round(land_use_m2 / 10000, 4)
|
||||
},
|
||||
'human_equivalents': {
|
||||
'car_km_equivalent': round(equivalents['car_km'], 0),
|
||||
'smartphone_charges': round(equivalents['smartphone_charges'], 0),
|
||||
'showers_equivalent': round(equivalents['showers'], 0),
|
||||
'trees_planted': round(equivalents['trees_year_growth'], 1)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate environmental impact", error=str(e))
|
||||
raise
|
||||
|
||||
async def _calculate_sdg_compliance(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
waste_data: Dict[str, Any],
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate compliance with UN SDG 12.3
|
||||
Target: Halve per capita global food waste by 2030
|
||||
"""
|
||||
try:
|
||||
# Get baseline (first 90 days of operation or industry average)
|
||||
baseline = await self._get_baseline_waste(db, tenant_id)
|
||||
|
||||
current_waste_percentage = waste_data['waste_percentage']
|
||||
baseline_percentage = baseline.get('waste_percentage', EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100)
|
||||
|
||||
# Calculate reduction from baseline
|
||||
if baseline_percentage > 0:
|
||||
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
|
||||
else:
|
||||
reduction_percentage = 0
|
||||
|
||||
# SDG 12.3 target is 50% reduction
|
||||
sdg_target = baseline_percentage * (1 - EnvironmentalConstants.SDG_TARGET_REDUCTION)
|
||||
progress_to_target = (reduction_percentage / (EnvironmentalConstants.SDG_TARGET_REDUCTION * 100)) * 100
|
||||
|
||||
# Status assessment
|
||||
if reduction_percentage >= 50:
|
||||
status = 'sdg_compliant'
|
||||
status_label = 'SDG 12.3 Compliant'
|
||||
elif reduction_percentage >= 30:
|
||||
status = 'on_track'
|
||||
status_label = 'On Track to Compliance'
|
||||
elif reduction_percentage >= 10:
|
||||
status = 'progressing'
|
||||
status_label = 'Making Progress'
|
||||
else:
|
||||
status = 'baseline'
|
||||
status_label = 'Establishing Baseline'
|
||||
|
||||
return {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': round(baseline_percentage, 2),
|
||||
'current_waste_percentage': round(current_waste_percentage, 2),
|
||||
'reduction_achieved': round(reduction_percentage, 2),
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': round(min(progress_to_target, 100), 1),
|
||||
'status': status,
|
||||
'status_label': status_label,
|
||||
'target_waste_percentage': round(sdg_target, 2)
|
||||
},
|
||||
'baseline_period': baseline.get('period', 'industry_average'),
|
||||
'certification_ready': reduction_percentage >= 50,
|
||||
'improvement_areas': self._identify_improvement_areas(waste_data)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate SDG compliance", error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_baseline_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get baseline waste percentage from production service using shared client"""
|
||||
try:
|
||||
# Use the shared production client with proper authentication and resilience
|
||||
production_client = create_production_client(settings)
|
||||
|
||||
baseline_data = await production_client.get_baseline(str(tenant_id))
|
||||
|
||||
if baseline_data and baseline_data.get('data_available', False):
|
||||
# Production service has real baseline data
|
||||
logger.info(
|
||||
"Retrieved baseline from production service via client",
|
||||
tenant_id=str(tenant_id),
|
||||
baseline_percentage=baseline_data.get('waste_percentage', 0)
|
||||
)
|
||||
return {
|
||||
'waste_percentage': baseline_data['waste_percentage'],
|
||||
'period': baseline_data['period'].get('type', 'first_90_days'),
|
||||
'total_production_kg': baseline_data.get('total_production_kg', 0),
|
||||
'total_waste_kg': baseline_data.get('total_waste_kg', 0)
|
||||
}
|
||||
else:
|
||||
# Production service doesn't have enough data yet
|
||||
logger.info(
|
||||
"Production service baseline not available, using industry average",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return {
|
||||
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
|
||||
'period': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Error calling production service for baseline via client, using industry average",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Fallback to industry average
|
||||
return {
|
||||
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
|
||||
'period': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
}
|
||||
|
||||
async def _calculate_avoided_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate waste avoided through AI predictions and smart planning
|
||||
This is a KEY metric for marketing and grant applications
|
||||
"""
|
||||
try:
|
||||
# Get AI-assisted batch data from production service
|
||||
production_data = await self._get_production_waste_data(tenant_id, start_date, end_date)
|
||||
|
||||
# Extract data with AI batch tracking
|
||||
total_planned = production_data.get('total_planned', 0) if production_data else 0
|
||||
total_waste = production_data.get('total_production_waste', 0) if production_data else 0
|
||||
ai_assisted_batches = production_data.get('ai_assisted_batches', 0) if production_data else 0
|
||||
|
||||
# Estimate waste avoided by comparing to industry average
|
||||
if total_planned > 0:
|
||||
# Industry average waste: 25%
|
||||
# Current actual waste from production
|
||||
industry_expected_waste = total_planned * EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE
|
||||
actual_waste = total_waste
|
||||
estimated_avoided = max(0, industry_expected_waste - actual_waste)
|
||||
|
||||
# Calculate environmental impact of avoided waste
|
||||
avoided_co2 = estimated_avoided * EnvironmentalConstants.CO2_PER_KG_WASTE
|
||||
avoided_water = estimated_avoided * EnvironmentalConstants.WATER_FOOTPRINT['default']
|
||||
|
||||
return {
|
||||
'waste_avoided_kg': round(estimated_avoided, 2),
|
||||
'ai_assisted_batches': ai_assisted_batches,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': round(avoided_co2, 2),
|
||||
'water_liters': round(avoided_water, 2)
|
||||
},
|
||||
'methodology': 'compared_to_industry_baseline'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'waste_avoided_kg': 0,
|
||||
'ai_assisted_batches': 0,
|
||||
'note': 'Insufficient data for avoided waste calculation'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate avoided waste", error=str(e))
|
||||
return {'waste_avoided_kg': 0, 'error': str(e)}
|
||||
|
||||
def _calculate_financial_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate financial impact of food waste"""
|
||||
# Average cost per kg of bakery products: €3.50
|
||||
avg_cost_per_kg = 3.50
|
||||
|
||||
total_waste_kg = waste_data['total_waste_kg']
|
||||
waste_cost = total_waste_kg * avg_cost_per_kg
|
||||
|
||||
# If waste was reduced by 30%, potential savings
|
||||
potential_savings = waste_cost * 0.30
|
||||
|
||||
return {
|
||||
'waste_cost_eur': round(waste_cost, 2),
|
||||
'cost_per_kg': avg_cost_per_kg,
|
||||
'potential_monthly_savings': round(potential_savings, 2),
|
||||
'annual_projection': round(waste_cost * 12, 2)
|
||||
}
|
||||
|
||||
def _identify_improvement_areas(self, waste_data: Dict[str, Any]) -> List[str]:
|
||||
"""Identify areas for improvement based on waste data"""
|
||||
areas = []
|
||||
|
||||
waste_by_reason = waste_data.get('waste_by_reason', {})
|
||||
|
||||
if waste_by_reason.get('production_defects', 0) > waste_data['total_waste_kg'] * 0.3:
|
||||
areas.append('quality_control_in_production')
|
||||
|
||||
if waste_by_reason.get('expired_inventory', 0) > waste_data['total_waste_kg'] * 0.4:
|
||||
areas.append('inventory_rotation_management')
|
||||
|
||||
if waste_data.get('waste_percentage', 0) > 20:
|
||||
areas.append('demand_forecasting_accuracy')
|
||||
|
||||
if not areas:
|
||||
areas.append('maintain_current_practices')
|
||||
|
||||
return areas
|
||||
|
||||
def _assess_grant_readiness(self, sdg_compliance: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Assess readiness for various grant programs"""
|
||||
reduction = sdg_compliance['sdg_12_3']['reduction_achieved']
|
||||
|
||||
grants = {
|
||||
'eu_horizon_europe': {
|
||||
'eligible': reduction >= 30,
|
||||
'confidence': 'high' if reduction >= 50 else 'medium' if reduction >= 30 else 'low',
|
||||
'requirements_met': reduction >= 30
|
||||
},
|
||||
'eu_farm_to_fork': {
|
||||
'eligible': reduction >= 20,
|
||||
'confidence': 'high' if reduction >= 40 else 'medium' if reduction >= 20 else 'low',
|
||||
'requirements_met': reduction >= 20
|
||||
},
|
||||
'national_circular_economy': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15
|
||||
},
|
||||
'un_sdg_certified': {
|
||||
'eligible': reduction >= 50,
|
||||
'confidence': 'high' if reduction >= 50 else 'low',
|
||||
'requirements_met': reduction >= 50
|
||||
}
|
||||
}
|
||||
|
||||
overall_readiness = sum(1 for g in grants.values() if g['eligible']) / len(grants) * 100
|
||||
|
||||
return {
|
||||
'overall_readiness_percentage': round(overall_readiness, 1),
|
||||
'grant_programs': grants,
|
||||
'recommended_applications': [
|
||||
name for name, details in grants.items() if details['eligible']
|
||||
]
|
||||
}
|
||||
|
||||
async def export_grant_report(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
grant_type: str = 'general',
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate export-ready report for grant applications
|
||||
Formats data according to common grant application requirements
|
||||
"""
|
||||
try:
|
||||
metrics = await self.get_sustainability_metrics(
|
||||
db, tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
# Format for grant applications
|
||||
report = {
|
||||
'report_metadata': {
|
||||
'generated_at': datetime.now().isoformat(),
|
||||
'report_type': grant_type,
|
||||
'period': metrics['period'],
|
||||
'tenant_id': str(tenant_id)
|
||||
},
|
||||
'executive_summary': {
|
||||
'total_waste_reduced_kg': metrics['waste_metrics']['total_waste_kg'],
|
||||
'waste_reduction_percentage': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved'],
|
||||
'co2_emissions_avoided_kg': metrics['environmental_impact']['co2_emissions']['kg'],
|
||||
'financial_savings_eur': metrics['financial_impact']['waste_cost_eur'],
|
||||
'sdg_compliance_status': metrics['sdg_compliance']['sdg_12_3']['status_label']
|
||||
},
|
||||
'detailed_metrics': metrics,
|
||||
'certifications': {
|
||||
'sdg_12_3_compliant': metrics['sdg_compliance']['certification_ready'],
|
||||
'grant_programs_eligible': metrics['grant_readiness']['recommended_applications']
|
||||
},
|
||||
'supporting_data': {
|
||||
'baseline_comparison': {
|
||||
'baseline': metrics['sdg_compliance']['sdg_12_3']['baseline_waste_percentage'],
|
||||
'current': metrics['sdg_compliance']['sdg_12_3']['current_waste_percentage'],
|
||||
'improvement': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved']
|
||||
},
|
||||
'environmental_benefits': metrics['environmental_impact'],
|
||||
'financial_benefits': metrics['financial_impact']
|
||||
}
|
||||
}
|
||||
|
||||
return report
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate grant report", error=str(e))
|
||||
raise
|
||||
Reference in New Issue
Block a user