Improve the frontend and repository layer
This commit is contained in:
7
services/demo_session/app/repositories/__init__.py
Normal file
7
services/demo_session/app/repositories/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Demo Session Repositories
|
||||
"""
|
||||
|
||||
from .demo_session_repository import DemoSessionRepository
|
||||
|
||||
__all__ = ["DemoSessionRepository"]
|
||||
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
Demo Session Repository
|
||||
Data access layer for demo sessions
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.models import DemoSession, DemoSessionStatus
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DemoSessionRepository:
|
||||
"""Repository for DemoSession data access"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
|
||||
async def create(self, session_data: Dict[str, Any]) -> DemoSession:
|
||||
"""
|
||||
Create a new demo session
|
||||
|
||||
Args:
|
||||
session_data: Dictionary with session attributes
|
||||
|
||||
Returns:
|
||||
Created DemoSession instance
|
||||
"""
|
||||
session = DemoSession(**session_data)
|
||||
self.db.add(session)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
return session
|
||||
|
||||
async def get_by_session_id(self, session_id: str) -> Optional[DemoSession]:
|
||||
"""
|
||||
Get session by session_id
|
||||
|
||||
Args:
|
||||
session_id: Session ID string
|
||||
|
||||
Returns:
|
||||
DemoSession or None if not found
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.session_id == session_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_virtual_tenant_id(self, virtual_tenant_id: UUID) -> Optional[DemoSession]:
|
||||
"""
|
||||
Get session by virtual tenant ID
|
||||
|
||||
Args:
|
||||
virtual_tenant_id: Virtual tenant UUID
|
||||
|
||||
Returns:
|
||||
DemoSession or None if not found
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.virtual_tenant_id == virtual_tenant_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def update(self, session: DemoSession) -> DemoSession:
|
||||
"""
|
||||
Update an existing session
|
||||
|
||||
Args:
|
||||
session: DemoSession instance with updates
|
||||
|
||||
Returns:
|
||||
Updated DemoSession instance
|
||||
"""
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
return session
|
||||
|
||||
async def update_fields(self, session_id: str, **fields) -> None:
|
||||
"""
|
||||
Update specific fields of a session
|
||||
|
||||
Args:
|
||||
session_id: Session ID to update
|
||||
**fields: Field names and values to update
|
||||
"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(**fields)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def update_activity(self, session_id: str) -> None:
|
||||
"""
|
||||
Update last activity timestamp and increment request count
|
||||
|
||||
Args:
|
||||
session_id: Session ID to update
|
||||
"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
request_count=DemoSession.request_count + 1
|
||||
)
|
||||
)
|
||||
await self.db.commit()
|
||||
|
||||
async def mark_data_cloned(self, session_id: str) -> None:
|
||||
"""
|
||||
Mark session as having data cloned
|
||||
|
||||
Args:
|
||||
session_id: Session ID to update
|
||||
"""
|
||||
await self.update_fields(session_id, data_cloned=True)
|
||||
|
||||
async def mark_redis_populated(self, session_id: str) -> None:
|
||||
"""
|
||||
Mark session as having Redis data populated
|
||||
|
||||
Args:
|
||||
session_id: Session ID to update
|
||||
"""
|
||||
await self.update_fields(session_id, redis_populated=True)
|
||||
|
||||
async def destroy(self, session_id: str) -> None:
|
||||
"""
|
||||
Mark session as destroyed
|
||||
|
||||
Args:
|
||||
session_id: Session ID to destroy
|
||||
"""
|
||||
await self.update_fields(
|
||||
session_id,
|
||||
status=DemoSessionStatus.DESTROYED,
|
||||
destroyed_at=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
async def get_active_sessions_count(self) -> int:
|
||||
"""
|
||||
Get count of active sessions
|
||||
|
||||
Returns:
|
||||
Number of active sessions
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.status == DemoSessionStatus.ACTIVE)
|
||||
)
|
||||
return len(result.scalars().all())
|
||||
|
||||
async def get_all_sessions(self) -> List[DemoSession]:
|
||||
"""
|
||||
Get all demo sessions
|
||||
|
||||
Returns:
|
||||
List of all DemoSession instances
|
||||
"""
|
||||
result = await self.db.execute(select(DemoSession))
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_sessions_by_status(self, status: DemoSessionStatus) -> List[DemoSession]:
|
||||
"""
|
||||
Get sessions by status
|
||||
|
||||
Args:
|
||||
status: DemoSessionStatus to filter by
|
||||
|
||||
Returns:
|
||||
List of DemoSession instances with the specified status
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.status == status)
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_session_stats(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get session statistics
|
||||
|
||||
Returns:
|
||||
Dictionary with session statistics
|
||||
"""
|
||||
all_sessions = await self.get_all_sessions()
|
||||
active_sessions = [s for s in all_sessions if s.status == DemoSessionStatus.ACTIVE]
|
||||
|
||||
return {
|
||||
"total_sessions": len(all_sessions),
|
||||
"active_sessions": len(active_sessions),
|
||||
"expired_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.EXPIRED]),
|
||||
"destroyed_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.DESTROYED]),
|
||||
"avg_duration_minutes": sum(
|
||||
(s.destroyed_at - s.created_at).total_seconds() / 60
|
||||
for s in all_sessions if s.destroyed_at
|
||||
) / max(len([s for s in all_sessions if s.destroyed_at]), 1),
|
||||
"total_requests": sum(s.request_count for s in all_sessions)
|
||||
}
|
||||
@@ -4,7 +4,6 @@ Handles creation, extension, and destruction of demo sessions
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
import uuid
|
||||
@@ -15,6 +14,7 @@ from app.models import DemoSession, DemoSessionStatus, CloningStatus
|
||||
from app.core.redis_wrapper import DemoRedisWrapper
|
||||
from app.core import settings
|
||||
from app.services.clone_orchestrator import CloneOrchestrator
|
||||
from app.repositories.demo_session_repository import DemoSessionRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -25,6 +25,7 @@ class DemoSessionManager:
|
||||
def __init__(self, db: AsyncSession, redis: DemoRedisWrapper):
|
||||
self.db = db
|
||||
self.redis = redis
|
||||
self.repository = DemoSessionRepository(db)
|
||||
self.orchestrator = CloneOrchestrator()
|
||||
|
||||
async def create_session(
|
||||
@@ -66,32 +67,30 @@ class DemoSessionManager:
|
||||
|
||||
base_tenant_id = uuid.UUID(base_tenant_id_str)
|
||||
|
||||
# Create session record
|
||||
session = DemoSession(
|
||||
session_id=session_id,
|
||||
user_id=uuid.UUID(user_id) if user_id else None,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
base_demo_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
status=DemoSessionStatus.PENDING, # Start as pending until cloning completes
|
||||
created_at=datetime.now(timezone.utc),
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(
|
||||
# Create session record using repository
|
||||
session_data = {
|
||||
"session_id": session_id,
|
||||
"user_id": uuid.UUID(user_id) if user_id else None,
|
||||
"ip_address": ip_address,
|
||||
"user_agent": user_agent,
|
||||
"base_demo_tenant_id": base_tenant_id,
|
||||
"virtual_tenant_id": virtual_tenant_id,
|
||||
"demo_account_type": demo_account_type,
|
||||
"status": DemoSessionStatus.PENDING, # Start as pending until cloning completes
|
||||
"created_at": datetime.now(timezone.utc),
|
||||
"expires_at": datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.DEMO_SESSION_DURATION_MINUTES
|
||||
),
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
data_cloned=False,
|
||||
redis_populated=False,
|
||||
session_metadata={
|
||||
"last_activity_at": datetime.now(timezone.utc),
|
||||
"data_cloned": False,
|
||||
"redis_populated": False,
|
||||
"session_metadata": {
|
||||
"demo_config": demo_config,
|
||||
"extension_count": 0
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
self.db.add(session)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
session = await self.repository.create(session_data)
|
||||
|
||||
# Store session metadata in Redis
|
||||
await self._store_session_metadata(session)
|
||||
@@ -107,19 +106,11 @@ class DemoSessionManager:
|
||||
|
||||
async def get_session(self, session_id: str) -> Optional[DemoSession]:
|
||||
"""Get session by session_id"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.session_id == session_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
return await self.repository.get_by_session_id(session_id)
|
||||
|
||||
async def get_session_by_virtual_tenant(self, virtual_tenant_id: str) -> Optional[DemoSession]:
|
||||
"""Get session by virtual tenant ID"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(
|
||||
DemoSession.virtual_tenant_id == uuid.UUID(virtual_tenant_id)
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
return await self.repository.get_by_virtual_tenant_id(uuid.UUID(virtual_tenant_id))
|
||||
|
||||
async def extend_session(self, session_id: str) -> DemoSession:
|
||||
"""
|
||||
@@ -156,8 +147,7 @@ class DemoSessionManager:
|
||||
session.last_activity_at = datetime.now(timezone.utc)
|
||||
session.session_metadata["extension_count"] = extension_count + 1
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
session = await self.repository.update(session)
|
||||
|
||||
# Extend Redis TTL
|
||||
await self.redis.extend_session_ttl(
|
||||
@@ -176,33 +166,15 @@ class DemoSessionManager:
|
||||
|
||||
async def update_activity(self, session_id: str):
|
||||
"""Update last activity timestamp"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(
|
||||
last_activity_at=datetime.now(timezone.utc),
|
||||
request_count=DemoSession.request_count + 1
|
||||
)
|
||||
)
|
||||
await self.db.commit()
|
||||
await self.repository.update_activity(session_id)
|
||||
|
||||
async def mark_data_cloned(self, session_id: str):
|
||||
"""Mark session as having data cloned"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(data_cloned=True)
|
||||
)
|
||||
await self.db.commit()
|
||||
await self.repository.mark_data_cloned(session_id)
|
||||
|
||||
async def mark_redis_populated(self, session_id: str):
|
||||
"""Mark session as having Redis data populated"""
|
||||
await self.db.execute(
|
||||
update(DemoSession)
|
||||
.where(DemoSession.session_id == session_id)
|
||||
.values(redis_populated=True)
|
||||
)
|
||||
await self.db.commit()
|
||||
await self.repository.mark_redis_populated(session_id)
|
||||
|
||||
async def destroy_session(self, session_id: str):
|
||||
"""
|
||||
@@ -217,11 +189,8 @@ class DemoSessionManager:
|
||||
logger.warning("Session not found for destruction", session_id=session_id)
|
||||
return
|
||||
|
||||
# Update session status
|
||||
session.status = DemoSessionStatus.DESTROYED
|
||||
session.destroyed_at = datetime.now(timezone.utc)
|
||||
|
||||
await self.db.commit()
|
||||
# Update session status via repository
|
||||
await self.repository.destroy(session_id)
|
||||
|
||||
# Delete Redis data
|
||||
await self.redis.delete_session_data(session_id)
|
||||
@@ -229,10 +198,7 @@ class DemoSessionManager:
|
||||
logger.info(
|
||||
"Session destroyed",
|
||||
session_id=session_id,
|
||||
virtual_tenant_id=str(session.virtual_tenant_id),
|
||||
duration_seconds=(
|
||||
session.destroyed_at - session.created_at
|
||||
).total_seconds()
|
||||
virtual_tenant_id=str(session.virtual_tenant_id)
|
||||
)
|
||||
|
||||
async def _store_session_metadata(self, session: DemoSession):
|
||||
@@ -252,29 +218,11 @@ class DemoSessionManager:
|
||||
|
||||
async def get_active_sessions_count(self) -> int:
|
||||
"""Get count of active sessions"""
|
||||
result = await self.db.execute(
|
||||
select(DemoSession).where(DemoSession.status == DemoSessionStatus.ACTIVE)
|
||||
)
|
||||
return len(result.scalars().all())
|
||||
return await self.repository.get_active_sessions_count()
|
||||
|
||||
async def get_session_stats(self) -> Dict[str, Any]:
|
||||
"""Get session statistics"""
|
||||
result = await self.db.execute(select(DemoSession))
|
||||
all_sessions = result.scalars().all()
|
||||
|
||||
active_sessions = [s for s in all_sessions if s.status == DemoSessionStatus.ACTIVE]
|
||||
|
||||
return {
|
||||
"total_sessions": len(all_sessions),
|
||||
"active_sessions": len(active_sessions),
|
||||
"expired_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.EXPIRED]),
|
||||
"destroyed_sessions": len([s for s in all_sessions if s.status == DemoSessionStatus.DESTROYED]),
|
||||
"avg_duration_minutes": sum(
|
||||
(s.destroyed_at - s.created_at).total_seconds() / 60
|
||||
for s in all_sessions if s.destroyed_at
|
||||
) / max(len([s for s in all_sessions if s.destroyed_at]), 1),
|
||||
"total_requests": sum(s.request_count for s in all_sessions)
|
||||
}
|
||||
return await self.repository.get_session_stats()
|
||||
|
||||
async def trigger_orchestrated_cloning(
|
||||
self,
|
||||
@@ -299,7 +247,7 @@ class DemoSessionManager:
|
||||
|
||||
# Mark cloning as started
|
||||
session.cloning_started_at = datetime.now(timezone.utc)
|
||||
await self.db.commit()
|
||||
await self.repository.update(session)
|
||||
|
||||
# Run orchestration
|
||||
result = await self.orchestrator.clone_all_services(
|
||||
@@ -340,8 +288,7 @@ class DemoSessionManager:
|
||||
session.data_cloned = True
|
||||
session.redis_populated = True
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(session)
|
||||
await self.repository.update(session)
|
||||
|
||||
# Cache status in Redis for fast polling
|
||||
await self._cache_session_status(session)
|
||||
|
||||
@@ -0,0 +1,214 @@
|
||||
# services/forecasting/app/repositories/forecasting_alert_repository.py
|
||||
"""
|
||||
Forecasting Alert Repository
|
||||
Data access layer for forecasting-specific alert detection and analysis
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ForecastingAlertRepository:
|
||||
"""Repository for forecasting alert data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_weekend_demand_surges(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get predicted weekend demand surges
|
||||
Returns forecasts showing significant growth over previous weeks
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH weekend_forecast AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
f.predicted_demand,
|
||||
f.forecast_date,
|
||||
LAG(f.predicted_demand, 7) OVER (
|
||||
PARTITION BY f.tenant_id, f.inventory_product_id
|
||||
ORDER BY f.forecast_date
|
||||
) as prev_week_demand,
|
||||
AVG(f.predicted_demand) OVER (
|
||||
PARTITION BY f.tenant_id, f.inventory_product_id
|
||||
ORDER BY f.forecast_date
|
||||
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
|
||||
) as avg_weekly_demand
|
||||
FROM forecasts f
|
||||
WHERE f.forecast_date >= CURRENT_DATE + INTERVAL '1 day'
|
||||
AND f.forecast_date <= CURRENT_DATE + INTERVAL '3 days'
|
||||
AND EXTRACT(DOW FROM f.forecast_date) IN (6, 0)
|
||||
AND f.tenant_id = :tenant_id
|
||||
),
|
||||
surge_analysis AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN prev_week_demand > 0 THEN
|
||||
(predicted_demand - prev_week_demand) / prev_week_demand * 100
|
||||
ELSE 0
|
||||
END as growth_percentage,
|
||||
CASE
|
||||
WHEN avg_weekly_demand > 0 THEN
|
||||
(predicted_demand - avg_weekly_demand) / avg_weekly_demand * 100
|
||||
ELSE 0
|
||||
END as avg_growth_percentage
|
||||
FROM weekend_forecast
|
||||
)
|
||||
SELECT * FROM surge_analysis
|
||||
WHERE growth_percentage > 50 OR avg_growth_percentage > 50
|
||||
ORDER BY growth_percentage DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get weekend demand surges", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_weather_impact_forecasts(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get weather impact on demand forecasts
|
||||
Returns forecasts with rain or significant demand changes
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH weather_impact AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
f.predicted_demand,
|
||||
f.forecast_date,
|
||||
f.weather_precipitation,
|
||||
f.weather_temperature,
|
||||
f.traffic_volume,
|
||||
AVG(f.predicted_demand) OVER (
|
||||
PARTITION BY f.tenant_id, f.inventory_product_id
|
||||
ORDER BY f.forecast_date
|
||||
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
|
||||
) as avg_demand
|
||||
FROM forecasts f
|
||||
WHERE f.forecast_date >= CURRENT_DATE + INTERVAL '1 day'
|
||||
AND f.forecast_date <= CURRENT_DATE + INTERVAL '2 days'
|
||||
AND f.tenant_id = :tenant_id
|
||||
),
|
||||
rain_impact AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN weather_precipitation > 2.0 THEN true
|
||||
ELSE false
|
||||
END as rain_forecast,
|
||||
CASE
|
||||
WHEN traffic_volume < 80 THEN true
|
||||
ELSE false
|
||||
END as low_traffic_expected,
|
||||
(predicted_demand - avg_demand) / avg_demand * 100 as demand_change
|
||||
FROM weather_impact
|
||||
)
|
||||
SELECT * FROM rain_impact
|
||||
WHERE rain_forecast = true OR demand_change < -15
|
||||
ORDER BY demand_change ASC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get weather impact forecasts", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_holiday_demand_spikes(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get historical holiday demand spike analysis
|
||||
Returns products with significant holiday demand increases
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH holiday_demand AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
AVG(f.predicted_demand) as avg_holiday_demand,
|
||||
AVG(CASE WHEN f.is_holiday = false THEN f.predicted_demand END) as avg_normal_demand,
|
||||
COUNT(*) as forecast_count
|
||||
FROM forecasts f
|
||||
WHERE f.created_at > CURRENT_DATE - INTERVAL '365 days'
|
||||
AND f.tenant_id = :tenant_id
|
||||
GROUP BY f.tenant_id, f.inventory_product_id, f.product_name
|
||||
HAVING COUNT(*) >= 10
|
||||
),
|
||||
demand_spike_analysis AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN avg_normal_demand > 0 THEN
|
||||
(avg_holiday_demand - avg_normal_demand) / avg_normal_demand * 100
|
||||
ELSE 0
|
||||
END as spike_percentage
|
||||
FROM holiday_demand
|
||||
)
|
||||
SELECT * FROM demand_spike_analysis
|
||||
WHERE spike_percentage > 25
|
||||
ORDER BY spike_percentage DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get holiday demand spikes", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_demand_pattern_analysis(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get weekly demand pattern analysis for optimization
|
||||
Returns products with significant demand variations
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH weekly_patterns AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
EXTRACT(DOW FROM f.forecast_date) as day_of_week,
|
||||
AVG(f.predicted_demand) as avg_demand,
|
||||
STDDEV(f.predicted_demand) as demand_variance,
|
||||
COUNT(*) as data_points
|
||||
FROM forecasts f
|
||||
WHERE f.created_at > CURRENT_DATE - INTERVAL '60 days'
|
||||
AND f.tenant_id = :tenant_id
|
||||
GROUP BY f.tenant_id, f.inventory_product_id, f.product_name, EXTRACT(DOW FROM f.forecast_date)
|
||||
HAVING COUNT(*) >= 5
|
||||
),
|
||||
pattern_analysis AS (
|
||||
SELECT
|
||||
tenant_id, inventory_product_id, product_name,
|
||||
MAX(avg_demand) as peak_demand,
|
||||
MIN(avg_demand) as min_demand,
|
||||
AVG(avg_demand) as overall_avg,
|
||||
MAX(avg_demand) - MIN(avg_demand) as demand_range
|
||||
FROM weekly_patterns
|
||||
GROUP BY tenant_id, inventory_product_id, product_name
|
||||
)
|
||||
SELECT * FROM pattern_analysis
|
||||
WHERE demand_range > overall_avg * 0.3
|
||||
AND peak_demand > overall_avg * 1.5
|
||||
ORDER BY demand_range DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get demand pattern analysis", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
@@ -66,66 +66,25 @@ class ForecastingAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for predicted weekend demand surges (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
WITH weekend_forecast AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
f.predicted_demand,
|
||||
f.forecast_date,
|
||||
LAG(f.predicted_demand, 7) OVER (
|
||||
PARTITION BY f.tenant_id, f.inventory_product_id
|
||||
ORDER BY f.forecast_date
|
||||
) as prev_week_demand,
|
||||
AVG(f.predicted_demand) OVER (
|
||||
PARTITION BY f.tenant_id, f.inventory_product_id
|
||||
ORDER BY f.forecast_date
|
||||
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
|
||||
) as avg_weekly_demand
|
||||
FROM forecasts f
|
||||
WHERE f.forecast_date >= CURRENT_DATE + INTERVAL '1 day'
|
||||
AND f.forecast_date <= CURRENT_DATE + INTERVAL '3 days'
|
||||
AND EXTRACT(DOW FROM f.forecast_date) IN (6, 0) -- Saturday, Sunday
|
||||
AND f.tenant_id = $1
|
||||
),
|
||||
surge_analysis AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN prev_week_demand > 0 THEN
|
||||
(predicted_demand - prev_week_demand) / prev_week_demand * 100
|
||||
ELSE 0
|
||||
END as growth_percentage,
|
||||
CASE
|
||||
WHEN avg_weekly_demand > 0 THEN
|
||||
(predicted_demand - avg_weekly_demand) / avg_weekly_demand * 100
|
||||
ELSE 0
|
||||
END as avg_growth_percentage
|
||||
FROM weekend_forecast
|
||||
)
|
||||
SELECT * FROM surge_analysis
|
||||
WHERE growth_percentage > 50 OR avg_growth_percentage > 50
|
||||
ORDER BY growth_percentage DESC
|
||||
"""
|
||||
|
||||
|
||||
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
surges = result.fetchall()
|
||||
|
||||
alert_repo = ForecastingAlertRepository(session)
|
||||
surges = await alert_repo.get_weekend_demand_surges(tenant_id)
|
||||
|
||||
for surge in surges:
|
||||
await self._process_weekend_surge(tenant_id, surge)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking weekend demand surge",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error checking weekend demand surge",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Weekend demand surge check failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -184,64 +143,25 @@ class ForecastingAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for weather impact on demand (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Get weather forecast data and correlate with demand patterns
|
||||
query = """
|
||||
WITH weather_impact AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
f.predicted_demand,
|
||||
f.forecast_date,
|
||||
f.weather_precipitation,
|
||||
f.weather_temperature,
|
||||
f.traffic_volume,
|
||||
AVG(f.predicted_demand) OVER (
|
||||
PARTITION BY f.tenant_id, f.inventory_product_id
|
||||
ORDER BY f.forecast_date
|
||||
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
|
||||
) as avg_demand
|
||||
FROM forecasts f
|
||||
WHERE f.forecast_date >= CURRENT_DATE + INTERVAL '1 day'
|
||||
AND f.forecast_date <= CURRENT_DATE + INTERVAL '2 days'
|
||||
AND f.tenant_id = $1
|
||||
),
|
||||
rain_impact AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN weather_precipitation > 2.0 THEN true
|
||||
ELSE false
|
||||
END as rain_forecast,
|
||||
CASE
|
||||
WHEN traffic_volume < 80 THEN true
|
||||
ELSE false
|
||||
END as low_traffic_expected,
|
||||
(predicted_demand - avg_demand) / avg_demand * 100 as demand_change
|
||||
FROM weather_impact
|
||||
)
|
||||
SELECT * FROM rain_impact
|
||||
WHERE rain_forecast = true OR demand_change < -15
|
||||
ORDER BY demand_change ASC
|
||||
"""
|
||||
|
||||
|
||||
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
weather_impacts = result.fetchall()
|
||||
|
||||
alert_repo = ForecastingAlertRepository(session)
|
||||
weather_impacts = await alert_repo.get_weather_impact_forecasts(tenant_id)
|
||||
|
||||
for impact in weather_impacts:
|
||||
await self._process_weather_impact(tenant_id, impact)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking weather impact",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error checking weather impact",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Weather impact check failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -308,63 +228,34 @@ class ForecastingAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for upcoming Spanish holidays requiring preparation (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
|
||||
# Check for Spanish holidays in the next 3-7 days
|
||||
upcoming_holidays = await self._get_upcoming_spanish_holidays(3, 7)
|
||||
|
||||
|
||||
if not upcoming_holidays:
|
||||
return
|
||||
|
||||
# Analyze historical demand spikes for holidays
|
||||
query = """
|
||||
WITH holiday_demand AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
AVG(f.predicted_demand) as avg_holiday_demand,
|
||||
AVG(CASE WHEN f.is_holiday = false THEN f.predicted_demand END) as avg_normal_demand,
|
||||
COUNT(*) as forecast_count
|
||||
FROM forecasts f
|
||||
WHERE f.created_at > CURRENT_DATE - INTERVAL '365 days'
|
||||
AND f.tenant_id = $1
|
||||
GROUP BY f.tenant_id, f.inventory_product_id, f.product_name
|
||||
HAVING COUNT(*) >= 10
|
||||
),
|
||||
demand_spike_analysis AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN avg_normal_demand > 0 THEN
|
||||
(avg_holiday_demand - avg_normal_demand) / avg_normal_demand * 100
|
||||
ELSE 0
|
||||
END as spike_percentage
|
||||
FROM holiday_demand
|
||||
)
|
||||
SELECT * FROM demand_spike_analysis
|
||||
WHERE spike_percentage > 25
|
||||
ORDER BY spike_percentage DESC
|
||||
"""
|
||||
|
||||
|
||||
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
demand_spikes = result.fetchall()
|
||||
|
||||
alert_repo = ForecastingAlertRepository(session)
|
||||
demand_spikes = await alert_repo.get_holiday_demand_spikes(tenant_id)
|
||||
|
||||
for holiday_info in upcoming_holidays:
|
||||
for spike in demand_spikes:
|
||||
await self._process_holiday_preparation(
|
||||
tenant_id, holiday_info, spike
|
||||
)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking holiday preparation",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error checking holiday preparation",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Holiday preparation check failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -415,57 +306,25 @@ class ForecastingAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Analyze demand patterns for recommendations"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Analyze weekly patterns for optimization opportunities
|
||||
query = """
|
||||
WITH weekly_patterns AS (
|
||||
SELECT
|
||||
f.tenant_id,
|
||||
f.inventory_product_id,
|
||||
f.product_name,
|
||||
EXTRACT(DOW FROM f.forecast_date) as day_of_week,
|
||||
AVG(f.predicted_demand) as avg_demand,
|
||||
STDDEV(f.predicted_demand) as demand_variance,
|
||||
COUNT(*) as data_points
|
||||
FROM forecasts f
|
||||
WHERE f.created_at > CURRENT_DATE - INTERVAL '60 days'
|
||||
AND f.tenant_id = $1
|
||||
GROUP BY f.tenant_id, f.inventory_product_id, f.product_name, EXTRACT(DOW FROM f.forecast_date)
|
||||
HAVING COUNT(*) >= 5
|
||||
),
|
||||
pattern_analysis AS (
|
||||
SELECT
|
||||
tenant_id, inventory_product_id, product_name,
|
||||
MAX(avg_demand) as peak_demand,
|
||||
MIN(avg_demand) as min_demand,
|
||||
AVG(avg_demand) as overall_avg,
|
||||
MAX(avg_demand) - MIN(avg_demand) as demand_range
|
||||
FROM weekly_patterns
|
||||
GROUP BY tenant_id, inventory_product_id, product_name
|
||||
)
|
||||
SELECT * FROM pattern_analysis
|
||||
WHERE demand_range > overall_avg * 0.3
|
||||
AND peak_demand > overall_avg * 1.5
|
||||
ORDER BY demand_range DESC
|
||||
"""
|
||||
|
||||
|
||||
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
patterns = result.fetchall()
|
||||
|
||||
alert_repo = ForecastingAlertRepository(session)
|
||||
patterns = await alert_repo.get_demand_pattern_analysis(tenant_id)
|
||||
|
||||
for pattern in patterns:
|
||||
await self._generate_demand_pattern_recommendation(tenant_id, pattern)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error analyzing demand patterns",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error analyzing demand patterns",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Demand pattern analysis failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
|
||||
@@ -20,7 +20,6 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
|
||||
from app.core.database import get_db
|
||||
from app.models.inventory import Ingredient, Stock
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
@@ -254,44 +253,12 @@ async def clone_demo_data(
|
||||
# Commit all changes
|
||||
await db.commit()
|
||||
|
||||
# Generate inventory alerts with RabbitMQ publishing
|
||||
rabbitmq_client = None
|
||||
try:
|
||||
from shared.utils.alert_generator import generate_inventory_alerts
|
||||
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
||||
# inventory_alert_service which runs scheduled checks every 2-5 minutes.
|
||||
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
||||
stats["alerts_generated"] = 0
|
||||
|
||||
# Initialize RabbitMQ client for alert publishing
|
||||
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
|
||||
|
||||
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="inventory")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
# Generate alerts and publish to RabbitMQ
|
||||
alerts_count = await generate_inventory_alerts(
|
||||
db,
|
||||
virtual_uuid,
|
||||
session_created_at,
|
||||
rabbitmq_client=rabbitmq_client
|
||||
)
|
||||
stats["alerts_generated"] = alerts_count
|
||||
await db.commit()
|
||||
logger.info(f"Generated {alerts_count} inventory alerts", virtual_tenant_id=virtual_tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to generate alerts: {str(e)}", exc_info=True)
|
||||
stats["alerts_generated"] = 0
|
||||
finally:
|
||||
# Clean up RabbitMQ connection
|
||||
if rabbitmq_client:
|
||||
try:
|
||||
await rabbitmq_client.disconnect()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
|
||||
|
||||
total_records = sum(stats.values())
|
||||
total_records = stats["ingredients"] + stats["stock_batches"]
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
|
||||
374
services/inventory/app/api/sustainability.py
Normal file
374
services/inventory/app/api/sustainability.py
Normal file
@@ -0,0 +1,374 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/api/sustainability.py
|
||||
# ================================================================
|
||||
"""
|
||||
Sustainability API endpoints for Environmental Impact & SDG Compliance
|
||||
Following standardized URL structure: /api/v1/tenants/{tenant_id}/sustainability/{operation}
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from app.core.database import get_db
|
||||
from app.services.sustainability_service import SustainabilityService
|
||||
from app.schemas.sustainability import (
|
||||
SustainabilityMetrics,
|
||||
GrantReport,
|
||||
SustainabilityWidgetData,
|
||||
SustainabilityMetricsRequest,
|
||||
GrantReportRequest
|
||||
)
|
||||
from shared.routing import RouteBuilder
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Create route builder for consistent URL structure
|
||||
route_builder = RouteBuilder('sustainability')
|
||||
|
||||
router = APIRouter(tags=["sustainability"])
|
||||
|
||||
|
||||
# ===== Dependency Injection =====
|
||||
|
||||
async def get_sustainability_service() -> SustainabilityService:
|
||||
"""Get sustainability service instance"""
|
||||
return SustainabilityService()
|
||||
|
||||
|
||||
# ===== SUSTAINABILITY ENDPOINTS =====
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/sustainability/metrics",
|
||||
response_model=SustainabilityMetrics,
|
||||
summary="Get Sustainability Metrics",
|
||||
description="Get comprehensive sustainability metrics including environmental impact, SDG compliance, and grant readiness"
|
||||
)
|
||||
async def get_sustainability_metrics(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
start_date: Optional[datetime] = Query(None, description="Start date for metrics (default: 30 days ago)"),
|
||||
end_date: Optional[datetime] = Query(None, description="End date for metrics (default: now)"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get comprehensive sustainability metrics for the tenant.
|
||||
|
||||
**Includes:**
|
||||
- Food waste metrics (production, inventory, total)
|
||||
- Environmental impact (CO2, water, land use)
|
||||
- UN SDG 12.3 compliance tracking
|
||||
- Waste avoided through AI predictions
|
||||
- Financial impact analysis
|
||||
- Grant program eligibility assessment
|
||||
|
||||
**Use cases:**
|
||||
- Dashboard displays
|
||||
- Grant applications
|
||||
- Sustainability reporting
|
||||
- Compliance verification
|
||||
"""
|
||||
try:
|
||||
metrics = await sustainability_service.get_sustainability_metrics(
|
||||
db=db,
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Sustainability metrics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get('user_id'),
|
||||
waste_reduction=metrics.get('sdg_compliance', {}).get('sdg_12_3', {}).get('reduction_achieved', 0)
|
||||
)
|
||||
|
||||
return metrics
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting sustainability metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve sustainability metrics: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/sustainability/widget",
|
||||
response_model=SustainabilityWidgetData,
|
||||
summary="Get Sustainability Widget Data",
|
||||
description="Get simplified sustainability data optimized for dashboard widgets"
|
||||
)
|
||||
async def get_sustainability_widget_data(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get simplified sustainability metrics for dashboard widgets.
|
||||
|
||||
**Optimized for:**
|
||||
- Dashboard displays
|
||||
- Quick overview cards
|
||||
- Real-time monitoring
|
||||
|
||||
**Returns:**
|
||||
- Key metrics only
|
||||
- Human-readable values
|
||||
- Status indicators
|
||||
"""
|
||||
try:
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
metrics = await sustainability_service.get_sustainability_metrics(
|
||||
db=db,
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
# Extract widget-friendly data
|
||||
widget_data = {
|
||||
'total_waste_kg': metrics['waste_metrics']['total_waste_kg'],
|
||||
'waste_reduction_percentage': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved'],
|
||||
'co2_saved_kg': metrics['environmental_impact']['co2_emissions']['kg'],
|
||||
'water_saved_liters': metrics['environmental_impact']['water_footprint']['liters'],
|
||||
'trees_equivalent': metrics['environmental_impact']['co2_emissions']['trees_to_offset'],
|
||||
'sdg_status': metrics['sdg_compliance']['sdg_12_3']['status'],
|
||||
'sdg_progress': metrics['sdg_compliance']['sdg_12_3']['progress_to_target'],
|
||||
'grant_programs_ready': len(metrics['grant_readiness']['recommended_applications']),
|
||||
'financial_savings_eur': metrics['financial_impact']['waste_cost_eur']
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Widget data retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get('user_id')
|
||||
)
|
||||
|
||||
return widget_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting widget data",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve widget data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/api/v1/tenants/{tenant_id}/sustainability/export/grant-report",
|
||||
response_model=GrantReport,
|
||||
summary="Export Grant Application Report",
|
||||
description="Generate a comprehensive report formatted for grant applications"
|
||||
)
|
||||
async def export_grant_report(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
request: GrantReportRequest = None,
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Generate comprehensive grant application report.
|
||||
|
||||
**Supported grant types:**
|
||||
- `general`: General sustainability report
|
||||
- `eu_horizon`: EU Horizon Europe format
|
||||
- `farm_to_fork`: EU Farm to Fork Strategy
|
||||
- `circular_economy`: Circular Economy grants
|
||||
- `un_sdg`: UN SDG certification
|
||||
|
||||
**Export formats:**
|
||||
- `json`: JSON format (default)
|
||||
- `pdf`: PDF document (future)
|
||||
- `csv`: CSV export (future)
|
||||
|
||||
**Use cases:**
|
||||
- Grant applications
|
||||
- Compliance reporting
|
||||
- Investor presentations
|
||||
- Certification requests
|
||||
"""
|
||||
try:
|
||||
if request is None:
|
||||
request = GrantReportRequest()
|
||||
|
||||
report = await sustainability_service.export_grant_report(
|
||||
db=db,
|
||||
tenant_id=tenant_id,
|
||||
grant_type=request.grant_type,
|
||||
start_date=request.start_date,
|
||||
end_date=request.end_date
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Grant report exported",
|
||||
tenant_id=str(tenant_id),
|
||||
grant_type=request.grant_type,
|
||||
user_id=current_user.get('user_id')
|
||||
)
|
||||
|
||||
# For now, return JSON. In future, support PDF/CSV generation
|
||||
if request.format == 'json':
|
||||
return report
|
||||
else:
|
||||
# Future: Generate PDF or CSV
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail=f"Export format '{request.format}' not yet implemented. Use 'json' for now."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error exporting grant report",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to export grant report: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/sustainability/sdg-compliance",
|
||||
summary="Get SDG 12.3 Compliance Status",
|
||||
description="Get detailed UN SDG 12.3 compliance status and progress"
|
||||
)
|
||||
async def get_sdg_compliance(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get detailed UN SDG 12.3 compliance information.
|
||||
|
||||
**SDG 12.3 Target:**
|
||||
By 2030, halve per capita global food waste at the retail and consumer levels
|
||||
and reduce food losses along production and supply chains, including post-harvest losses.
|
||||
|
||||
**Returns:**
|
||||
- Current compliance status
|
||||
- Progress toward 50% reduction target
|
||||
- Baseline comparison
|
||||
- Certification readiness
|
||||
- Improvement recommendations
|
||||
"""
|
||||
try:
|
||||
metrics = await sustainability_service.get_sustainability_metrics(
|
||||
db=db,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
sdg_data = {
|
||||
'sdg_12_3_compliance': metrics['sdg_compliance']['sdg_12_3'],
|
||||
'baseline_period': metrics['sdg_compliance']['baseline_period'],
|
||||
'certification_ready': metrics['sdg_compliance']['certification_ready'],
|
||||
'improvement_areas': metrics['sdg_compliance']['improvement_areas'],
|
||||
'current_waste': metrics['waste_metrics'],
|
||||
'environmental_impact': metrics['environmental_impact']
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"SDG compliance data retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
status=sdg_data['sdg_12_3_compliance']['status']
|
||||
)
|
||||
|
||||
return sdg_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting SDG compliance",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve SDG compliance data: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/sustainability/environmental-impact",
|
||||
summary="Get Environmental Impact",
|
||||
description="Get detailed environmental impact metrics"
|
||||
)
|
||||
async def get_environmental_impact(
|
||||
tenant_id: UUID = Path(..., description="Tenant ID"),
|
||||
days: int = Query(30, ge=1, le=365, description="Number of days to analyze"),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
sustainability_service: SustainabilityService = Depends(get_sustainability_service),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get detailed environmental impact of food waste.
|
||||
|
||||
**Metrics included:**
|
||||
- CO2 emissions (kg and tons)
|
||||
- Water footprint (liters and cubic meters)
|
||||
- Land use (m² and hectares)
|
||||
- Human-relatable equivalents (car km, showers, etc.)
|
||||
|
||||
**Use cases:**
|
||||
- Sustainability reports
|
||||
- Marketing materials
|
||||
- Customer communication
|
||||
- ESG reporting
|
||||
"""
|
||||
try:
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
metrics = await sustainability_service.get_sustainability_metrics(
|
||||
db=db,
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
impact_data = {
|
||||
'period': metrics['period'],
|
||||
'waste_metrics': metrics['waste_metrics'],
|
||||
'environmental_impact': metrics['environmental_impact'],
|
||||
'avoided_impact': metrics['avoided_waste']['environmental_impact_avoided'],
|
||||
'financial_impact': metrics['financial_impact']
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Environmental impact data retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
co2_kg=impact_data['environmental_impact']['co2_emissions']['kg']
|
||||
)
|
||||
|
||||
return impact_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting environmental impact",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve environmental impact: {str(e)}"
|
||||
)
|
||||
@@ -24,6 +24,7 @@ from app.api import (
|
||||
food_safety_operations,
|
||||
dashboard,
|
||||
analytics,
|
||||
sustainability,
|
||||
internal_demo
|
||||
)
|
||||
|
||||
@@ -103,7 +104,11 @@ class InventoryService(StandardFastAPIService):
|
||||
"dashboard_analytics",
|
||||
"business_model_detection",
|
||||
"real_time_alerts",
|
||||
"regulatory_reporting"
|
||||
"regulatory_reporting",
|
||||
"sustainability_tracking",
|
||||
"sdg_compliance",
|
||||
"environmental_impact",
|
||||
"grant_reporting"
|
||||
]
|
||||
|
||||
|
||||
@@ -127,6 +132,7 @@ service.add_router(food_safety_alerts.router)
|
||||
service.add_router(food_safety_operations.router)
|
||||
service.add_router(dashboard.router)
|
||||
service.add_router(analytics.router)
|
||||
service.add_router(sustainability.router)
|
||||
service.add_router(internal_demo.router)
|
||||
|
||||
|
||||
|
||||
464
services/inventory/app/repositories/dashboard_repository.py
Normal file
464
services/inventory/app/repositories/dashboard_repository.py
Normal file
@@ -0,0 +1,464 @@
|
||||
# services/inventory/app/repositories/dashboard_repository.py
|
||||
"""
|
||||
Dashboard Repository for complex dashboard queries
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class DashboardRepository:
|
||||
"""Repository for dashboard-specific database queries"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_business_model_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get ingredient metrics for business model detection"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total_ingredients,
|
||||
COUNT(CASE WHEN product_type::text = 'finished_product' THEN 1 END) as finished_products,
|
||||
COUNT(CASE WHEN product_type::text = 'ingredient' THEN 1 END) as raw_ingredients,
|
||||
COUNT(DISTINCT st.supplier_id) as supplier_count,
|
||||
AVG(CASE WHEN s.available_quantity IS NOT NULL THEN s.available_quantity ELSE 0 END) as avg_stock_level
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, supplier_id
|
||||
FROM stock WHERE tenant_id = :tenant_id AND supplier_id IS NOT NULL
|
||||
GROUP BY ingredient_id, supplier_id
|
||||
) st ON i.id = st.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"total_ingredients": 0,
|
||||
"finished_products": 0,
|
||||
"raw_ingredients": 0,
|
||||
"supplier_count": 0,
|
||||
"avg_stock_level": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"total_ingredients": row.total_ingredients,
|
||||
"finished_products": row.finished_products,
|
||||
"raw_ingredients": row.raw_ingredients,
|
||||
"supplier_count": row.supplier_count,
|
||||
"avg_stock_level": float(row.avg_stock_level) if row.avg_stock_level else 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get business model metrics", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_stock_by_category(self, tenant_id: UUID) -> Dict[str, Dict[str, Any]]:
|
||||
"""Get stock breakdown by category"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(*) as count,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity, AVG(unit_cost) as unit_cost
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
categories = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
categories[row.category] = {
|
||||
"count": row.count,
|
||||
"total_value": float(row.total_value)
|
||||
}
|
||||
|
||||
return categories
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock by category", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_alerts_by_severity(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get active alerts breakdown by severity"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT severity, COUNT(*) as count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
GROUP BY severity
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
alerts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
||||
|
||||
for row in result.fetchall():
|
||||
alerts[row.severity] = row.count
|
||||
|
||||
return alerts
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts by severity", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_movements_by_type(self, tenant_id: UUID, days: int = 7) -> Dict[str, int]:
|
||||
"""Get stock movements breakdown by type for recent period"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT sm.movement_type, COUNT(*) as count
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
GROUP BY sm.movement_type
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
movements = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
movements[row.movement_type] = row.count
|
||||
|
||||
return movements
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by type", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_alert_trend(self, tenant_id: UUID, days: int = 30) -> List[Dict[str, Any]]:
|
||||
"""Get alert trend over time"""
|
||||
try:
|
||||
query = text(f"""
|
||||
SELECT
|
||||
DATE(created_at) as alert_date,
|
||||
COUNT(*) as alert_count,
|
||||
COUNT(CASE WHEN severity IN ('high', 'critical') THEN 1 END) as high_severity_count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at > NOW() - INTERVAL '{days} days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY alert_date
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
|
||||
return [
|
||||
{
|
||||
"date": row.alert_date.isoformat(),
|
||||
"total_alerts": row.alert_count,
|
||||
"high_severity_alerts": row.high_severity_count
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert trend", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_recent_stock_movements(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get recent stock movements"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
'stock_movement' as activity_type,
|
||||
CASE
|
||||
WHEN movement_type = 'PURCHASE' THEN 'Stock added: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'PRODUCTION_USE' THEN 'Stock consumed: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'WASTE' THEN 'Stock wasted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'Stock adjusted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
ELSE 'Stock movement: ' || i.name
|
||||
END as description,
|
||||
sm.movement_date as timestamp,
|
||||
sm.created_by as user_id,
|
||||
CASE
|
||||
WHEN movement_type = 'WASTE' THEN 'high'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
sm.id as entity_id,
|
||||
'stock_movement' as entity_type
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
ORDER BY sm.movement_date DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id, "limit": limit})
|
||||
|
||||
return [
|
||||
{
|
||||
"activity_type": row.activity_type,
|
||||
"description": row.description,
|
||||
"timestamp": row.timestamp,
|
||||
"user_id": row.user_id,
|
||||
"impact_level": row.impact_level,
|
||||
"entity_id": row.entity_id,
|
||||
"entity_type": row.entity_type
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent stock movements", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_recent_food_safety_alerts(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get recent food safety alerts"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
'food_safety_alert' as activity_type,
|
||||
title as description,
|
||||
created_at as timestamp,
|
||||
created_by as user_id,
|
||||
CASE
|
||||
WHEN severity = 'critical' THEN 'high'
|
||||
WHEN severity = 'high' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
id as entity_id,
|
||||
'food_safety_alert' as entity_type
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
ORDER BY created_at DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id, "limit": limit})
|
||||
|
||||
return [
|
||||
{
|
||||
"activity_type": row.activity_type,
|
||||
"description": row.description,
|
||||
"timestamp": row.timestamp,
|
||||
"user_id": row.user_id,
|
||||
"impact_level": row.impact_level,
|
||||
"entity_id": row.entity_id,
|
||||
"entity_type": row.entity_type
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent food safety alerts", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_live_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get real-time inventory metrics"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN s.available_quantity = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value,
|
||||
COUNT(CASE WHEN s.expiration_date < NOW() THEN 1 END) as expired_items,
|
||||
COUNT(CASE WHEN s.expiration_date BETWEEN NOW() AND NOW() + INTERVAL '7 days' THEN 1 END) as expiring_soon
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
metrics = result.fetchone()
|
||||
|
||||
if not metrics:
|
||||
return {
|
||||
"total_ingredients": 0,
|
||||
"in_stock": 0,
|
||||
"low_stock": 0,
|
||||
"out_of_stock": 0,
|
||||
"total_value": 0.0,
|
||||
"expired_items": 0,
|
||||
"expiring_soon": 0,
|
||||
"last_updated": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
return {
|
||||
"total_ingredients": metrics.total_ingredients,
|
||||
"in_stock": metrics.in_stock,
|
||||
"low_stock": metrics.low_stock,
|
||||
"out_of_stock": metrics.out_of_stock,
|
||||
"total_value": float(metrics.total_value),
|
||||
"expired_items": metrics.expired_items,
|
||||
"expiring_soon": metrics.expiring_soon,
|
||||
"last_updated": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get live metrics", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_stock_status_by_category(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get stock status breakdown by category"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold AND s.available_quantity > 0 THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN COALESCE(s.available_quantity, 0) = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
ingredient_id,
|
||||
SUM(available_quantity) as available_quantity,
|
||||
AVG(unit_cost) as unit_cost
|
||||
FROM stock
|
||||
WHERE tenant_id = :tenant_id AND is_available = true
|
||||
GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
ORDER BY total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
|
||||
return [
|
||||
{
|
||||
"category": row.category,
|
||||
"total_ingredients": row.total_ingredients,
|
||||
"in_stock": row.in_stock,
|
||||
"low_stock": row.low_stock,
|
||||
"out_of_stock": row.out_of_stock,
|
||||
"total_value": float(row.total_value)
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock status by category", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_alerts_summary(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
alert_types: Optional[List[str]] = None,
|
||||
severities: Optional[List[str]] = None,
|
||||
date_from: Optional[datetime] = None,
|
||||
date_to: Optional[datetime] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get alerts summary by type and severity with filters"""
|
||||
try:
|
||||
# Build query with filters
|
||||
where_conditions = ["tenant_id = :tenant_id", "status = 'active'"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if alert_types:
|
||||
where_conditions.append("alert_type = ANY(:alert_types)")
|
||||
params["alert_types"] = alert_types
|
||||
|
||||
if severities:
|
||||
where_conditions.append("severity = ANY(:severities)")
|
||||
params["severities"] = severities
|
||||
|
||||
if date_from:
|
||||
where_conditions.append("created_at >= :date_from")
|
||||
params["date_from"] = date_from
|
||||
|
||||
if date_to:
|
||||
where_conditions.append("created_at <= :date_to")
|
||||
params["date_to"] = date_to
|
||||
|
||||
where_clause = " AND ".join(where_conditions)
|
||||
|
||||
query = text(f"""
|
||||
SELECT
|
||||
alert_type,
|
||||
severity,
|
||||
COUNT(*) as count,
|
||||
MIN(EXTRACT(EPOCH FROM (NOW() - created_at))/3600)::int as oldest_alert_age_hours,
|
||||
AVG(CASE WHEN resolved_at IS NOT NULL
|
||||
THEN EXTRACT(EPOCH FROM (resolved_at - created_at))/3600
|
||||
ELSE NULL END)::int as avg_resolution_hours
|
||||
FROM food_safety_alerts
|
||||
WHERE {where_clause}
|
||||
GROUP BY alert_type, severity
|
||||
ORDER BY severity DESC, count DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, params)
|
||||
|
||||
return [
|
||||
{
|
||||
"alert_type": row.alert_type,
|
||||
"severity": row.severity,
|
||||
"count": row.count,
|
||||
"oldest_alert_age_hours": row.oldest_alert_age_hours,
|
||||
"average_resolution_time_hours": row.avg_resolution_hours
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts summary", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_ingredient_stock_levels(self, tenant_id: UUID) -> Dict[str, float]:
|
||||
"""
|
||||
Get current stock levels for all ingredients
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary mapping ingredient_id to current stock level
|
||||
"""
|
||||
try:
|
||||
stock_query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
COALESCE(SUM(s.available_quantity), 0) as current_stock
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id
|
||||
""")
|
||||
|
||||
result = await self.session.execute(stock_query, {"tenant_id": tenant_id})
|
||||
stock_levels = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
stock_levels[str(row.ingredient_id)] = float(row.current_stock)
|
||||
|
||||
return stock_levels
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get ingredient stock levels", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
279
services/inventory/app/repositories/food_safety_repository.py
Normal file
279
services/inventory/app/repositories/food_safety_repository.py
Normal file
@@ -0,0 +1,279 @@
|
||||
# services/inventory/app/repositories/food_safety_repository.py
|
||||
"""
|
||||
Food Safety Repository
|
||||
Data access layer for food safety compliance and monitoring
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from sqlalchemy import text, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.food_safety import (
|
||||
FoodSafetyCompliance,
|
||||
FoodSafetyAlert,
|
||||
TemperatureLog,
|
||||
ComplianceStatus
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class FoodSafetyRepository:
|
||||
"""Repository for food safety data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
# ===== COMPLIANCE METHODS =====
|
||||
|
||||
async def create_compliance(self, compliance: FoodSafetyCompliance) -> FoodSafetyCompliance:
|
||||
"""
|
||||
Create a new compliance record
|
||||
|
||||
Args:
|
||||
compliance: FoodSafetyCompliance instance
|
||||
|
||||
Returns:
|
||||
Created FoodSafetyCompliance instance
|
||||
"""
|
||||
self.session.add(compliance)
|
||||
await self.session.flush()
|
||||
await self.session.refresh(compliance)
|
||||
return compliance
|
||||
|
||||
async def get_compliance_by_id(
|
||||
self,
|
||||
compliance_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[FoodSafetyCompliance]:
|
||||
"""
|
||||
Get compliance record by ID
|
||||
|
||||
Args:
|
||||
compliance_id: Compliance record UUID
|
||||
tenant_id: Tenant UUID for authorization
|
||||
|
||||
Returns:
|
||||
FoodSafetyCompliance or None
|
||||
"""
|
||||
compliance = await self.session.get(FoodSafetyCompliance, compliance_id)
|
||||
if compliance and compliance.tenant_id == tenant_id:
|
||||
return compliance
|
||||
return None
|
||||
|
||||
async def update_compliance(
|
||||
self,
|
||||
compliance: FoodSafetyCompliance
|
||||
) -> FoodSafetyCompliance:
|
||||
"""
|
||||
Update compliance record
|
||||
|
||||
Args:
|
||||
compliance: FoodSafetyCompliance instance with updates
|
||||
|
||||
Returns:
|
||||
Updated FoodSafetyCompliance instance
|
||||
"""
|
||||
await self.session.flush()
|
||||
await self.session.refresh(compliance)
|
||||
return compliance
|
||||
|
||||
async def get_compliance_stats(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Get compliance statistics for dashboard
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with compliance counts by status
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
|
||||
FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"total": 0,
|
||||
"compliant": 0,
|
||||
"non_compliant": 0,
|
||||
"pending_review": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"total": row.total or 0,
|
||||
"compliant": row.compliant or 0,
|
||||
"non_compliant": row.non_compliant or 0,
|
||||
"pending_review": row.pending_review or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get compliance stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== TEMPERATURE MONITORING METHODS =====
|
||||
|
||||
async def get_temperature_stats(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Get temperature monitoring statistics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with temperature monitoring stats
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(DISTINCT equipment_id) as sensors_online,
|
||||
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
|
||||
FROM temperature_logs
|
||||
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"sensors_online": 0,
|
||||
"violations_24h": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"sensors_online": row.sensors_online or 0,
|
||||
"violations_24h": row.violations_24h or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== EXPIRATION TRACKING METHODS =====
|
||||
|
||||
async def get_expiration_stats(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Get expiration tracking statistics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with expiration counts
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
|
||||
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
|
||||
COUNT(CASE WHEN expiration_date < CURRENT_DATE AND is_available THEN 1 END) as expired_requiring_action
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id AND s.is_available = true
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"expiring_today": 0,
|
||||
"expiring_week": 0,
|
||||
"expired_requiring_action": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"expiring_today": row.expiring_today or 0,
|
||||
"expiring_week": row.expiring_week or 0,
|
||||
"expired_requiring_action": row.expired_requiring_action or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiration stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== ALERT METHODS =====
|
||||
|
||||
async def get_alert_stats(self, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""
|
||||
Get food safety alert statistics
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with alert counts by severity
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
|
||||
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
|
||||
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if not row:
|
||||
return {
|
||||
"high_risk": 0,
|
||||
"critical": 0,
|
||||
"regulatory_pending": 0
|
||||
}
|
||||
|
||||
return {
|
||||
"high_risk": row.high_risk or 0,
|
||||
"critical": row.critical or 0,
|
||||
"regulatory_pending": row.regulatory_pending or 0
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert stats", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ===== VALIDATION METHODS =====
|
||||
|
||||
async def validate_ingredient_exists(
|
||||
self,
|
||||
ingredient_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> bool:
|
||||
"""
|
||||
Validate that an ingredient exists for a tenant
|
||||
|
||||
Args:
|
||||
ingredient_id: Ingredient UUID
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
True if ingredient exists, False otherwise
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT id
|
||||
FROM ingredients
|
||||
WHERE id = :ingredient_id AND tenant_id = :tenant_id
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"ingredient_id": ingredient_id,
|
||||
"tenant_id": tenant_id
|
||||
})
|
||||
|
||||
return result.fetchone() is not None
|
||||
except Exception as e:
|
||||
logger.error("Failed to validate ingredient", error=str(e))
|
||||
raise
|
||||
@@ -0,0 +1,301 @@
|
||||
# services/inventory/app/repositories/inventory_alert_repository.py
|
||||
"""
|
||||
Inventory Alert Repository
|
||||
Data access layer for inventory alert detection and analysis
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class InventoryAlertRepository:
|
||||
"""Repository for inventory alert data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_stock_issues(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get stock level issues with CTE analysis
|
||||
Returns list of critical, low, and overstock situations
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
i.max_stock_level as maximum_stock,
|
||||
i.reorder_point,
|
||||
0 as tomorrow_needed,
|
||||
0 as avg_daily_usage,
|
||||
7 as lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
|
||||
ELSE 'normal'
|
||||
END as status,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
|
||||
)
|
||||
SELECT * FROM stock_analysis WHERE status != 'normal'
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'critical' THEN 1
|
||||
WHEN 'low' THEN 2
|
||||
WHEN 'overstock' THEN 3
|
||||
END,
|
||||
shortage_amount DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock issues", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_expiring_products(self, tenant_id: UUID, days_threshold: int = 7) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get products expiring soon or already expired
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
s.id as stock_id,
|
||||
s.batch_number,
|
||||
s.expiration_date,
|
||||
s.current_quantity,
|
||||
i.unit_of_measure,
|
||||
s.unit_cost,
|
||||
(s.current_quantity * s.unit_cost) as total_value,
|
||||
CASE
|
||||
WHEN s.expiration_date < CURRENT_DATE THEN 'expired'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '1 day' THEN 'expires_today'
|
||||
WHEN s.expiration_date <= CURRENT_DATE + INTERVAL '3 days' THEN 'expires_soon'
|
||||
ELSE 'warning'
|
||||
END as urgency,
|
||||
EXTRACT(DAY FROM (s.expiration_date - CURRENT_DATE)) as days_until_expiry
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date <= CURRENT_DATE + INTERVAL ':days_threshold days'
|
||||
ORDER BY s.expiration_date ASC, total_value DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"days_threshold": days_threshold
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get expiring products", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_temperature_breaches(self, tenant_id: UUID, hours_back: int = 24) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get temperature monitoring breaches
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
tl.id,
|
||||
tl.equipment_id,
|
||||
tl.equipment_name,
|
||||
tl.storage_type,
|
||||
tl.temperature_celsius,
|
||||
tl.min_threshold,
|
||||
tl.max_threshold,
|
||||
tl.is_within_range,
|
||||
tl.recorded_at,
|
||||
tl.alert_triggered,
|
||||
EXTRACT(EPOCH FROM (NOW() - tl.recorded_at))/3600 as hours_ago,
|
||||
CASE
|
||||
WHEN tl.temperature_celsius < tl.min_threshold
|
||||
THEN tl.min_threshold - tl.temperature_celsius
|
||||
WHEN tl.temperature_celsius > tl.max_threshold
|
||||
THEN tl.temperature_celsius - tl.max_threshold
|
||||
ELSE 0
|
||||
END as deviation
|
||||
FROM temperature_logs tl
|
||||
WHERE tl.tenant_id = :tenant_id
|
||||
AND tl.is_within_range = false
|
||||
AND tl.recorded_at > NOW() - INTERVAL ':hours_back hours'
|
||||
AND tl.alert_triggered = false
|
||||
ORDER BY deviation DESC, tl.recorded_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"tenant_id": tenant_id,
|
||||
"hours_back": hours_back
|
||||
})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get temperature breaches", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def mark_temperature_alert_triggered(self, log_id: UUID) -> None:
|
||||
"""
|
||||
Mark a temperature log as having triggered an alert
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE temperature_logs
|
||||
SET alert_triggered = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": log_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark temperature alert", error=str(e), log_id=str(log_id))
|
||||
raise
|
||||
|
||||
async def get_waste_opportunities(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Identify waste reduction opportunities
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH waste_analysis AS (
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
i.name as ingredient_name,
|
||||
i.ingredient_category,
|
||||
COUNT(sm.id) as waste_incidents,
|
||||
SUM(sm.quantity) as total_waste_quantity,
|
||||
SUM(sm.total_cost) as total_waste_cost,
|
||||
AVG(sm.quantity) as avg_waste_per_incident,
|
||||
MAX(sm.movement_date) as last_waste_date
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '30 days'
|
||||
GROUP BY i.id, i.name, i.ingredient_category
|
||||
HAVING COUNT(sm.id) >= 3 OR SUM(sm.total_cost) > 50
|
||||
)
|
||||
SELECT * FROM waste_analysis
|
||||
ORDER BY total_waste_cost DESC, waste_incidents DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste opportunities", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_reorder_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get ingredients that need reordering based on stock levels and usage
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH usage_analysis AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.reorder_point,
|
||||
i.low_stock_threshold,
|
||||
COALESCE(SUM(sm.quantity) FILTER (WHERE sm.movement_date > NOW() - INTERVAL '7 days'), 0) / 7 as daily_usage,
|
||||
i.preferred_supplier_id,
|
||||
i.standard_order_quantity
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
LEFT JOIN stock_movements sm ON sm.ingredient_id = i.id
|
||||
AND sm.movement_type = 'PRODUCTION_USE'
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.reorder_point, i.low_stock_threshold,
|
||||
i.preferred_supplier_id, i.standard_order_quantity
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN daily_usage > 0 THEN FLOOR(current_stock / NULLIF(daily_usage, 0))
|
||||
ELSE 999
|
||||
END as days_of_stock,
|
||||
GREATEST(
|
||||
standard_order_quantity,
|
||||
CEIL(daily_usage * 14)
|
||||
) as recommended_order_quantity
|
||||
FROM usage_analysis
|
||||
WHERE current_stock <= reorder_point
|
||||
ORDER BY days_of_stock ASC, current_stock ASC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get reorder recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_active_tenant_ids(self) -> List[UUID]:
|
||||
"""
|
||||
Get list of active tenant IDs from ingredients table
|
||||
"""
|
||||
try:
|
||||
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
|
||||
result = await self.session.execute(query)
|
||||
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row.tenant_id
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
return tenant_ids
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get active tenant IDs", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Dict[str, Any]:
|
||||
"""
|
||||
Get stock information after hypothetical order
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT i.id, i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.id = :ingredient_id
|
||||
GROUP BY i.id, i.name, i.low_stock_threshold
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {
|
||||
"ingredient_id": ingredient_id,
|
||||
"order_quantity": order_quantity
|
||||
})
|
||||
row = result.fetchone()
|
||||
return dict(row._mapping) if row else None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock after order", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
@@ -491,4 +491,49 @@ class StockMovementRepository(BaseRepository[StockMovement, StockMovementCreate,
|
||||
tenant_id=str(tenant_id),
|
||||
ingredient_id=str(ingredient_id),
|
||||
stock_id=str(stock_id))
|
||||
raise
|
||||
|
||||
async def get_inventory_waste_total(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> float:
|
||||
"""
|
||||
Get total inventory waste for sustainability reporting
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
start_date: Start date for period
|
||||
end_date: End date for period
|
||||
|
||||
Returns:
|
||||
Total waste quantity
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
query = text("""
|
||||
SELECT COALESCE(SUM(sm.quantity), 0) as total_inventory_waste
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_type = 'WASTE'
|
||||
AND sm.movement_date BETWEEN :start_date AND :end_date
|
||||
""")
|
||||
|
||||
result = await self.session.execute(
|
||||
query,
|
||||
{
|
||||
'tenant_id': tenant_id,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date
|
||||
}
|
||||
)
|
||||
row = result.fetchone()
|
||||
|
||||
return float(row.total_inventory_waste or 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get inventory waste total", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
206
services/inventory/app/schemas/sustainability.py
Normal file
206
services/inventory/app/schemas/sustainability.py
Normal file
@@ -0,0 +1,206 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/schemas/sustainability.py
|
||||
# ================================================================
|
||||
"""
|
||||
Sustainability Schemas - Environmental Impact & SDG Compliance
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional
|
||||
from decimal import Decimal
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class PeriodInfo(BaseModel):
|
||||
"""Time period for metrics"""
|
||||
start_date: str
|
||||
end_date: str
|
||||
days: int
|
||||
|
||||
|
||||
class WasteMetrics(BaseModel):
|
||||
"""Waste tracking metrics"""
|
||||
total_waste_kg: float = Field(description="Total waste in kilograms")
|
||||
production_waste_kg: float = Field(description="Waste from production processes")
|
||||
expired_waste_kg: float = Field(description="Waste from expired inventory")
|
||||
waste_percentage: float = Field(description="Waste as percentage of total production")
|
||||
waste_by_reason: Dict[str, float] = Field(description="Breakdown by waste reason")
|
||||
|
||||
|
||||
class CO2Emissions(BaseModel):
|
||||
"""CO2 emission metrics"""
|
||||
kg: float = Field(description="CO2 emissions in kilograms")
|
||||
tons: float = Field(description="CO2 emissions in tons")
|
||||
trees_to_offset: float = Field(description="Equivalent trees needed to offset emissions")
|
||||
|
||||
|
||||
class WaterFootprint(BaseModel):
|
||||
"""Water usage metrics"""
|
||||
liters: float = Field(description="Water footprint in liters")
|
||||
cubic_meters: float = Field(description="Water footprint in cubic meters")
|
||||
|
||||
|
||||
class LandUse(BaseModel):
|
||||
"""Land use metrics"""
|
||||
square_meters: float = Field(description="Land use in square meters")
|
||||
hectares: float = Field(description="Land use in hectares")
|
||||
|
||||
|
||||
class HumanEquivalents(BaseModel):
|
||||
"""Human-relatable equivalents for impact"""
|
||||
car_km_equivalent: float = Field(description="Equivalent kilometers driven by car")
|
||||
smartphone_charges: float = Field(description="Equivalent smartphone charges")
|
||||
showers_equivalent: float = Field(description="Equivalent showers taken")
|
||||
trees_planted: float = Field(description="Equivalent trees planted")
|
||||
|
||||
|
||||
class EnvironmentalImpact(BaseModel):
|
||||
"""Environmental impact of food waste"""
|
||||
co2_emissions: CO2Emissions
|
||||
water_footprint: WaterFootprint
|
||||
land_use: LandUse
|
||||
human_equivalents: HumanEquivalents
|
||||
|
||||
|
||||
class SDG123Metrics(BaseModel):
|
||||
"""UN SDG 12.3 specific metrics"""
|
||||
baseline_waste_percentage: float = Field(description="Baseline waste percentage")
|
||||
current_waste_percentage: float = Field(description="Current waste percentage")
|
||||
reduction_achieved: float = Field(description="Reduction achieved from baseline (%)")
|
||||
target_reduction: float = Field(description="Target reduction (50%)", default=50.0)
|
||||
progress_to_target: float = Field(description="Progress toward target (%)")
|
||||
status: str = Field(description="Status code: sdg_compliant, on_track, progressing, baseline")
|
||||
status_label: str = Field(description="Human-readable status")
|
||||
target_waste_percentage: float = Field(description="Target waste percentage to achieve")
|
||||
|
||||
|
||||
class SDGCompliance(BaseModel):
|
||||
"""SDG compliance assessment"""
|
||||
sdg_12_3: SDG123Metrics
|
||||
baseline_period: str = Field(description="Period used for baseline calculation")
|
||||
certification_ready: bool = Field(description="Ready for SDG certification")
|
||||
improvement_areas: List[str] = Field(description="Identified areas for improvement")
|
||||
|
||||
|
||||
class EnvironmentalImpactAvoided(BaseModel):
|
||||
"""Environmental impact avoided through AI"""
|
||||
co2_kg: float = Field(description="CO2 emissions avoided (kg)")
|
||||
water_liters: float = Field(description="Water saved (liters)")
|
||||
|
||||
|
||||
class AvoidedWaste(BaseModel):
|
||||
"""Waste avoided through AI predictions"""
|
||||
waste_avoided_kg: float = Field(description="Waste avoided in kilograms")
|
||||
ai_assisted_batches: int = Field(description="Number of AI-assisted batches")
|
||||
environmental_impact_avoided: EnvironmentalImpactAvoided
|
||||
methodology: str = Field(description="Calculation methodology")
|
||||
|
||||
|
||||
class FinancialImpact(BaseModel):
|
||||
"""Financial impact of waste"""
|
||||
waste_cost_eur: float = Field(description="Cost of waste in euros")
|
||||
cost_per_kg: float = Field(description="Average cost per kg")
|
||||
potential_monthly_savings: float = Field(description="Potential monthly savings")
|
||||
annual_projection: float = Field(description="Annual cost projection")
|
||||
|
||||
|
||||
class GrantProgramEligibility(BaseModel):
|
||||
"""Eligibility for a specific grant program"""
|
||||
eligible: bool = Field(description="Whether eligible for this grant")
|
||||
confidence: str = Field(description="Confidence level: high, medium, low")
|
||||
requirements_met: bool = Field(description="Whether requirements are met")
|
||||
|
||||
|
||||
class GrantReadiness(BaseModel):
|
||||
"""Grant application readiness assessment"""
|
||||
overall_readiness_percentage: float = Field(description="Overall readiness percentage")
|
||||
grant_programs: Dict[str, GrantProgramEligibility] = Field(description="Eligibility by program")
|
||||
recommended_applications: List[str] = Field(description="Recommended grant programs to apply for")
|
||||
|
||||
|
||||
class SustainabilityMetrics(BaseModel):
|
||||
"""Complete sustainability metrics response"""
|
||||
period: PeriodInfo
|
||||
waste_metrics: WasteMetrics
|
||||
environmental_impact: EnvironmentalImpact
|
||||
sdg_compliance: SDGCompliance
|
||||
avoided_waste: AvoidedWaste
|
||||
financial_impact: FinancialImpact
|
||||
grant_readiness: GrantReadiness
|
||||
|
||||
|
||||
class BaselineComparison(BaseModel):
|
||||
"""Baseline comparison for grants"""
|
||||
baseline: float
|
||||
current: float
|
||||
improvement: float
|
||||
|
||||
|
||||
class SupportingData(BaseModel):
|
||||
"""Supporting data for grant applications"""
|
||||
baseline_comparison: BaselineComparison
|
||||
environmental_benefits: EnvironmentalImpact
|
||||
financial_benefits: FinancialImpact
|
||||
|
||||
|
||||
class Certifications(BaseModel):
|
||||
"""Certification status"""
|
||||
sdg_12_3_compliant: bool
|
||||
grant_programs_eligible: List[str]
|
||||
|
||||
|
||||
class ExecutiveSummary(BaseModel):
|
||||
"""Executive summary for grant reports"""
|
||||
total_waste_reduced_kg: float
|
||||
waste_reduction_percentage: float
|
||||
co2_emissions_avoided_kg: float
|
||||
financial_savings_eur: float
|
||||
sdg_compliance_status: str
|
||||
|
||||
|
||||
class ReportMetadata(BaseModel):
|
||||
"""Report metadata"""
|
||||
generated_at: str
|
||||
report_type: str
|
||||
period: PeriodInfo
|
||||
tenant_id: str
|
||||
|
||||
|
||||
class GrantReport(BaseModel):
|
||||
"""Complete grant application report"""
|
||||
report_metadata: ReportMetadata
|
||||
executive_summary: ExecutiveSummary
|
||||
detailed_metrics: SustainabilityMetrics
|
||||
certifications: Certifications
|
||||
supporting_data: SupportingData
|
||||
|
||||
|
||||
# Request schemas
|
||||
|
||||
class SustainabilityMetricsRequest(BaseModel):
|
||||
"""Request for sustainability metrics"""
|
||||
start_date: Optional[datetime] = Field(None, description="Start date for metrics")
|
||||
end_date: Optional[datetime] = Field(None, description="End date for metrics")
|
||||
|
||||
|
||||
class GrantReportRequest(BaseModel):
|
||||
"""Request for grant report export"""
|
||||
grant_type: str = Field("general", description="Type of grant: general, eu_horizon, farm_to_fork, etc.")
|
||||
start_date: Optional[datetime] = Field(None, description="Start date for report")
|
||||
end_date: Optional[datetime] = Field(None, description="End date for report")
|
||||
format: str = Field("json", description="Export format: json, pdf, csv")
|
||||
|
||||
|
||||
# Widget/Dashboard schemas
|
||||
|
||||
class SustainabilityWidgetData(BaseModel):
|
||||
"""Simplified data for dashboard widgets"""
|
||||
total_waste_kg: float
|
||||
waste_reduction_percentage: float
|
||||
co2_saved_kg: float
|
||||
water_saved_liters: float
|
||||
trees_equivalent: float
|
||||
sdg_status: str
|
||||
sdg_progress: float
|
||||
grant_programs_ready: int
|
||||
financial_savings_eur: float
|
||||
@@ -10,6 +10,7 @@ from decimal import Decimal
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
from sqlalchemy import text
|
||||
|
||||
from app.core.config import settings
|
||||
from app.services.inventory_service import InventoryService
|
||||
@@ -17,6 +18,7 @@ from app.services.food_safety_service import FoodSafetyService
|
||||
from app.repositories.ingredient_repository import IngredientRepository
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.dashboard_repository import DashboardRepository
|
||||
from app.schemas.dashboard import (
|
||||
InventoryDashboardSummary,
|
||||
BusinessModelInsights,
|
||||
@@ -40,20 +42,23 @@ class DashboardService:
|
||||
food_safety_service: FoodSafetyService,
|
||||
ingredient_repository: Optional[IngredientRepository] = None,
|
||||
stock_repository: Optional[StockRepository] = None,
|
||||
stock_movement_repository: Optional[StockMovementRepository] = None
|
||||
stock_movement_repository: Optional[StockMovementRepository] = None,
|
||||
dashboard_repository: Optional[DashboardRepository] = None
|
||||
):
|
||||
self.inventory_service = inventory_service
|
||||
self.food_safety_service = food_safety_service
|
||||
self._ingredient_repository = ingredient_repository
|
||||
self._stock_repository = stock_repository
|
||||
self._stock_movement_repository = stock_movement_repository
|
||||
self._dashboard_repository = dashboard_repository
|
||||
|
||||
def _get_repositories(self, db):
|
||||
"""Get repository instances for the current database session"""
|
||||
return {
|
||||
'ingredient_repo': self._ingredient_repository or IngredientRepository(db),
|
||||
'stock_repo': self._stock_repository or StockRepository(db),
|
||||
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db)
|
||||
'stock_movement_repo': self._stock_movement_repository or StockMovementRepository(db),
|
||||
'dashboard_repo': self._dashboard_repository or DashboardRepository(db)
|
||||
}
|
||||
|
||||
async def get_inventory_dashboard_summary(
|
||||
@@ -75,22 +80,26 @@ class DashboardService:
|
||||
# Get business model insights
|
||||
business_model = await self._detect_business_model(db, tenant_id)
|
||||
|
||||
# Get dashboard repository
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Get category breakdown
|
||||
stock_by_category = await self._get_stock_by_category(db, tenant_id)
|
||||
|
||||
stock_by_category = await dashboard_repo.get_stock_by_category(tenant_id)
|
||||
|
||||
# Get alerts breakdown
|
||||
alerts_by_severity = await self._get_alerts_by_severity(db, tenant_id)
|
||||
|
||||
alerts_by_severity = await dashboard_repo.get_alerts_by_severity(tenant_id)
|
||||
|
||||
# Get movements breakdown
|
||||
movements_by_type = await self._get_movements_by_type(db, tenant_id)
|
||||
movements_by_type = await dashboard_repo.get_movements_by_type(tenant_id)
|
||||
|
||||
# Get performance indicators
|
||||
performance_metrics = await self._calculate_performance_indicators(db, tenant_id)
|
||||
|
||||
# Get trending data
|
||||
stock_value_trend = await self._get_stock_value_trend(db, tenant_id, days=30)
|
||||
alert_trend = await self._get_alert_trend(db, tenant_id, days=30)
|
||||
|
||||
alert_trend = await dashboard_repo.get_alert_trend(tenant_id, days=30)
|
||||
|
||||
# Recent activity
|
||||
recent_activity = await self.get_recent_activity(db, tenant_id, limit=10)
|
||||
|
||||
@@ -200,26 +209,10 @@ class DashboardService:
|
||||
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
stock_summary = await repos['stock_repo'].get_stock_summary_by_tenant(tenant_id)
|
||||
|
||||
# Get current stock levels for all ingredients using a direct query
|
||||
# Get current stock levels for all ingredients using repository
|
||||
ingredient_stock_levels = {}
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# Query to get current stock for all ingredients
|
||||
stock_query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
COALESCE(SUM(s.available_quantity), 0) as current_stock
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id
|
||||
""")
|
||||
|
||||
result = await db.execute(stock_query, {"tenant_id": tenant_id})
|
||||
for row in result.fetchall():
|
||||
ingredient_stock_levels[str(row.ingredient_id)] = float(row.current_stock)
|
||||
|
||||
ingredient_stock_levels = await dashboard_repo.get_ingredient_stock_levels(tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch current stock levels: {e}")
|
||||
|
||||
@@ -320,50 +313,29 @@ class DashboardService:
|
||||
) -> List[StockStatusSummary]:
|
||||
"""Get stock status breakdown by category"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold AND s.available_quantity > 0 THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN COALESCE(s.available_quantity, 0) = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
ingredient_id,
|
||||
SUM(available_quantity) as available_quantity,
|
||||
AVG(unit_cost) as unit_cost
|
||||
FROM stock
|
||||
WHERE tenant_id = :tenant_id AND is_available = true
|
||||
GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
ORDER BY total_value DESC
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
rows = result.fetchall()
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
rows = await dashboard_repo.get_stock_status_by_category(tenant_id)
|
||||
|
||||
summaries = []
|
||||
total_value = sum(row.total_value for row in rows)
|
||||
|
||||
total_value = sum(row["total_value"] for row in rows)
|
||||
|
||||
for row in rows:
|
||||
percentage = (row.total_value / total_value * 100) if total_value > 0 else 0
|
||||
|
||||
percentage = (row["total_value"] / total_value * 100) if total_value > 0 else 0
|
||||
|
||||
summaries.append(StockStatusSummary(
|
||||
category=row.category,
|
||||
total_ingredients=row.total_ingredients,
|
||||
in_stock=row.in_stock,
|
||||
low_stock=row.low_stock,
|
||||
out_of_stock=row.out_of_stock,
|
||||
total_value=Decimal(str(row.total_value)),
|
||||
category=row["category"],
|
||||
total_ingredients=row["total_ingredients"],
|
||||
in_stock=row["in_stock"],
|
||||
low_stock=row["low_stock"],
|
||||
out_of_stock=row["out_of_stock"],
|
||||
total_value=Decimal(str(row["total_value"])),
|
||||
percentage_of_total=Decimal(str(percentage))
|
||||
))
|
||||
|
||||
|
||||
return summaries
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock status by category", error=str(e))
|
||||
raise
|
||||
@@ -376,58 +348,30 @@ class DashboardService:
|
||||
) -> List[AlertSummary]:
|
||||
"""Get alerts summary by type and severity"""
|
||||
try:
|
||||
# Build query with filters
|
||||
where_conditions = ["tenant_id = :tenant_id", "status = 'active'"]
|
||||
params = {"tenant_id": tenant_id}
|
||||
|
||||
if filters:
|
||||
if filters.alert_types:
|
||||
where_conditions.append("alert_type = ANY(:alert_types)")
|
||||
params["alert_types"] = filters.alert_types
|
||||
|
||||
if filters.severities:
|
||||
where_conditions.append("severity = ANY(:severities)")
|
||||
params["severities"] = filters.severities
|
||||
|
||||
if filters.date_from:
|
||||
where_conditions.append("created_at >= :date_from")
|
||||
params["date_from"] = filters.date_from
|
||||
|
||||
if filters.date_to:
|
||||
where_conditions.append("created_at <= :date_to")
|
||||
params["date_to"] = filters.date_to
|
||||
|
||||
where_clause = " AND ".join(where_conditions)
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
alert_type,
|
||||
severity,
|
||||
COUNT(*) as count,
|
||||
MIN(EXTRACT(EPOCH FROM (NOW() - created_at))/3600)::int as oldest_alert_age_hours,
|
||||
AVG(CASE WHEN resolved_at IS NOT NULL
|
||||
THEN EXTRACT(EPOCH FROM (resolved_at - created_at))/3600
|
||||
ELSE NULL END)::int as avg_resolution_hours
|
||||
FROM food_safety_alerts
|
||||
WHERE {where_clause}
|
||||
GROUP BY alert_type, severity
|
||||
ORDER BY severity DESC, count DESC
|
||||
"""
|
||||
|
||||
result = await db.execute(query, params)
|
||||
rows = result.fetchall()
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Extract filter parameters
|
||||
alert_types = filters.alert_types if filters else None
|
||||
severities = filters.severities if filters else None
|
||||
date_from = filters.date_from if filters else None
|
||||
date_to = filters.date_to if filters else None
|
||||
|
||||
rows = await dashboard_repo.get_alerts_summary(
|
||||
tenant_id, alert_types, severities, date_from, date_to
|
||||
)
|
||||
|
||||
return [
|
||||
AlertSummary(
|
||||
alert_type=row.alert_type,
|
||||
severity=row.severity,
|
||||
count=row.count,
|
||||
oldest_alert_age_hours=row.oldest_alert_age_hours,
|
||||
average_resolution_time_hours=row.avg_resolution_hours
|
||||
alert_type=row["alert_type"],
|
||||
severity=row["severity"],
|
||||
count=row["count"],
|
||||
oldest_alert_age_hours=row["oldest_alert_age_hours"],
|
||||
average_resolution_time_hours=row["average_resolution_time_hours"]
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts summary", error=str(e))
|
||||
raise
|
||||
@@ -441,81 +385,39 @@ class DashboardService:
|
||||
) -> List[RecentActivity]:
|
||||
"""Get recent inventory activity"""
|
||||
try:
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
activities = []
|
||||
|
||||
|
||||
# Get recent stock movements
|
||||
stock_query = """
|
||||
SELECT
|
||||
'stock_movement' as activity_type,
|
||||
CASE
|
||||
WHEN movement_type = 'PURCHASE' THEN 'Stock added: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'PRODUCTION_USE' THEN 'Stock consumed: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'WASTE' THEN 'Stock wasted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'Stock adjusted: ' || i.name || ' (' || sm.quantity || ' ' || i.unit_of_measure::text || ')'
|
||||
ELSE 'Stock movement: ' || i.name
|
||||
END as description,
|
||||
sm.movement_date as timestamp,
|
||||
sm.created_by as user_id,
|
||||
CASE
|
||||
WHEN movement_type = 'WASTE' THEN 'high'
|
||||
WHEN movement_type = 'ADJUSTMENT' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
sm.id as entity_id,
|
||||
'stock_movement' as entity_type
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
ORDER BY sm.movement_date DESC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await db.execute(stock_query, {"tenant_id": tenant_id, "limit": limit // 2})
|
||||
for row in result.fetchall():
|
||||
stock_movements = await dashboard_repo.get_recent_stock_movements(tenant_id, limit // 2)
|
||||
for row in stock_movements:
|
||||
activities.append(RecentActivity(
|
||||
activity_type=row.activity_type,
|
||||
description=row.description,
|
||||
timestamp=row.timestamp,
|
||||
impact_level=row.impact_level,
|
||||
entity_id=row.entity_id,
|
||||
entity_type=row.entity_type
|
||||
activity_type=row["activity_type"],
|
||||
description=row["description"],
|
||||
timestamp=row["timestamp"],
|
||||
impact_level=row["impact_level"],
|
||||
entity_id=row["entity_id"],
|
||||
entity_type=row["entity_type"]
|
||||
))
|
||||
|
||||
|
||||
# Get recent food safety alerts
|
||||
alert_query = """
|
||||
SELECT
|
||||
'food_safety_alert' as activity_type,
|
||||
title as description,
|
||||
created_at as timestamp,
|
||||
created_by as user_id,
|
||||
CASE
|
||||
WHEN severity = 'critical' THEN 'high'
|
||||
WHEN severity = 'high' THEN 'medium'
|
||||
ELSE 'low'
|
||||
END as impact_level,
|
||||
id as entity_id,
|
||||
'food_safety_alert' as entity_type
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
ORDER BY created_at DESC
|
||||
LIMIT :limit
|
||||
"""
|
||||
|
||||
result = await db.execute(alert_query, {"tenant_id": tenant_id, "limit": limit // 2})
|
||||
for row in result.fetchall():
|
||||
safety_alerts = await dashboard_repo.get_recent_food_safety_alerts(tenant_id, limit // 2)
|
||||
for row in safety_alerts:
|
||||
activities.append(RecentActivity(
|
||||
activity_type=row.activity_type,
|
||||
description=row.description,
|
||||
timestamp=row.timestamp,
|
||||
impact_level=row.impact_level,
|
||||
entity_id=row.entity_id,
|
||||
entity_type=row.entity_type
|
||||
activity_type=row["activity_type"],
|
||||
description=row["description"],
|
||||
timestamp=row["timestamp"],
|
||||
impact_level=row["impact_level"],
|
||||
entity_id=row["entity_id"],
|
||||
entity_type=row["entity_type"]
|
||||
))
|
||||
|
||||
|
||||
# Sort by timestamp and limit
|
||||
activities.sort(key=lambda x: x.timestamp, reverse=True)
|
||||
return activities[:limit]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get recent activity", error=str(e))
|
||||
raise
|
||||
@@ -523,34 +425,11 @@ class DashboardService:
|
||||
async def get_live_metrics(self, db, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get real-time inventory metrics"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(DISTINCT i.id) as total_ingredients,
|
||||
COUNT(CASE WHEN s.available_quantity > i.low_stock_threshold THEN 1 END) as in_stock,
|
||||
COUNT(CASE WHEN s.available_quantity <= i.low_stock_threshold THEN 1 END) as low_stock,
|
||||
COUNT(CASE WHEN s.available_quantity = 0 THEN 1 END) as out_of_stock,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value,
|
||||
COUNT(CASE WHEN s.expiration_date < NOW() THEN 1 END) as expired_items,
|
||||
COUNT(CASE WHEN s.expiration_date BETWEEN NOW() AND NOW() + INTERVAL '7 days' THEN 1 END) as expiring_soon
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
metrics = result.fetchone()
|
||||
|
||||
return {
|
||||
"total_ingredients": metrics.total_ingredients,
|
||||
"in_stock": metrics.in_stock,
|
||||
"low_stock": metrics.low_stock,
|
||||
"out_of_stock": metrics.out_of_stock,
|
||||
"total_value": float(metrics.total_value),
|
||||
"expired_items": metrics.expired_items,
|
||||
"expiring_soon": metrics.expiring_soon,
|
||||
"last_updated": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_live_metrics(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get live metrics", error=str(e))
|
||||
raise
|
||||
@@ -607,34 +486,16 @@ class DashboardService:
|
||||
try:
|
||||
if not settings.ENABLE_BUSINESS_MODEL_DETECTION:
|
||||
return {"model": "unknown", "confidence": Decimal("0")}
|
||||
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
# Get ingredient metrics
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_ingredients,
|
||||
COUNT(CASE WHEN product_type = 'finished_product' THEN 1 END) as finished_products,
|
||||
COUNT(CASE WHEN product_type = 'ingredient' THEN 1 END) as raw_ingredients,
|
||||
COUNT(DISTINCT st.supplier_id) as supplier_count,
|
||||
AVG(CASE WHEN s.available_quantity IS NOT NULL THEN s.available_quantity ELSE 0 END) as avg_stock_level
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, supplier_id
|
||||
FROM stock WHERE tenant_id = :tenant_id AND supplier_id IS NOT NULL
|
||||
GROUP BY ingredient_id, supplier_id
|
||||
) st ON i.id = st.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
metrics = result.fetchone()
|
||||
|
||||
metrics = await dashboard_repo.get_business_model_metrics(tenant_id)
|
||||
|
||||
# Business model detection logic
|
||||
total_ingredients = metrics.total_ingredients
|
||||
finished_ratio = metrics.finished_products / total_ingredients if total_ingredients > 0 else 0
|
||||
total_ingredients = metrics["total_ingredients"]
|
||||
finished_ratio = metrics["finished_products"] / total_ingredients if total_ingredients > 0 else 0
|
||||
|
||||
if total_ingredients >= settings.CENTRAL_BAKERY_THRESHOLD_INGREDIENTS:
|
||||
if finished_ratio > 0.3: # More than 30% finished products
|
||||
@@ -659,31 +520,11 @@ class DashboardService:
|
||||
async def _get_stock_by_category(self, db, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""Get stock breakdown by category"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COALESCE(i.ingredient_category::text, i.product_category::text, 'other') as category,
|
||||
COUNT(*) as count,
|
||||
COALESCE(SUM(s.available_quantity * s.unit_cost), 0) as total_value
|
||||
FROM ingredients i
|
||||
LEFT JOIN (
|
||||
SELECT ingredient_id, SUM(available_quantity) as available_quantity, AVG(unit_cost) as unit_cost
|
||||
FROM stock WHERE tenant_id = :tenant_id GROUP BY ingredient_id
|
||||
) s ON i.id = s.ingredient_id
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY category
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
categories = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
categories[row.category] = {
|
||||
"count": row.count,
|
||||
"total_value": float(row.total_value)
|
||||
}
|
||||
|
||||
return categories
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_stock_by_category(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get stock by category", error=str(e))
|
||||
return {}
|
||||
@@ -691,21 +532,11 @@ class DashboardService:
|
||||
async def _get_alerts_by_severity(self, db, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get alerts breakdown by severity"""
|
||||
try:
|
||||
query = """
|
||||
SELECT severity, COUNT(*) as count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
GROUP BY severity
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
alerts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
||||
|
||||
for row in result.fetchall():
|
||||
alerts[row.severity] = row.count
|
||||
|
||||
return alerts
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_alerts_by_severity(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alerts by severity", error=str(e))
|
||||
return {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
||||
@@ -713,23 +544,11 @@ class DashboardService:
|
||||
async def _get_movements_by_type(self, db, tenant_id: UUID) -> Dict[str, int]:
|
||||
"""Get movements breakdown by type"""
|
||||
try:
|
||||
query = """
|
||||
SELECT sm.movement_type, COUNT(*) as count
|
||||
FROM stock_movements sm
|
||||
JOIN ingredients i ON sm.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id
|
||||
AND sm.movement_date > NOW() - INTERVAL '7 days'
|
||||
GROUP BY sm.movement_type
|
||||
"""
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
movements = {}
|
||||
|
||||
for row in result.fetchall():
|
||||
movements[row.movement_type] = row.count
|
||||
|
||||
return movements
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_movements_by_type(tenant_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get movements by type", error=str(e))
|
||||
return {}
|
||||
@@ -773,29 +592,11 @@ class DashboardService:
|
||||
async def _get_alert_trend(self, db, tenant_id: UUID, days: int) -> List[Dict[str, Any]]:
|
||||
"""Get alert trend over time"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
DATE(created_at) as alert_date,
|
||||
COUNT(*) as alert_count,
|
||||
COUNT(CASE WHEN severity IN ('high', 'critical') THEN 1 END) as high_severity_count
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at > NOW() - INTERVAL '%s days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY alert_date
|
||||
""" % days
|
||||
|
||||
result = await db.execute(query, {"tenant_id": tenant_id})
|
||||
|
||||
return [
|
||||
{
|
||||
"date": row.alert_date.isoformat(),
|
||||
"total_alerts": row.alert_count,
|
||||
"high_severity_alerts": row.high_severity_count
|
||||
}
|
||||
for row in result.fetchall()
|
||||
]
|
||||
|
||||
repos = self._get_repositories(db)
|
||||
dashboard_repo = repos['dashboard_repo']
|
||||
|
||||
return await dashboard_repo.get_alert_trend(tenant_id, days)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get alert trend", error=str(e))
|
||||
return []
|
||||
@@ -870,26 +671,10 @@ class DashboardService:
|
||||
# Get ingredients to analyze costs by category
|
||||
ingredients = await repos['ingredient_repo'].get_ingredients_by_tenant(tenant_id, limit=1000)
|
||||
|
||||
# Get current stock levels for all ingredients using a direct query
|
||||
# Get current stock levels for all ingredients using repository
|
||||
ingredient_stock_levels = {}
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# Query to get current stock for all ingredients
|
||||
stock_query = text("""
|
||||
SELECT
|
||||
i.id as ingredient_id,
|
||||
COALESCE(SUM(s.available_quantity), 0) as current_stock
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON i.id = s.ingredient_id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id
|
||||
""")
|
||||
|
||||
result = await db.execute(stock_query, {"tenant_id": tenant_id})
|
||||
for row in result.fetchall():
|
||||
ingredient_stock_levels[str(row.ingredient_id)] = float(row.current_stock)
|
||||
|
||||
ingredient_stock_levels = await repos['dashboard_repo'].get_ingredient_stock_levels(tenant_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch current stock levels for cost analysis: {e}")
|
||||
|
||||
|
||||
@@ -16,13 +16,14 @@ from shared.database.transactions import transactional
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models.food_safety import (
|
||||
FoodSafetyCompliance,
|
||||
TemperatureLog,
|
||||
FoodSafetyCompliance,
|
||||
TemperatureLog,
|
||||
FoodSafetyAlert,
|
||||
FoodSafetyStandard,
|
||||
ComplianceStatus,
|
||||
FoodSafetyAlertType
|
||||
)
|
||||
from app.repositories.food_safety_repository import FoodSafetyRepository
|
||||
from app.schemas.food_safety import (
|
||||
FoodSafetyComplianceCreate,
|
||||
FoodSafetyComplianceUpdate,
|
||||
@@ -42,9 +43,13 @@ logger = structlog.get_logger()
|
||||
|
||||
class FoodSafetyService:
|
||||
"""Service for food safety and compliance operations"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def _get_repository(self, db) -> FoodSafetyRepository:
|
||||
"""Get repository instance for the current database session"""
|
||||
return FoodSafetyRepository(db)
|
||||
|
||||
# ===== COMPLIANCE MANAGEMENT =====
|
||||
|
||||
@@ -90,9 +95,9 @@ class FoodSafetyService:
|
||||
updated_by=user_id
|
||||
)
|
||||
|
||||
db.add(compliance)
|
||||
await db.flush()
|
||||
await db.refresh(compliance)
|
||||
# Create compliance record using repository
|
||||
repo = self._get_repository(db)
|
||||
compliance = await repo.create_compliance(compliance)
|
||||
|
||||
# Check for compliance alerts
|
||||
await self._check_compliance_alerts(db, compliance)
|
||||
@@ -117,9 +122,10 @@ class FoodSafetyService:
|
||||
) -> Optional[FoodSafetyComplianceResponse]:
|
||||
"""Update an existing compliance record"""
|
||||
try:
|
||||
# Get existing compliance record
|
||||
compliance = await db.get(FoodSafetyCompliance, compliance_id)
|
||||
if not compliance or compliance.tenant_id != tenant_id:
|
||||
# Get existing compliance record using repository
|
||||
repo = self._get_repository(db)
|
||||
compliance = await repo.get_compliance_by_id(compliance_id, tenant_id)
|
||||
if not compliance:
|
||||
return None
|
||||
|
||||
# Update fields
|
||||
@@ -132,9 +138,9 @@ class FoodSafetyService:
|
||||
setattr(compliance, field, value)
|
||||
|
||||
compliance.updated_by = user_id
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(compliance)
|
||||
|
||||
# Update compliance record using repository
|
||||
compliance = await repo.update_compliance(compliance)
|
||||
|
||||
# Check for compliance alerts after update
|
||||
await self._check_compliance_alerts(db, compliance)
|
||||
@@ -336,85 +342,44 @@ class FoodSafetyService:
|
||||
) -> FoodSafetyDashboard:
|
||||
"""Get food safety dashboard data"""
|
||||
try:
|
||||
# Get compliance overview
|
||||
from sqlalchemy import text
|
||||
|
||||
compliance_query = text("""
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN compliance_status = 'COMPLIANT' THEN 1 END) as compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'NON_COMPLIANT' THEN 1 END) as non_compliant,
|
||||
COUNT(CASE WHEN compliance_status = 'PENDING_REVIEW' THEN 1 END) as pending_review
|
||||
FROM food_safety_compliance
|
||||
WHERE tenant_id = :tenant_id AND is_active = true
|
||||
""")
|
||||
|
||||
compliance_result = await db.execute(compliance_query, {"tenant_id": tenant_id})
|
||||
compliance_stats = compliance_result.fetchone()
|
||||
|
||||
total_compliance = compliance_stats.total or 0
|
||||
compliant_items = compliance_stats.compliant or 0
|
||||
# Get repository instance
|
||||
repo = self._get_repository(db)
|
||||
|
||||
# Get compliance overview using repository
|
||||
compliance_stats = await repo.get_compliance_stats(tenant_id)
|
||||
total_compliance = compliance_stats["total"]
|
||||
compliant_items = compliance_stats["compliant"]
|
||||
compliance_percentage = (compliant_items / total_compliance * 100) if total_compliance > 0 else 0
|
||||
|
||||
# Get temperature monitoring status
|
||||
temp_query = text("""
|
||||
SELECT
|
||||
COUNT(DISTINCT equipment_id) as sensors_online,
|
||||
COUNT(CASE WHEN NOT is_within_range AND recorded_at > NOW() - INTERVAL '24 hours' THEN 1 END) as violations_24h
|
||||
FROM temperature_logs
|
||||
WHERE tenant_id = :tenant_id AND recorded_at > NOW() - INTERVAL '1 hour'
|
||||
""")
|
||||
|
||||
temp_result = await db.execute(temp_query, {"tenant_id": tenant_id})
|
||||
temp_stats = temp_result.fetchone()
|
||||
|
||||
# Get expiration tracking
|
||||
expiration_query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN expiration_date::date = CURRENT_DATE THEN 1 END) as expiring_today,
|
||||
COUNT(CASE WHEN expiration_date BETWEEN CURRENT_DATE AND CURRENT_DATE + INTERVAL '7 days' THEN 1 END) as expiring_week,
|
||||
COUNT(CASE WHEN expiration_date < CURRENT_DATE AND is_available THEN 1 END) as expired_requiring_action
|
||||
FROM stock s
|
||||
JOIN ingredients i ON s.ingredient_id = i.id
|
||||
WHERE i.tenant_id = :tenant_id AND s.is_available = true
|
||||
""")
|
||||
|
||||
expiration_result = await db.execute(expiration_query, {"tenant_id": tenant_id})
|
||||
expiration_stats = expiration_result.fetchone()
|
||||
|
||||
# Get alert counts
|
||||
alert_query = text("""
|
||||
SELECT
|
||||
COUNT(CASE WHEN severity = 'high' OR severity = 'critical' THEN 1 END) as high_risk,
|
||||
COUNT(CASE WHEN severity = 'critical' THEN 1 END) as critical,
|
||||
COUNT(CASE WHEN regulatory_action_required = true AND resolved_at IS NULL THEN 1 END) as regulatory_pending
|
||||
FROM food_safety_alerts
|
||||
WHERE tenant_id = :tenant_id AND status = 'active'
|
||||
""")
|
||||
|
||||
alert_result = await db.execute(alert_query, {"tenant_id": tenant_id})
|
||||
alert_stats = alert_result.fetchone()
|
||||
|
||||
# Get temperature monitoring status using repository
|
||||
temp_stats = await repo.get_temperature_stats(tenant_id)
|
||||
|
||||
# Get expiration tracking using repository
|
||||
expiration_stats = await repo.get_expiration_stats(tenant_id)
|
||||
|
||||
# Get alert counts using repository
|
||||
alert_stats = await repo.get_alert_stats(tenant_id)
|
||||
|
||||
return FoodSafetyDashboard(
|
||||
total_compliance_items=total_compliance,
|
||||
compliant_items=compliant_items,
|
||||
non_compliant_items=compliance_stats.non_compliant or 0,
|
||||
pending_review_items=compliance_stats.pending_review or 0,
|
||||
non_compliant_items=compliance_stats["non_compliant"],
|
||||
pending_review_items=compliance_stats["pending_review"],
|
||||
compliance_percentage=Decimal(str(compliance_percentage)),
|
||||
temperature_sensors_online=temp_stats.sensors_online or 0,
|
||||
temperature_sensors_total=temp_stats.sensors_online or 0, # Would need actual count
|
||||
temperature_violations_24h=temp_stats.violations_24h or 0,
|
||||
temperature_sensors_online=temp_stats["sensors_online"],
|
||||
temperature_sensors_total=temp_stats["sensors_online"], # Would need actual count
|
||||
temperature_violations_24h=temp_stats["violations_24h"],
|
||||
current_temperature_status="normal", # Would need to calculate
|
||||
items_expiring_today=expiration_stats.expiring_today or 0,
|
||||
items_expiring_this_week=expiration_stats.expiring_week or 0,
|
||||
expired_items_requiring_action=expiration_stats.expired_requiring_action or 0,
|
||||
items_expiring_today=expiration_stats["expiring_today"],
|
||||
items_expiring_this_week=expiration_stats["expiring_week"],
|
||||
expired_items_requiring_action=expiration_stats["expired_requiring_action"],
|
||||
upcoming_audits=0, # Would need to calculate
|
||||
overdue_audits=0, # Would need to calculate
|
||||
certifications_valid=compliant_items,
|
||||
certifications_expiring_soon=0, # Would need to calculate
|
||||
high_risk_items=alert_stats.high_risk or 0,
|
||||
critical_alerts=alert_stats.critical or 0,
|
||||
regulatory_notifications_pending=alert_stats.regulatory_pending or 0,
|
||||
high_risk_items=alert_stats["high_risk"],
|
||||
critical_alerts=alert_stats["critical"],
|
||||
regulatory_notifications_pending=alert_stats["regulatory_pending"],
|
||||
recent_safety_incidents=[] # Would need to get recent incidents
|
||||
)
|
||||
|
||||
@@ -426,16 +391,14 @@ class FoodSafetyService:
|
||||
|
||||
async def _validate_compliance_data(self, db, compliance_data: FoodSafetyComplianceCreate):
|
||||
"""Validate compliance data for business rules"""
|
||||
# Check if ingredient exists
|
||||
from sqlalchemy import text
|
||||
|
||||
ingredient_query = text("SELECT id FROM ingredients WHERE id = :ingredient_id AND tenant_id = :tenant_id")
|
||||
result = await db.execute(ingredient_query, {
|
||||
"ingredient_id": compliance_data.ingredient_id,
|
||||
"tenant_id": compliance_data.tenant_id
|
||||
})
|
||||
|
||||
if not result.fetchone():
|
||||
# Check if ingredient exists using repository
|
||||
repo = self._get_repository(db)
|
||||
ingredient_exists = await repo.validate_ingredient_exists(
|
||||
compliance_data.ingredient_id,
|
||||
compliance_data.tenant_id
|
||||
)
|
||||
|
||||
if not ingredient_exists:
|
||||
raise ValueError("Ingredient not found")
|
||||
|
||||
# Validate standard
|
||||
|
||||
@@ -18,6 +18,7 @@ from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
|
||||
from shared.alerts.templates import format_item_message
|
||||
from app.repositories.stock_repository import StockRepository
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from app.repositories.inventory_alert_repository import InventoryAlertRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -90,54 +91,20 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Batch check all stock levels for critical shortages (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
WITH stock_analysis AS (
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
i.max_stock_level as maximum_stock,
|
||||
i.reorder_point,
|
||||
0 as tomorrow_needed,
|
||||
0 as avg_daily_usage,
|
||||
7 as lead_time_days,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold THEN 'critical'
|
||||
WHEN COALESCE(SUM(s.current_quantity), 0) < i.low_stock_threshold * 1.2 THEN 'low'
|
||||
WHEN i.max_stock_level IS NOT NULL AND COALESCE(SUM(s.current_quantity), 0) > i.max_stock_level THEN 'overstock'
|
||||
ELSE 'normal'
|
||||
END as status,
|
||||
GREATEST(0, i.low_stock_threshold - COALESCE(SUM(s.current_quantity), 0)) as shortage_amount
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.tenant_id = :tenant_id AND i.is_active = true
|
||||
GROUP BY i.id, i.name, i.tenant_id, i.low_stock_threshold, i.max_stock_level, i.reorder_point
|
||||
)
|
||||
SELECT * FROM stock_analysis WHERE status != 'normal'
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'critical' THEN 1
|
||||
WHEN 'low' THEN 2
|
||||
WHEN 'overstock' THEN 3
|
||||
END,
|
||||
shortage_amount DESC
|
||||
"""
|
||||
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
# Add timeout to prevent hanging connections
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
issues = result.fetchall()
|
||||
|
||||
# Use repository for stock analysis
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
issues = await alert_repo.get_stock_issues(tenant_id)
|
||||
|
||||
for issue in issues:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
issue_dict = dict(issue._mapping) if hasattr(issue, '_mapping') else dict(issue)
|
||||
await self._process_stock_issue(tenant_id, issue_dict)
|
||||
await self._process_stock_issue(tenant_id, issue)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking stock for tenant",
|
||||
@@ -230,39 +197,24 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for products approaching expiry (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
i.id, i.name, i.tenant_id,
|
||||
s.id as stock_id, s.expiration_date, s.current_quantity,
|
||||
EXTRACT(days FROM (s.expiration_date - CURRENT_DATE)) as days_to_expiry
|
||||
FROM ingredients i
|
||||
JOIN stock s ON s.ingredient_id = i.id
|
||||
WHERE s.expiration_date <= CURRENT_DATE + INTERVAL '7 days'
|
||||
AND s.current_quantity > 0
|
||||
AND s.is_available = true
|
||||
AND s.expiration_date IS NOT NULL
|
||||
ORDER BY s.expiration_date ASC
|
||||
"""
|
||||
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query))
|
||||
expiring_items = result.fetchall()
|
||||
|
||||
# Group by tenant
|
||||
by_tenant = {}
|
||||
for item in expiring_items:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
item_dict = dict(item._mapping) if hasattr(item, '_mapping') else dict(item)
|
||||
tenant_id = item_dict['tenant_id']
|
||||
if tenant_id not in by_tenant:
|
||||
by_tenant[tenant_id] = []
|
||||
by_tenant[tenant_id].append(item_dict)
|
||||
|
||||
for tenant_id, items in by_tenant.items():
|
||||
await self._process_expiring_items(tenant_id, items)
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
# Get expiring products for this tenant
|
||||
items = await alert_repo.get_expiring_products(tenant_id, days_threshold=7)
|
||||
if items:
|
||||
await self._process_expiring_items(tenant_id, items)
|
||||
except Exception as e:
|
||||
logger.error("Error checking expiring products for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Expiry check failed", error=str(e))
|
||||
@@ -334,31 +286,23 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for temperature breaches (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
t.id, t.equipment_id as sensor_id, t.storage_location as location,
|
||||
t.temperature_celsius as temperature,
|
||||
t.target_temperature_max as max_threshold, t.tenant_id,
|
||||
COALESCE(t.deviation_minutes, 0) as breach_duration_minutes
|
||||
FROM temperature_logs t
|
||||
WHERE t.temperature_celsius > COALESCE(t.target_temperature_max, 25)
|
||||
AND NOT t.is_within_range
|
||||
AND COALESCE(t.deviation_minutes, 0) >= 30 -- Only after 30 minutes
|
||||
AND (t.recorded_at < NOW() - INTERVAL '15 minutes' OR t.alert_triggered = false) -- Avoid spam
|
||||
ORDER BY t.temperature_celsius DESC, t.deviation_minutes DESC
|
||||
"""
|
||||
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query))
|
||||
breaches = result.fetchall()
|
||||
|
||||
for breach in breaches:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
breach_dict = dict(breach._mapping) if hasattr(breach, '_mapping') else dict(breach)
|
||||
await self._process_temperature_breach(breach_dict)
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
breaches = await alert_repo.get_temperature_breaches(tenant_id, hours_back=24)
|
||||
for breach in breaches:
|
||||
await self._process_temperature_breach(breach)
|
||||
except Exception as e:
|
||||
logger.error("Error checking temperature breaches for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Temperature check failed", error=str(e))
|
||||
@@ -405,10 +349,8 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(10): # 10 second timeout for simple update
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
await session.execute(
|
||||
text("UPDATE temperature_logs SET alert_triggered = true WHERE id = :id"),
|
||||
{"id": breach['id']}
|
||||
)
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
await alert_repo.mark_temperature_alert_triggered(breach['id'])
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing temperature breach",
|
||||
@@ -458,20 +400,17 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
recommendations = result.fetchall()
|
||||
|
||||
for rec in recommendations:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
rec_dict = dict(rec._mapping) if hasattr(rec, '_mapping') else dict(rec)
|
||||
await self._generate_stock_recommendation(tenant_id, rec_dict)
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
recommendations = await alert_repo.get_reorder_recommendations(tenant_id)
|
||||
for rec in recommendations:
|
||||
await self._generate_stock_recommendation(tenant_id, rec)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating recommendations for tenant",
|
||||
@@ -559,20 +498,17 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
waste_data = result.fetchall()
|
||||
|
||||
for waste in waste_data:
|
||||
# Convert SQLAlchemy Row to dictionary for easier access
|
||||
waste_dict = dict(waste._mapping) if hasattr(waste, '_mapping') else dict(waste)
|
||||
await self._generate_waste_recommendation(tenant_id, waste_dict)
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(30): # 30 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
waste_data = await alert_repo.get_waste_opportunities(tenant_id)
|
||||
for waste in waste_data:
|
||||
await self._generate_waste_recommendation(tenant_id, waste)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating waste recommendations",
|
||||
@@ -738,21 +674,11 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
async def get_active_tenants(self) -> List[UUID]:
|
||||
"""Get list of active tenant IDs from ingredients table (inventory service specific)"""
|
||||
try:
|
||||
query = text("SELECT DISTINCT tenant_id FROM ingredients WHERE is_active = true")
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(10): # 10 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(query)
|
||||
# Handle PostgreSQL UUID objects properly
|
||||
tenant_ids = []
|
||||
for row in result.fetchall():
|
||||
tenant_id = row.tenant_id
|
||||
# Convert to UUID if it's not already
|
||||
if isinstance(tenant_id, UUID):
|
||||
tenant_ids.append(tenant_id)
|
||||
else:
|
||||
tenant_ids.append(UUID(str(tenant_id)))
|
||||
return tenant_ids
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
return await alert_repo.get_active_tenant_ids()
|
||||
except Exception as e:
|
||||
logger.error("Error fetching active tenants from ingredients", error=str(e))
|
||||
return []
|
||||
@@ -760,27 +686,15 @@ class InventoryAlertService(BaseAlertService, AlertServiceMixin):
|
||||
async def get_stock_after_order(self, ingredient_id: str, order_quantity: float) -> Optional[Dict[str, Any]]:
|
||||
"""Get stock information after hypothetical order"""
|
||||
try:
|
||||
query = """
|
||||
SELECT i.id, i.name,
|
||||
COALESCE(SUM(s.current_quantity), 0) as current_stock,
|
||||
i.low_stock_threshold as minimum_stock,
|
||||
(COALESCE(SUM(s.current_quantity), 0) - :order_quantity) as remaining
|
||||
FROM ingredients i
|
||||
LEFT JOIN stock s ON s.ingredient_id = i.id AND s.is_available = true
|
||||
WHERE i.id = :ingredient_id
|
||||
GROUP BY i.id, i.name, i.low_stock_threshold
|
||||
"""
|
||||
|
||||
# Add timeout to prevent hanging connections
|
||||
async with asyncio.timeout(10): # 10 second timeout
|
||||
async with self.db_manager.get_background_session() as session:
|
||||
result = await session.execute(text(query), {"ingredient_id": ingredient_id, "order_quantity": order_quantity})
|
||||
row = result.fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
alert_repo = InventoryAlertRepository(session)
|
||||
return await alert_repo.get_stock_after_order(ingredient_id, order_quantity)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting stock after order",
|
||||
ingredient_id=ingredient_id,
|
||||
logger.error("Error getting stock after order",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
return None
|
||||
|
||||
|
||||
583
services/inventory/app/services/sustainability_service.py
Normal file
583
services/inventory/app/services/sustainability_service.py
Normal file
@@ -0,0 +1,583 @@
|
||||
# ================================================================
|
||||
# services/inventory/app/services/sustainability_service.py
|
||||
# ================================================================
|
||||
"""
|
||||
Sustainability Service - Environmental Impact & SDG Compliance Tracking
|
||||
Aligned with UN SDG 12.3 and EU Farm to Fork Strategy
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import Dict, Any, Optional, List
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.config import settings
|
||||
from app.repositories.stock_movement_repository import StockMovementRepository
|
||||
from shared.clients.production_client import create_production_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
# Environmental Impact Constants (Research-based averages for bakery products)
|
||||
class EnvironmentalConstants:
|
||||
"""Environmental impact factors for bakery production"""
|
||||
|
||||
# CO2 equivalent per kg of food waste (kg CO2e/kg)
|
||||
# Source: EU Commission, average for baked goods
|
||||
CO2_PER_KG_WASTE = 1.9
|
||||
|
||||
# Water footprint (liters per kg of ingredient)
|
||||
WATER_FOOTPRINT = {
|
||||
'flour': 1827, # Wheat flour
|
||||
'dairy': 1020, # Average dairy products
|
||||
'eggs': 3265, # Eggs
|
||||
'sugar': 1782, # Sugar
|
||||
'yeast': 500, # Estimated for yeast
|
||||
'fats': 1600, # Butter/oils average
|
||||
'default': 1500 # Conservative default
|
||||
}
|
||||
|
||||
# Land use per kg (m² per kg)
|
||||
LAND_USE_PER_KG = 3.4
|
||||
|
||||
# Average trees needed to offset 1 ton CO2
|
||||
TREES_PER_TON_CO2 = 50
|
||||
|
||||
# EU bakery waste baseline (average industry waste %)
|
||||
EU_BAKERY_BASELINE_WASTE = 0.25 # 25% average
|
||||
|
||||
# UN SDG 12.3 target: 50% reduction by 2030
|
||||
SDG_TARGET_REDUCTION = 0.50
|
||||
|
||||
|
||||
class SustainabilityService:
|
||||
"""Service for calculating environmental impact and SDG compliance"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_sustainability_metrics(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive sustainability metrics for a tenant
|
||||
|
||||
Returns metrics aligned with:
|
||||
- UN SDG 12.3 (Food waste reduction)
|
||||
- EU Farm to Fork Strategy
|
||||
- Green Deal objectives
|
||||
"""
|
||||
try:
|
||||
# Default to last 30 days if no date range provided
|
||||
if not end_date:
|
||||
end_date = datetime.now()
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
# Get waste data from production and inventory
|
||||
waste_data = await self._get_waste_data(db, tenant_id, start_date, end_date)
|
||||
|
||||
# Calculate environmental impact
|
||||
environmental_impact = self._calculate_environmental_impact(waste_data)
|
||||
|
||||
# Calculate SDG compliance
|
||||
sdg_compliance = await self._calculate_sdg_compliance(
|
||||
db, tenant_id, waste_data, start_date, end_date
|
||||
)
|
||||
|
||||
# Calculate avoided waste (through AI predictions)
|
||||
avoided_waste = await self._calculate_avoided_waste(
|
||||
db, tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
# Calculate financial impact
|
||||
financial_impact = self._calculate_financial_impact(waste_data)
|
||||
|
||||
return {
|
||||
'period': {
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat(),
|
||||
'days': (end_date - start_date).days
|
||||
},
|
||||
'waste_metrics': {
|
||||
'total_waste_kg': waste_data['total_waste_kg'],
|
||||
'production_waste_kg': waste_data['production_waste_kg'],
|
||||
'expired_waste_kg': waste_data['expired_waste_kg'],
|
||||
'waste_percentage': waste_data['waste_percentage'],
|
||||
'waste_by_reason': waste_data['waste_by_reason']
|
||||
},
|
||||
'environmental_impact': environmental_impact,
|
||||
'sdg_compliance': sdg_compliance,
|
||||
'avoided_waste': avoided_waste,
|
||||
'financial_impact': financial_impact,
|
||||
'grant_readiness': self._assess_grant_readiness(sdg_compliance)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate sustainability metrics",
|
||||
tenant_id=str(tenant_id), error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_waste_data(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Get waste data from production service and inventory"""
|
||||
try:
|
||||
# Get production waste data via HTTP call to production service
|
||||
production_waste_data = await self._get_production_waste_data(
|
||||
tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
prod_data = production_waste_data if production_waste_data else {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
# Query inventory waste using repository
|
||||
stock_movement_repo = StockMovementRepository(db)
|
||||
inventory_waste = await stock_movement_repo.get_inventory_waste_total(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
# Calculate totals
|
||||
production_waste = float(prod_data.get('total_production_waste', 0) or 0)
|
||||
defect_waste = float(prod_data.get('total_defects', 0) or 0)
|
||||
total_waste = production_waste + defect_waste + inventory_waste
|
||||
|
||||
total_production = float(prod_data.get('total_planned', 0) or 0)
|
||||
waste_percentage = (total_waste / total_production * 100) if total_production > 0 else 0
|
||||
|
||||
# Categorize waste by reason
|
||||
waste_by_reason = {
|
||||
'production_defects': defect_waste,
|
||||
'production_waste': production_waste - defect_waste,
|
||||
'expired_inventory': inventory_waste * 0.7, # Estimate: 70% expires
|
||||
'damaged_inventory': inventory_waste * 0.3, # Estimate: 30% damaged
|
||||
}
|
||||
|
||||
return {
|
||||
'total_waste_kg': total_waste,
|
||||
'production_waste_kg': production_waste + defect_waste,
|
||||
'expired_waste_kg': inventory_waste,
|
||||
'waste_percentage': waste_percentage,
|
||||
'total_production_kg': total_production,
|
||||
'waste_by_reason': waste_by_reason,
|
||||
'waste_incidents': int(inv_data.waste_incidents or 0)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get waste data", error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_production_waste_data(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get production waste data from production service using shared client"""
|
||||
try:
|
||||
# Use the shared production client with proper authentication and resilience
|
||||
production_client = create_production_client(settings)
|
||||
|
||||
data = await production_client.get_waste_analytics(
|
||||
str(tenant_id),
|
||||
start_date.isoformat(),
|
||||
end_date.isoformat()
|
||||
)
|
||||
|
||||
if data:
|
||||
logger.info(
|
||||
"Retrieved production waste data via production client",
|
||||
tenant_id=str(tenant_id),
|
||||
total_waste=data.get('total_production_waste', 0)
|
||||
)
|
||||
return data
|
||||
else:
|
||||
# Client returned None, return zeros as fallback
|
||||
logger.warning(
|
||||
"Production waste analytics returned None, using zeros",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error calling production service for waste data via client",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
# Return zeros on error to not break the flow
|
||||
return {
|
||||
'total_production_waste': 0,
|
||||
'total_defects': 0,
|
||||
'total_planned': 0,
|
||||
'total_actual': 0
|
||||
}
|
||||
|
||||
def _calculate_environmental_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate environmental impact of food waste"""
|
||||
try:
|
||||
total_waste_kg = waste_data['total_waste_kg']
|
||||
|
||||
# CO2 emissions
|
||||
co2_emissions_kg = total_waste_kg * EnvironmentalConstants.CO2_PER_KG_WASTE
|
||||
co2_emissions_tons = co2_emissions_kg / 1000
|
||||
|
||||
# Equivalent trees to offset
|
||||
trees_equivalent = co2_emissions_tons * EnvironmentalConstants.TREES_PER_TON_CO2
|
||||
|
||||
# Water footprint (using average for bakery products)
|
||||
water_liters = total_waste_kg * EnvironmentalConstants.WATER_FOOTPRINT['default']
|
||||
|
||||
# Land use
|
||||
land_use_m2 = total_waste_kg * EnvironmentalConstants.LAND_USE_PER_KG
|
||||
|
||||
# Human-readable equivalents for marketing
|
||||
equivalents = {
|
||||
'car_km': co2_emissions_kg / 0.12, # Average car emits 120g CO2/km
|
||||
'smartphone_charges': (co2_emissions_kg * 1000) / 8, # 8g CO2 per charge
|
||||
'showers': water_liters / 65, # Average shower uses 65L
|
||||
'trees_year_growth': trees_equivalent
|
||||
}
|
||||
|
||||
return {
|
||||
'co2_emissions': {
|
||||
'kg': round(co2_emissions_kg, 2),
|
||||
'tons': round(co2_emissions_tons, 4),
|
||||
'trees_to_offset': round(trees_equivalent, 1)
|
||||
},
|
||||
'water_footprint': {
|
||||
'liters': round(water_liters, 2),
|
||||
'cubic_meters': round(water_liters / 1000, 2)
|
||||
},
|
||||
'land_use': {
|
||||
'square_meters': round(land_use_m2, 2),
|
||||
'hectares': round(land_use_m2 / 10000, 4)
|
||||
},
|
||||
'human_equivalents': {
|
||||
'car_km_equivalent': round(equivalents['car_km'], 0),
|
||||
'smartphone_charges': round(equivalents['smartphone_charges'], 0),
|
||||
'showers_equivalent': round(equivalents['showers'], 0),
|
||||
'trees_planted': round(equivalents['trees_year_growth'], 1)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate environmental impact", error=str(e))
|
||||
raise
|
||||
|
||||
async def _calculate_sdg_compliance(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
waste_data: Dict[str, Any],
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate compliance with UN SDG 12.3
|
||||
Target: Halve per capita global food waste by 2030
|
||||
"""
|
||||
try:
|
||||
# Get baseline (first 90 days of operation or industry average)
|
||||
baseline = await self._get_baseline_waste(db, tenant_id)
|
||||
|
||||
current_waste_percentage = waste_data['waste_percentage']
|
||||
baseline_percentage = baseline.get('waste_percentage', EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100)
|
||||
|
||||
# Calculate reduction from baseline
|
||||
if baseline_percentage > 0:
|
||||
reduction_percentage = ((baseline_percentage - current_waste_percentage) / baseline_percentage) * 100
|
||||
else:
|
||||
reduction_percentage = 0
|
||||
|
||||
# SDG 12.3 target is 50% reduction
|
||||
sdg_target = baseline_percentage * (1 - EnvironmentalConstants.SDG_TARGET_REDUCTION)
|
||||
progress_to_target = (reduction_percentage / (EnvironmentalConstants.SDG_TARGET_REDUCTION * 100)) * 100
|
||||
|
||||
# Status assessment
|
||||
if reduction_percentage >= 50:
|
||||
status = 'sdg_compliant'
|
||||
status_label = 'SDG 12.3 Compliant'
|
||||
elif reduction_percentage >= 30:
|
||||
status = 'on_track'
|
||||
status_label = 'On Track to Compliance'
|
||||
elif reduction_percentage >= 10:
|
||||
status = 'progressing'
|
||||
status_label = 'Making Progress'
|
||||
else:
|
||||
status = 'baseline'
|
||||
status_label = 'Establishing Baseline'
|
||||
|
||||
return {
|
||||
'sdg_12_3': {
|
||||
'baseline_waste_percentage': round(baseline_percentage, 2),
|
||||
'current_waste_percentage': round(current_waste_percentage, 2),
|
||||
'reduction_achieved': round(reduction_percentage, 2),
|
||||
'target_reduction': 50.0,
|
||||
'progress_to_target': round(min(progress_to_target, 100), 1),
|
||||
'status': status,
|
||||
'status_label': status_label,
|
||||
'target_waste_percentage': round(sdg_target, 2)
|
||||
},
|
||||
'baseline_period': baseline.get('period', 'industry_average'),
|
||||
'certification_ready': reduction_percentage >= 50,
|
||||
'improvement_areas': self._identify_improvement_areas(waste_data)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate SDG compliance", error=str(e))
|
||||
raise
|
||||
|
||||
async def _get_baseline_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get baseline waste percentage from production service using shared client"""
|
||||
try:
|
||||
# Use the shared production client with proper authentication and resilience
|
||||
production_client = create_production_client(settings)
|
||||
|
||||
baseline_data = await production_client.get_baseline(str(tenant_id))
|
||||
|
||||
if baseline_data and baseline_data.get('data_available', False):
|
||||
# Production service has real baseline data
|
||||
logger.info(
|
||||
"Retrieved baseline from production service via client",
|
||||
tenant_id=str(tenant_id),
|
||||
baseline_percentage=baseline_data.get('waste_percentage', 0)
|
||||
)
|
||||
return {
|
||||
'waste_percentage': baseline_data['waste_percentage'],
|
||||
'period': baseline_data['period'].get('type', 'first_90_days'),
|
||||
'total_production_kg': baseline_data.get('total_production_kg', 0),
|
||||
'total_waste_kg': baseline_data.get('total_waste_kg', 0)
|
||||
}
|
||||
else:
|
||||
# Production service doesn't have enough data yet
|
||||
logger.info(
|
||||
"Production service baseline not available, using industry average",
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
return {
|
||||
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
|
||||
'period': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Error calling production service for baseline via client, using industry average",
|
||||
error=str(e),
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
# Fallback to industry average
|
||||
return {
|
||||
'waste_percentage': EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE * 100,
|
||||
'period': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
}
|
||||
|
||||
async def _calculate_avoided_waste(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate waste avoided through AI predictions and smart planning
|
||||
This is a KEY metric for marketing and grant applications
|
||||
"""
|
||||
try:
|
||||
# Get AI-assisted batch data from production service
|
||||
production_data = await self._get_production_waste_data(tenant_id, start_date, end_date)
|
||||
|
||||
# Extract data with AI batch tracking
|
||||
total_planned = production_data.get('total_planned', 0) if production_data else 0
|
||||
total_waste = production_data.get('total_production_waste', 0) if production_data else 0
|
||||
ai_assisted_batches = production_data.get('ai_assisted_batches', 0) if production_data else 0
|
||||
|
||||
# Estimate waste avoided by comparing to industry average
|
||||
if total_planned > 0:
|
||||
# Industry average waste: 25%
|
||||
# Current actual waste from production
|
||||
industry_expected_waste = total_planned * EnvironmentalConstants.EU_BAKERY_BASELINE_WASTE
|
||||
actual_waste = total_waste
|
||||
estimated_avoided = max(0, industry_expected_waste - actual_waste)
|
||||
|
||||
# Calculate environmental impact of avoided waste
|
||||
avoided_co2 = estimated_avoided * EnvironmentalConstants.CO2_PER_KG_WASTE
|
||||
avoided_water = estimated_avoided * EnvironmentalConstants.WATER_FOOTPRINT['default']
|
||||
|
||||
return {
|
||||
'waste_avoided_kg': round(estimated_avoided, 2),
|
||||
'ai_assisted_batches': ai_assisted_batches,
|
||||
'environmental_impact_avoided': {
|
||||
'co2_kg': round(avoided_co2, 2),
|
||||
'water_liters': round(avoided_water, 2)
|
||||
},
|
||||
'methodology': 'compared_to_industry_baseline'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'waste_avoided_kg': 0,
|
||||
'ai_assisted_batches': 0,
|
||||
'note': 'Insufficient data for avoided waste calculation'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate avoided waste", error=str(e))
|
||||
return {'waste_avoided_kg': 0, 'error': str(e)}
|
||||
|
||||
def _calculate_financial_impact(self, waste_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Calculate financial impact of food waste"""
|
||||
# Average cost per kg of bakery products: €3.50
|
||||
avg_cost_per_kg = 3.50
|
||||
|
||||
total_waste_kg = waste_data['total_waste_kg']
|
||||
waste_cost = total_waste_kg * avg_cost_per_kg
|
||||
|
||||
# If waste was reduced by 30%, potential savings
|
||||
potential_savings = waste_cost * 0.30
|
||||
|
||||
return {
|
||||
'waste_cost_eur': round(waste_cost, 2),
|
||||
'cost_per_kg': avg_cost_per_kg,
|
||||
'potential_monthly_savings': round(potential_savings, 2),
|
||||
'annual_projection': round(waste_cost * 12, 2)
|
||||
}
|
||||
|
||||
def _identify_improvement_areas(self, waste_data: Dict[str, Any]) -> List[str]:
|
||||
"""Identify areas for improvement based on waste data"""
|
||||
areas = []
|
||||
|
||||
waste_by_reason = waste_data.get('waste_by_reason', {})
|
||||
|
||||
if waste_by_reason.get('production_defects', 0) > waste_data['total_waste_kg'] * 0.3:
|
||||
areas.append('quality_control_in_production')
|
||||
|
||||
if waste_by_reason.get('expired_inventory', 0) > waste_data['total_waste_kg'] * 0.4:
|
||||
areas.append('inventory_rotation_management')
|
||||
|
||||
if waste_data.get('waste_percentage', 0) > 20:
|
||||
areas.append('demand_forecasting_accuracy')
|
||||
|
||||
if not areas:
|
||||
areas.append('maintain_current_practices')
|
||||
|
||||
return areas
|
||||
|
||||
def _assess_grant_readiness(self, sdg_compliance: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Assess readiness for various grant programs"""
|
||||
reduction = sdg_compliance['sdg_12_3']['reduction_achieved']
|
||||
|
||||
grants = {
|
||||
'eu_horizon_europe': {
|
||||
'eligible': reduction >= 30,
|
||||
'confidence': 'high' if reduction >= 50 else 'medium' if reduction >= 30 else 'low',
|
||||
'requirements_met': reduction >= 30
|
||||
},
|
||||
'eu_farm_to_fork': {
|
||||
'eligible': reduction >= 20,
|
||||
'confidence': 'high' if reduction >= 40 else 'medium' if reduction >= 20 else 'low',
|
||||
'requirements_met': reduction >= 20
|
||||
},
|
||||
'national_circular_economy': {
|
||||
'eligible': reduction >= 15,
|
||||
'confidence': 'high' if reduction >= 25 else 'medium' if reduction >= 15 else 'low',
|
||||
'requirements_met': reduction >= 15
|
||||
},
|
||||
'un_sdg_certified': {
|
||||
'eligible': reduction >= 50,
|
||||
'confidence': 'high' if reduction >= 50 else 'low',
|
||||
'requirements_met': reduction >= 50
|
||||
}
|
||||
}
|
||||
|
||||
overall_readiness = sum(1 for g in grants.values() if g['eligible']) / len(grants) * 100
|
||||
|
||||
return {
|
||||
'overall_readiness_percentage': round(overall_readiness, 1),
|
||||
'grant_programs': grants,
|
||||
'recommended_applications': [
|
||||
name for name, details in grants.items() if details['eligible']
|
||||
]
|
||||
}
|
||||
|
||||
async def export_grant_report(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
tenant_id: UUID,
|
||||
grant_type: str = 'general',
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate export-ready report for grant applications
|
||||
Formats data according to common grant application requirements
|
||||
"""
|
||||
try:
|
||||
metrics = await self.get_sustainability_metrics(
|
||||
db, tenant_id, start_date, end_date
|
||||
)
|
||||
|
||||
# Format for grant applications
|
||||
report = {
|
||||
'report_metadata': {
|
||||
'generated_at': datetime.now().isoformat(),
|
||||
'report_type': grant_type,
|
||||
'period': metrics['period'],
|
||||
'tenant_id': str(tenant_id)
|
||||
},
|
||||
'executive_summary': {
|
||||
'total_waste_reduced_kg': metrics['waste_metrics']['total_waste_kg'],
|
||||
'waste_reduction_percentage': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved'],
|
||||
'co2_emissions_avoided_kg': metrics['environmental_impact']['co2_emissions']['kg'],
|
||||
'financial_savings_eur': metrics['financial_impact']['waste_cost_eur'],
|
||||
'sdg_compliance_status': metrics['sdg_compliance']['sdg_12_3']['status_label']
|
||||
},
|
||||
'detailed_metrics': metrics,
|
||||
'certifications': {
|
||||
'sdg_12_3_compliant': metrics['sdg_compliance']['certification_ready'],
|
||||
'grant_programs_eligible': metrics['grant_readiness']['recommended_applications']
|
||||
},
|
||||
'supporting_data': {
|
||||
'baseline_comparison': {
|
||||
'baseline': metrics['sdg_compliance']['sdg_12_3']['baseline_waste_percentage'],
|
||||
'current': metrics['sdg_compliance']['sdg_12_3']['current_waste_percentage'],
|
||||
'improvement': metrics['sdg_compliance']['sdg_12_3']['reduction_achieved']
|
||||
},
|
||||
'environmental_benefits': metrics['environmental_impact'],
|
||||
'financial_benefits': metrics['financial_impact']
|
||||
}
|
||||
}
|
||||
|
||||
return report
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate grant report", error=str(e))
|
||||
raise
|
||||
@@ -126,6 +126,27 @@ async def create_stock_batches_for_ingredient(
|
||||
stocks = []
|
||||
num_batches = random.randint(1, 2) # Reduced from 3-5 for faster demo loading
|
||||
|
||||
# Calculate target total stock for this ingredient
|
||||
# Use 40-80% of max_stock_level to allow for realistic variation
|
||||
# If max_stock_level is not set, use reorder_point * 3 as a reasonable target
|
||||
if ingredient.max_stock_level:
|
||||
target_total_stock = float(ingredient.max_stock_level) * random.uniform(0.4, 0.8)
|
||||
else:
|
||||
target_total_stock = float(ingredient.reorder_point or 50.0) * 3.0
|
||||
|
||||
# Distribute total stock across batches
|
||||
batch_quantities = []
|
||||
remaining = target_total_stock
|
||||
for i in range(num_batches):
|
||||
if i == num_batches - 1:
|
||||
# Last batch gets whatever is remaining
|
||||
batch_quantities.append(remaining)
|
||||
else:
|
||||
# Earlier batches get a random portion of remaining
|
||||
portion = remaining * random.uniform(0.3, 0.7)
|
||||
batch_quantities.append(portion)
|
||||
remaining -= portion
|
||||
|
||||
for i in range(num_batches):
|
||||
# Calculate expiration days offset
|
||||
days_offset = calculate_expiration_distribution()
|
||||
@@ -146,17 +167,11 @@ async def create_stock_batches_for_ingredient(
|
||||
quality_status = "good"
|
||||
is_available = True
|
||||
|
||||
# Generate quantities
|
||||
if ingredient.unit_of_measure.value in ['kg', 'l']:
|
||||
current_quantity = round(random.uniform(5.0, 50.0), 2)
|
||||
reserved_quantity = round(random.uniform(0.0, current_quantity * 0.3), 2) if is_available else 0.0
|
||||
elif ingredient.unit_of_measure.value in ['g', 'ml']:
|
||||
current_quantity = round(random.uniform(500.0, 5000.0), 2)
|
||||
reserved_quantity = round(random.uniform(0.0, current_quantity * 0.3), 2) if is_available else 0.0
|
||||
else: # units, pieces, etc.
|
||||
current_quantity = float(random.randint(10, 200))
|
||||
reserved_quantity = float(random.randint(0, int(current_quantity * 0.3))) if is_available else 0.0
|
||||
# Use pre-calculated batch quantity
|
||||
current_quantity = round(batch_quantities[i], 2)
|
||||
|
||||
# Reserve 0-30% of current quantity if available
|
||||
reserved_quantity = round(random.uniform(0.0, current_quantity * 0.3), 2) if is_available else 0.0
|
||||
available_quantity = current_quantity - reserved_quantity
|
||||
|
||||
# Calculate costs with variation
|
||||
|
||||
@@ -18,8 +18,6 @@ from app.models.order import CustomerOrder, OrderItem
|
||||
from app.models.procurement import ProcurementPlan, ProcurementRequirement
|
||||
from app.models.customer import Customer
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.alert_generator import generate_order_alerts
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
@@ -383,44 +381,15 @@ async def clone_demo_data(
|
||||
db.add(new_req)
|
||||
stats["procurement_requirements"] += 1
|
||||
|
||||
# Commit cloned data first
|
||||
# Commit cloned data
|
||||
await db.commit()
|
||||
|
||||
# Generate order alerts (urgent, delayed, upcoming deliveries) with RabbitMQ publishing
|
||||
rabbitmq_client = None
|
||||
try:
|
||||
# Initialize RabbitMQ client for alert publishing
|
||||
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
|
||||
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
||||
# respective alert services which run scheduled checks at appropriate intervals.
|
||||
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
||||
stats["alerts_generated"] = 0
|
||||
|
||||
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="orders")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
# Generate alerts and publish to RabbitMQ
|
||||
alerts_count = await generate_order_alerts(
|
||||
db,
|
||||
virtual_uuid,
|
||||
session_time,
|
||||
rabbitmq_client=rabbitmq_client
|
||||
)
|
||||
stats["alerts_generated"] += alerts_count
|
||||
await db.commit()
|
||||
logger.info(f"Generated {alerts_count} order alerts")
|
||||
except Exception as alert_error:
|
||||
logger.warning(f"Alert generation failed: {alert_error}", exc_info=True)
|
||||
finally:
|
||||
# Clean up RabbitMQ connection
|
||||
if rabbitmq_client:
|
||||
try:
|
||||
await rabbitmq_client.disconnect()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
|
||||
|
||||
total_records = sum(stats.values())
|
||||
total_records = stats["customers"] + stats["customer_orders"] + stats["order_line_items"] + stats["procurement_plans"] + stats["procurement_requirements"]
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -13,6 +13,8 @@ from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from app.services.pos_config_service import POSConfigurationService
|
||||
from app.schemas.pos_config import POSConfigurationListResponse
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
@@ -22,23 +24,41 @@ route_builder = RouteBuilder('pos')
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("configurations"),
|
||||
response_model=dict
|
||||
response_model=POSConfigurationListResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def list_pos_configurations(
|
||||
tenant_id: UUID = Path(...),
|
||||
pos_system: Optional[str] = Query(None),
|
||||
is_active: Optional[bool] = Query(None),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=100),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""List all POS configurations for a tenant"""
|
||||
try:
|
||||
return {
|
||||
"configurations": [],
|
||||
"total": 0,
|
||||
"supported_systems": ["square", "toast", "lightspeed"]
|
||||
}
|
||||
service = POSConfigurationService()
|
||||
|
||||
configurations = await service.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
total = await service.count_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
return POSConfigurationListResponse(
|
||||
configurations=configurations,
|
||||
total=total,
|
||||
supported_systems=["square", "toast", "lightspeed"]
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to list POS configurations", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list configurations: {str(e)}")
|
||||
|
||||
@@ -14,6 +14,8 @@ from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing import RouteBuilder
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.services.pos_config_service import POSConfigurationService
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
@@ -74,15 +76,33 @@ async def get_sync_status(
|
||||
):
|
||||
"""Get synchronization status and recent sync history"""
|
||||
try:
|
||||
transaction_service = POSTransactionService()
|
||||
|
||||
# Get sync metrics from transaction service
|
||||
sync_metrics = await transaction_service.get_sync_metrics(tenant_id)
|
||||
|
||||
# Get last successful sync time
|
||||
sync_status = sync_metrics["sync_status"]
|
||||
last_successful_sync = sync_status.get("last_sync_at")
|
||||
|
||||
# Calculate sync success rate
|
||||
total = sync_metrics["total_transactions"]
|
||||
synced = sync_status.get("synced", 0)
|
||||
success_rate = (synced / total * 100) if total > 0 else 100.0
|
||||
|
||||
return {
|
||||
"current_sync": None,
|
||||
"last_successful_sync": None,
|
||||
"recent_syncs": [],
|
||||
"last_successful_sync": last_successful_sync.isoformat() if last_successful_sync else None,
|
||||
"recent_syncs": [], # Could be enhanced with actual sync history
|
||||
"sync_health": {
|
||||
"status": "healthy",
|
||||
"success_rate": 95.5,
|
||||
"average_duration_minutes": 3.2,
|
||||
"last_error": None
|
||||
"status": "healthy" if success_rate > 90 else "degraded" if success_rate > 70 else "unhealthy",
|
||||
"success_rate": round(success_rate, 2),
|
||||
"average_duration_minutes": 3.2, # Placeholder - could calculate from actual data
|
||||
"last_error": None,
|
||||
"total_transactions": total,
|
||||
"synced_count": synced,
|
||||
"pending_count": sync_status.get("pending", 0),
|
||||
"failed_count": sync_status.get("failed", 0)
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
@@ -159,12 +179,35 @@ async def test_pos_connection(
|
||||
):
|
||||
"""Test connection to POS system (Admin/Owner only)"""
|
||||
try:
|
||||
config_service = POSConfigurationService()
|
||||
|
||||
# Get the configuration to verify it exists
|
||||
configurations = await config_service.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
skip=0,
|
||||
limit=100
|
||||
)
|
||||
|
||||
config = next((c for c in configurations if str(c.id) == str(config_id)), None)
|
||||
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
# For demo purposes, we assume connection is successful if config exists
|
||||
# In production, this would actually test the POS API connection
|
||||
is_connected = config.is_connected and config.is_active
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Connection test successful",
|
||||
"success": is_connected,
|
||||
"status": "success" if is_connected else "failed",
|
||||
"message": f"Connection test {'successful' if is_connected else 'failed'} for {config.pos_system}",
|
||||
"tested_at": datetime.utcnow().isoformat(),
|
||||
"config_id": str(config_id)
|
||||
"config_id": str(config_id),
|
||||
"pos_system": config.pos_system,
|
||||
"health_status": config.health_status
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to test POS connection", error=str(e),
|
||||
tenant_id=tenant_id, config_id=config_id)
|
||||
|
||||
@@ -4,15 +4,22 @@ ATOMIC layer - Basic CRUD operations for POS transactions
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder
|
||||
from app.services.pos_transaction_service import POSTransactionService
|
||||
from app.schemas.pos_transaction import (
|
||||
POSTransactionResponse,
|
||||
POSTransactionListResponse,
|
||||
POSTransactionDashboardSummary
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
logger = structlog.get_logger()
|
||||
@@ -21,7 +28,7 @@ route_builder = RouteBuilder('pos')
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("transactions"),
|
||||
response_model=dict
|
||||
response_model=POSTransactionListResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def list_pos_transactions(
|
||||
@@ -38,20 +45,46 @@ async def list_pos_transactions(
|
||||
):
|
||||
"""List POS transactions for a tenant"""
|
||||
try:
|
||||
return {
|
||||
"transactions": [],
|
||||
"total": 0,
|
||||
"has_more": False,
|
||||
"summary": {
|
||||
"total_amount": 0,
|
||||
"transaction_count": 0,
|
||||
"sync_status": {
|
||||
"synced": 0,
|
||||
"pending": 0,
|
||||
"failed": 0
|
||||
}
|
||||
service = POSTransactionService()
|
||||
|
||||
transactions = await service.get_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced,
|
||||
skip=offset,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
total = await service.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced
|
||||
)
|
||||
|
||||
# Get sync metrics for summary
|
||||
sync_metrics = await service.get_sync_metrics(tenant_id)
|
||||
|
||||
# Calculate summary
|
||||
total_amount = sum(float(t.total_amount) for t in transactions if t.status == "completed")
|
||||
|
||||
has_more = (offset + limit) < total
|
||||
|
||||
return POSTransactionListResponse(
|
||||
transactions=transactions,
|
||||
total=total,
|
||||
has_more=has_more,
|
||||
summary={
|
||||
"total_amount": total_amount,
|
||||
"transaction_count": len(transactions),
|
||||
"sync_status": sync_metrics["sync_status"]
|
||||
}
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to list POS transactions", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list transactions: {str(e)}")
|
||||
@@ -59,7 +92,7 @@ async def list_pos_transactions(
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_detail_route("transactions", "transaction_id"),
|
||||
response_model=dict
|
||||
response_model=POSTransactionResponse
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_pos_transaction(
|
||||
@@ -70,13 +103,46 @@ async def get_pos_transaction(
|
||||
):
|
||||
"""Get a specific POS transaction"""
|
||||
try:
|
||||
return {
|
||||
"id": str(transaction_id),
|
||||
"tenant_id": str(tenant_id),
|
||||
"status": "completed",
|
||||
"is_synced": True
|
||||
}
|
||||
service = POSTransactionService()
|
||||
|
||||
transaction = await service.get_transaction_with_items(
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if not transaction:
|
||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||
|
||||
return transaction
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to get POS transaction", error=str(e),
|
||||
tenant_id=tenant_id, transaction_id=transaction_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get transaction: {str(e)}")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_operations_route("transactions-dashboard"),
|
||||
response_model=POSTransactionDashboardSummary
|
||||
)
|
||||
@require_user_role(['viewer', 'member', 'admin', 'owner'])
|
||||
async def get_transactions_dashboard(
|
||||
tenant_id: UUID = Path(...),
|
||||
current_user: dict = Depends(get_current_user_dep),
|
||||
db=Depends(get_db)
|
||||
):
|
||||
"""Get dashboard summary for POS transactions"""
|
||||
try:
|
||||
service = POSTransactionService()
|
||||
|
||||
summary = await service.get_dashboard_summary(tenant_id)
|
||||
|
||||
logger.info("Transactions dashboard retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
total_today=summary.total_transactions_today)
|
||||
|
||||
return summary
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions dashboard", error=str(e), tenant_id=tenant_id)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get dashboard: {str(e)}")
|
||||
|
||||
82
services/pos/app/repositories/pos_config_repository.py
Normal file
82
services/pos/app/repositories/pos_config_repository.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""
|
||||
POS Configuration Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, and_, or_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.pos_config import POSConfiguration
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSConfigurationRepository(BaseRepository[POSConfiguration, dict, dict]):
|
||||
"""Repository for POS configuration operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(POSConfiguration, session)
|
||||
|
||||
async def get_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSConfiguration]:
|
||||
"""Get POS configurations for a specific tenant with optional filters"""
|
||||
try:
|
||||
query = select(self.model).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if is_active is not None:
|
||||
conditions.append(self.model.is_active == is_active)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
query = query.offset(skip).limit(limit).order_by(self.model.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS configurations for a specific tenant with optional filters"""
|
||||
try:
|
||||
from sqlalchemy import func
|
||||
|
||||
query = select(func.count(self.model.id)).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if is_active is not None:
|
||||
conditions.append(self.model.is_active == is_active)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
count = result.scalar() or 0
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
113
services/pos/app/repositories/pos_transaction_item_repository.py
Normal file
113
services/pos/app/repositories/pos_transaction_item_repository.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""
|
||||
POS Transaction Item Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
from app.models.pos_transaction import POSTransactionItem
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSTransactionItemRepository(BaseRepository[POSTransactionItem, dict, dict]):
|
||||
"""Repository for POS transaction item operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(POSTransactionItem, session)
|
||||
|
||||
async def get_items_by_transaction(
|
||||
self,
|
||||
transaction_id: UUID
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all items for a transaction"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
POSTransactionItem.transaction_id == transaction_id
|
||||
).order_by(POSTransactionItem.created_at)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transaction items",
|
||||
transaction_id=str(transaction_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_items_by_product(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
product_name: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all transaction items for a specific product"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
and_(
|
||||
POSTransactionItem.tenant_id == tenant_id,
|
||||
POSTransactionItem.product_name.ilike(f"%{product_name}%")
|
||||
)
|
||||
).order_by(POSTransactionItem.created_at.desc()).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get items by product",
|
||||
product_name=product_name,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_items_by_sku(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
sku: str
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all transaction items for a specific SKU"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
and_(
|
||||
POSTransactionItem.tenant_id == tenant_id,
|
||||
POSTransactionItem.sku == sku
|
||||
)
|
||||
).order_by(POSTransactionItem.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get items by SKU",
|
||||
sku=sku,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_items_by_category(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
category: str,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSTransactionItem]:
|
||||
"""Get all transaction items for a specific category"""
|
||||
try:
|
||||
query = select(POSTransactionItem).where(
|
||||
and_(
|
||||
POSTransactionItem.tenant_id == tenant_id,
|
||||
POSTransactionItem.product_category == category
|
||||
)
|
||||
).order_by(POSTransactionItem.created_at.desc()).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get items by category",
|
||||
category=category,
|
||||
error=str(e))
|
||||
raise
|
||||
362
services/pos/app/repositories/pos_transaction_repository.py
Normal file
362
services/pos/app/repositories/pos_transaction_repository.py
Normal file
@@ -0,0 +1,362 @@
|
||||
"""
|
||||
POS Transaction Repository using Repository Pattern
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date, timedelta
|
||||
from sqlalchemy import select, func, and_, or_, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
import structlog
|
||||
|
||||
from app.models.pos_transaction import POSTransaction, POSTransactionItem
|
||||
from shared.database.repository import BaseRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSTransactionRepository(BaseRepository[POSTransaction, dict, dict]):
|
||||
"""Repository for POS transaction operations"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(POSTransaction, session)
|
||||
|
||||
async def get_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[POSTransaction]:
|
||||
"""Get POS transactions for a specific tenant with optional filters"""
|
||||
try:
|
||||
query = select(self.model).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if status:
|
||||
conditions.append(self.model.status == status)
|
||||
if is_synced is not None:
|
||||
conditions.append(self.model.is_synced_to_sales == is_synced)
|
||||
if start_date:
|
||||
conditions.append(self.model.transaction_date >= start_date)
|
||||
if end_date:
|
||||
conditions.append(self.model.transaction_date <= end_date)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
query = query.order_by(desc(self.model.transaction_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS transactions for a specific tenant with optional filters"""
|
||||
try:
|
||||
query = select(func.count(self.model.id)).where(self.model.tenant_id == tenant_id)
|
||||
|
||||
# Apply filters
|
||||
conditions = []
|
||||
if pos_system:
|
||||
conditions.append(self.model.pos_system == pos_system)
|
||||
if status:
|
||||
conditions.append(self.model.status == status)
|
||||
if is_synced is not None:
|
||||
conditions.append(self.model.is_synced_to_sales == is_synced)
|
||||
if start_date:
|
||||
conditions.append(self.model.transaction_date >= start_date)
|
||||
if end_date:
|
||||
conditions.append(self.model.transaction_date <= end_date)
|
||||
|
||||
if conditions:
|
||||
query = query.where(and_(*conditions))
|
||||
|
||||
result = await self.session.execute(query)
|
||||
count = result.scalar() or 0
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_transaction_with_items(
|
||||
self,
|
||||
transaction_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[POSTransaction]:
|
||||
"""Get transaction with all its items"""
|
||||
try:
|
||||
query = select(POSTransaction).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(
|
||||
and_(
|
||||
POSTransaction.id == transaction_id,
|
||||
POSTransaction.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transaction with items",
|
||||
transaction_id=str(transaction_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_transactions_by_pos_config(
|
||||
self,
|
||||
pos_config_id: UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[POSTransaction]:
|
||||
"""Get transactions for a specific POS configuration"""
|
||||
try:
|
||||
query = select(POSTransaction).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(
|
||||
POSTransaction.pos_config_id == pos_config_id
|
||||
).order_by(desc(POSTransaction.transaction_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by pos config",
|
||||
pos_config_id=str(pos_config_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_transactions_by_date_range(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: date,
|
||||
end_date: date,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSTransaction]:
|
||||
"""Get transactions within date range"""
|
||||
try:
|
||||
start_datetime = datetime.combine(start_date, datetime.min.time())
|
||||
end_datetime = datetime.combine(end_date, datetime.max.time())
|
||||
|
||||
query = select(POSTransaction).options(
|
||||
selectinload(POSTransaction.items)
|
||||
).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= start_datetime,
|
||||
POSTransaction.transaction_date <= end_datetime
|
||||
)
|
||||
).order_by(desc(POSTransaction.transaction_date)).offset(skip).limit(limit)
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return result.scalars().all()
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by date range",
|
||||
start_date=str(start_date),
|
||||
end_date=str(end_date),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_dashboard_metrics(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get dashboard metrics for transactions"""
|
||||
try:
|
||||
# Today's metrics
|
||||
today = datetime.now().date()
|
||||
today_start = datetime.combine(today, datetime.min.time())
|
||||
today_end = datetime.combine(today, datetime.max.time())
|
||||
|
||||
week_start = today - timedelta(days=today.weekday())
|
||||
week_start_datetime = datetime.combine(week_start, datetime.min.time())
|
||||
|
||||
month_start = today.replace(day=1)
|
||||
month_start_datetime = datetime.combine(month_start, datetime.min.time())
|
||||
|
||||
# Transaction counts by period
|
||||
transactions_today = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= today_start,
|
||||
POSTransaction.transaction_date <= today_end,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
transactions_week = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= week_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
transactions_month = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= month_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Revenue by period
|
||||
revenue_today = await self.session.execute(
|
||||
select(func.coalesce(func.sum(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= today_start,
|
||||
POSTransaction.transaction_date <= today_end,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
revenue_week = await self.session.execute(
|
||||
select(func.coalesce(func.sum(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= week_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
revenue_month = await self.session.execute(
|
||||
select(func.coalesce(func.sum(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.transaction_date >= month_start_datetime,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Status breakdown
|
||||
status_counts = await self.session.execute(
|
||||
select(POSTransaction.status, func.count()).select_from(POSTransaction).where(
|
||||
POSTransaction.tenant_id == tenant_id
|
||||
).group_by(POSTransaction.status)
|
||||
)
|
||||
|
||||
status_breakdown = {status: count for status, count in status_counts.fetchall()}
|
||||
|
||||
# Payment method breakdown
|
||||
payment_counts = await self.session.execute(
|
||||
select(POSTransaction.payment_method, func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
).group_by(POSTransaction.payment_method)
|
||||
)
|
||||
|
||||
payment_breakdown = {method: count for method, count in payment_counts.fetchall()}
|
||||
|
||||
# Average transaction value
|
||||
avg_transaction_value = await self.session.execute(
|
||||
select(func.coalesce(func.avg(POSTransaction.total_amount), 0)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.status == "completed"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"total_transactions_today": transactions_today.scalar(),
|
||||
"total_transactions_this_week": transactions_week.scalar(),
|
||||
"total_transactions_this_month": transactions_month.scalar(),
|
||||
"revenue_today": float(revenue_today.scalar()),
|
||||
"revenue_this_week": float(revenue_week.scalar()),
|
||||
"revenue_this_month": float(revenue_month.scalar()),
|
||||
"status_breakdown": status_breakdown,
|
||||
"payment_method_breakdown": payment_breakdown,
|
||||
"average_transaction_value": float(avg_transaction_value.scalar())
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get dashboard metrics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sync_status_summary(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sync status summary for transactions"""
|
||||
try:
|
||||
# Count synced vs unsynced
|
||||
synced_count = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
pending_count = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == False,
|
||||
POSTransaction.sync_error.is_(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
failed_count = await self.session.execute(
|
||||
select(func.count()).select_from(POSTransaction).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == False,
|
||||
POSTransaction.sync_error.isnot(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Get last sync time
|
||||
last_sync = await self.session.execute(
|
||||
select(func.max(POSTransaction.sync_completed_at)).where(
|
||||
and_(
|
||||
POSTransaction.tenant_id == tenant_id,
|
||||
POSTransaction.is_synced_to_sales == True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"synced": synced_count.scalar(),
|
||||
"pending": pending_count.scalar(),
|
||||
"failed": failed_count.scalar(),
|
||||
"last_sync_at": last_sync.scalar()
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync status summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
95
services/pos/app/schemas/pos_config.py
Normal file
95
services/pos/app/schemas/pos_config.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
Pydantic schemas for POS configuration API requests and responses
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class POSProvider(str, Enum):
|
||||
"""POS provider types"""
|
||||
SQUARE = "square"
|
||||
TOAST = "toast"
|
||||
LIGHTSPEED = "lightspeed"
|
||||
|
||||
|
||||
class POSConfigurationBase(BaseModel):
|
||||
"""Base schema for POS configurations"""
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None
|
||||
}
|
||||
|
||||
|
||||
class POSConfigurationResponse(POSConfigurationBase):
|
||||
"""Schema for POS configuration API responses"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
pos_system: POSProvider
|
||||
provider_name: str
|
||||
is_active: bool
|
||||
is_connected: bool
|
||||
webhook_url: Optional[str] = None
|
||||
webhook_secret: Optional[str] = None
|
||||
environment: str = "sandbox"
|
||||
location_id: Optional[str] = None
|
||||
merchant_id: Optional[str] = None
|
||||
sync_enabled: bool = True
|
||||
sync_interval_minutes: str = "5"
|
||||
auto_sync_products: bool = True
|
||||
auto_sync_transactions: bool = True
|
||||
last_sync_at: Optional[datetime] = None
|
||||
last_successful_sync_at: Optional[datetime] = None
|
||||
last_sync_status: Optional[str] = None
|
||||
last_sync_message: Optional[str] = None
|
||||
provider_settings: Optional[Dict[str, Any]] = None
|
||||
last_health_check_at: Optional[datetime] = None
|
||||
health_status: str = "unknown"
|
||||
health_message: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
notes: Optional[str] = None
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, obj):
|
||||
"""Convert ORM object to schema with proper UUID handling"""
|
||||
return cls(
|
||||
id=str(obj.id),
|
||||
tenant_id=str(obj.tenant_id),
|
||||
pos_system=obj.pos_system,
|
||||
provider_name=obj.provider_name,
|
||||
is_active=obj.is_active,
|
||||
is_connected=obj.is_connected,
|
||||
webhook_url=obj.webhook_url,
|
||||
webhook_secret=obj.webhook_secret,
|
||||
environment=obj.environment,
|
||||
location_id=obj.location_id,
|
||||
merchant_id=obj.merchant_id,
|
||||
sync_enabled=obj.sync_enabled,
|
||||
sync_interval_minutes=obj.sync_interval_minutes,
|
||||
auto_sync_products=obj.auto_sync_products,
|
||||
auto_sync_transactions=obj.auto_sync_transactions,
|
||||
last_sync_at=obj.last_sync_at,
|
||||
last_successful_sync_at=obj.last_successful_sync_at,
|
||||
last_sync_status=obj.last_sync_status,
|
||||
last_sync_message=obj.last_sync_message,
|
||||
provider_settings=obj.provider_settings,
|
||||
last_health_check_at=obj.last_health_check_at,
|
||||
health_status=obj.health_status,
|
||||
health_message=obj.health_message,
|
||||
created_at=obj.created_at,
|
||||
updated_at=obj.updated_at,
|
||||
notes=obj.notes
|
||||
)
|
||||
|
||||
|
||||
class POSConfigurationListResponse(BaseModel):
|
||||
"""Schema for POS configuration list API response"""
|
||||
configurations: List[POSConfigurationResponse]
|
||||
total: int
|
||||
supported_systems: List[str] = ["square", "toast", "lightspeed"]
|
||||
248
services/pos/app/schemas/pos_transaction.py
Normal file
248
services/pos/app/schemas/pos_transaction.py
Normal file
@@ -0,0 +1,248 @@
|
||||
"""
|
||||
Pydantic schemas for POS transaction API requests and responses
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class TransactionType(str, Enum):
|
||||
"""Transaction type enumeration"""
|
||||
SALE = "sale"
|
||||
REFUND = "refund"
|
||||
VOID = "void"
|
||||
EXCHANGE = "exchange"
|
||||
|
||||
|
||||
class TransactionStatus(str, Enum):
|
||||
"""Transaction status enumeration"""
|
||||
COMPLETED = "completed"
|
||||
PENDING = "pending"
|
||||
FAILED = "failed"
|
||||
REFUNDED = "refunded"
|
||||
VOIDED = "voided"
|
||||
|
||||
|
||||
class PaymentMethod(str, Enum):
|
||||
"""Payment method enumeration"""
|
||||
CARD = "card"
|
||||
CASH = "cash"
|
||||
DIGITAL_WALLET = "digital_wallet"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
class OrderType(str, Enum):
|
||||
"""Order type enumeration"""
|
||||
DINE_IN = "dine_in"
|
||||
TAKEOUT = "takeout"
|
||||
DELIVERY = "delivery"
|
||||
PICKUP = "pickup"
|
||||
|
||||
|
||||
class POSTransactionItemResponse(BaseModel):
|
||||
"""Schema for POS transaction item response"""
|
||||
id: str
|
||||
transaction_id: str
|
||||
tenant_id: str
|
||||
external_item_id: Optional[str] = None
|
||||
sku: Optional[str] = None
|
||||
product_name: str
|
||||
product_category: Optional[str] = None
|
||||
product_subcategory: Optional[str] = None
|
||||
quantity: Decimal
|
||||
unit_price: Decimal
|
||||
total_price: Decimal
|
||||
discount_amount: Decimal = Decimal("0")
|
||||
tax_amount: Decimal = Decimal("0")
|
||||
modifiers: Optional[Dict[str, Any]] = None
|
||||
inventory_product_id: Optional[str] = None
|
||||
is_mapped_to_inventory: bool = False
|
||||
is_synced_to_sales: bool = False
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else 0.0
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, obj):
|
||||
"""Convert ORM object to schema with proper UUID and Decimal handling"""
|
||||
return cls(
|
||||
id=str(obj.id),
|
||||
transaction_id=str(obj.transaction_id),
|
||||
tenant_id=str(obj.tenant_id),
|
||||
external_item_id=obj.external_item_id,
|
||||
sku=obj.sku,
|
||||
product_name=obj.product_name,
|
||||
product_category=obj.product_category,
|
||||
product_subcategory=obj.product_subcategory,
|
||||
quantity=obj.quantity,
|
||||
unit_price=obj.unit_price,
|
||||
total_price=obj.total_price,
|
||||
discount_amount=obj.discount_amount,
|
||||
tax_amount=obj.tax_amount,
|
||||
modifiers=obj.modifiers,
|
||||
inventory_product_id=str(obj.inventory_product_id) if obj.inventory_product_id else None,
|
||||
is_mapped_to_inventory=obj.is_mapped_to_inventory,
|
||||
is_synced_to_sales=obj.is_synced_to_sales,
|
||||
created_at=obj.created_at,
|
||||
updated_at=obj.updated_at
|
||||
)
|
||||
|
||||
|
||||
class POSTransactionResponse(BaseModel):
|
||||
"""Schema for POS transaction response"""
|
||||
id: str
|
||||
tenant_id: str
|
||||
pos_config_id: str
|
||||
pos_system: str
|
||||
external_transaction_id: str
|
||||
external_order_id: Optional[str] = None
|
||||
transaction_type: TransactionType
|
||||
status: TransactionStatus
|
||||
subtotal: Decimal
|
||||
tax_amount: Decimal
|
||||
tip_amount: Decimal
|
||||
discount_amount: Decimal
|
||||
total_amount: Decimal
|
||||
currency: str = "EUR"
|
||||
payment_method: Optional[PaymentMethod] = None
|
||||
payment_status: Optional[str] = None
|
||||
transaction_date: datetime
|
||||
pos_created_at: datetime
|
||||
pos_updated_at: Optional[datetime] = None
|
||||
location_id: Optional[str] = None
|
||||
location_name: Optional[str] = None
|
||||
staff_id: Optional[str] = None
|
||||
staff_name: Optional[str] = None
|
||||
customer_id: Optional[str] = None
|
||||
customer_email: Optional[str] = None
|
||||
customer_phone: Optional[str] = None
|
||||
order_type: Optional[OrderType] = None
|
||||
table_number: Optional[str] = None
|
||||
receipt_number: Optional[str] = None
|
||||
is_synced_to_sales: bool = False
|
||||
sales_record_id: Optional[str] = None
|
||||
sync_attempted_at: Optional[datetime] = None
|
||||
sync_completed_at: Optional[datetime] = None
|
||||
sync_error: Optional[str] = None
|
||||
sync_retry_count: int = 0
|
||||
is_processed: bool = False
|
||||
is_duplicate: bool = False
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
items: List[POSTransactionItemResponse] = []
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else 0.0
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, obj):
|
||||
"""Convert ORM object to schema with proper UUID and Decimal handling"""
|
||||
return cls(
|
||||
id=str(obj.id),
|
||||
tenant_id=str(obj.tenant_id),
|
||||
pos_config_id=str(obj.pos_config_id),
|
||||
pos_system=obj.pos_system,
|
||||
external_transaction_id=obj.external_transaction_id,
|
||||
external_order_id=obj.external_order_id,
|
||||
transaction_type=obj.transaction_type,
|
||||
status=obj.status,
|
||||
subtotal=obj.subtotal,
|
||||
tax_amount=obj.tax_amount,
|
||||
tip_amount=obj.tip_amount,
|
||||
discount_amount=obj.discount_amount,
|
||||
total_amount=obj.total_amount,
|
||||
currency=obj.currency,
|
||||
payment_method=obj.payment_method,
|
||||
payment_status=obj.payment_status,
|
||||
transaction_date=obj.transaction_date,
|
||||
pos_created_at=obj.pos_created_at,
|
||||
pos_updated_at=obj.pos_updated_at,
|
||||
location_id=obj.location_id,
|
||||
location_name=obj.location_name,
|
||||
staff_id=obj.staff_id,
|
||||
staff_name=obj.staff_name,
|
||||
customer_id=obj.customer_id,
|
||||
customer_email=obj.customer_email,
|
||||
customer_phone=obj.customer_phone,
|
||||
order_type=obj.order_type,
|
||||
table_number=obj.table_number,
|
||||
receipt_number=obj.receipt_number,
|
||||
is_synced_to_sales=obj.is_synced_to_sales,
|
||||
sales_record_id=str(obj.sales_record_id) if obj.sales_record_id else None,
|
||||
sync_attempted_at=obj.sync_attempted_at,
|
||||
sync_completed_at=obj.sync_completed_at,
|
||||
sync_error=obj.sync_error,
|
||||
sync_retry_count=obj.sync_retry_count,
|
||||
is_processed=obj.is_processed,
|
||||
is_duplicate=obj.is_duplicate,
|
||||
created_at=obj.created_at,
|
||||
updated_at=obj.updated_at,
|
||||
items=[POSTransactionItemResponse.from_orm(item) for item in obj.items] if hasattr(obj, 'items') and obj.items else []
|
||||
)
|
||||
|
||||
|
||||
class POSTransactionSummary(BaseModel):
|
||||
"""Summary information for a transaction (lightweight)"""
|
||||
id: str
|
||||
external_transaction_id: str
|
||||
transaction_date: datetime
|
||||
total_amount: Decimal
|
||||
status: TransactionStatus
|
||||
payment_method: Optional[PaymentMethod] = None
|
||||
is_synced_to_sales: bool
|
||||
item_count: int = 0
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
use_enum_values = True
|
||||
json_encoders = {
|
||||
datetime: lambda v: v.isoformat() if v else None,
|
||||
Decimal: lambda v: float(v) if v else 0.0
|
||||
}
|
||||
|
||||
|
||||
class POSTransactionListResponse(BaseModel):
|
||||
"""Schema for paginated transaction list response"""
|
||||
transactions: List[POSTransactionResponse]
|
||||
total: int
|
||||
has_more: bool = False
|
||||
summary: Optional[Dict[str, Any]] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class POSTransactionDashboardSummary(BaseModel):
|
||||
"""Dashboard summary for POS transactions"""
|
||||
total_transactions_today: int = 0
|
||||
total_transactions_this_week: int = 0
|
||||
total_transactions_this_month: int = 0
|
||||
revenue_today: Decimal = Decimal("0")
|
||||
revenue_this_week: Decimal = Decimal("0")
|
||||
revenue_this_month: Decimal = Decimal("0")
|
||||
average_transaction_value: Decimal = Decimal("0")
|
||||
status_breakdown: Dict[str, int] = {}
|
||||
payment_method_breakdown: Dict[str, int] = {}
|
||||
sync_status: Dict[str, Any] = {}
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
json_encoders = {
|
||||
Decimal: lambda v: float(v) if v else 0.0,
|
||||
datetime: lambda v: v.isoformat() if v else None
|
||||
}
|
||||
76
services/pos/app/services/pos_config_service.py
Normal file
76
services/pos/app/services/pos_config_service.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
POS Configuration Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from app.repositories.pos_config_repository import POSConfigurationRepository
|
||||
from app.schemas.pos_config import POSConfigurationResponse
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSConfigurationService:
|
||||
"""Service layer for POS configuration operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[POSConfigurationResponse]:
|
||||
"""Get POS configurations for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
|
||||
configurations = await repository.get_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Convert to response schemas using from_orm
|
||||
responses = []
|
||||
for config in configurations:
|
||||
response = POSConfigurationResponse.from_orm(config)
|
||||
responses.append(response)
|
||||
|
||||
return responses
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_configurations_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
is_active: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS configurations for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSConfigurationRepository(db)
|
||||
|
||||
count = await repository.count_configurations_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
is_active=is_active
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count configurations by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
239
services/pos/app/services/pos_transaction_service.py
Normal file
239
services/pos/app/services/pos_transaction_service.py
Normal file
@@ -0,0 +1,239 @@
|
||||
"""
|
||||
POS Transaction Service - Business Logic Layer
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import UUID
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
import structlog
|
||||
|
||||
from app.repositories.pos_transaction_repository import POSTransactionRepository
|
||||
from app.repositories.pos_transaction_item_repository import POSTransactionItemRepository
|
||||
from app.schemas.pos_transaction import (
|
||||
POSTransactionResponse,
|
||||
POSTransactionDashboardSummary
|
||||
)
|
||||
from app.core.database import get_db_transaction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class POSTransactionService:
|
||||
"""Service layer for POS transaction operations"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def get_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 50
|
||||
) -> List[POSTransactionResponse]:
|
||||
"""Get POS transactions for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
transactions = await repository.get_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Convert to response schemas
|
||||
responses = []
|
||||
for transaction in transactions:
|
||||
response = POSTransactionResponse.from_orm(transaction)
|
||||
responses.append(response)
|
||||
|
||||
return responses
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def count_transactions_by_tenant(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
pos_system: Optional[str] = None,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
status: Optional[str] = None,
|
||||
is_synced: Optional[bool] = None
|
||||
) -> int:
|
||||
"""Count POS transactions for a tenant with filtering"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
count = await repository.count_transactions_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
pos_system=pos_system,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
status=status,
|
||||
is_synced=is_synced
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to count transactions by tenant", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_transaction_with_items(
|
||||
self,
|
||||
transaction_id: UUID,
|
||||
tenant_id: UUID
|
||||
) -> Optional[POSTransactionResponse]:
|
||||
"""Get transaction with all its items"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
transaction = await repository.get_transaction_with_items(
|
||||
transaction_id=transaction_id,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
if not transaction:
|
||||
return None
|
||||
|
||||
return POSTransactionResponse.from_orm(transaction)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get transaction with items",
|
||||
transaction_id=str(transaction_id),
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_dashboard_summary(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> POSTransactionDashboardSummary:
|
||||
"""Get dashboard summary for POS transactions"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
# Get metrics from repository
|
||||
metrics = await repository.get_dashboard_metrics(tenant_id)
|
||||
|
||||
# Get sync status
|
||||
sync_status = await repository.get_sync_status_summary(tenant_id)
|
||||
|
||||
# Construct dashboard summary
|
||||
return POSTransactionDashboardSummary(
|
||||
total_transactions_today=metrics["total_transactions_today"],
|
||||
total_transactions_this_week=metrics["total_transactions_this_week"],
|
||||
total_transactions_this_month=metrics["total_transactions_this_month"],
|
||||
revenue_today=Decimal(str(metrics["revenue_today"])),
|
||||
revenue_this_week=Decimal(str(metrics["revenue_this_week"])),
|
||||
revenue_this_month=Decimal(str(metrics["revenue_this_month"])),
|
||||
average_transaction_value=Decimal(str(metrics["average_transaction_value"])),
|
||||
status_breakdown=metrics["status_breakdown"],
|
||||
payment_method_breakdown=metrics["payment_method_breakdown"],
|
||||
sync_status=sync_status
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get dashboard summary", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def get_sync_metrics(
|
||||
self,
|
||||
tenant_id: UUID
|
||||
) -> Dict[str, Any]:
|
||||
"""Get sync metrics for transactions"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
sync_status = await repository.get_sync_status_summary(tenant_id)
|
||||
|
||||
# Calculate sync rate
|
||||
total = sync_status["synced"] + sync_status["pending"] + sync_status["failed"]
|
||||
sync_rate = (sync_status["synced"] / total * 100) if total > 0 else 0
|
||||
|
||||
return {
|
||||
"sync_status": sync_status,
|
||||
"sync_rate_percentage": round(sync_rate, 2),
|
||||
"total_transactions": total
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get sync metrics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
|
||||
async def calculate_transaction_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""Calculate analytics for transactions within a date range"""
|
||||
try:
|
||||
async with get_db_transaction() as db:
|
||||
repository = POSTransactionRepository(db)
|
||||
|
||||
transactions = await repository.get_transactions_by_date_range(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date.date(),
|
||||
end_date=end_date.date(),
|
||||
skip=0,
|
||||
limit=10000 # Large limit for analytics
|
||||
)
|
||||
|
||||
# Calculate analytics
|
||||
total_revenue = Decimal("0")
|
||||
total_transactions = len(transactions)
|
||||
payment_methods = {}
|
||||
order_types = {}
|
||||
hourly_distribution = {}
|
||||
|
||||
for transaction in transactions:
|
||||
if transaction.status == "completed":
|
||||
total_revenue += transaction.total_amount
|
||||
|
||||
# Payment method breakdown
|
||||
pm = transaction.payment_method or "unknown"
|
||||
payment_methods[pm] = payment_methods.get(pm, 0) + 1
|
||||
|
||||
# Order type breakdown
|
||||
ot = transaction.order_type or "unknown"
|
||||
order_types[ot] = order_types.get(ot, 0) + 1
|
||||
|
||||
# Hourly distribution
|
||||
hour = transaction.transaction_date.hour
|
||||
hourly_distribution[hour] = hourly_distribution.get(hour, 0) + 1
|
||||
|
||||
avg_transaction_value = (total_revenue / total_transactions) if total_transactions > 0 else Decimal("0")
|
||||
|
||||
return {
|
||||
"period": {
|
||||
"start_date": start_date.isoformat(),
|
||||
"end_date": end_date.isoformat()
|
||||
},
|
||||
"total_revenue": float(total_revenue),
|
||||
"total_transactions": total_transactions,
|
||||
"average_transaction_value": float(avg_transaction_value),
|
||||
"payment_methods": payment_methods,
|
||||
"order_types": order_types,
|
||||
"hourly_distribution": hourly_distribution
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate transaction analytics", error=str(e), tenant_id=tenant_id)
|
||||
raise
|
||||
@@ -426,3 +426,102 @@ async def get_predictive_maintenance_insights(
|
||||
status_code=500,
|
||||
detail="Failed to generate predictive maintenance insights"
|
||||
)
|
||||
|
||||
|
||||
# ===== SUSTAINABILITY / WASTE ANALYTICS ENDPOINT =====
|
||||
# Called by Inventory Service for sustainability metrics
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/production/waste-analytics",
|
||||
response_model=dict
|
||||
)
|
||||
async def get_waste_analytics_for_sustainability(
|
||||
tenant_id: UUID = Path(...),
|
||||
start_date: datetime = Query(..., description="Start date for waste analysis"),
|
||||
end_date: datetime = Query(..., description="End date for waste analysis"),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""
|
||||
Get production waste analytics for sustainability tracking
|
||||
|
||||
This endpoint is called by the Inventory Service's sustainability module
|
||||
to calculate environmental impact and SDG 12.3 compliance.
|
||||
|
||||
Does NOT require analytics tier - this is core sustainability data.
|
||||
|
||||
Returns:
|
||||
- total_production_waste: Sum of waste_quantity from all batches
|
||||
- total_defects: Sum of defect_quantity from all batches
|
||||
- total_planned: Sum of planned_quantity
|
||||
- total_actual: Sum of actual_quantity
|
||||
"""
|
||||
try:
|
||||
waste_data = await production_service.get_waste_analytics(
|
||||
tenant_id,
|
||||
start_date,
|
||||
end_date
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Production waste analytics retrieved for sustainability",
|
||||
tenant_id=str(tenant_id),
|
||||
total_waste=waste_data.get('total_production_waste', 0),
|
||||
start_date=start_date.isoformat(),
|
||||
end_date=end_date.isoformat()
|
||||
)
|
||||
|
||||
return waste_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting waste analytics for sustainability",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve waste analytics: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/api/v1/tenants/{tenant_id}/production/baseline",
|
||||
response_model=dict
|
||||
)
|
||||
async def get_baseline_metrics(
|
||||
tenant_id: UUID = Path(...),
|
||||
production_service: ProductionService = Depends(get_production_service)
|
||||
):
|
||||
"""
|
||||
Get baseline production metrics from first 90 days
|
||||
|
||||
Used by sustainability service to establish waste baseline
|
||||
for SDG 12.3 compliance tracking.
|
||||
|
||||
Returns:
|
||||
- waste_percentage: Baseline waste percentage from first 90 days
|
||||
- total_production_kg: Total production in first 90 days
|
||||
- total_waste_kg: Total waste in first 90 days
|
||||
- period: Date range of baseline period
|
||||
"""
|
||||
try:
|
||||
baseline_data = await production_service.get_baseline_metrics(tenant_id)
|
||||
|
||||
logger.info(
|
||||
"Baseline metrics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
baseline_percentage=baseline_data.get('waste_percentage', 0)
|
||||
)
|
||||
|
||||
return baseline_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting baseline metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to retrieve baseline metrics: {str(e)}"
|
||||
)
|
||||
|
||||
@@ -20,8 +20,6 @@ from app.models.production import (
|
||||
EquipmentStatus, EquipmentType
|
||||
)
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
from shared.utils.alert_generator import generate_equipment_alerts
|
||||
from shared.messaging.rabbitmq import RabbitMQClient
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter(prefix="/internal/demo", tags=["internal"])
|
||||
@@ -430,44 +428,18 @@ async def clone_demo_data(
|
||||
db.add(new_capacity)
|
||||
stats["production_capacity"] += 1
|
||||
|
||||
# Commit cloned data first
|
||||
# Commit cloned data
|
||||
await db.commit()
|
||||
|
||||
# Generate equipment maintenance and status alerts with RabbitMQ publishing
|
||||
rabbitmq_client = None
|
||||
try:
|
||||
# Initialize RabbitMQ client for alert publishing
|
||||
rabbitmq_host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
rabbitmq_user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
rabbitmq_password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
rabbitmq_port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
rabbitmq_vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
rabbitmq_url = f"amqp://{rabbitmq_user}:{rabbitmq_password}@{rabbitmq_host}:{rabbitmq_port}{rabbitmq_vhost}"
|
||||
# NOTE: Alert generation removed - alerts are now generated automatically by the
|
||||
# production alert service which runs scheduled checks at appropriate intervals.
|
||||
# This eliminates duplicate alerts and provides a more realistic demo experience.
|
||||
stats["alerts_generated"] = 0
|
||||
|
||||
rabbitmq_client = RabbitMQClient(rabbitmq_url, service_name="production")
|
||||
await rabbitmq_client.connect()
|
||||
|
||||
# Generate alerts and publish to RabbitMQ
|
||||
alerts_count = await generate_equipment_alerts(
|
||||
db,
|
||||
virtual_uuid,
|
||||
session_time,
|
||||
rabbitmq_client=rabbitmq_client
|
||||
)
|
||||
stats["alerts_generated"] += alerts_count
|
||||
await db.commit()
|
||||
logger.info(f"Generated {alerts_count} equipment alerts")
|
||||
except Exception as alert_error:
|
||||
logger.warning(f"Alert generation failed: {alert_error}", exc_info=True)
|
||||
finally:
|
||||
# Clean up RabbitMQ connection
|
||||
if rabbitmq_client:
|
||||
try:
|
||||
await rabbitmq_client.disconnect()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error disconnecting RabbitMQ: {cleanup_error}")
|
||||
|
||||
total_records = sum(stats.values())
|
||||
# Calculate total from non-alert stats
|
||||
total_records = (stats["equipment"] + stats["batches"] + stats["schedules"] +
|
||||
stats["quality_templates"] + stats["quality_checks"] +
|
||||
stats["production_capacity"])
|
||||
duration_ms = int((datetime.now(timezone.utc) - start_time).total_seconds() * 1000)
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -12,7 +12,7 @@ from shared.auth.decorators import get_current_user_dep
|
||||
from shared.auth.access_control import require_user_role
|
||||
from shared.routing import RouteBuilder, RouteCategory
|
||||
from app.core.database import get_db
|
||||
from app.repositories.quality_template_repository import QualityTemplateRepository
|
||||
from app.services.quality_template_service import QualityTemplateService
|
||||
from app.models.production import ProcessStage, QualityCheckTemplate
|
||||
from app.schemas.quality_templates import (
|
||||
QualityCheckTemplateCreate,
|
||||
@@ -52,9 +52,9 @@ async def list_quality_templates(
|
||||
- is_active: Filter by active status (default: True)
|
||||
"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
templates, total = await repo.get_templates_by_tenant(
|
||||
templates, total = await service.get_templates(
|
||||
tenant_id=str(tenant_id),
|
||||
stage=stage,
|
||||
check_type=check_type.value if check_type else None,
|
||||
@@ -98,29 +98,18 @@ async def create_quality_template(
|
||||
):
|
||||
"""Create a new quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
# Check if template code already exists (if provided)
|
||||
if template_data.template_code:
|
||||
code_exists = await repo.check_template_code_exists(
|
||||
tenant_id=str(tenant_id),
|
||||
template_code=template_data.template_code
|
||||
)
|
||||
if code_exists:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Template code '{template_data.template_code}' already exists"
|
||||
)
|
||||
|
||||
# Create template
|
||||
# Add created_by from current user
|
||||
template_dict = template_data.dict()
|
||||
template_dict['tenant_id'] = str(tenant_id)
|
||||
template_dict['created_by'] = UUID(current_user["sub"])
|
||||
template_create = QualityCheckTemplateCreate(**template_dict)
|
||||
|
||||
template = QualityCheckTemplate(**template_dict)
|
||||
db.add(template)
|
||||
await db.commit()
|
||||
await db.refresh(template)
|
||||
# Create template via service (handles validation and business rules)
|
||||
template = await service.create_template(
|
||||
tenant_id=str(tenant_id),
|
||||
template_data=template_create
|
||||
)
|
||||
|
||||
logger.info("Created quality template",
|
||||
template_id=str(template.id),
|
||||
@@ -129,10 +118,13 @@ async def create_quality_template(
|
||||
|
||||
return QualityCheckTemplateResponse.from_orm(template)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except ValueError as e:
|
||||
# Business rule validation errors
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error creating quality template",
|
||||
error=str(e), tenant_id=str(tenant_id))
|
||||
raise HTTPException(
|
||||
@@ -153,9 +145,9 @@ async def get_quality_template(
|
||||
):
|
||||
"""Get a specific quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
template = await repo.get_by_tenant_and_id(
|
||||
template = await service.get_template(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
@@ -195,12 +187,13 @@ async def update_quality_template(
|
||||
):
|
||||
"""Update a quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
# Get existing template
|
||||
template = await repo.get_by_tenant_and_id(
|
||||
# Update template via service (handles validation and business rules)
|
||||
template = await service.update_template(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
template_id=template_id,
|
||||
template_data=template_data
|
||||
)
|
||||
|
||||
if not template:
|
||||
@@ -209,37 +202,21 @@ async def update_quality_template(
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Check if template code already exists (if being updated)
|
||||
if template_data.template_code and template_data.template_code != template.template_code:
|
||||
code_exists = await repo.check_template_code_exists(
|
||||
tenant_id=str(tenant_id),
|
||||
template_code=template_data.template_code,
|
||||
exclude_id=template_id
|
||||
)
|
||||
if code_exists:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Template code '{template_data.template_code}' already exists"
|
||||
)
|
||||
|
||||
# Update template fields
|
||||
update_data = template_data.dict(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(template, field, value)
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(template)
|
||||
|
||||
logger.info("Updated quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
return QualityCheckTemplateResponse.from_orm(template)
|
||||
|
||||
except ValueError as e:
|
||||
# Business rule validation errors
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e)
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error updating quality template",
|
||||
error=str(e),
|
||||
template_id=str(template_id),
|
||||
@@ -262,31 +239,27 @@ async def delete_quality_template(
|
||||
db = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete a quality check template (soft delete by setting is_active to False)
|
||||
Delete a quality check template
|
||||
|
||||
Note: For safety, this performs a soft delete. Hard deletes would require
|
||||
checking for dependencies in recipes and production batches.
|
||||
Note: Service layer determines whether to use soft or hard delete
|
||||
based on business rules (checking dependencies, etc.)
|
||||
"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
# Get existing template
|
||||
template = await repo.get_by_tenant_and_id(
|
||||
# Delete template via service (handles business rules)
|
||||
success = await service.delete_template(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
|
||||
if not template:
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Soft delete by marking as inactive
|
||||
template.is_active = False
|
||||
await db.commit()
|
||||
|
||||
logger.info("Deleted quality template (soft delete)",
|
||||
logger.info("Deleted quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=str(tenant_id))
|
||||
|
||||
@@ -322,9 +295,9 @@ async def get_templates_for_stage(
|
||||
):
|
||||
"""Get all quality templates applicable to a specific process stage"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
templates = await repo.get_templates_for_stage(
|
||||
templates = await service.get_templates_for_stage(
|
||||
tenant_id=str(tenant_id),
|
||||
stage=stage,
|
||||
is_active=is_active
|
||||
@@ -367,50 +340,20 @@ async def duplicate_quality_template(
|
||||
):
|
||||
"""Duplicate an existing quality check template"""
|
||||
try:
|
||||
repo = QualityTemplateRepository(db)
|
||||
service = QualityTemplateService(db)
|
||||
|
||||
# Get existing template
|
||||
original = await repo.get_by_tenant_and_id(
|
||||
# Duplicate template via service (handles business rules)
|
||||
duplicate = await service.duplicate_template(
|
||||
tenant_id=str(tenant_id),
|
||||
template_id=template_id
|
||||
)
|
||||
|
||||
if not original:
|
||||
if not duplicate:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Quality template not found"
|
||||
)
|
||||
|
||||
# Create duplicate
|
||||
duplicate_data = {
|
||||
'tenant_id': original.tenant_id,
|
||||
'name': f"{original.name} (Copy)",
|
||||
'template_code': f"{original.template_code}_copy" if original.template_code else None,
|
||||
'check_type': original.check_type,
|
||||
'category': original.category,
|
||||
'description': original.description,
|
||||
'instructions': original.instructions,
|
||||
'parameters': original.parameters,
|
||||
'thresholds': original.thresholds,
|
||||
'scoring_criteria': original.scoring_criteria,
|
||||
'is_active': original.is_active,
|
||||
'is_required': original.is_required,
|
||||
'is_critical': original.is_critical,
|
||||
'weight': original.weight,
|
||||
'min_value': original.min_value,
|
||||
'max_value': original.max_value,
|
||||
'target_value': original.target_value,
|
||||
'unit': original.unit,
|
||||
'tolerance_percentage': original.tolerance_percentage,
|
||||
'applicable_stages': original.applicable_stages,
|
||||
'created_by': UUID(current_user["sub"])
|
||||
}
|
||||
|
||||
duplicate = QualityCheckTemplate(**duplicate_data)
|
||||
db.add(duplicate)
|
||||
await db.commit()
|
||||
await db.refresh(duplicate)
|
||||
|
||||
logger.info("Duplicated quality template",
|
||||
original_id=str(template_id),
|
||||
duplicate_id=str(duplicate.id),
|
||||
@@ -421,7 +364,6 @@ async def duplicate_quality_template(
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error("Error duplicating quality template",
|
||||
error=str(e),
|
||||
template_id=str(template_id),
|
||||
|
||||
@@ -0,0 +1,278 @@
|
||||
# services/production/app/repositories/production_alert_repository.py
|
||||
"""
|
||||
Production Alert Repository
|
||||
Data access layer for production-specific alert detection and analysis
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Any
|
||||
from uuid import UUID
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ProductionAlertRepository:
|
||||
"""Repository for production alert data access"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
async def get_capacity_issues(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get production capacity overload issues
|
||||
Returns batches that exceed daily capacity thresholds
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
pb.tenant_id,
|
||||
DATE(pb.planned_start_time) as planned_date,
|
||||
COUNT(*) as batch_count,
|
||||
SUM(pb.planned_quantity) as total_planned,
|
||||
'capacity_check' as capacity_status,
|
||||
100.0 as capacity_percentage
|
||||
FROM production_batches pb
|
||||
WHERE pb.planned_start_time >= CURRENT_DATE
|
||||
AND pb.planned_start_time <= CURRENT_DATE + INTERVAL '3 days'
|
||||
AND pb.status IN ('planned', 'in_progress')
|
||||
GROUP BY pb.tenant_id, DATE(pb.planned_start_time)
|
||||
HAVING COUNT(*) > 10
|
||||
ORDER BY total_planned DESC
|
||||
LIMIT 20
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get capacity issues", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_production_delays(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get production batches that are delayed
|
||||
Returns batches in progress past their planned end time
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
pb.id, pb.tenant_id, pb.product_name, pb.batch_number,
|
||||
pb.planned_end_time as planned_completion_time, pb.actual_start_time,
|
||||
pb.actual_end_time as estimated_completion_time, pb.status,
|
||||
EXTRACT(minutes FROM (NOW() - pb.planned_end_time)) as delay_minutes,
|
||||
COALESCE(pb.priority::text, 'medium') as priority_level,
|
||||
1 as affected_orders
|
||||
FROM production_batches pb
|
||||
WHERE pb.status = 'in_progress'
|
||||
AND pb.planned_end_time < NOW()
|
||||
AND pb.planned_end_time > NOW() - INTERVAL '24 hours'
|
||||
ORDER BY
|
||||
CASE COALESCE(pb.priority::text, 'MEDIUM')
|
||||
WHEN 'URGENT' THEN 1 WHEN 'HIGH' THEN 2 ELSE 3
|
||||
END,
|
||||
delay_minutes DESC
|
||||
LIMIT 50
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get production delays", error=str(e))
|
||||
raise
|
||||
|
||||
async def get_quality_issues(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get quality control failures
|
||||
Returns quality checks that failed within recent hours
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
qc.id, qc.tenant_id, qc.batch_id, qc.test_type,
|
||||
qc.result_value, qc.min_acceptable, qc.max_acceptable,
|
||||
qc.pass_fail, qc.defect_count,
|
||||
qc.notes as qc_severity,
|
||||
1 as total_failures,
|
||||
pb.product_name, pb.batch_number,
|
||||
qc.created_at
|
||||
FROM quality_checks qc
|
||||
JOIN production_batches pb ON pb.id = qc.batch_id
|
||||
WHERE qc.pass_fail = false
|
||||
AND qc.created_at > NOW() - INTERVAL '4 hours'
|
||||
AND qc.corrective_action_needed = true
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN qc.pass_fail = false AND qc.defect_count > 5 THEN 1
|
||||
WHEN qc.pass_fail = false THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
qc.created_at DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get quality issues", error=str(e))
|
||||
raise
|
||||
|
||||
async def mark_quality_check_acknowledged(self, quality_check_id: UUID) -> None:
|
||||
"""
|
||||
Mark a quality check as acknowledged to avoid duplicate alerts
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
UPDATE quality_checks
|
||||
SET acknowledged = true
|
||||
WHERE id = :id
|
||||
""")
|
||||
|
||||
await self.session.execute(query, {"id": quality_check_id})
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to mark quality check acknowledged", error=str(e), qc_id=str(quality_check_id))
|
||||
raise
|
||||
|
||||
async def get_equipment_status(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get equipment requiring attention
|
||||
Returns equipment with maintenance due or status issues
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
e.id, e.tenant_id, e.name, e.type, e.status,
|
||||
e.efficiency_percentage, e.uptime_percentage,
|
||||
e.last_maintenance_date, e.next_maintenance_date,
|
||||
e.maintenance_interval_days,
|
||||
EXTRACT(DAYS FROM (e.next_maintenance_date - NOW())) as days_to_maintenance,
|
||||
COUNT(ea.id) as active_alerts
|
||||
FROM equipment e
|
||||
LEFT JOIN alerts ea ON ea.equipment_id = e.id
|
||||
AND ea.is_active = true
|
||||
AND ea.is_resolved = false
|
||||
WHERE e.is_active = true
|
||||
AND e.tenant_id = :tenant_id
|
||||
GROUP BY e.id, e.tenant_id, e.name, e.type, e.status,
|
||||
e.efficiency_percentage, e.uptime_percentage,
|
||||
e.last_maintenance_date, e.next_maintenance_date,
|
||||
e.maintenance_interval_days
|
||||
ORDER BY e.next_maintenance_date ASC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get equipment status", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_efficiency_recommendations(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get production efficiency improvement recommendations
|
||||
Analyzes production patterns to identify optimization opportunities
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH efficiency_analysis AS (
|
||||
SELECT
|
||||
pb.tenant_id, pb.product_name,
|
||||
AVG(EXTRACT(EPOCH FROM (pb.actual_end_time - pb.actual_start_time)) / 60) as avg_production_time,
|
||||
AVG(pb.planned_duration_minutes) as avg_planned_duration,
|
||||
COUNT(*) as batch_count,
|
||||
AVG(pb.yield_percentage) as avg_yield,
|
||||
EXTRACT(hour FROM pb.actual_start_time) as start_hour
|
||||
FROM production_batches pb
|
||||
WHERE pb.status = 'COMPLETED'
|
||||
AND pb.actual_completion_time > CURRENT_DATE - INTERVAL '30 days'
|
||||
AND pb.tenant_id = :tenant_id
|
||||
GROUP BY pb.tenant_id, pb.product_name, EXTRACT(hour FROM pb.actual_start_time)
|
||||
HAVING COUNT(*) >= 3
|
||||
),
|
||||
recommendations AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN avg_production_time > avg_planned_duration * 1.2 THEN 'reduce_production_time'
|
||||
WHEN avg_yield < 85 THEN 'improve_yield'
|
||||
WHEN start_hour BETWEEN 14 AND 16 AND avg_production_time > avg_planned_duration * 1.1 THEN 'avoid_afternoon_production'
|
||||
ELSE null
|
||||
END as recommendation_type,
|
||||
(avg_production_time - avg_planned_duration) / avg_planned_duration * 100 as efficiency_loss_percent
|
||||
FROM efficiency_analysis
|
||||
)
|
||||
SELECT * FROM recommendations
|
||||
WHERE recommendation_type IS NOT NULL
|
||||
AND efficiency_loss_percent > 10
|
||||
ORDER BY efficiency_loss_percent DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get efficiency recommendations", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_energy_consumption_patterns(self, tenant_id: UUID) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get energy consumption patterns for optimization analysis
|
||||
Returns consumption by equipment and hour of day
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
e.tenant_id, e.name as equipment_name, e.type,
|
||||
AVG(ec.energy_consumption_kwh) as avg_energy,
|
||||
EXTRACT(hour FROM ec.recorded_at) as hour_of_day,
|
||||
COUNT(*) as readings_count
|
||||
FROM equipment e
|
||||
JOIN energy_consumption ec ON ec.equipment_id = e.id
|
||||
WHERE ec.recorded_at > CURRENT_DATE - INTERVAL '30 days'
|
||||
AND e.tenant_id = :tenant_id
|
||||
GROUP BY e.tenant_id, e.id, e.name, e.type, EXTRACT(hour FROM ec.recorded_at)
|
||||
HAVING COUNT(*) >= 10
|
||||
ORDER BY avg_energy DESC
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"tenant_id": tenant_id})
|
||||
return [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get energy consumption patterns", error=str(e), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
async def get_affected_production_batches(self, ingredient_id: str) -> List[str]:
|
||||
"""
|
||||
Get production batches affected by ingredient shortage
|
||||
Returns batch IDs that use the specified ingredient
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT DISTINCT pb.id
|
||||
FROM production_batches pb
|
||||
JOIN recipe_ingredients ri ON ri.recipe_id = pb.recipe_id
|
||||
WHERE ri.ingredient_id = :ingredient_id
|
||||
AND pb.status = 'in_progress'
|
||||
AND pb.planned_completion_time > NOW()
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {"ingredient_id": ingredient_id})
|
||||
return [str(row.id) for row in result.fetchall()]
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get affected production batches", error=str(e), ingredient_id=ingredient_id)
|
||||
raise
|
||||
|
||||
async def set_statement_timeout(self, timeout: str = '30s') -> None:
|
||||
"""
|
||||
Set PostgreSQL statement timeout for the current session
|
||||
"""
|
||||
try:
|
||||
await self.session.execute(text(f"SET statement_timeout = '{timeout}'"))
|
||||
except Exception as e:
|
||||
logger.error("Failed to set statement timeout", error=str(e))
|
||||
raise
|
||||
@@ -689,4 +689,148 @@ class ProductionBatchRepository(ProductionBaseRepository, BatchCountProvider):
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error counting filtered batches", error=str(e))
|
||||
raise DatabaseError(f"Failed to count filtered batches: {str(e)}")
|
||||
raise DatabaseError(f"Failed to count filtered batches: {str(e)}")
|
||||
|
||||
async def get_waste_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get production waste analytics for sustainability reporting
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
start_date: Start date for analytics period
|
||||
end_date: End date for analytics period
|
||||
|
||||
Returns:
|
||||
Dictionary with waste analytics data
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT
|
||||
COALESCE(SUM(waste_quantity), 0) as total_production_waste,
|
||||
COALESCE(SUM(defect_quantity), 0) as total_defects,
|
||||
COALESCE(SUM(planned_quantity), 0) as total_planned,
|
||||
COALESCE(SUM(actual_quantity), 0) as total_actual,
|
||||
COUNT(*) as total_batches,
|
||||
COUNT(CASE WHEN forecast_id IS NOT NULL THEN 1 END) as ai_assisted_batches
|
||||
FROM production_batches
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at BETWEEN :start_date AND :end_date
|
||||
AND status IN ('COMPLETED', 'QUALITY_CHECK', 'FINISHED')
|
||||
""")
|
||||
|
||||
result = await self.session.execute(
|
||||
query,
|
||||
{
|
||||
'tenant_id': tenant_id,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date
|
||||
}
|
||||
)
|
||||
row = result.fetchone()
|
||||
|
||||
waste_data = {
|
||||
'total_production_waste': float(row.total_production_waste or 0),
|
||||
'total_defects': float(row.total_defects or 0),
|
||||
'total_planned': float(row.total_planned or 0),
|
||||
'total_actual': float(row.total_actual or 0),
|
||||
'total_batches': int(row.total_batches or 0),
|
||||
'ai_assisted_batches': int(row.ai_assisted_batches or 0)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Waste analytics calculated",
|
||||
tenant_id=str(tenant_id),
|
||||
total_waste=waste_data['total_production_waste'],
|
||||
batches=waste_data['total_batches']
|
||||
)
|
||||
|
||||
return waste_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating waste analytics", error=str(e), tenant_id=str(tenant_id))
|
||||
raise DatabaseError(f"Failed to calculate waste analytics: {str(e)}")
|
||||
|
||||
async def get_baseline_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Get baseline production metrics from first 90 days
|
||||
|
||||
Used by sustainability service to establish waste baseline
|
||||
for SDG 12.3 compliance tracking.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
|
||||
Returns:
|
||||
Dictionary with baseline metrics data
|
||||
"""
|
||||
try:
|
||||
query = text("""
|
||||
WITH first_batch AS (
|
||||
SELECT MIN(created_at) as start_date
|
||||
FROM production_batches
|
||||
WHERE tenant_id = :tenant_id
|
||||
),
|
||||
baseline_data AS (
|
||||
SELECT
|
||||
COALESCE(SUM(waste_quantity + defect_quantity), 0) as total_waste,
|
||||
COALESCE(SUM(planned_quantity), 0) as total_production
|
||||
FROM production_batches, first_batch
|
||||
WHERE tenant_id = :tenant_id
|
||||
AND created_at BETWEEN first_batch.start_date
|
||||
AND first_batch.start_date + INTERVAL '90 days'
|
||||
AND status IN ('COMPLETED', 'QUALITY_CHECK', 'FINISHED')
|
||||
)
|
||||
SELECT
|
||||
total_waste,
|
||||
total_production,
|
||||
CASE
|
||||
WHEN total_production > 0
|
||||
THEN (total_waste / total_production * 100)
|
||||
ELSE NULL
|
||||
END as waste_percentage,
|
||||
(SELECT start_date FROM first_batch) as baseline_start,
|
||||
(SELECT start_date + INTERVAL '90 days' FROM first_batch) as baseline_end
|
||||
FROM baseline_data
|
||||
""")
|
||||
|
||||
result = await self.session.execute(query, {'tenant_id': tenant_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if row and row.waste_percentage is not None and row.total_production > 100:
|
||||
# We have enough data for a real baseline
|
||||
baseline_data = {
|
||||
'waste_percentage': float(row.waste_percentage),
|
||||
'total_waste': float(row.total_waste),
|
||||
'total_production': float(row.total_production),
|
||||
'baseline_start': row.baseline_start,
|
||||
'baseline_end': row.baseline_end,
|
||||
'has_baseline': True
|
||||
}
|
||||
else:
|
||||
# Not enough data yet, return defaults
|
||||
baseline_data = {
|
||||
'waste_percentage': None,
|
||||
'total_waste': 0,
|
||||
'total_production': 0,
|
||||
'baseline_start': None,
|
||||
'baseline_end': None,
|
||||
'has_baseline': False
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Baseline metrics calculated",
|
||||
tenant_id=str(tenant_id),
|
||||
has_baseline=baseline_data['has_baseline'],
|
||||
waste_percentage=baseline_data.get('waste_percentage')
|
||||
)
|
||||
|
||||
return baseline_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating baseline metrics", error=str(e), tenant_id=str(tenant_id))
|
||||
raise DatabaseError(f"Failed to calculate baseline metrics: {str(e)}")
|
||||
@@ -382,4 +382,51 @@ class ProductionScheduleRepository(ProductionBaseRepository):
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching today's schedule", error=str(e))
|
||||
raise DatabaseError(f"Failed to fetch today's schedule: {str(e)}")
|
||||
raise DatabaseError(f"Failed to fetch today's schedule: {str(e)}")
|
||||
|
||||
async def get_all_schedules_for_tenant(self, tenant_id: UUID) -> List[ProductionSchedule]:
|
||||
"""Get all production schedules for a specific tenant"""
|
||||
try:
|
||||
from sqlalchemy import select
|
||||
from app.models.production import ProductionSchedule
|
||||
|
||||
result = await self.session.execute(
|
||||
select(ProductionSchedule).where(
|
||||
ProductionSchedule.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
schedules = result.scalars().all()
|
||||
|
||||
logger.info("Retrieved all schedules for tenant",
|
||||
tenant_id=str(tenant_id),
|
||||
count=len(schedules))
|
||||
|
||||
return list(schedules)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching all tenant schedules", error=str(e), tenant_id=str(tenant_id))
|
||||
raise DatabaseError(f"Failed to fetch all tenant schedules: {str(e)}")
|
||||
|
||||
async def archive_schedule(self, schedule: ProductionSchedule) -> None:
|
||||
"""Archive a production schedule"""
|
||||
try:
|
||||
schedule.archived = True
|
||||
await self.session.commit()
|
||||
logger.info("Archived schedule", schedule_id=str(schedule.id))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error archiving schedule", error=str(e), schedule_id=str(schedule.id))
|
||||
raise DatabaseError(f"Failed to archive schedule: {str(e)}")
|
||||
|
||||
async def cancel_schedule(self, schedule: ProductionSchedule, reason: str = None) -> None:
|
||||
"""Cancel a production schedule"""
|
||||
try:
|
||||
schedule.status = "cancelled"
|
||||
if reason:
|
||||
schedule.notes = (schedule.notes or "") + f"\n{reason}"
|
||||
await self.session.commit()
|
||||
logger.info("Cancelled schedule", schedule_id=str(schedule.id))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error cancelling schedule", error=str(e), schedule_id=str(schedule.id))
|
||||
raise DatabaseError(f"Failed to cancel schedule: {str(e)}")
|
||||
@@ -93,36 +93,18 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Use a simpler query with timeout and connection management
|
||||
from sqlalchemy import text
|
||||
simplified_query = text("""
|
||||
SELECT
|
||||
pb.tenant_id,
|
||||
DATE(pb.planned_start_time) as planned_date,
|
||||
COUNT(*) as batch_count,
|
||||
SUM(pb.planned_quantity) as total_planned,
|
||||
'capacity_check' as capacity_status,
|
||||
100.0 as capacity_percentage -- Default value for processing
|
||||
FROM production_batches pb
|
||||
WHERE pb.planned_start_time >= CURRENT_DATE
|
||||
AND pb.planned_start_time <= CURRENT_DATE + INTERVAL '3 days'
|
||||
AND pb.status IN ('planned', 'in_progress')
|
||||
GROUP BY pb.tenant_id, DATE(pb.planned_start_time)
|
||||
HAVING COUNT(*) > 10 -- Alert if more than 10 batches per day
|
||||
ORDER BY total_planned DESC
|
||||
LIMIT 20 -- Limit results to prevent excessive processing
|
||||
""")
|
||||
|
||||
# Use timeout and proper session handling
|
||||
try:
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
# Set statement timeout to prevent long-running queries
|
||||
await session.execute(text("SET statement_timeout = '30s'"))
|
||||
result = await session.execute(simplified_query)
|
||||
capacity_issues = result.fetchall()
|
||||
await alert_repo.set_statement_timeout('30s')
|
||||
capacity_issues = await alert_repo.get_capacity_issues()
|
||||
|
||||
for issue in capacity_issues:
|
||||
await self._process_capacity_issue(issue.tenant_id, issue)
|
||||
await self._process_capacity_issue(issue['tenant_id'], issue)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning("Capacity check timed out", service=self.config.SERVICE_NAME)
|
||||
@@ -203,36 +185,14 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Import text function at the beginning
|
||||
from sqlalchemy import text
|
||||
|
||||
# Simplified query with timeout and proper error handling
|
||||
query = text("""
|
||||
SELECT
|
||||
pb.id, pb.tenant_id, pb.product_name, pb.batch_number,
|
||||
pb.planned_end_time as planned_completion_time, pb.actual_start_time,
|
||||
pb.actual_end_time as estimated_completion_time, pb.status,
|
||||
EXTRACT(minutes FROM (NOW() - pb.planned_end_time)) as delay_minutes,
|
||||
COALESCE(pb.priority::text, 'medium') as priority_level,
|
||||
1 as affected_orders -- Default to 1 since we can't count orders
|
||||
FROM production_batches pb
|
||||
WHERE pb.status = 'in_progress'
|
||||
AND pb.planned_end_time < NOW()
|
||||
AND pb.planned_end_time > NOW() - INTERVAL '24 hours'
|
||||
ORDER BY
|
||||
CASE COALESCE(pb.priority::text, 'MEDIUM')
|
||||
WHEN 'URGENT' THEN 1 WHEN 'HIGH' THEN 2 ELSE 3
|
||||
END,
|
||||
delay_minutes DESC
|
||||
LIMIT 50 -- Limit results to prevent excessive processing
|
||||
""")
|
||||
|
||||
try:
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
# Set statement timeout
|
||||
await session.execute(text("SET statement_timeout = '30s'"))
|
||||
result = await session.execute(query)
|
||||
delays = result.fetchall()
|
||||
await alert_repo.set_statement_timeout('30s')
|
||||
delays = await alert_repo.get_production_delays()
|
||||
|
||||
for delay in delays:
|
||||
await self._process_production_delay(delay)
|
||||
@@ -300,44 +260,16 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check for quality control issues (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Fixed query using actual quality_checks table structure
|
||||
query = """
|
||||
SELECT
|
||||
qc.id, qc.tenant_id, qc.batch_id, qc.check_type as test_type,
|
||||
qc.quality_score as result_value,
|
||||
qc.target_weight as min_acceptable,
|
||||
(qc.target_weight * (1 + qc.tolerance_percentage/100)) as max_acceptable,
|
||||
CASE
|
||||
WHEN qc.pass_fail = false AND qc.defect_count > 5 THEN 'critical'
|
||||
WHEN qc.pass_fail = false THEN 'major'
|
||||
ELSE 'minor'
|
||||
END as qc_severity,
|
||||
qc.created_at,
|
||||
pb.product_name, pb.batch_number,
|
||||
COUNT(*) OVER (PARTITION BY qc.batch_id) as total_failures
|
||||
FROM quality_checks qc
|
||||
JOIN production_batches pb ON pb.id = qc.batch_id
|
||||
WHERE qc.pass_fail = false -- Use pass_fail instead of status
|
||||
AND qc.created_at > NOW() - INTERVAL '4 hours'
|
||||
AND qc.corrective_action_needed = true -- Use this instead of acknowledged
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN qc.pass_fail = false AND qc.defect_count > 5 THEN 1
|
||||
WHEN qc.pass_fail = false THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
qc.created_at DESC
|
||||
"""
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query))
|
||||
quality_issues = result.fetchall()
|
||||
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
quality_issues = await alert_repo.get_quality_issues()
|
||||
|
||||
for issue in quality_issues:
|
||||
await self._process_quality_issue(issue)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
# Skip quality checks if tables don't exist (graceful degradation)
|
||||
if "does not exist" in str(e) or "column" in str(e).lower() and "does not exist" in str(e).lower():
|
||||
@@ -380,16 +312,14 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
|
||||
# Mark as acknowledged to avoid duplicates - using proper session management
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
await session.execute(
|
||||
text("UPDATE quality_checks SET acknowledged = true WHERE id = :id"),
|
||||
{"id": issue['id']}
|
||||
)
|
||||
await session.commit()
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
await alert_repo.mark_quality_check_acknowledged(issue['id'])
|
||||
except Exception as e:
|
||||
logger.error("Failed to update quality check acknowledged status",
|
||||
quality_check_id=str(issue.get('id')),
|
||||
logger.error("Failed to update quality check acknowledged status",
|
||||
quality_check_id=str(issue.get('id')),
|
||||
error=str(e))
|
||||
# Don't raise here to avoid breaking the main flow
|
||||
|
||||
@@ -402,49 +332,28 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Check equipment status and maintenance requirements (alerts)"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Query equipment that needs attention
|
||||
query = """
|
||||
SELECT
|
||||
e.id, e.tenant_id, e.name, e.type, e.status,
|
||||
e.efficiency_percentage, e.uptime_percentage,
|
||||
e.last_maintenance_date, e.next_maintenance_date,
|
||||
e.maintenance_interval_days,
|
||||
EXTRACT(DAYS FROM (e.next_maintenance_date - NOW())) as days_to_maintenance,
|
||||
COUNT(ea.id) as active_alerts
|
||||
FROM equipment e
|
||||
LEFT JOIN alerts ea ON ea.equipment_id = e.id
|
||||
AND ea.is_active = true
|
||||
AND ea.is_resolved = false
|
||||
WHERE e.is_active = true
|
||||
AND e.tenant_id = $1
|
||||
GROUP BY e.id, e.tenant_id, e.name, e.type, e.status,
|
||||
e.efficiency_percentage, e.uptime_percentage,
|
||||
e.last_maintenance_date, e.next_maintenance_date,
|
||||
e.maintenance_interval_days
|
||||
ORDER BY e.next_maintenance_date ASC
|
||||
"""
|
||||
|
||||
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Use a separate session for each tenant to avoid connection blocking
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
equipment_list = result.fetchall()
|
||||
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
equipment_list = await alert_repo.get_equipment_status(tenant_id)
|
||||
|
||||
for equipment in equipment_list:
|
||||
# Process each equipment item in a non-blocking manner
|
||||
await self._process_equipment_issue(equipment)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error checking equipment status",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error checking equipment status",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
# Continue processing other tenants despite this error
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Equipment status check failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -530,61 +439,28 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
"""Generate production efficiency recommendations"""
|
||||
try:
|
||||
self._checks_performed += 1
|
||||
|
||||
# Analyze production patterns for efficiency opportunities
|
||||
query = """
|
||||
WITH efficiency_analysis AS (
|
||||
SELECT
|
||||
pb.tenant_id, pb.product_name,
|
||||
AVG(EXTRACT(minutes FROM (pb.actual_completion_time - pb.actual_start_time))) as avg_production_time,
|
||||
AVG(pb.planned_duration_minutes) as avg_planned_duration,
|
||||
COUNT(*) as batch_count,
|
||||
AVG(pb.yield_percentage) as avg_yield,
|
||||
EXTRACT(hour FROM pb.actual_start_time) as start_hour
|
||||
FROM production_batches pb
|
||||
WHERE pb.status = 'COMPLETED'
|
||||
AND pb.actual_completion_time > CURRENT_DATE - INTERVAL '30 days'
|
||||
AND pb.tenant_id = $1
|
||||
GROUP BY pb.tenant_id, pb.product_name, EXTRACT(hour FROM pb.actual_start_time)
|
||||
HAVING COUNT(*) >= 3
|
||||
),
|
||||
recommendations AS (
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN avg_production_time > avg_planned_duration * 1.2 THEN 'reduce_production_time'
|
||||
WHEN avg_yield < 85 THEN 'improve_yield'
|
||||
WHEN start_hour BETWEEN 14 AND 16 AND avg_production_time > avg_planned_duration * 1.1 THEN 'avoid_afternoon_production'
|
||||
ELSE null
|
||||
END as recommendation_type,
|
||||
(avg_production_time - avg_planned_duration) / avg_planned_duration * 100 as efficiency_loss_percent
|
||||
FROM efficiency_analysis
|
||||
)
|
||||
SELECT * FROM recommendations
|
||||
WHERE recommendation_type IS NOT NULL
|
||||
AND efficiency_loss_percent > 10
|
||||
ORDER BY efficiency_loss_percent DESC
|
||||
"""
|
||||
|
||||
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Use a separate session per tenant to avoid connection blocking
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
recommendations = result.fetchall()
|
||||
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
recommendations = await alert_repo.get_efficiency_recommendations(tenant_id)
|
||||
|
||||
for rec in recommendations:
|
||||
# Process each recommendation individually
|
||||
await self._generate_efficiency_recommendation(tenant_id, rec)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating efficiency recommendations",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error generating efficiency recommendations",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
# Continue with other tenants despite this error
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Efficiency recommendations failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -659,41 +535,26 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
async def generate_energy_recommendations(self):
|
||||
"""Generate energy optimization recommendations"""
|
||||
try:
|
||||
# Analyze energy consumption patterns
|
||||
query = """
|
||||
SELECT
|
||||
e.tenant_id, e.name as equipment_name, e.type,
|
||||
AVG(ec.energy_consumption_kwh) as avg_energy,
|
||||
EXTRACT(hour FROM ec.recorded_at) as hour_of_day,
|
||||
COUNT(*) as readings_count
|
||||
FROM equipment e
|
||||
JOIN energy_consumption ec ON ec.equipment_id = e.id
|
||||
WHERE ec.recorded_at > CURRENT_DATE - INTERVAL '30 days'
|
||||
AND e.tenant_id = $1
|
||||
GROUP BY e.tenant_id, e.id, EXTRACT(hour FROM ec.recorded_at)
|
||||
HAVING COUNT(*) >= 10
|
||||
ORDER BY avg_energy DESC
|
||||
"""
|
||||
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
tenants = await self.get_active_tenants()
|
||||
|
||||
|
||||
for tenant_id in tenants:
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
# Use a separate session per tenant to avoid connection blocking
|
||||
async with self.db_manager.get_session() as session:
|
||||
result = await session.execute(text(query), {"tenant_id": tenant_id})
|
||||
energy_data = result.fetchall()
|
||||
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
energy_data = await alert_repo.get_energy_consumption_patterns(tenant_id)
|
||||
|
||||
# Analyze for peak hours and optimization opportunities
|
||||
await self._analyze_energy_patterns(tenant_id, energy_data)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error generating energy recommendations",
|
||||
tenant_id=str(tenant_id),
|
||||
logger.error("Error generating energy recommendations",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e))
|
||||
# Continue with other tenants despite this error
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Energy recommendations failed", error=str(e))
|
||||
self._errors_count += 1
|
||||
@@ -839,23 +700,14 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
async def get_affected_production_batches(self, ingredient_id: str) -> List[str]:
|
||||
"""Get production batches affected by ingredient shortage"""
|
||||
try:
|
||||
query = """
|
||||
SELECT DISTINCT pb.id
|
||||
FROM production_batches pb
|
||||
JOIN recipe_ingredients ri ON ri.recipe_id = pb.recipe_id
|
||||
WHERE ri.ingredient_id = $1
|
||||
AND pb.status = 'in_progress'
|
||||
AND pb.planned_completion_time > NOW()
|
||||
"""
|
||||
|
||||
from sqlalchemy import text
|
||||
from app.repositories.production_alert_repository import ProductionAlertRepository
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
result_rows = await session.execute(text(query), {"ingredient_id": ingredient_id})
|
||||
result = result_rows.fetchall()
|
||||
return [str(row['id']) for row in result]
|
||||
|
||||
alert_repo = ProductionAlertRepository(session)
|
||||
return await alert_repo.get_affected_production_batches(ingredient_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting affected production batches",
|
||||
ingredient_id=ingredient_id,
|
||||
logger.error("Error getting affected production batches",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
return []
|
||||
@@ -284,18 +284,10 @@ class ProductionSchedulerService(BaseAlertService, AlertServiceMixin):
|
||||
async def _get_schedule_by_date(self, session, tenant_id: UUID, schedule_date: date) -> Optional[Dict]:
|
||||
"""Check if production schedule exists for date"""
|
||||
try:
|
||||
from sqlalchemy import select, and_
|
||||
from app.models.production import ProductionSchedule
|
||||
from app.repositories.production_schedule_repository import ProductionScheduleRepository
|
||||
|
||||
result = await session.execute(
|
||||
select(ProductionSchedule).where(
|
||||
and_(
|
||||
ProductionSchedule.tenant_id == tenant_id,
|
||||
ProductionSchedule.schedule_date == schedule_date
|
||||
)
|
||||
)
|
||||
)
|
||||
schedule = result.scalars().first()
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
schedule = await schedule_repo.get_schedule_by_date(str(tenant_id), schedule_date)
|
||||
|
||||
if schedule:
|
||||
return {"id": schedule.id, "status": schedule.status}
|
||||
@@ -386,32 +378,27 @@ class ProductionSchedulerService(BaseAlertService, AlertServiceMixin):
|
||||
stats = {"archived": 0, "cancelled": 0, "escalated": 0}
|
||||
|
||||
try:
|
||||
from app.repositories.production_schedule_repository import ProductionScheduleRepository
|
||||
|
||||
async with self.db_manager.get_session() as session:
|
||||
from sqlalchemy import select, and_
|
||||
from app.models.production import ProductionSchedule
|
||||
schedule_repo = ProductionScheduleRepository(session)
|
||||
|
||||
today = date.today()
|
||||
|
||||
# Get all schedules for tenant
|
||||
result = await session.execute(
|
||||
select(ProductionSchedule).where(
|
||||
ProductionSchedule.tenant_id == tenant_id
|
||||
)
|
||||
)
|
||||
schedules = result.scalars().all()
|
||||
schedules = await schedule_repo.get_all_schedules_for_tenant(tenant_id)
|
||||
|
||||
for schedule in schedules:
|
||||
schedule_age_days = (today - schedule.schedule_date).days
|
||||
|
||||
# Archive completed schedules older than 90 days
|
||||
if schedule.status == "completed" and schedule_age_days > 90:
|
||||
schedule.archived = True
|
||||
await schedule_repo.archive_schedule(schedule)
|
||||
stats["archived"] += 1
|
||||
|
||||
# Cancel draft schedules older than 7 days
|
||||
elif schedule.status == "draft" and schedule_age_days > 7:
|
||||
schedule.status = "cancelled"
|
||||
schedule.notes = (schedule.notes or "") + "\nAuto-cancelled: stale draft schedule"
|
||||
await schedule_repo.cancel_schedule(schedule, "Auto-cancelled: stale draft schedule")
|
||||
stats["cancelled"] += 1
|
||||
|
||||
# Escalate overdue schedules
|
||||
@@ -419,8 +406,6 @@ class ProductionSchedulerService(BaseAlertService, AlertServiceMixin):
|
||||
await self._send_schedule_escalation_alert(tenant_id, schedule.id)
|
||||
stats["escalated"] += 1
|
||||
|
||||
await session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in tenant schedule cleanup",
|
||||
tenant_id=str(tenant_id), error=str(e))
|
||||
|
||||
@@ -1528,4 +1528,100 @@ class ProductionService:
|
||||
except Exception as e:
|
||||
logger.error("Error deleting equipment",
|
||||
error=str(e), equipment_id=str(equipment_id), tenant_id=str(tenant_id))
|
||||
raise
|
||||
|
||||
# ================================================================
|
||||
# SUSTAINABILITY / WASTE ANALYTICS
|
||||
# ================================================================
|
||||
|
||||
async def get_waste_analytics(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
start_date: datetime,
|
||||
end_date: datetime
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get production waste analytics for sustainability tracking
|
||||
|
||||
Called by Inventory Service's sustainability module
|
||||
to calculate environmental impact and SDG 12.3 compliance.
|
||||
"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.production_batch_repository import ProductionBatchRepository
|
||||
|
||||
# Use repository for waste analytics
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
waste_data = await batch_repo.get_waste_analytics(
|
||||
tenant_id=tenant_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
|
||||
return waste_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error calculating waste analytics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_baseline_metrics(self, tenant_id: UUID) -> Dict[str, Any]:
|
||||
"""
|
||||
Get baseline production metrics from first 90 days
|
||||
|
||||
Used by sustainability service to establish waste baseline
|
||||
for SDG 12.3 compliance tracking.
|
||||
"""
|
||||
try:
|
||||
async with self.database_manager.get_session() as session:
|
||||
from app.repositories.production_batch_repository import ProductionBatchRepository
|
||||
|
||||
# Use repository for baseline metrics
|
||||
batch_repo = ProductionBatchRepository(session)
|
||||
baseline_raw = await batch_repo.get_baseline_metrics(tenant_id)
|
||||
|
||||
# Transform repository data to match expected format
|
||||
if baseline_raw['has_baseline']:
|
||||
baseline_data = {
|
||||
'waste_percentage': baseline_raw['waste_percentage'],
|
||||
'total_production_kg': baseline_raw['total_production'],
|
||||
'total_waste_kg': baseline_raw['total_waste'],
|
||||
'period': {
|
||||
'start_date': baseline_raw['baseline_start'].isoformat() if baseline_raw['baseline_start'] else None,
|
||||
'end_date': baseline_raw['baseline_end'].isoformat() if baseline_raw['baseline_end'] else None,
|
||||
'type': 'first_90_days'
|
||||
},
|
||||
'data_available': True
|
||||
}
|
||||
else:
|
||||
# Not enough data yet - return indicator
|
||||
baseline_data = {
|
||||
'waste_percentage': 25.0, # EU bakery industry average
|
||||
'total_production_kg': 0,
|
||||
'total_waste_kg': 0,
|
||||
'period': {
|
||||
'type': 'industry_average',
|
||||
'note': 'Using EU bakery industry average of 25% as baseline'
|
||||
},
|
||||
'data_available': False
|
||||
}
|
||||
|
||||
logger.info(
|
||||
"Baseline metrics retrieved",
|
||||
tenant_id=str(tenant_id),
|
||||
waste_percentage=baseline_data['waste_percentage'],
|
||||
data_available=baseline_data['data_available']
|
||||
)
|
||||
|
||||
return baseline_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error getting baseline metrics",
|
||||
tenant_id=str(tenant_id),
|
||||
error=str(e)
|
||||
)
|
||||
raise
|
||||
@@ -1,56 +1,82 @@
|
||||
# services/production/app/services/quality_template_service.py
|
||||
"""
|
||||
Quality Check Template Service for business logic and data operations
|
||||
Quality Check Template Service - Business Logic Layer
|
||||
Handles quality template operations with business rules and validation
|
||||
"""
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import and_, or_, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional, Tuple
|
||||
from uuid import UUID, uuid4
|
||||
from datetime import datetime, timezone
|
||||
import structlog
|
||||
|
||||
from ..models.production import QualityCheckTemplate, ProcessStage
|
||||
from ..schemas.quality_templates import QualityCheckTemplateCreate, QualityCheckTemplateUpdate
|
||||
from app.models.production import QualityCheckTemplate, ProcessStage
|
||||
from app.schemas.quality_templates import QualityCheckTemplateCreate, QualityCheckTemplateUpdate
|
||||
from app.repositories.quality_template_repository import QualityTemplateRepository
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class QualityTemplateService:
|
||||
"""Service for managing quality check templates"""
|
||||
"""Service for managing quality check templates with business logic"""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
self.repository = QualityTemplateRepository(db)
|
||||
|
||||
def create_template(
|
||||
async def create_template(
|
||||
self,
|
||||
tenant_id: str,
|
||||
template_data: QualityCheckTemplateCreate
|
||||
) -> QualityCheckTemplate:
|
||||
"""Create a new quality check template"""
|
||||
"""
|
||||
Create a new quality check template
|
||||
|
||||
# Validate template code uniqueness if provided
|
||||
if template_data.template_code:
|
||||
existing = self.db.query(QualityCheckTemplate).filter(
|
||||
and_(
|
||||
QualityCheckTemplate.tenant_id == tenant_id,
|
||||
QualityCheckTemplate.template_code == template_data.template_code
|
||||
Business Rules:
|
||||
- Template code must be unique within tenant
|
||||
- Validates template configuration
|
||||
"""
|
||||
try:
|
||||
# Business Rule: Validate template code uniqueness
|
||||
if template_data.template_code:
|
||||
exists = await self.repository.check_template_code_exists(
|
||||
tenant_id,
|
||||
template_data.template_code
|
||||
)
|
||||
).first()
|
||||
if existing:
|
||||
raise ValueError(f"Template code '{template_data.template_code}' already exists")
|
||||
if exists:
|
||||
raise ValueError(f"Template code '{template_data.template_code}' already exists")
|
||||
|
||||
# Create template
|
||||
template = QualityCheckTemplate(
|
||||
id=uuid4(),
|
||||
tenant_id=UUID(tenant_id),
|
||||
**template_data.dict()
|
||||
)
|
||||
# Business Rule: Validate template configuration
|
||||
is_valid, errors = self._validate_template_configuration(template_data.dict())
|
||||
if not is_valid:
|
||||
raise ValueError(f"Invalid template configuration: {', '.join(errors)}")
|
||||
|
||||
self.db.add(template)
|
||||
self.db.commit()
|
||||
self.db.refresh(template)
|
||||
# Create template via repository
|
||||
template_dict = template_data.dict()
|
||||
template_dict['id'] = uuid4()
|
||||
template_dict['tenant_id'] = UUID(tenant_id)
|
||||
|
||||
return template
|
||||
template = await self.repository.create(template_dict)
|
||||
|
||||
def get_templates(
|
||||
logger.info("Quality template created",
|
||||
template_id=str(template.id),
|
||||
tenant_id=tenant_id,
|
||||
template_code=template.template_code)
|
||||
|
||||
return template
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning("Template creation validation failed",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to create quality template",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_templates(
|
||||
self,
|
||||
tenant_id: str,
|
||||
stage: Optional[ProcessStage] = None,
|
||||
@@ -59,225 +85,349 @@ class QualityTemplateService:
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> Tuple[List[QualityCheckTemplate], int]:
|
||||
"""Get quality check templates with filtering and pagination"""
|
||||
"""
|
||||
Get quality check templates with filtering and pagination
|
||||
|
||||
query = self.db.query(QualityCheckTemplate).filter(
|
||||
QualityCheckTemplate.tenant_id == tenant_id
|
||||
)
|
||||
Business Rules:
|
||||
- Default to active templates only
|
||||
- Limit maximum results per page
|
||||
"""
|
||||
try:
|
||||
# Business Rule: Enforce maximum limit
|
||||
if limit > 1000:
|
||||
limit = 1000
|
||||
logger.warning("Template list limit capped at 1000",
|
||||
tenant_id=tenant_id,
|
||||
requested_limit=limit)
|
||||
|
||||
# Apply filters
|
||||
if is_active is not None:
|
||||
query = query.filter(QualityCheckTemplate.is_active == is_active)
|
||||
|
||||
if check_type:
|
||||
query = query.filter(QualityCheckTemplate.check_type == check_type)
|
||||
|
||||
if stage:
|
||||
# Filter by applicable stages (JSON array contains stage)
|
||||
query = query.filter(
|
||||
func.json_contains(
|
||||
QualityCheckTemplate.applicable_stages,
|
||||
f'"{stage.value}"'
|
||||
)
|
||||
templates, total = await self.repository.get_templates_by_tenant(
|
||||
tenant_id=tenant_id,
|
||||
stage=stage,
|
||||
check_type=check_type,
|
||||
is_active=is_active,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Get total count
|
||||
total = query.count()
|
||||
logger.debug("Retrieved quality templates",
|
||||
tenant_id=tenant_id,
|
||||
total=total,
|
||||
returned=len(templates))
|
||||
|
||||
# Apply pagination and ordering
|
||||
templates = query.order_by(
|
||||
QualityCheckTemplate.is_critical.desc(),
|
||||
QualityCheckTemplate.is_required.desc(),
|
||||
QualityCheckTemplate.name
|
||||
).offset(skip).limit(limit).all()
|
||||
return templates, total
|
||||
|
||||
return templates, total
|
||||
except Exception as e:
|
||||
logger.error("Failed to get quality templates",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
def get_template(
|
||||
async def get_template(
|
||||
self,
|
||||
tenant_id: str,
|
||||
template_id: UUID
|
||||
) -> Optional[QualityCheckTemplate]:
|
||||
"""Get a specific quality check template"""
|
||||
"""
|
||||
Get a specific quality check template
|
||||
|
||||
return self.db.query(QualityCheckTemplate).filter(
|
||||
and_(
|
||||
QualityCheckTemplate.tenant_id == tenant_id,
|
||||
QualityCheckTemplate.id == template_id
|
||||
)
|
||||
).first()
|
||||
Business Rules:
|
||||
- Template must belong to tenant
|
||||
"""
|
||||
try:
|
||||
template = await self.repository.get_by_tenant_and_id(tenant_id, template_id)
|
||||
|
||||
def update_template(
|
||||
if template:
|
||||
logger.debug("Retrieved quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
else:
|
||||
logger.warning("Quality template not found",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return template
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def update_template(
|
||||
self,
|
||||
tenant_id: str,
|
||||
template_id: UUID,
|
||||
template_data: QualityCheckTemplateUpdate
|
||||
) -> Optional[QualityCheckTemplate]:
|
||||
"""Update a quality check template"""
|
||||
"""
|
||||
Update a quality check template
|
||||
|
||||
template = self.get_template(tenant_id, template_id)
|
||||
if not template:
|
||||
return None
|
||||
Business Rules:
|
||||
- Template must exist and belong to tenant
|
||||
- Template code must remain unique if changed
|
||||
- Validates updated configuration
|
||||
"""
|
||||
try:
|
||||
# Business Rule: Template must exist
|
||||
template = await self.repository.get_by_tenant_and_id(tenant_id, template_id)
|
||||
if not template:
|
||||
logger.warning("Cannot update non-existent template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# Validate template code uniqueness if being updated
|
||||
if template_data.template_code and template_data.template_code != template.template_code:
|
||||
existing = self.db.query(QualityCheckTemplate).filter(
|
||||
and_(
|
||||
QualityCheckTemplate.tenant_id == tenant_id,
|
||||
QualityCheckTemplate.template_code == template_data.template_code,
|
||||
QualityCheckTemplate.id != template_id
|
||||
# Business Rule: Validate template code uniqueness if being updated
|
||||
if template_data.template_code and template_data.template_code != template.template_code:
|
||||
exists = await self.repository.check_template_code_exists(
|
||||
tenant_id,
|
||||
template_data.template_code,
|
||||
exclude_id=template_id
|
||||
)
|
||||
).first()
|
||||
if existing:
|
||||
raise ValueError(f"Template code '{template_data.template_code}' already exists")
|
||||
if exists:
|
||||
raise ValueError(f"Template code '{template_data.template_code}' already exists")
|
||||
|
||||
# Update fields
|
||||
update_data = template_data.dict(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(template, field, value)
|
||||
# Business Rule: Validate updated configuration
|
||||
update_dict = template_data.dict(exclude_unset=True)
|
||||
if update_dict:
|
||||
# Merge with existing data for validation
|
||||
full_data = template.__dict__.copy()
|
||||
full_data.update(update_dict)
|
||||
is_valid, errors = self._validate_template_configuration(full_data)
|
||||
if not is_valid:
|
||||
raise ValueError(f"Invalid template configuration: {', '.join(errors)}")
|
||||
|
||||
template.updated_at = datetime.now(timezone.utc)
|
||||
# Update via repository
|
||||
update_dict['updated_at'] = datetime.now(timezone.utc)
|
||||
updated_template = await self.repository.update(template_id, update_dict)
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(template)
|
||||
logger.info("Quality template updated",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return template
|
||||
return updated_template
|
||||
|
||||
def delete_template(
|
||||
except ValueError as e:
|
||||
logger.warning("Template update validation failed",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error("Failed to update quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def delete_template(
|
||||
self,
|
||||
tenant_id: str,
|
||||
template_id: UUID
|
||||
) -> bool:
|
||||
"""Delete a quality check template"""
|
||||
"""
|
||||
Delete a quality check template
|
||||
|
||||
template = self.get_template(tenant_id, template_id)
|
||||
if not template:
|
||||
return False
|
||||
Business Rules:
|
||||
- Template must exist and belong to tenant
|
||||
- Consider soft delete for audit trail (future enhancement)
|
||||
"""
|
||||
try:
|
||||
# Business Rule: Template must exist
|
||||
template = await self.repository.get_by_tenant_and_id(tenant_id, template_id)
|
||||
if not template:
|
||||
logger.warning("Cannot delete non-existent template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
return False
|
||||
|
||||
# Check if template is in use (you might want to add this check)
|
||||
# For now, we'll allow deletion but in production you might want to:
|
||||
# 1. Soft delete by setting is_active = False
|
||||
# 2. Check for dependent quality checks
|
||||
# 3. Prevent deletion if in use
|
||||
# TODO: Business Rule - Check if template is in use before deletion
|
||||
# For now, allow deletion. In production you might want to:
|
||||
# 1. Soft delete by setting is_active = False
|
||||
# 2. Check for dependent quality checks
|
||||
# 3. Prevent deletion if actively used
|
||||
|
||||
self.db.delete(template)
|
||||
self.db.commit()
|
||||
success = await self.repository.delete(template_id)
|
||||
|
||||
return True
|
||||
if success:
|
||||
logger.info("Quality template deleted",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
else:
|
||||
logger.warning("Failed to delete quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
|
||||
def get_templates_for_stage(
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to delete quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_templates_for_stage(
|
||||
self,
|
||||
tenant_id: str,
|
||||
stage: ProcessStage,
|
||||
is_active: Optional[bool] = True
|
||||
) -> List[QualityCheckTemplate]:
|
||||
"""Get all quality check templates applicable to a specific process stage"""
|
||||
"""
|
||||
Get all quality check templates applicable to a specific process stage
|
||||
|
||||
query = self.db.query(QualityCheckTemplate).filter(
|
||||
and_(
|
||||
QualityCheckTemplate.tenant_id == tenant_id,
|
||||
or_(
|
||||
# Templates that specify applicable stages
|
||||
func.json_contains(
|
||||
QualityCheckTemplate.applicable_stages,
|
||||
f'"{stage.value}"'
|
||||
),
|
||||
# Templates that don't specify stages (applicable to all)
|
||||
QualityCheckTemplate.applicable_stages.is_(None)
|
||||
)
|
||||
Business Rules:
|
||||
- Returns templates ordered by criticality
|
||||
- Required templates come first
|
||||
"""
|
||||
try:
|
||||
templates = await self.repository.get_templates_for_stage(
|
||||
tenant_id=tenant_id,
|
||||
stage=stage,
|
||||
is_active=is_active
|
||||
)
|
||||
)
|
||||
|
||||
if is_active is not None:
|
||||
query = query.filter(QualityCheckTemplate.is_active == is_active)
|
||||
logger.debug("Retrieved templates for stage",
|
||||
tenant_id=tenant_id,
|
||||
stage=stage.value,
|
||||
count=len(templates))
|
||||
|
||||
return query.order_by(
|
||||
QualityCheckTemplate.is_critical.desc(),
|
||||
QualityCheckTemplate.is_required.desc(),
|
||||
QualityCheckTemplate.weight.desc(),
|
||||
QualityCheckTemplate.name
|
||||
).all()
|
||||
return templates
|
||||
|
||||
def duplicate_template(
|
||||
except Exception as e:
|
||||
logger.error("Failed to get templates for stage",
|
||||
tenant_id=tenant_id,
|
||||
stage=stage.value if stage else None,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def duplicate_template(
|
||||
self,
|
||||
tenant_id: str,
|
||||
template_id: UUID
|
||||
) -> Optional[QualityCheckTemplate]:
|
||||
"""Duplicate an existing quality check template"""
|
||||
"""
|
||||
Duplicate an existing quality check template
|
||||
|
||||
original = self.get_template(tenant_id, template_id)
|
||||
if not original:
|
||||
return None
|
||||
Business Rules:
|
||||
- Original template must exist
|
||||
- Duplicate gets modified name and code
|
||||
- All other attributes copied
|
||||
"""
|
||||
try:
|
||||
# Business Rule: Original must exist
|
||||
original = await self.repository.get_by_tenant_and_id(tenant_id, template_id)
|
||||
if not original:
|
||||
logger.warning("Cannot duplicate non-existent template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
# Create duplicate with modified name and code
|
||||
duplicate_data = {
|
||||
'name': f"{original.name} (Copy)",
|
||||
'template_code': f"{original.template_code}_copy" if original.template_code else None,
|
||||
'check_type': original.check_type,
|
||||
'category': original.category,
|
||||
'description': original.description,
|
||||
'instructions': original.instructions,
|
||||
'parameters': original.parameters,
|
||||
'thresholds': original.thresholds,
|
||||
'scoring_criteria': original.scoring_criteria,
|
||||
'is_active': original.is_active,
|
||||
'is_required': original.is_required,
|
||||
'is_critical': original.is_critical,
|
||||
'weight': original.weight,
|
||||
'min_value': original.min_value,
|
||||
'max_value': original.max_value,
|
||||
'target_value': original.target_value,
|
||||
'unit': original.unit,
|
||||
'tolerance_percentage': original.tolerance_percentage,
|
||||
'applicable_stages': original.applicable_stages,
|
||||
'created_by': original.created_by
|
||||
}
|
||||
# Business Rule: Create duplicate with modified identifiers
|
||||
duplicate_data = {
|
||||
'name': f"{original.name} (Copy)",
|
||||
'template_code': f"{original.template_code}_copy" if original.template_code else None,
|
||||
'check_type': original.check_type,
|
||||
'category': original.category,
|
||||
'description': original.description,
|
||||
'instructions': original.instructions,
|
||||
'parameters': original.parameters,
|
||||
'thresholds': original.thresholds,
|
||||
'scoring_criteria': original.scoring_criteria,
|
||||
'is_active': original.is_active,
|
||||
'is_required': original.is_required,
|
||||
'is_critical': original.is_critical,
|
||||
'weight': original.weight,
|
||||
'min_value': original.min_value,
|
||||
'max_value': original.max_value,
|
||||
'target_value': original.target_value,
|
||||
'unit': original.unit,
|
||||
'tolerance_percentage': original.tolerance_percentage,
|
||||
'applicable_stages': original.applicable_stages,
|
||||
'created_by': original.created_by
|
||||
}
|
||||
|
||||
create_data = QualityCheckTemplateCreate(**duplicate_data)
|
||||
return self.create_template(tenant_id, create_data)
|
||||
create_data = QualityCheckTemplateCreate(**duplicate_data)
|
||||
duplicate = await self.create_template(tenant_id, create_data)
|
||||
|
||||
def get_templates_by_recipe_config(
|
||||
logger.info("Quality template duplicated",
|
||||
original_id=str(template_id),
|
||||
duplicate_id=str(duplicate.id),
|
||||
tenant_id=tenant_id)
|
||||
|
||||
return duplicate
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to duplicate quality template",
|
||||
template_id=str(template_id),
|
||||
tenant_id=tenant_id,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
async def get_templates_by_recipe_config(
|
||||
self,
|
||||
tenant_id: str,
|
||||
stage: ProcessStage,
|
||||
recipe_quality_config: dict
|
||||
) -> List[QualityCheckTemplate]:
|
||||
"""Get quality check templates based on recipe configuration"""
|
||||
"""
|
||||
Get quality check templates based on recipe configuration
|
||||
|
||||
# Extract template IDs from recipe configuration for the specific stage
|
||||
stage_config = recipe_quality_config.get('stages', {}).get(stage.value)
|
||||
if not stage_config:
|
||||
return []
|
||||
Business Rules:
|
||||
- Returns only active templates
|
||||
- Filters by template IDs specified in recipe config
|
||||
- Ordered by criticality
|
||||
"""
|
||||
try:
|
||||
# Business Rule: Extract template IDs from recipe config
|
||||
stage_config = recipe_quality_config.get('stages', {}).get(stage.value)
|
||||
if not stage_config:
|
||||
logger.debug("No quality config for stage",
|
||||
tenant_id=tenant_id,
|
||||
stage=stage.value)
|
||||
return []
|
||||
|
||||
template_ids = stage_config.get('template_ids', [])
|
||||
if not template_ids:
|
||||
return []
|
||||
template_ids = stage_config.get('template_ids', [])
|
||||
if not template_ids:
|
||||
logger.debug("No template IDs in config",
|
||||
tenant_id=tenant_id,
|
||||
stage=stage.value)
|
||||
return []
|
||||
|
||||
# Get templates by IDs
|
||||
templates = self.db.query(QualityCheckTemplate).filter(
|
||||
and_(
|
||||
QualityCheckTemplate.tenant_id == tenant_id,
|
||||
QualityCheckTemplate.id.in_([UUID(tid) for tid in template_ids]),
|
||||
QualityCheckTemplate.is_active == True
|
||||
)
|
||||
).order_by(
|
||||
QualityCheckTemplate.is_critical.desc(),
|
||||
QualityCheckTemplate.is_required.desc(),
|
||||
QualityCheckTemplate.weight.desc()
|
||||
).all()
|
||||
# Get templates by IDs via repository
|
||||
template_ids_uuid = [UUID(tid) for tid in template_ids]
|
||||
templates = await self.repository.get_templates_by_ids(tenant_id, template_ids_uuid)
|
||||
|
||||
return templates
|
||||
logger.debug("Retrieved templates by recipe config",
|
||||
tenant_id=tenant_id,
|
||||
stage=stage.value,
|
||||
count=len(templates))
|
||||
|
||||
def validate_template_configuration(
|
||||
return templates
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to get templates by recipe config",
|
||||
tenant_id=tenant_id,
|
||||
stage=stage.value if stage else None,
|
||||
error=str(e))
|
||||
raise
|
||||
|
||||
def _validate_template_configuration(
|
||||
self,
|
||||
tenant_id: str,
|
||||
template_data: dict
|
||||
) -> Tuple[bool, List[str]]:
|
||||
"""Validate quality check template configuration"""
|
||||
"""
|
||||
Validate quality check template configuration (business rules)
|
||||
|
||||
Business Rules:
|
||||
- Measurement checks require unit
|
||||
- Min value must be less than max value
|
||||
- Visual checks require scoring criteria
|
||||
- Process stages must be valid
|
||||
"""
|
||||
errors = []
|
||||
|
||||
# Validate check type specific requirements
|
||||
# Business Rule: Type-specific validation
|
||||
check_type = template_data.get('check_type')
|
||||
|
||||
if check_type in ['measurement', 'temperature', 'weight']:
|
||||
@@ -290,12 +440,12 @@ class QualityTemplateService:
|
||||
if min_val is not None and max_val is not None and min_val >= max_val:
|
||||
errors.append("Minimum value must be less than maximum value")
|
||||
|
||||
# Validate scoring criteria
|
||||
# Business Rule: Visual checks need scoring criteria
|
||||
scoring = template_data.get('scoring_criteria', {})
|
||||
if check_type == 'visual' and not scoring:
|
||||
errors.append("Visual checks require scoring criteria")
|
||||
|
||||
# Validate process stages
|
||||
# Business Rule: Validate process stages
|
||||
stages = template_data.get('applicable_stages', [])
|
||||
if stages:
|
||||
valid_stages = [stage.value for stage in ProcessStage]
|
||||
@@ -303,4 +453,11 @@ class QualityTemplateService:
|
||||
if invalid_stages:
|
||||
errors.append(f"Invalid process stages: {invalid_stages}")
|
||||
|
||||
return len(errors) == 0, errors
|
||||
is_valid = len(errors) == 0
|
||||
|
||||
if not is_valid:
|
||||
logger.warning("Template configuration validation failed",
|
||||
check_type=check_type,
|
||||
errors=errors)
|
||||
|
||||
return is_valid, errors
|
||||
|
||||
@@ -188,6 +188,34 @@ async def update_recipe(
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["count"]),
|
||||
response_model=dict
|
||||
)
|
||||
async def count_recipes(
|
||||
tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get count of recipes for a tenant"""
|
||||
try:
|
||||
recipe_service = RecipeService(db)
|
||||
|
||||
# Use the search method with limit 0 to just get the count
|
||||
recipes = await recipe_service.search_recipes(
|
||||
tenant_id=tenant_id,
|
||||
limit=10000 # High limit to get all
|
||||
)
|
||||
|
||||
count = len(recipes)
|
||||
logger.info(f"Retrieved recipe count for tenant {tenant_id}: {count}")
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error counting recipes for tenant {tenant_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.delete(
|
||||
route_builder.build_custom_route(RouteCategory.BASE, ["{recipe_id}"])
|
||||
)
|
||||
|
||||
@@ -207,6 +207,35 @@ async def delete_supplier(
|
||||
raise HTTPException(status_code=500, detail="Failed to delete supplier")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_base_route("suppliers/count"),
|
||||
response_model=dict
|
||||
)
|
||||
async def count_suppliers(
|
||||
tenant_id: str = Path(..., description="Tenant ID"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get count of suppliers for a tenant"""
|
||||
try:
|
||||
service = SupplierService(db)
|
||||
|
||||
# Use search with high limit to get all suppliers
|
||||
search_params = SupplierSearchParams(limit=10000)
|
||||
suppliers = await service.search_suppliers(
|
||||
tenant_id=UUID(tenant_id),
|
||||
search_params=search_params
|
||||
)
|
||||
|
||||
count = len(suppliers)
|
||||
logger.info("Retrieved supplier count", tenant_id=tenant_id, count=count)
|
||||
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error counting suppliers", tenant_id=tenant_id, error=str(e))
|
||||
raise HTTPException(status_code=500, detail="Failed to count suppliers")
|
||||
|
||||
|
||||
@router.get(
|
||||
route_builder.build_resource_action_route("suppliers", "supplier_id", "products"),
|
||||
response_model=List[Dict[str, Any]]
|
||||
|
||||
@@ -26,6 +26,7 @@ from shared.routing.route_builder import RouteBuilder
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.metrics import track_endpoint_metrics
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
from shared.config.base import is_internal_service
|
||||
|
||||
logger = structlog.get_logger()
|
||||
router = APIRouter()
|
||||
@@ -64,7 +65,22 @@ def get_subscription_limit_service():
|
||||
try:
|
||||
from app.core.config import settings
|
||||
database_manager = create_database_manager(settings.DATABASE_URL, "tenant-service")
|
||||
redis_client = get_tenant_redis_client()
|
||||
|
||||
# Get Redis client properly (it's an async function)
|
||||
import asyncio
|
||||
try:
|
||||
# Try to get the event loop, if we're in an async context
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop.is_running():
|
||||
# If we're in a running event loop, we can't use await here
|
||||
# So we'll pass None and handle Redis initialization in the service
|
||||
redis_client = None
|
||||
else:
|
||||
redis_client = asyncio.run(get_tenant_redis_client())
|
||||
except RuntimeError:
|
||||
# No event loop running, we can use async/await
|
||||
redis_client = asyncio.run(get_tenant_redis_client())
|
||||
|
||||
return SubscriptionLimitService(database_manager, redis_client)
|
||||
except Exception as e:
|
||||
logger.error("Failed to create subscription limit service", error=str(e))
|
||||
@@ -204,9 +220,10 @@ async def verify_tenant_access(
|
||||
):
|
||||
"""Verify if user has access to tenant - Enhanced version with detailed permissions"""
|
||||
|
||||
# Check if this is a service request
|
||||
if user_id in ["training-service", "data-service", "forecasting-service", "auth-service"]:
|
||||
# Check if this is an internal service request using centralized registry
|
||||
if is_internal_service(user_id):
|
||||
# Services have access to all tenants for their operations
|
||||
logger.info("Service access granted", service=user_id, tenant_id=str(tenant_id))
|
||||
return TenantAccessResponse(
|
||||
has_access=True,
|
||||
role="service",
|
||||
|
||||
186
services/tenant/app/api/tenant_settings.py
Normal file
186
services/tenant/app/api/tenant_settings.py
Normal file
@@ -0,0 +1,186 @@
|
||||
# services/tenant/app/api/tenant_settings.py
|
||||
"""
|
||||
Tenant Settings API Endpoints
|
||||
REST API for managing tenant-specific operational settings
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from uuid import UUID
|
||||
from typing import Dict, Any
|
||||
|
||||
from app.core.database import get_db
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from ..services.tenant_settings_service import TenantSettingsService
|
||||
from ..schemas.tenant_settings import (
|
||||
TenantSettingsResponse,
|
||||
TenantSettingsUpdate,
|
||||
CategoryUpdateRequest,
|
||||
CategoryResetResponse
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
route_builder = RouteBuilder("tenants")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tenant_id}/settings",
|
||||
response_model=TenantSettingsResponse,
|
||||
summary="Get all tenant settings",
|
||||
description="Retrieve all operational settings for a tenant. Creates default settings if none exist."
|
||||
)
|
||||
async def get_tenant_settings(
|
||||
tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get all settings for a tenant
|
||||
|
||||
- **tenant_id**: UUID of the tenant
|
||||
|
||||
Returns all setting categories with their current values.
|
||||
If settings don't exist, default values are created and returned.
|
||||
"""
|
||||
service = TenantSettingsService(db)
|
||||
settings = await service.get_settings(tenant_id)
|
||||
return settings
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{tenant_id}/settings",
|
||||
response_model=TenantSettingsResponse,
|
||||
summary="Update tenant settings",
|
||||
description="Update one or more setting categories for a tenant. Only provided categories are updated."
|
||||
)
|
||||
async def update_tenant_settings(
|
||||
tenant_id: UUID,
|
||||
updates: TenantSettingsUpdate,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Update tenant settings
|
||||
|
||||
- **tenant_id**: UUID of the tenant
|
||||
- **updates**: Object containing setting categories to update
|
||||
|
||||
Only provided categories will be updated. Omitted categories remain unchanged.
|
||||
All values are validated against min/max constraints.
|
||||
"""
|
||||
service = TenantSettingsService(db)
|
||||
settings = await service.update_settings(tenant_id, updates)
|
||||
return settings
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{tenant_id}/settings/{category}",
|
||||
response_model=Dict[str, Any],
|
||||
summary="Get settings for a specific category",
|
||||
description="Retrieve settings for a single category (procurement, inventory, production, supplier, pos, or order)"
|
||||
)
|
||||
async def get_category_settings(
|
||||
tenant_id: UUID,
|
||||
category: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get settings for a specific category
|
||||
|
||||
- **tenant_id**: UUID of the tenant
|
||||
- **category**: Category name (procurement, inventory, production, supplier, pos, order)
|
||||
|
||||
Returns settings for the specified category only.
|
||||
|
||||
Valid categories:
|
||||
- procurement: Auto-approval and procurement planning settings
|
||||
- inventory: Stock thresholds and temperature monitoring
|
||||
- production: Capacity, quality, and scheduling settings
|
||||
- supplier: Payment terms and performance thresholds
|
||||
- pos: POS integration sync settings
|
||||
- order: Discount and delivery settings
|
||||
"""
|
||||
service = TenantSettingsService(db)
|
||||
category_settings = await service.get_category(tenant_id, category)
|
||||
return {
|
||||
"tenant_id": str(tenant_id),
|
||||
"category": category,
|
||||
"settings": category_settings
|
||||
}
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{tenant_id}/settings/{category}",
|
||||
response_model=TenantSettingsResponse,
|
||||
summary="Update settings for a specific category",
|
||||
description="Update all or some fields within a single category"
|
||||
)
|
||||
async def update_category_settings(
|
||||
tenant_id: UUID,
|
||||
category: str,
|
||||
request: CategoryUpdateRequest,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Update settings for a specific category
|
||||
|
||||
- **tenant_id**: UUID of the tenant
|
||||
- **category**: Category name
|
||||
- **request**: Object containing the settings to update
|
||||
|
||||
Updates only the specified category. All values are validated.
|
||||
"""
|
||||
service = TenantSettingsService(db)
|
||||
settings = await service.update_category(tenant_id, category, request.settings)
|
||||
return settings
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{tenant_id}/settings/{category}/reset",
|
||||
response_model=CategoryResetResponse,
|
||||
summary="Reset category to default values",
|
||||
description="Reset a specific category to its default values"
|
||||
)
|
||||
async def reset_category_settings(
|
||||
tenant_id: UUID,
|
||||
category: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Reset a category to default values
|
||||
|
||||
- **tenant_id**: UUID of the tenant
|
||||
- **category**: Category name
|
||||
|
||||
Resets all settings in the specified category to their default values.
|
||||
This operation cannot be undone.
|
||||
"""
|
||||
service = TenantSettingsService(db)
|
||||
reset_settings = await service.reset_category(tenant_id, category)
|
||||
|
||||
return CategoryResetResponse(
|
||||
category=category,
|
||||
settings=reset_settings,
|
||||
message=f"Category '{category}' has been reset to default values"
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{tenant_id}/settings",
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
summary="Delete tenant settings",
|
||||
description="Delete all settings for a tenant (used when tenant is deleted)"
|
||||
)
|
||||
async def delete_tenant_settings(
|
||||
tenant_id: UUID,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete tenant settings
|
||||
|
||||
- **tenant_id**: UUID of the tenant
|
||||
|
||||
This endpoint is typically called automatically when a tenant is deleted.
|
||||
It removes all setting data for the tenant.
|
||||
"""
|
||||
service = TenantSettingsService(db)
|
||||
await service.delete_settings(tenant_id)
|
||||
return None
|
||||
@@ -37,15 +37,36 @@ async def get_tenant(
|
||||
current_user: Dict[str, Any] = Depends(get_current_user_dep),
|
||||
tenant_service: EnhancedTenantService = Depends(get_enhanced_tenant_service)
|
||||
):
|
||||
"""Get tenant by ID - ATOMIC operation"""
|
||||
"""Get tenant by ID - ATOMIC operation - ENHANCED with logging"""
|
||||
|
||||
logger.info(
|
||||
"Tenant GET request received",
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get("user_id"),
|
||||
user_type=current_user.get("type", "user"),
|
||||
is_service=current_user.get("type") == "service",
|
||||
role=current_user.get("role"),
|
||||
service_name=current_user.get("service", "none")
|
||||
)
|
||||
|
||||
tenant = await tenant_service.get_tenant_by_id(str(tenant_id))
|
||||
if not tenant:
|
||||
logger.warning(
|
||||
"Tenant not found",
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Tenant not found"
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Tenant GET request successful",
|
||||
tenant_id=str(tenant_id),
|
||||
user_id=current_user.get("user_id")
|
||||
)
|
||||
|
||||
return tenant
|
||||
|
||||
@router.put(route_builder.build_base_route("{tenant_id}", include_tenant_prefix=False), response_model=TenantResponse)
|
||||
|
||||
@@ -7,7 +7,7 @@ from fastapi import FastAPI
|
||||
from sqlalchemy import text
|
||||
from app.core.config import settings
|
||||
from app.core.database import database_manager
|
||||
from app.api import tenants, tenant_members, tenant_operations, webhooks, internal_demo, plans, subscription
|
||||
from app.api import tenants, tenant_members, tenant_operations, webhooks, internal_demo, plans, subscription, tenant_settings
|
||||
from shared.service_base import StandardFastAPIService
|
||||
|
||||
|
||||
@@ -68,6 +68,7 @@ class TenantService(StandardFastAPIService):
|
||||
"""Custom startup logic for tenant service"""
|
||||
# Import models to ensure they're registered with SQLAlchemy
|
||||
from app.models.tenants import Tenant, TenantMember, Subscription
|
||||
from app.models.tenant_settings import TenantSettings
|
||||
self.logger.info("Tenant models imported successfully")
|
||||
|
||||
async def on_shutdown(self, app: FastAPI):
|
||||
@@ -113,6 +114,8 @@ service.setup_custom_endpoints()
|
||||
# Include routers
|
||||
service.add_router(plans.router, tags=["subscription-plans"]) # Public endpoint
|
||||
service.add_router(subscription.router, tags=["subscription"])
|
||||
# Register settings router BEFORE tenants router to ensure proper route matching
|
||||
service.add_router(tenant_settings.router, prefix="/api/v1/tenants", tags=["tenant-settings"])
|
||||
service.add_router(tenants.router, tags=["tenants"])
|
||||
service.add_router(tenant_members.router, tags=["tenant-members"])
|
||||
service.add_router(tenant_operations.router, tags=["tenant-operations"])
|
||||
|
||||
195
services/tenant/app/models/tenant_settings.py
Normal file
195
services/tenant/app/models/tenant_settings.py
Normal file
@@ -0,0 +1,195 @@
|
||||
# services/tenant/app/models/tenant_settings.py
|
||||
"""
|
||||
Tenant Settings Model
|
||||
Centralized configuration storage for all tenant-specific operational settings
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey, JSON
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime, timezone
|
||||
import uuid
|
||||
|
||||
from shared.database.base import Base
|
||||
|
||||
|
||||
class TenantSettings(Base):
|
||||
"""
|
||||
Centralized tenant settings model
|
||||
Stores all operational configurations for a tenant across all services
|
||||
"""
|
||||
__tablename__ = "tenant_settings"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
tenant_id = Column(UUID(as_uuid=True), ForeignKey("tenants.id", ondelete="CASCADE"), nullable=False, unique=True, index=True)
|
||||
|
||||
# Procurement & Auto-Approval Settings (Orders Service)
|
||||
procurement_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
"auto_approve_enabled": True,
|
||||
"auto_approve_threshold_eur": 500.0,
|
||||
"auto_approve_min_supplier_score": 0.80,
|
||||
"require_approval_new_suppliers": True,
|
||||
"require_approval_critical_items": True,
|
||||
"procurement_lead_time_days": 3,
|
||||
"demand_forecast_days": 14,
|
||||
"safety_stock_percentage": 20.0,
|
||||
"po_approval_reminder_hours": 24,
|
||||
"po_critical_escalation_hours": 12
|
||||
})
|
||||
|
||||
# Inventory Management Settings (Inventory Service)
|
||||
inventory_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
"low_stock_threshold": 10,
|
||||
"reorder_point": 20,
|
||||
"reorder_quantity": 50,
|
||||
"expiring_soon_days": 7,
|
||||
"expiration_warning_days": 3,
|
||||
"quality_score_threshold": 8.0,
|
||||
"temperature_monitoring_enabled": True,
|
||||
"refrigeration_temp_min": 1.0,
|
||||
"refrigeration_temp_max": 4.0,
|
||||
"freezer_temp_min": -20.0,
|
||||
"freezer_temp_max": -15.0,
|
||||
"room_temp_min": 18.0,
|
||||
"room_temp_max": 25.0,
|
||||
"temp_deviation_alert_minutes": 15,
|
||||
"critical_temp_deviation_minutes": 5
|
||||
})
|
||||
|
||||
# Production Settings (Production Service)
|
||||
production_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
"planning_horizon_days": 7,
|
||||
"minimum_batch_size": 1.0,
|
||||
"maximum_batch_size": 100.0,
|
||||
"production_buffer_percentage": 10.0,
|
||||
"working_hours_per_day": 12,
|
||||
"max_overtime_hours": 4,
|
||||
"capacity_utilization_target": 0.85,
|
||||
"capacity_warning_threshold": 0.95,
|
||||
"quality_check_enabled": True,
|
||||
"minimum_yield_percentage": 85.0,
|
||||
"quality_score_threshold": 8.0,
|
||||
"schedule_optimization_enabled": True,
|
||||
"prep_time_buffer_minutes": 30,
|
||||
"cleanup_time_buffer_minutes": 15,
|
||||
"labor_cost_per_hour_eur": 15.0,
|
||||
"overhead_cost_percentage": 20.0
|
||||
})
|
||||
|
||||
# Supplier Settings (Suppliers Service)
|
||||
supplier_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
"default_payment_terms_days": 30,
|
||||
"default_delivery_days": 3,
|
||||
"excellent_delivery_rate": 95.0,
|
||||
"good_delivery_rate": 90.0,
|
||||
"excellent_quality_rate": 98.0,
|
||||
"good_quality_rate": 95.0,
|
||||
"critical_delivery_delay_hours": 24,
|
||||
"critical_quality_rejection_rate": 10.0,
|
||||
"high_cost_variance_percentage": 15.0
|
||||
})
|
||||
|
||||
# POS Integration Settings (POS Service)
|
||||
pos_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
"sync_interval_minutes": 5,
|
||||
"auto_sync_products": True,
|
||||
"auto_sync_transactions": True
|
||||
})
|
||||
|
||||
# Order & Business Rules Settings (Orders Service)
|
||||
order_settings = Column(JSON, nullable=False, default=lambda: {
|
||||
"max_discount_percentage": 50.0,
|
||||
"default_delivery_window_hours": 48,
|
||||
"dynamic_pricing_enabled": False,
|
||||
"discount_enabled": True,
|
||||
"delivery_tracking_enabled": True
|
||||
})
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
|
||||
# Relationships
|
||||
tenant = relationship("Tenant", backref="settings")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<TenantSettings(tenant_id={self.tenant_id})>"
|
||||
|
||||
@staticmethod
|
||||
def get_default_settings() -> dict:
|
||||
"""
|
||||
Get default settings for all categories
|
||||
Returns a dictionary with default values for all setting categories
|
||||
"""
|
||||
return {
|
||||
"procurement_settings": {
|
||||
"auto_approve_enabled": True,
|
||||
"auto_approve_threshold_eur": 500.0,
|
||||
"auto_approve_min_supplier_score": 0.80,
|
||||
"require_approval_new_suppliers": True,
|
||||
"require_approval_critical_items": True,
|
||||
"procurement_lead_time_days": 3,
|
||||
"demand_forecast_days": 14,
|
||||
"safety_stock_percentage": 20.0,
|
||||
"po_approval_reminder_hours": 24,
|
||||
"po_critical_escalation_hours": 12
|
||||
},
|
||||
"inventory_settings": {
|
||||
"low_stock_threshold": 10,
|
||||
"reorder_point": 20,
|
||||
"reorder_quantity": 50,
|
||||
"expiring_soon_days": 7,
|
||||
"expiration_warning_days": 3,
|
||||
"quality_score_threshold": 8.0,
|
||||
"temperature_monitoring_enabled": True,
|
||||
"refrigeration_temp_min": 1.0,
|
||||
"refrigeration_temp_max": 4.0,
|
||||
"freezer_temp_min": -20.0,
|
||||
"freezer_temp_max": -15.0,
|
||||
"room_temp_min": 18.0,
|
||||
"room_temp_max": 25.0,
|
||||
"temp_deviation_alert_minutes": 15,
|
||||
"critical_temp_deviation_minutes": 5
|
||||
},
|
||||
"production_settings": {
|
||||
"planning_horizon_days": 7,
|
||||
"minimum_batch_size": 1.0,
|
||||
"maximum_batch_size": 100.0,
|
||||
"production_buffer_percentage": 10.0,
|
||||
"working_hours_per_day": 12,
|
||||
"max_overtime_hours": 4,
|
||||
"capacity_utilization_target": 0.85,
|
||||
"capacity_warning_threshold": 0.95,
|
||||
"quality_check_enabled": True,
|
||||
"minimum_yield_percentage": 85.0,
|
||||
"quality_score_threshold": 8.0,
|
||||
"schedule_optimization_enabled": True,
|
||||
"prep_time_buffer_minutes": 30,
|
||||
"cleanup_time_buffer_minutes": 15,
|
||||
"labor_cost_per_hour_eur": 15.0,
|
||||
"overhead_cost_percentage": 20.0
|
||||
},
|
||||
"supplier_settings": {
|
||||
"default_payment_terms_days": 30,
|
||||
"default_delivery_days": 3,
|
||||
"excellent_delivery_rate": 95.0,
|
||||
"good_delivery_rate": 90.0,
|
||||
"excellent_quality_rate": 98.0,
|
||||
"good_quality_rate": 95.0,
|
||||
"critical_delivery_delay_hours": 24,
|
||||
"critical_quality_rejection_rate": 10.0,
|
||||
"high_cost_variance_percentage": 15.0
|
||||
},
|
||||
"pos_settings": {
|
||||
"sync_interval_minutes": 5,
|
||||
"auto_sync_products": True,
|
||||
"auto_sync_transactions": True
|
||||
},
|
||||
"order_settings": {
|
||||
"max_discount_percentage": 50.0,
|
||||
"default_delivery_window_hours": 48,
|
||||
"dynamic_pricing_enabled": False,
|
||||
"discount_enabled": True,
|
||||
"delivery_tracking_enabled": True
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import json
|
||||
from .base import TenantBaseRepository
|
||||
from app.models.tenants import TenantMember
|
||||
from shared.database.exceptions import DatabaseError, ValidationError, DuplicateRecordError
|
||||
from shared.config.base import is_internal_service
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -89,6 +90,25 @@ class TenantMemberRepository(TenantBaseRepository):
|
||||
async def get_membership(self, tenant_id: str, user_id: str) -> Optional[TenantMember]:
|
||||
"""Get specific membership by tenant and user"""
|
||||
try:
|
||||
# Validate that user_id is a proper UUID format for actual users
|
||||
# Service names like 'inventory-service' should be handled differently
|
||||
import uuid
|
||||
try:
|
||||
uuid.UUID(user_id)
|
||||
is_valid_uuid = True
|
||||
except ValueError:
|
||||
is_valid_uuid = False
|
||||
|
||||
# For internal service access, return None to indicate no user membership
|
||||
# Service access should be handled at the API layer
|
||||
if not is_valid_uuid and is_internal_service(user_id):
|
||||
# This is an internal service request, return None
|
||||
# Service access is granted at the API endpoint level
|
||||
logger.debug("Internal service detected in membership lookup",
|
||||
service=user_id,
|
||||
tenant_id=tenant_id)
|
||||
return None
|
||||
|
||||
memberships = await self.get_multi(
|
||||
filters={
|
||||
"tenant_id": tenant_id,
|
||||
@@ -444,4 +464,4 @@ class TenantMemberRepository(TenantBaseRepository):
|
||||
except Exception as e:
|
||||
logger.error("Failed to cleanup inactive memberships",
|
||||
error=str(e))
|
||||
raise DatabaseError(f"Cleanup failed: {str(e)}")
|
||||
raise DatabaseError(f"Cleanup failed: {str(e)}")
|
||||
|
||||
@@ -0,0 +1,82 @@
|
||||
# services/tenant/app/repositories/tenant_settings_repository.py
|
||||
"""
|
||||
Tenant Settings Repository
|
||||
Data access layer for tenant settings
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
|
||||
from ..models.tenant_settings import TenantSettings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class TenantSettingsRepository:
|
||||
"""Repository for TenantSettings data access"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
|
||||
async def get_by_tenant_id(self, tenant_id: UUID) -> Optional[TenantSettings]:
|
||||
"""
|
||||
Get tenant settings by tenant ID
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
|
||||
Returns:
|
||||
TenantSettings or None if not found
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(TenantSettings).where(TenantSettings.tenant_id == tenant_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(self, settings: TenantSettings) -> TenantSettings:
|
||||
"""
|
||||
Create new tenant settings
|
||||
|
||||
Args:
|
||||
settings: TenantSettings instance to create
|
||||
|
||||
Returns:
|
||||
Created TenantSettings instance
|
||||
"""
|
||||
self.db.add(settings)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(settings)
|
||||
return settings
|
||||
|
||||
async def update(self, settings: TenantSettings) -> TenantSettings:
|
||||
"""
|
||||
Update tenant settings
|
||||
|
||||
Args:
|
||||
settings: TenantSettings instance with updates
|
||||
|
||||
Returns:
|
||||
Updated TenantSettings instance
|
||||
"""
|
||||
await self.db.commit()
|
||||
await self.db.refresh(settings)
|
||||
return settings
|
||||
|
||||
async def delete(self, tenant_id: UUID) -> None:
|
||||
"""
|
||||
Delete tenant settings
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(TenantSettings).where(TenantSettings.tenant_id == tenant_id)
|
||||
)
|
||||
settings = result.scalar_one_or_none()
|
||||
|
||||
if settings:
|
||||
await self.db.delete(settings)
|
||||
await self.db.commit()
|
||||
181
services/tenant/app/schemas/tenant_settings.py
Normal file
181
services/tenant/app/schemas/tenant_settings.py
Normal file
@@ -0,0 +1,181 @@
|
||||
# services/tenant/app/schemas/tenant_settings.py
|
||||
"""
|
||||
Tenant Settings Schemas
|
||||
Pydantic models for API request/response validation
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
# ================================================================
|
||||
# SETTING CATEGORY SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class ProcurementSettings(BaseModel):
|
||||
"""Procurement and auto-approval settings"""
|
||||
auto_approve_enabled: bool = True
|
||||
auto_approve_threshold_eur: float = Field(500.0, ge=0, le=10000)
|
||||
auto_approve_min_supplier_score: float = Field(0.80, ge=0.0, le=1.0)
|
||||
require_approval_new_suppliers: bool = True
|
||||
require_approval_critical_items: bool = True
|
||||
procurement_lead_time_days: int = Field(3, ge=1, le=30)
|
||||
demand_forecast_days: int = Field(14, ge=1, le=90)
|
||||
safety_stock_percentage: float = Field(20.0, ge=0.0, le=100.0)
|
||||
po_approval_reminder_hours: int = Field(24, ge=1, le=168)
|
||||
po_critical_escalation_hours: int = Field(12, ge=1, le=72)
|
||||
|
||||
|
||||
class InventorySettings(BaseModel):
|
||||
"""Inventory management settings"""
|
||||
low_stock_threshold: int = Field(10, ge=1, le=1000)
|
||||
reorder_point: int = Field(20, ge=1, le=1000)
|
||||
reorder_quantity: int = Field(50, ge=1, le=1000)
|
||||
expiring_soon_days: int = Field(7, ge=1, le=30)
|
||||
expiration_warning_days: int = Field(3, ge=1, le=14)
|
||||
quality_score_threshold: float = Field(8.0, ge=0.0, le=10.0)
|
||||
temperature_monitoring_enabled: bool = True
|
||||
refrigeration_temp_min: float = Field(1.0, ge=-5.0, le=10.0)
|
||||
refrigeration_temp_max: float = Field(4.0, ge=-5.0, le=10.0)
|
||||
freezer_temp_min: float = Field(-20.0, ge=-30.0, le=0.0)
|
||||
freezer_temp_max: float = Field(-15.0, ge=-30.0, le=0.0)
|
||||
room_temp_min: float = Field(18.0, ge=10.0, le=35.0)
|
||||
room_temp_max: float = Field(25.0, ge=10.0, le=35.0)
|
||||
temp_deviation_alert_minutes: int = Field(15, ge=1, le=60)
|
||||
critical_temp_deviation_minutes: int = Field(5, ge=1, le=30)
|
||||
|
||||
@validator('refrigeration_temp_max')
|
||||
def validate_refrigeration_range(cls, v, values):
|
||||
if 'refrigeration_temp_min' in values and v <= values['refrigeration_temp_min']:
|
||||
raise ValueError('refrigeration_temp_max must be greater than refrigeration_temp_min')
|
||||
return v
|
||||
|
||||
@validator('freezer_temp_max')
|
||||
def validate_freezer_range(cls, v, values):
|
||||
if 'freezer_temp_min' in values and v <= values['freezer_temp_min']:
|
||||
raise ValueError('freezer_temp_max must be greater than freezer_temp_min')
|
||||
return v
|
||||
|
||||
@validator('room_temp_max')
|
||||
def validate_room_range(cls, v, values):
|
||||
if 'room_temp_min' in values and v <= values['room_temp_min']:
|
||||
raise ValueError('room_temp_max must be greater than room_temp_min')
|
||||
return v
|
||||
|
||||
|
||||
class ProductionSettings(BaseModel):
|
||||
"""Production settings"""
|
||||
planning_horizon_days: int = Field(7, ge=1, le=30)
|
||||
minimum_batch_size: float = Field(1.0, ge=0.1, le=100.0)
|
||||
maximum_batch_size: float = Field(100.0, ge=1.0, le=1000.0)
|
||||
production_buffer_percentage: float = Field(10.0, ge=0.0, le=50.0)
|
||||
working_hours_per_day: int = Field(12, ge=1, le=24)
|
||||
max_overtime_hours: int = Field(4, ge=0, le=12)
|
||||
capacity_utilization_target: float = Field(0.85, ge=0.5, le=1.0)
|
||||
capacity_warning_threshold: float = Field(0.95, ge=0.7, le=1.0)
|
||||
quality_check_enabled: bool = True
|
||||
minimum_yield_percentage: float = Field(85.0, ge=50.0, le=100.0)
|
||||
quality_score_threshold: float = Field(8.0, ge=0.0, le=10.0)
|
||||
schedule_optimization_enabled: bool = True
|
||||
prep_time_buffer_minutes: int = Field(30, ge=0, le=120)
|
||||
cleanup_time_buffer_minutes: int = Field(15, ge=0, le=120)
|
||||
labor_cost_per_hour_eur: float = Field(15.0, ge=5.0, le=100.0)
|
||||
overhead_cost_percentage: float = Field(20.0, ge=0.0, le=50.0)
|
||||
|
||||
@validator('maximum_batch_size')
|
||||
def validate_batch_size_range(cls, v, values):
|
||||
if 'minimum_batch_size' in values and v <= values['minimum_batch_size']:
|
||||
raise ValueError('maximum_batch_size must be greater than minimum_batch_size')
|
||||
return v
|
||||
|
||||
@validator('capacity_warning_threshold')
|
||||
def validate_capacity_threshold(cls, v, values):
|
||||
if 'capacity_utilization_target' in values and v <= values['capacity_utilization_target']:
|
||||
raise ValueError('capacity_warning_threshold must be greater than capacity_utilization_target')
|
||||
return v
|
||||
|
||||
|
||||
class SupplierSettings(BaseModel):
|
||||
"""Supplier management settings"""
|
||||
default_payment_terms_days: int = Field(30, ge=1, le=90)
|
||||
default_delivery_days: int = Field(3, ge=1, le=30)
|
||||
excellent_delivery_rate: float = Field(95.0, ge=90.0, le=100.0)
|
||||
good_delivery_rate: float = Field(90.0, ge=80.0, le=99.0)
|
||||
excellent_quality_rate: float = Field(98.0, ge=90.0, le=100.0)
|
||||
good_quality_rate: float = Field(95.0, ge=80.0, le=99.0)
|
||||
critical_delivery_delay_hours: int = Field(24, ge=1, le=168)
|
||||
critical_quality_rejection_rate: float = Field(10.0, ge=0.0, le=50.0)
|
||||
high_cost_variance_percentage: float = Field(15.0, ge=0.0, le=100.0)
|
||||
|
||||
@validator('good_delivery_rate')
|
||||
def validate_delivery_rates(cls, v, values):
|
||||
if 'excellent_delivery_rate' in values and v >= values['excellent_delivery_rate']:
|
||||
raise ValueError('good_delivery_rate must be less than excellent_delivery_rate')
|
||||
return v
|
||||
|
||||
@validator('good_quality_rate')
|
||||
def validate_quality_rates(cls, v, values):
|
||||
if 'excellent_quality_rate' in values and v >= values['excellent_quality_rate']:
|
||||
raise ValueError('good_quality_rate must be less than excellent_quality_rate')
|
||||
return v
|
||||
|
||||
|
||||
class POSSettings(BaseModel):
|
||||
"""POS integration settings"""
|
||||
sync_interval_minutes: int = Field(5, ge=1, le=60)
|
||||
auto_sync_products: bool = True
|
||||
auto_sync_transactions: bool = True
|
||||
|
||||
|
||||
class OrderSettings(BaseModel):
|
||||
"""Order and business rules settings"""
|
||||
max_discount_percentage: float = Field(50.0, ge=0.0, le=100.0)
|
||||
default_delivery_window_hours: int = Field(48, ge=1, le=168)
|
||||
dynamic_pricing_enabled: bool = False
|
||||
discount_enabled: bool = True
|
||||
delivery_tracking_enabled: bool = True
|
||||
|
||||
|
||||
# ================================================================
|
||||
# REQUEST/RESPONSE SCHEMAS
|
||||
# ================================================================
|
||||
|
||||
class TenantSettingsResponse(BaseModel):
|
||||
"""Response schema for tenant settings"""
|
||||
id: UUID
|
||||
tenant_id: UUID
|
||||
procurement_settings: ProcurementSettings
|
||||
inventory_settings: InventorySettings
|
||||
production_settings: ProductionSettings
|
||||
supplier_settings: SupplierSettings
|
||||
pos_settings: POSSettings
|
||||
order_settings: OrderSettings
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TenantSettingsUpdate(BaseModel):
|
||||
"""Schema for updating tenant settings"""
|
||||
procurement_settings: Optional[ProcurementSettings] = None
|
||||
inventory_settings: Optional[InventorySettings] = None
|
||||
production_settings: Optional[ProductionSettings] = None
|
||||
supplier_settings: Optional[SupplierSettings] = None
|
||||
pos_settings: Optional[POSSettings] = None
|
||||
order_settings: Optional[OrderSettings] = None
|
||||
|
||||
|
||||
class CategoryUpdateRequest(BaseModel):
|
||||
"""Schema for updating a single category"""
|
||||
settings: dict
|
||||
|
||||
|
||||
class CategoryResetResponse(BaseModel):
|
||||
"""Response schema for category reset"""
|
||||
category: str
|
||||
settings: dict
|
||||
message: str
|
||||
@@ -8,13 +8,14 @@ from typing import Dict, Any, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from fastapi import HTTPException, status
|
||||
from datetime import datetime, timezone
|
||||
import httpx
|
||||
|
||||
from app.repositories import SubscriptionRepository, TenantRepository, TenantMemberRepository
|
||||
from app.models.tenants import Subscription, Tenant, TenantMember
|
||||
from shared.database.exceptions import DatabaseError
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.subscription.plans import SubscriptionPlanMetadata, get_training_job_quota, get_forecast_quota
|
||||
from shared.clients.recipes_client import create_recipes_client
|
||||
from shared.clients.suppliers_client import create_suppliers_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -459,50 +460,64 @@ class SubscriptionLimitService:
|
||||
return 0
|
||||
|
||||
async def _get_recipe_count(self, tenant_id: str) -> int:
|
||||
"""Get recipe count from recipes service"""
|
||||
"""Get recipe count from recipes service using shared client"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.RECIPES_SERVICE_URL}/api/v1/tenants/{tenant_id}/recipes/count",
|
||||
headers={"X-Internal-Request": "true"}
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
count = data.get("count", 0)
|
||||
# Use the shared recipes client with proper authentication and resilience
|
||||
recipes_client = create_recipes_client(settings)
|
||||
count = await recipes_client.count_recipes(tenant_id)
|
||||
|
||||
logger.info("Retrieved recipe count", tenant_id=tenant_id, count=count)
|
||||
return count
|
||||
logger.info(
|
||||
"Retrieved recipe count via recipes client",
|
||||
tenant_id=tenant_id,
|
||||
count=count
|
||||
)
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting recipe count", tenant_id=tenant_id, error=str(e))
|
||||
logger.error(
|
||||
"Error getting recipe count via recipes client",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
# Return 0 as fallback to avoid breaking subscription display
|
||||
return 0
|
||||
|
||||
async def _get_supplier_count(self, tenant_id: str) -> int:
|
||||
"""Get supplier count from suppliers service"""
|
||||
"""Get supplier count from suppliers service using shared client"""
|
||||
try:
|
||||
from app.core.config import settings
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get(
|
||||
f"{settings.SUPPLIERS_SERVICE_URL}/api/v1/tenants/{tenant_id}/suppliers/count",
|
||||
headers={"X-Internal-Request": "true"}
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
count = data.get("count", 0)
|
||||
# Use the shared suppliers client with proper authentication and resilience
|
||||
suppliers_client = create_suppliers_client(settings)
|
||||
count = await suppliers_client.count_suppliers(tenant_id)
|
||||
|
||||
logger.info("Retrieved supplier count", tenant_id=tenant_id, count=count)
|
||||
return count
|
||||
logger.info(
|
||||
"Retrieved supplier count via suppliers client",
|
||||
tenant_id=tenant_id,
|
||||
count=count
|
||||
)
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error getting supplier count", tenant_id=tenant_id, error=str(e))
|
||||
logger.error(
|
||||
"Error getting supplier count via suppliers client",
|
||||
tenant_id=tenant_id,
|
||||
error=str(e)
|
||||
)
|
||||
# Return 0 as fallback to avoid breaking subscription display
|
||||
return 0
|
||||
|
||||
async def _get_redis_quota(self, quota_key: str) -> int:
|
||||
"""Get current count from Redis quota key"""
|
||||
try:
|
||||
if not self.redis:
|
||||
# Try to initialize Redis client if not available
|
||||
from app.core.config import settings
|
||||
import shared.redis_utils
|
||||
self.redis = await shared.redis_utils.initialize_redis(settings.REDIS_URL)
|
||||
|
||||
if not self.redis:
|
||||
return 0
|
||||
|
||||
@@ -607,4 +622,4 @@ class SubscriptionLimitService:
|
||||
"""Get limit value from plan metadata"""
|
||||
plan_metadata = SubscriptionPlanMetadata.PLANS.get(plan, {})
|
||||
limit = plan_metadata.get('limits', {}).get(limit_key)
|
||||
return limit if limit != -1 else None
|
||||
return limit if limit != -1 else None
|
||||
|
||||
262
services/tenant/app/services/tenant_settings_service.py
Normal file
262
services/tenant/app/services/tenant_settings_service.py
Normal file
@@ -0,0 +1,262 @@
|
||||
# services/tenant/app/services/tenant_settings_service.py
|
||||
"""
|
||||
Tenant Settings Service
|
||||
Business logic for managing tenant-specific operational settings
|
||||
"""
|
||||
|
||||
import structlog
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from uuid import UUID
|
||||
from typing import Optional, Dict, Any
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from ..models.tenant_settings import TenantSettings
|
||||
from ..repositories.tenant_settings_repository import TenantSettingsRepository
|
||||
from ..schemas.tenant_settings import (
|
||||
TenantSettingsUpdate,
|
||||
ProcurementSettings,
|
||||
InventorySettings,
|
||||
ProductionSettings,
|
||||
SupplierSettings,
|
||||
POSSettings,
|
||||
OrderSettings
|
||||
)
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class TenantSettingsService:
|
||||
"""
|
||||
Service for managing tenant settings
|
||||
Handles validation, CRUD operations, and default value management
|
||||
"""
|
||||
|
||||
# Map category names to schema validators
|
||||
CATEGORY_SCHEMAS = {
|
||||
"procurement": ProcurementSettings,
|
||||
"inventory": InventorySettings,
|
||||
"production": ProductionSettings,
|
||||
"supplier": SupplierSettings,
|
||||
"pos": POSSettings,
|
||||
"order": OrderSettings
|
||||
}
|
||||
|
||||
# Map category names to database column names
|
||||
CATEGORY_COLUMNS = {
|
||||
"procurement": "procurement_settings",
|
||||
"inventory": "inventory_settings",
|
||||
"production": "production_settings",
|
||||
"supplier": "supplier_settings",
|
||||
"pos": "pos_settings",
|
||||
"order": "order_settings"
|
||||
}
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
self.repository = TenantSettingsRepository(db)
|
||||
|
||||
async def get_settings(self, tenant_id: UUID) -> TenantSettings:
|
||||
"""
|
||||
Get tenant settings, creating defaults if they don't exist
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
|
||||
Returns:
|
||||
TenantSettings object
|
||||
|
||||
Raises:
|
||||
HTTPException: If tenant not found
|
||||
"""
|
||||
try:
|
||||
# Try to get existing settings using repository
|
||||
settings = await self.repository.get_by_tenant_id(tenant_id)
|
||||
|
||||
logger.info(f"Existing settings lookup for tenant {tenant_id}: {'found' if settings else 'not found'}")
|
||||
|
||||
# Create default settings if they don't exist
|
||||
if not settings:
|
||||
logger.info(f"Creating default settings for tenant {tenant_id}")
|
||||
settings = await self._create_default_settings(tenant_id)
|
||||
logger.info(f"Successfully created default settings for tenant {tenant_id}")
|
||||
|
||||
return settings
|
||||
except Exception as e:
|
||||
logger.error("Failed to get or create tenant settings", tenant_id=tenant_id, error=str(e), exc_info=True)
|
||||
# Re-raise as HTTPException to match the expected behavior
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get tenant settings: {str(e)}"
|
||||
)
|
||||
|
||||
async def update_settings(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
updates: TenantSettingsUpdate
|
||||
) -> TenantSettings:
|
||||
"""
|
||||
Update tenant settings
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
updates: TenantSettingsUpdate object with new values
|
||||
|
||||
Returns:
|
||||
Updated TenantSettings object
|
||||
"""
|
||||
settings = await self.get_settings(tenant_id)
|
||||
|
||||
# Update each category if provided
|
||||
if updates.procurement_settings is not None:
|
||||
settings.procurement_settings = updates.procurement_settings.dict()
|
||||
|
||||
if updates.inventory_settings is not None:
|
||||
settings.inventory_settings = updates.inventory_settings.dict()
|
||||
|
||||
if updates.production_settings is not None:
|
||||
settings.production_settings = updates.production_settings.dict()
|
||||
|
||||
if updates.supplier_settings is not None:
|
||||
settings.supplier_settings = updates.supplier_settings.dict()
|
||||
|
||||
if updates.pos_settings is not None:
|
||||
settings.pos_settings = updates.pos_settings.dict()
|
||||
|
||||
if updates.order_settings is not None:
|
||||
settings.order_settings = updates.order_settings.dict()
|
||||
|
||||
return await self.repository.update(settings)
|
||||
|
||||
async def get_category(self, tenant_id: UUID, category: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get settings for a specific category
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
category: Category name (procurement, inventory, production, etc.)
|
||||
|
||||
Returns:
|
||||
Dictionary with category settings
|
||||
|
||||
Raises:
|
||||
HTTPException: If category is invalid
|
||||
"""
|
||||
if category not in self.CATEGORY_COLUMNS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid category: {category}. Valid categories: {', '.join(self.CATEGORY_COLUMNS.keys())}"
|
||||
)
|
||||
|
||||
settings = await self.get_settings(tenant_id)
|
||||
column_name = self.CATEGORY_COLUMNS[category]
|
||||
|
||||
return getattr(settings, column_name)
|
||||
|
||||
async def update_category(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
category: str,
|
||||
updates: Dict[str, Any]
|
||||
) -> TenantSettings:
|
||||
"""
|
||||
Update settings for a specific category
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
category: Category name
|
||||
updates: Dictionary with new values
|
||||
|
||||
Returns:
|
||||
Updated TenantSettings object
|
||||
|
||||
Raises:
|
||||
HTTPException: If category is invalid or validation fails
|
||||
"""
|
||||
if category not in self.CATEGORY_COLUMNS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid category: {category}"
|
||||
)
|
||||
|
||||
# Validate updates using the appropriate schema
|
||||
schema = self.CATEGORY_SCHEMAS[category]
|
||||
try:
|
||||
validated_data = schema(**updates)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail=f"Validation error: {str(e)}"
|
||||
)
|
||||
|
||||
# Get existing settings and update the category
|
||||
settings = await self.get_settings(tenant_id)
|
||||
column_name = self.CATEGORY_COLUMNS[category]
|
||||
setattr(settings, column_name, validated_data.dict())
|
||||
|
||||
return await self.repository.update(settings)
|
||||
|
||||
async def reset_category(self, tenant_id: UUID, category: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Reset a category to default values
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
category: Category name
|
||||
|
||||
Returns:
|
||||
Dictionary with reset category settings
|
||||
|
||||
Raises:
|
||||
HTTPException: If category is invalid
|
||||
"""
|
||||
if category not in self.CATEGORY_COLUMNS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid category: {category}"
|
||||
)
|
||||
|
||||
# Get default settings for the category
|
||||
defaults = TenantSettings.get_default_settings()
|
||||
column_name = self.CATEGORY_COLUMNS[category]
|
||||
default_category_settings = defaults[column_name]
|
||||
|
||||
# Update the category with defaults
|
||||
settings = await self.get_settings(tenant_id)
|
||||
setattr(settings, column_name, default_category_settings)
|
||||
|
||||
await self.repository.update(settings)
|
||||
|
||||
return default_category_settings
|
||||
|
||||
async def _create_default_settings(self, tenant_id: UUID) -> TenantSettings:
|
||||
"""
|
||||
Create default settings for a new tenant
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
|
||||
Returns:
|
||||
Newly created TenantSettings object
|
||||
"""
|
||||
defaults = TenantSettings.get_default_settings()
|
||||
|
||||
settings = TenantSettings(
|
||||
tenant_id=tenant_id,
|
||||
procurement_settings=defaults["procurement_settings"],
|
||||
inventory_settings=defaults["inventory_settings"],
|
||||
production_settings=defaults["production_settings"],
|
||||
supplier_settings=defaults["supplier_settings"],
|
||||
pos_settings=defaults["pos_settings"],
|
||||
order_settings=defaults["order_settings"]
|
||||
)
|
||||
|
||||
return await self.repository.create(settings)
|
||||
|
||||
async def delete_settings(self, tenant_id: UUID) -> None:
|
||||
"""
|
||||
Delete tenant settings (used when tenant is deleted)
|
||||
|
||||
Args:
|
||||
tenant_id: UUID of the tenant
|
||||
"""
|
||||
await self.repository.delete(tenant_id)
|
||||
@@ -0,0 +1,155 @@
|
||||
"""add tenant_settings
|
||||
|
||||
Revision ID: 20251022_0000
|
||||
Revises: 20251017_0000
|
||||
Create Date: 2025-10-22
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from uuid import uuid4
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20251022_0000'
|
||||
down_revision = '20251017_0000'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def get_default_settings():
|
||||
"""Get default settings for all categories"""
|
||||
return {
|
||||
"procurement_settings": {
|
||||
"auto_approve_enabled": True,
|
||||
"auto_approve_threshold_eur": 500.0,
|
||||
"auto_approve_min_supplier_score": 0.80,
|
||||
"require_approval_new_suppliers": True,
|
||||
"require_approval_critical_items": True,
|
||||
"procurement_lead_time_days": 3,
|
||||
"demand_forecast_days": 14,
|
||||
"safety_stock_percentage": 20.0,
|
||||
"po_approval_reminder_hours": 24,
|
||||
"po_critical_escalation_hours": 12
|
||||
},
|
||||
"inventory_settings": {
|
||||
"low_stock_threshold": 10,
|
||||
"reorder_point": 20,
|
||||
"reorder_quantity": 50,
|
||||
"expiring_soon_days": 7,
|
||||
"expiration_warning_days": 3,
|
||||
"quality_score_threshold": 8.0,
|
||||
"temperature_monitoring_enabled": True,
|
||||
"refrigeration_temp_min": 1.0,
|
||||
"refrigeration_temp_max": 4.0,
|
||||
"freezer_temp_min": -20.0,
|
||||
"freezer_temp_max": -15.0,
|
||||
"room_temp_min": 18.0,
|
||||
"room_temp_max": 25.0,
|
||||
"temp_deviation_alert_minutes": 15,
|
||||
"critical_temp_deviation_minutes": 5
|
||||
},
|
||||
"production_settings": {
|
||||
"planning_horizon_days": 7,
|
||||
"minimum_batch_size": 1.0,
|
||||
"maximum_batch_size": 100.0,
|
||||
"production_buffer_percentage": 10.0,
|
||||
"working_hours_per_day": 12,
|
||||
"max_overtime_hours": 4,
|
||||
"capacity_utilization_target": 0.85,
|
||||
"capacity_warning_threshold": 0.95,
|
||||
"quality_check_enabled": True,
|
||||
"minimum_yield_percentage": 85.0,
|
||||
"quality_score_threshold": 8.0,
|
||||
"schedule_optimization_enabled": True,
|
||||
"prep_time_buffer_minutes": 30,
|
||||
"cleanup_time_buffer_minutes": 15,
|
||||
"labor_cost_per_hour_eur": 15.0,
|
||||
"overhead_cost_percentage": 20.0
|
||||
},
|
||||
"supplier_settings": {
|
||||
"default_payment_terms_days": 30,
|
||||
"default_delivery_days": 3,
|
||||
"excellent_delivery_rate": 95.0,
|
||||
"good_delivery_rate": 90.0,
|
||||
"excellent_quality_rate": 98.0,
|
||||
"good_quality_rate": 95.0,
|
||||
"critical_delivery_delay_hours": 24,
|
||||
"critical_quality_rejection_rate": 10.0,
|
||||
"high_cost_variance_percentage": 15.0
|
||||
},
|
||||
"pos_settings": {
|
||||
"sync_interval_minutes": 5,
|
||||
"auto_sync_products": True,
|
||||
"auto_sync_transactions": True
|
||||
},
|
||||
"order_settings": {
|
||||
"max_discount_percentage": 50.0,
|
||||
"default_delivery_window_hours": 48,
|
||||
"dynamic_pricing_enabled": False,
|
||||
"discount_enabled": True,
|
||||
"delivery_tracking_enabled": True
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Create tenant_settings table and seed existing tenants"""
|
||||
# Create tenant_settings table
|
||||
op.create_table(
|
||||
'tenant_settings',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid4),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('procurement_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('inventory_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('production_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('supplier_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('pos_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('order_settings', postgresql.JSON(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
|
||||
sa.UniqueConstraint('tenant_id', name='uq_tenant_settings_tenant_id')
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_tenant_settings_tenant_id', 'tenant_settings', ['tenant_id'])
|
||||
|
||||
# Seed existing tenants with default settings
|
||||
connection = op.get_bind()
|
||||
|
||||
# Get all existing tenant IDs
|
||||
result = connection.execute(sa.text("SELECT id FROM tenants"))
|
||||
tenant_ids = [row[0] for row in result]
|
||||
|
||||
# Insert default settings for each existing tenant
|
||||
defaults = get_default_settings()
|
||||
for tenant_id in tenant_ids:
|
||||
connection.execute(
|
||||
sa.text("""
|
||||
INSERT INTO tenant_settings (
|
||||
id, tenant_id, procurement_settings, inventory_settings,
|
||||
production_settings, supplier_settings, pos_settings, order_settings
|
||||
) VALUES (
|
||||
:id, :tenant_id, :procurement_settings::jsonb, :inventory_settings::jsonb,
|
||||
:production_settings::jsonb, :supplier_settings::jsonb,
|
||||
:pos_settings::jsonb, :order_settings::jsonb
|
||||
)
|
||||
"""),
|
||||
{
|
||||
"id": str(uuid4()),
|
||||
"tenant_id": tenant_id,
|
||||
"procurement_settings": str(defaults["procurement_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"inventory_settings": str(defaults["inventory_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"production_settings": str(defaults["production_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"supplier_settings": str(defaults["supplier_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"pos_settings": str(defaults["pos_settings"]).replace("'", '"').replace("True", "true").replace("False", "false"),
|
||||
"order_settings": str(defaults["order_settings"]).replace("'", '"').replace("True", "true").replace("False", "false")
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Drop tenant_settings table"""
|
||||
op.drop_index('ix_tenant_settings_tenant_id', table_name='tenant_settings')
|
||||
op.drop_table('tenant_settings')
|
||||
Reference in New Issue
Block a user