414 lines
12 KiB
Python
414 lines
12 KiB
Python
"""
|
|
Production Alert Service - Simplified
|
|
|
|
Emits minimal events using EventPublisher.
|
|
All enrichment handled by alert_processor.
|
|
"""
|
|
|
|
import asyncio
|
|
from typing import List, Dict, Any, Optional
|
|
from uuid import UUID
|
|
from datetime import datetime
|
|
import structlog
|
|
|
|
from shared.messaging import UnifiedEventPublisher
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
class ProductionAlertService:
|
|
"""Production alert service using EventPublisher with database access for delay checks"""
|
|
|
|
def __init__(self, event_publisher: UnifiedEventPublisher, database_manager=None):
|
|
self.publisher = event_publisher
|
|
self.database_manager = database_manager
|
|
|
|
async def start(self):
|
|
"""Start the production alert service"""
|
|
logger.info("ProductionAlertService started")
|
|
# Add any initialization logic here if needed
|
|
|
|
async def stop(self):
|
|
"""Stop the production alert service"""
|
|
logger.info("ProductionAlertService stopped")
|
|
# Add any cleanup logic here if needed
|
|
|
|
async def emit_production_delay(
|
|
self,
|
|
tenant_id: UUID,
|
|
batch_id: UUID,
|
|
product_name: str,
|
|
batch_number: str,
|
|
delay_minutes: int,
|
|
affected_orders: int = 0,
|
|
customer_names: Optional[List[str]] = None
|
|
):
|
|
"""Emit production delay event"""
|
|
|
|
# Determine severity based on delay
|
|
if delay_minutes > 120:
|
|
severity = "urgent"
|
|
elif delay_minutes > 60:
|
|
severity = "high"
|
|
else:
|
|
severity = "medium"
|
|
|
|
metadata = {
|
|
"batch_id": str(batch_id),
|
|
"product_name": product_name,
|
|
"batch_number": batch_number,
|
|
"delay_minutes": delay_minutes,
|
|
"affected_orders": affected_orders
|
|
}
|
|
|
|
if customer_names:
|
|
metadata["customer_names"] = customer_names
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.production_delay",
|
|
tenant_id=tenant_id,
|
|
severity=severity,
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"production_delay_emitted",
|
|
tenant_id=str(tenant_id),
|
|
batch_number=batch_number,
|
|
delay_minutes=delay_minutes
|
|
)
|
|
|
|
async def emit_equipment_failure(
|
|
self,
|
|
tenant_id: UUID,
|
|
equipment_id: UUID,
|
|
equipment_name: str,
|
|
equipment_type: str,
|
|
affected_batches: int = 0
|
|
):
|
|
"""Emit equipment failure event"""
|
|
|
|
metadata = {
|
|
"equipment_id": str(equipment_id),
|
|
"equipment_name": equipment_name,
|
|
"equipment_type": equipment_type,
|
|
"affected_batches": affected_batches
|
|
}
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.equipment_failure",
|
|
tenant_id=tenant_id,
|
|
severity="urgent",
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"equipment_failure_emitted",
|
|
tenant_id=str(tenant_id),
|
|
equipment_name=equipment_name
|
|
)
|
|
|
|
async def emit_capacity_overload(
|
|
self,
|
|
tenant_id: UUID,
|
|
current_load_percent: float,
|
|
planned_batches: int,
|
|
available_capacity: int,
|
|
affected_date: str
|
|
):
|
|
"""Emit capacity overload warning"""
|
|
|
|
metadata = {
|
|
"current_load_percent": current_load_percent,
|
|
"planned_batches": planned_batches,
|
|
"available_capacity": available_capacity,
|
|
"affected_date": affected_date
|
|
}
|
|
|
|
# Determine severity based on overload
|
|
if current_load_percent > 120:
|
|
severity = "urgent"
|
|
elif current_load_percent > 100:
|
|
severity = "high"
|
|
else:
|
|
severity = "medium"
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.capacity_overload",
|
|
tenant_id=tenant_id,
|
|
severity=severity,
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"capacity_overload_emitted",
|
|
tenant_id=str(tenant_id),
|
|
current_load_percent=current_load_percent
|
|
)
|
|
|
|
async def emit_quality_issue(
|
|
self,
|
|
tenant_id: UUID,
|
|
batch_id: UUID,
|
|
product_name: str,
|
|
batch_number: str,
|
|
issue_type: str,
|
|
issue_description: str,
|
|
affected_quantity: float
|
|
):
|
|
"""Emit quality issue alert"""
|
|
|
|
metadata = {
|
|
"batch_id": str(batch_id),
|
|
"product_name": product_name,
|
|
"batch_number": batch_number,
|
|
"issue_type": issue_type,
|
|
"issue_description": issue_description,
|
|
"affected_quantity": affected_quantity
|
|
}
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.quality_issue",
|
|
tenant_id=tenant_id,
|
|
severity="high",
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"quality_issue_emitted",
|
|
tenant_id=str(tenant_id),
|
|
batch_number=batch_number,
|
|
issue_type=issue_type
|
|
)
|
|
|
|
async def emit_start_production_alert(
|
|
self,
|
|
tenant_id: UUID,
|
|
batch_id: UUID,
|
|
product_name: str,
|
|
batch_number: str,
|
|
reasoning_data: Optional[Dict[str, Any]] = None,
|
|
planned_start_time: Optional[str] = None
|
|
):
|
|
"""Emit start production alert when a new batch is created"""
|
|
|
|
metadata = {
|
|
"batch_id": str(batch_id),
|
|
"product_name": product_name,
|
|
"batch_number": batch_number,
|
|
"reasoning_data": reasoning_data
|
|
}
|
|
|
|
if planned_start_time:
|
|
metadata["planned_start_time"] = planned_start_time
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.start_production",
|
|
tenant_id=tenant_id,
|
|
severity="medium",
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"start_production_alert_emitted",
|
|
tenant_id=str(tenant_id),
|
|
batch_number=batch_number,
|
|
reasoning_type=reasoning_data.get("type") if reasoning_data else None
|
|
)
|
|
|
|
async def emit_batch_start_delayed(
|
|
self,
|
|
tenant_id: UUID,
|
|
batch_id: UUID,
|
|
product_name: str,
|
|
batch_number: str,
|
|
scheduled_start: str,
|
|
delay_reason: Optional[str] = None
|
|
):
|
|
"""Emit batch start delay alert"""
|
|
|
|
metadata = {
|
|
"batch_id": str(batch_id),
|
|
"product_name": product_name,
|
|
"batch_number": batch_number,
|
|
"scheduled_start": scheduled_start
|
|
}
|
|
|
|
if delay_reason:
|
|
metadata["delay_reason"] = delay_reason
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.batch_start_delayed",
|
|
tenant_id=tenant_id,
|
|
severity="high",
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"batch_start_delayed_emitted",
|
|
tenant_id=str(tenant_id),
|
|
batch_number=batch_number
|
|
)
|
|
|
|
async def emit_missing_ingredients(
|
|
self,
|
|
tenant_id: UUID,
|
|
batch_id: UUID,
|
|
product_name: str,
|
|
batch_number: str,
|
|
missing_ingredients: List[Dict[str, Any]]
|
|
):
|
|
"""Emit missing ingredients alert"""
|
|
|
|
metadata = {
|
|
"batch_id": str(batch_id),
|
|
"product_name": product_name,
|
|
"batch_number": batch_number,
|
|
"missing_ingredients": missing_ingredients,
|
|
"missing_count": len(missing_ingredients)
|
|
}
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.missing_ingredients",
|
|
tenant_id=tenant_id,
|
|
severity="urgent",
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"missing_ingredients_emitted",
|
|
tenant_id=str(tenant_id),
|
|
batch_number=batch_number,
|
|
missing_count=len(missing_ingredients)
|
|
)
|
|
|
|
async def emit_equipment_maintenance_due(
|
|
self,
|
|
tenant_id: UUID,
|
|
equipment_id: UUID,
|
|
equipment_name: str,
|
|
equipment_type: str,
|
|
last_maintenance_date: Optional[str] = None,
|
|
days_overdue: Optional[int] = None
|
|
):
|
|
"""Emit equipment maintenance due alert"""
|
|
|
|
metadata = {
|
|
"equipment_id": str(equipment_id),
|
|
"equipment_name": equipment_name,
|
|
"equipment_type": equipment_type
|
|
}
|
|
|
|
if last_maintenance_date:
|
|
metadata["last_maintenance_date"] = last_maintenance_date
|
|
if days_overdue:
|
|
metadata["days_overdue"] = days_overdue
|
|
|
|
# Determine severity based on overdue days
|
|
if days_overdue and days_overdue > 30:
|
|
severity = "high"
|
|
else:
|
|
severity = "medium"
|
|
|
|
await self.publisher.publish_alert(
|
|
event_type="production.equipment_maintenance_due",
|
|
tenant_id=tenant_id,
|
|
severity=severity,
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"equipment_maintenance_due_emitted",
|
|
tenant_id=str(tenant_id),
|
|
equipment_name=equipment_name
|
|
)
|
|
|
|
# Recommendation methods
|
|
|
|
async def emit_efficiency_recommendation(
|
|
self,
|
|
tenant_id: UUID,
|
|
recommendation_type: str,
|
|
description: str,
|
|
potential_improvement_percent: float,
|
|
affected_batches: Optional[int] = None
|
|
):
|
|
"""Emit production efficiency recommendation"""
|
|
|
|
metadata = {
|
|
"recommendation_type": recommendation_type,
|
|
"description": description,
|
|
"potential_improvement_percent": potential_improvement_percent
|
|
}
|
|
|
|
if affected_batches:
|
|
metadata["affected_batches"] = affected_batches
|
|
|
|
await self.publisher.publish_recommendation(
|
|
event_type="production.efficiency_recommendation",
|
|
tenant_id=tenant_id,
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"efficiency_recommendation_emitted",
|
|
tenant_id=str(tenant_id),
|
|
recommendation_type=recommendation_type
|
|
)
|
|
|
|
async def emit_energy_optimization(
|
|
self,
|
|
tenant_id: UUID,
|
|
current_usage_kwh: float,
|
|
potential_savings_kwh: float,
|
|
potential_savings_eur: float,
|
|
optimization_suggestions: List[str]
|
|
):
|
|
"""Emit energy optimization recommendation"""
|
|
|
|
metadata = {
|
|
"current_usage_kwh": current_usage_kwh,
|
|
"potential_savings_kwh": potential_savings_kwh,
|
|
"potential_savings_eur": potential_savings_eur,
|
|
"optimization_suggestions": optimization_suggestions
|
|
}
|
|
|
|
await self.publisher.publish_recommendation(
|
|
event_type="production.energy_optimization",
|
|
tenant_id=tenant_id,
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"energy_optimization_emitted",
|
|
tenant_id=str(tenant_id),
|
|
potential_savings_eur=potential_savings_eur
|
|
)
|
|
|
|
async def emit_batch_sequence_optimization(
|
|
self,
|
|
tenant_id: UUID,
|
|
current_sequence: List[str],
|
|
optimized_sequence: List[str],
|
|
estimated_time_savings_minutes: int
|
|
):
|
|
"""Emit batch sequence optimization recommendation"""
|
|
|
|
metadata = {
|
|
"current_sequence": current_sequence,
|
|
"optimized_sequence": optimized_sequence,
|
|
"estimated_time_savings_minutes": estimated_time_savings_minutes
|
|
}
|
|
|
|
await self.publisher.publish_recommendation(
|
|
event_type="production.batch_sequence_optimization",
|
|
tenant_id=tenant_id,
|
|
data=metadata
|
|
)
|
|
|
|
logger.info(
|
|
"batch_sequence_optimization_emitted",
|
|
tenant_id=str(tenant_id),
|
|
time_savings=estimated_time_savings_minutes
|
|
)
|