New alert system and panel de control page
This commit is contained in:
@@ -13,7 +13,6 @@ import structlog
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
|
||||
from shared.alerts.templates import format_item_message
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -127,17 +126,13 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
percentage = issue['capacity_percentage']
|
||||
|
||||
if status == 'severe_overload':
|
||||
template_data = self.format_spanish_message(
|
||||
'order_overload',
|
||||
percentage=int(percentage - 100)
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, {
|
||||
'type': 'severe_capacity_overload',
|
||||
'severity': 'urgent',
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'planned_date': issue['planned_date'].isoformat(),
|
||||
'capacity_percentage': float(percentage),
|
||||
@@ -228,20 +223,16 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
else:
|
||||
severity = 'low'
|
||||
|
||||
template_data = self.format_spanish_message(
|
||||
'production_delay',
|
||||
batch_name=f"{delay['product_name']} #{delay['batch_number']}",
|
||||
delay_minutes=int(delay_minutes)
|
||||
)
|
||||
|
||||
await self.publish_item(delay['tenant_id'], {
|
||||
'type': 'production_delay',
|
||||
'severity': severity,
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'batch_id': str(delay['id']),
|
||||
'batch_name': f"{delay['product_name']} #{delay['batch_number']}",
|
||||
'product_name': delay['product_name'],
|
||||
'batch_number': delay['batch_number'],
|
||||
'delay_minutes': delay_minutes,
|
||||
@@ -367,17 +358,13 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
days_to_maintenance = equipment.get('days_to_maintenance', 30)
|
||||
|
||||
if status == 'down':
|
||||
template_data = self.format_spanish_message(
|
||||
'equipment_failure',
|
||||
equipment_name=equipment['name']
|
||||
)
|
||||
|
||||
await self.publish_item(equipment['tenant_id'], {
|
||||
'type': 'equipment_failure',
|
||||
'severity': 'urgent',
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'equipment_id': str(equipment['id']),
|
||||
'equipment_name': equipment['name'],
|
||||
@@ -389,18 +376,13 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
elif status == 'maintenance' or (days_to_maintenance is not None and days_to_maintenance <= 3):
|
||||
severity = 'high' if (days_to_maintenance is not None and days_to_maintenance <= 1) else 'medium'
|
||||
|
||||
template_data = self.format_spanish_message(
|
||||
'maintenance_required',
|
||||
equipment_name=equipment['name'],
|
||||
days_until_maintenance=max(0, int(days_to_maintenance)) if days_to_maintenance is not None else 3
|
||||
)
|
||||
|
||||
await self.publish_item(equipment['tenant_id'], {
|
||||
'type': 'maintenance_required',
|
||||
'severity': severity,
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'equipment_id': str(equipment['id']),
|
||||
'equipment_name': equipment['name'],
|
||||
@@ -412,18 +394,13 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
elif efficiency is not None and efficiency < 80:
|
||||
severity = 'medium' if efficiency < 70 else 'low'
|
||||
|
||||
template_data = self.format_spanish_message(
|
||||
'low_equipment_efficiency',
|
||||
equipment_name=equipment['name'],
|
||||
efficiency_percent=round(efficiency, 1)
|
||||
)
|
||||
|
||||
await self.publish_item(equipment['tenant_id'], {
|
||||
'type': 'low_equipment_efficiency',
|
||||
'severity': severity,
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'equipment_id': str(equipment['id']),
|
||||
'equipment_name': equipment['name'],
|
||||
@@ -476,19 +453,15 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
efficiency_loss = rec['efficiency_loss_percent']
|
||||
|
||||
if rec_type == 'reduce_production_time':
|
||||
template_data = self.format_spanish_message(
|
||||
'production_efficiency',
|
||||
suggested_time=f"{rec['start_hour']:02d}:00",
|
||||
savings_percent=efficiency_loss
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, {
|
||||
'type': 'production_efficiency',
|
||||
'severity': 'medium',
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'suggested_time': f"{rec['start_hour']:02d}:00",
|
||||
'product_name': rec['product_name'],
|
||||
'avg_production_time': float(rec['avg_production_time']),
|
||||
'avg_planned_duration': float(rec['avg_planned_duration']),
|
||||
@@ -585,20 +558,17 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
peak_hour_record['avg_energy']) * 100
|
||||
|
||||
if potential_savings > 15: # More than 15% potential savings
|
||||
template_data = self.format_spanish_message(
|
||||
'energy_optimization',
|
||||
start_time=f"{min_off_peak['hour_of_day']:02d}:00",
|
||||
end_time=f"{min_off_peak['hour_of_day']+2:02d}:00",
|
||||
savings_euros=potential_savings * 0.15 # Rough estimate
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, {
|
||||
'type': 'energy_optimization',
|
||||
'severity': 'low',
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'start_time': f"{min_off_peak['hour_of_day']:02d}:00",
|
||||
'end_time': f"{min_off_peak['hour_of_day']+2:02d}:00",
|
||||
'savings_euros': round(potential_savings * 0.15, 2),
|
||||
'equipment_name': equipment,
|
||||
'peak_hour': peak_hour_record['hour_of_day'],
|
||||
'optimal_hour': min_off_peak['hour_of_day'],
|
||||
@@ -629,20 +599,16 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
data = json.loads(payload)
|
||||
tenant_id = UUID(data['tenant_id'])
|
||||
|
||||
template_data = self.format_spanish_message(
|
||||
'production_delay',
|
||||
batch_name=f"{data['product_name']} #{data.get('batch_number', 'N/A')}",
|
||||
delay_minutes=data['delay_minutes']
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, {
|
||||
'type': 'production_delay',
|
||||
'severity': 'high',
|
||||
'title': template_data['title'],
|
||||
'message': template_data['message'],
|
||||
'actions': template_data['actions'],
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],
|
||||
'metadata': {
|
||||
'batch_id': data['batch_id'],
|
||||
'batch_name': f"{data['product_name']} #{data.get('batch_number', 'N/A')}",
|
||||
'delay_minutes': data['delay_minutes'],
|
||||
'trigger_source': 'database'
|
||||
}
|
||||
@@ -711,4 +677,84 @@ class ProductionAlertService(BaseAlertService, AlertServiceMixin):
|
||||
logger.error("Error getting affected production batches",
|
||||
ingredient_id=ingredient_id,
|
||||
error=str(e))
|
||||
return []
|
||||
return []
|
||||
|
||||
async def emit_batch_start_alert(
|
||||
self,
|
||||
tenant_id: UUID,
|
||||
batch_id: str,
|
||||
batch_number: str,
|
||||
product_name: str,
|
||||
product_sku: str,
|
||||
quantity_planned: float,
|
||||
unit: str,
|
||||
priority: str = "normal",
|
||||
estimated_duration_minutes: Optional[int] = None,
|
||||
scheduled_start_time: Optional[datetime] = None,
|
||||
reasoning_data: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Emit action_needed alert when a production batch is ready to start.
|
||||
This appears in the Cola de Acciones (Action Queue) to prompt user to start the batch.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant UUID
|
||||
batch_id: Production batch UUID
|
||||
batch_number: Human-readable batch number
|
||||
product_name: Product name
|
||||
product_sku: Product SKU
|
||||
quantity_planned: Planned quantity
|
||||
unit: Unit of measurement
|
||||
priority: Batch priority (urgent, high, normal, low)
|
||||
estimated_duration_minutes: Estimated production duration
|
||||
scheduled_start_time: When batch is scheduled to start
|
||||
reasoning_data: Structured reasoning from orchestrator (if auto-created)
|
||||
"""
|
||||
try:
|
||||
# Determine severity based on priority and timing
|
||||
if priority == 'urgent':
|
||||
severity = 'urgent'
|
||||
elif priority == 'high':
|
||||
severity = 'high'
|
||||
else:
|
||||
severity = 'medium'
|
||||
|
||||
# Build alert metadata
|
||||
metadata = {
|
||||
'batch_id': str(batch_id),
|
||||
'batch_number': batch_number,
|
||||
'product_name': product_name,
|
||||
'product_sku': product_sku,
|
||||
'quantity_planned': float(quantity_planned),
|
||||
'unit': unit,
|
||||
'priority': priority,
|
||||
'estimated_duration_minutes': estimated_duration_minutes,
|
||||
'scheduled_start_time': scheduled_start_time.isoformat() if scheduled_start_time else None,
|
||||
'reasoning_data': reasoning_data
|
||||
}
|
||||
|
||||
await self.publish_item(tenant_id, {
|
||||
'type': 'production_batch_start',
|
||||
'type_class': 'action_needed',
|
||||
'severity': severity,
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': ['start_production_batch', 'reschedule_batch', 'view_batch_details'],
|
||||
'metadata': metadata
|
||||
}, item_type='alert')
|
||||
|
||||
logger.info(
|
||||
"Production batch start alert emitted",
|
||||
batch_id=str(batch_id),
|
||||
batch_number=batch_number,
|
||||
product_name=product_name,
|
||||
tenant_id=str(tenant_id)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to emit batch start alert",
|
||||
batch_id=str(batch_id),
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
@@ -0,0 +1,307 @@
|
||||
"""
|
||||
Production Notification Service
|
||||
|
||||
Emits informational notifications for production state changes:
|
||||
- batch_state_changed: When batch transitions between states
|
||||
- batch_completed: When batch production completes
|
||||
- batch_started: When batch production begins
|
||||
|
||||
These are NOTIFICATIONS (not alerts) - informational state changes that don't require user action.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from shared.schemas.event_classification import RawEvent, EventClass, EventDomain
|
||||
from shared.alerts.base_service import BaseAlertService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProductionNotificationService(BaseAlertService):
|
||||
"""
|
||||
Service for emitting production notifications (informational state changes).
|
||||
"""
|
||||
|
||||
def __init__(self, rabbitmq_url: str = None):
|
||||
super().__init__(service_name="production", rabbitmq_url=rabbitmq_url)
|
||||
|
||||
async def emit_batch_state_changed_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
batch_id: str,
|
||||
product_sku: str,
|
||||
product_name: str,
|
||||
old_status: str,
|
||||
new_status: str,
|
||||
quantity: float,
|
||||
unit: str,
|
||||
assigned_to: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification when a production batch changes state.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: Tenant ID
|
||||
batch_id: Production batch ID
|
||||
product_sku: Product SKU
|
||||
product_name: Product name
|
||||
old_status: Previous status (PENDING, IN_PROGRESS, COMPLETED, etc.)
|
||||
new_status: New status
|
||||
quantity: Batch quantity
|
||||
unit: Unit of measurement
|
||||
assigned_to: Assigned worker/station (optional)
|
||||
"""
|
||||
try:
|
||||
# Build message based on state transition
|
||||
transition_messages = {
|
||||
("PENDING", "IN_PROGRESS"): f"Production started for {product_name}",
|
||||
("IN_PROGRESS", "COMPLETED"): f"Production completed for {product_name}",
|
||||
("IN_PROGRESS", "PAUSED"): f"Production paused for {product_name}",
|
||||
("PAUSED", "IN_PROGRESS"): f"Production resumed for {product_name}",
|
||||
("IN_PROGRESS", "FAILED"): f"Production failed for {product_name}",
|
||||
}
|
||||
|
||||
message = transition_messages.get(
|
||||
(old_status, new_status),
|
||||
f"{product_name} status changed from {old_status} to {new_status}"
|
||||
)
|
||||
|
||||
# Create notification event
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.PRODUCTION,
|
||||
event_type="batch_state_changed",
|
||||
title=f"Batch Status: {new_status}",
|
||||
message=f"{message} ({quantity} {unit})",
|
||||
service="production",
|
||||
event_metadata={
|
||||
"batch_id": batch_id,
|
||||
"product_sku": product_sku,
|
||||
"product_name": product_name,
|
||||
"old_status": old_status,
|
||||
"new_status": new_status,
|
||||
"quantity": quantity,
|
||||
"unit": unit,
|
||||
"assigned_to": assigned_to,
|
||||
"state_changed_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
# Publish to RabbitMQ for processing
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"Batch state change notification emitted: {batch_id} ({old_status} → {new_status})",
|
||||
extra={"tenant_id": tenant_id, "batch_id": batch_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit batch state change notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "batch_id": batch_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_batch_completed_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
batch_id: str,
|
||||
product_sku: str,
|
||||
product_name: str,
|
||||
quantity_produced: float,
|
||||
unit: str,
|
||||
production_duration_minutes: Optional[int] = None,
|
||||
quality_score: Optional[float] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification when a production batch is completed.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: Tenant ID
|
||||
batch_id: Production batch ID
|
||||
product_sku: Product SKU
|
||||
product_name: Product name
|
||||
quantity_produced: Quantity produced
|
||||
unit: Unit of measurement
|
||||
production_duration_minutes: Total production time (optional)
|
||||
quality_score: Quality score (0-100, optional)
|
||||
"""
|
||||
try:
|
||||
message = f"Produced {quantity_produced} {unit} of {product_name}"
|
||||
if production_duration_minutes:
|
||||
message += f" in {production_duration_minutes} minutes"
|
||||
if quality_score:
|
||||
message += f" (Quality: {quality_score:.1f}%)"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.PRODUCTION,
|
||||
event_type="batch_completed",
|
||||
title=f"Batch Completed: {product_name}",
|
||||
message=message,
|
||||
service="production",
|
||||
event_metadata={
|
||||
"batch_id": batch_id,
|
||||
"product_sku": product_sku,
|
||||
"product_name": product_name,
|
||||
"quantity_produced": quantity_produced,
|
||||
"unit": unit,
|
||||
"production_duration_minutes": production_duration_minutes,
|
||||
"quality_score": quality_score,
|
||||
"completed_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"Batch completed notification emitted: {batch_id} ({quantity_produced} {unit})",
|
||||
extra={"tenant_id": tenant_id, "batch_id": batch_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit batch completed notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "batch_id": batch_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_batch_started_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
batch_id: str,
|
||||
product_sku: str,
|
||||
product_name: str,
|
||||
quantity_planned: float,
|
||||
unit: str,
|
||||
estimated_duration_minutes: Optional[int] = None,
|
||||
assigned_to: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification when a production batch is started.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: Tenant ID
|
||||
batch_id: Production batch ID
|
||||
product_sku: Product SKU
|
||||
product_name: Product name
|
||||
quantity_planned: Planned quantity
|
||||
unit: Unit of measurement
|
||||
estimated_duration_minutes: Estimated duration (optional)
|
||||
assigned_to: Assigned worker/station (optional)
|
||||
"""
|
||||
try:
|
||||
message = f"Started production of {quantity_planned} {unit} of {product_name}"
|
||||
if estimated_duration_minutes:
|
||||
message += f" (Est. {estimated_duration_minutes} min)"
|
||||
if assigned_to:
|
||||
message += f" - Assigned to {assigned_to}"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.PRODUCTION,
|
||||
event_type="batch_started",
|
||||
title=f"Batch Started: {product_name}",
|
||||
message=message,
|
||||
service="production",
|
||||
event_metadata={
|
||||
"batch_id": batch_id,
|
||||
"product_sku": product_sku,
|
||||
"product_name": product_name,
|
||||
"quantity_planned": quantity_planned,
|
||||
"unit": unit,
|
||||
"estimated_duration_minutes": estimated_duration_minutes,
|
||||
"assigned_to": assigned_to,
|
||||
"started_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"Batch started notification emitted: {batch_id}",
|
||||
extra={"tenant_id": tenant_id, "batch_id": batch_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit batch started notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "batch_id": batch_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_equipment_status_notification(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
equipment_id: str,
|
||||
equipment_name: str,
|
||||
old_status: str,
|
||||
new_status: str,
|
||||
reason: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit notification when equipment status changes.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
tenant_id: Tenant ID
|
||||
equipment_id: Equipment ID
|
||||
equipment_name: Equipment name
|
||||
old_status: Previous status
|
||||
new_status: New status
|
||||
reason: Reason for status change (optional)
|
||||
"""
|
||||
try:
|
||||
message = f"{equipment_name} status: {old_status} → {new_status}"
|
||||
if reason:
|
||||
message += f" - {reason}"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.NOTIFICATION,
|
||||
event_domain=EventDomain.PRODUCTION,
|
||||
event_type="equipment_status_changed",
|
||||
title=f"Equipment Status: {equipment_name}",
|
||||
message=message,
|
||||
service="production",
|
||||
event_metadata={
|
||||
"equipment_id": equipment_id,
|
||||
"equipment_name": equipment_name,
|
||||
"old_status": old_status,
|
||||
"new_status": new_status,
|
||||
"reason": reason,
|
||||
"status_changed_at": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="notification")
|
||||
|
||||
logger.info(
|
||||
f"Equipment status notification emitted: {equipment_name}",
|
||||
extra={"tenant_id": tenant_id, "equipment_id": equipment_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit equipment status notification: {e}",
|
||||
extra={"tenant_id": tenant_id, "equipment_id": equipment_id},
|
||||
exc_info=True,
|
||||
)
|
||||
250
services/production/migrate_to_raw_alerts.py
Normal file
250
services/production/migrate_to_raw_alerts.py
Normal file
@@ -0,0 +1,250 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to update production service to send raw alerts
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
# Read the file
|
||||
with open('app/services/production_alert_service.py', 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Pattern 1: Replace order_overload template usage (line 130-147)
|
||||
pattern1 = r''' template_data = self\.format_spanish_message\(
|
||||
'order_overload',
|
||||
percentage=int\(percentage - 100\)
|
||||
\)
|
||||
|
||||
await self\.publish_item\(tenant_id, \{
|
||||
'type': 'severe_capacity_overload',
|
||||
'severity': 'urgent',
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement1 = ''' await self.publish_item(tenant_id, {
|
||||
'type': 'severe_capacity_overload',
|
||||
'severity': 'urgent',
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern1, replacement1)
|
||||
|
||||
# Pattern 2: Replace production_delay template usage (line 231-242)
|
||||
pattern2 = r''' template_data = self\.format_spanish_message\(
|
||||
'production_delay',
|
||||
batch_name=f"\{delay\['product_name'\]\} #\{delay\['batch_number'\]\}",
|
||||
delay_minutes=int\(delay_minutes\)
|
||||
\)
|
||||
|
||||
await self\.publish_item\(delay\['tenant_id'\], \{
|
||||
'type': 'production_delay',
|
||||
'severity': severity,
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement2 = ''' await self.publish_item(delay['tenant_id'], {
|
||||
'type': 'production_delay',
|
||||
'severity': severity,
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern2, replacement2)
|
||||
|
||||
# Add batch_name to metadata for production_delay
|
||||
content = content.replace(
|
||||
''' 'metadata': {
|
||||
'batch_id': str(delay['id']),
|
||||
'product_name': delay['product_name'],
|
||||
'batch_number': delay['batch_number'],
|
||||
'delay_minutes': delay_minutes,''',
|
||||
''' 'metadata': {
|
||||
'batch_id': str(delay['id']),
|
||||
'batch_name': f"{delay['product_name']} #{delay['batch_number']}",
|
||||
'product_name': delay['product_name'],
|
||||
'batch_number': delay['batch_number'],
|
||||
'delay_minutes': delay_minutes,'''
|
||||
)
|
||||
|
||||
# Pattern 3: Replace equipment_failure template usage (line 370-380)
|
||||
pattern3 = r''' template_data = self\.format_spanish_message\(
|
||||
'equipment_failure',
|
||||
equipment_name=equipment\['name'\]
|
||||
\)
|
||||
|
||||
await self\.publish_item\(equipment\['tenant_id'\], \{
|
||||
'type': 'equipment_failure',
|
||||
'severity': 'urgent',
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement3 = ''' await self.publish_item(equipment['tenant_id'], {
|
||||
'type': 'equipment_failure',
|
||||
'severity': 'urgent',
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern3, replacement3)
|
||||
|
||||
# Pattern 4: Replace maintenance_required template usage (line 392-403)
|
||||
pattern4 = r''' template_data = self\.format_spanish_message\(
|
||||
'maintenance_required',
|
||||
equipment_name=equipment\['name'\],
|
||||
days_until_maintenance=max\(0, int\(days_to_maintenance\)\) if days_to_maintenance is not None else 3
|
||||
\)
|
||||
|
||||
await self\.publish_item\(equipment\['tenant_id'\], \{
|
||||
'type': 'maintenance_required',
|
||||
'severity': severity,
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement4 = ''' await self.publish_item(equipment['tenant_id'], {
|
||||
'type': 'maintenance_required',
|
||||
'severity': severity,
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern4, replacement4)
|
||||
|
||||
# Pattern 5: Replace low_equipment_efficiency template usage (line 415-426)
|
||||
pattern5 = r''' template_data = self\.format_spanish_message\(
|
||||
'low_equipment_efficiency',
|
||||
equipment_name=equipment\['name'\],
|
||||
efficiency_percent=round\(efficiency, 1\)
|
||||
\)
|
||||
|
||||
await self\.publish_item\(equipment\['tenant_id'\], \{
|
||||
'type': 'low_equipment_efficiency',
|
||||
'severity': severity,
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement5 = ''' await self.publish_item(equipment['tenant_id'], {
|
||||
'type': 'low_equipment_efficiency',
|
||||
'severity': severity,
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern5, replacement5)
|
||||
|
||||
# Pattern 6: Replace production_efficiency template usage (line 479-490)
|
||||
pattern6 = r''' template_data = self\.format_spanish_message\(
|
||||
'production_efficiency',
|
||||
suggested_time=f"\{rec\['start_hour'\]:02d\}:00",
|
||||
savings_percent=efficiency_loss
|
||||
\)
|
||||
|
||||
await self\.publish_item\(tenant_id, \{
|
||||
'type': 'production_efficiency',
|
||||
'severity': 'medium',
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement6 = ''' await self.publish_item(tenant_id, {
|
||||
'type': 'production_efficiency',
|
||||
'severity': 'medium',
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern6, replacement6)
|
||||
|
||||
# Add suggested_time to metadata for production_efficiency
|
||||
content = content.replace(
|
||||
''' 'metadata': {
|
||||
'product_name': rec['product_name'],
|
||||
'avg_production_time': float(rec['avg_production_time']),''',
|
||||
''' 'metadata': {
|
||||
'suggested_time': f"{rec['start_hour']:02d}:00",
|
||||
'product_name': rec['product_name'],
|
||||
'avg_production_time': float(rec['avg_production_time']),'''
|
||||
)
|
||||
|
||||
# Pattern 7: Replace energy_optimization template usage (line 588-600)
|
||||
pattern7 = r''' template_data = self\.format_spanish_message\(
|
||||
'energy_optimization',
|
||||
start_time=f"\{min_off_peak\['hour_of_day'\]:02d\}:00",
|
||||
end_time=f"\{min_off_peak\['hour_of_day'\]\+2:02d\}:00",
|
||||
savings_euros=potential_savings \* 0\.15 # Rough estimate
|
||||
\)
|
||||
|
||||
await self\.publish_item\(tenant_id, \{
|
||||
'type': 'energy_optimization',
|
||||
'severity': 'low',
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement7 = ''' await self.publish_item(tenant_id, {
|
||||
'type': 'energy_optimization',
|
||||
'severity': 'low',
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern7, replacement7)
|
||||
|
||||
# Add time and savings to metadata for energy_optimization
|
||||
content = content.replace(
|
||||
''' 'metadata': {
|
||||
'equipment_name': equipment,
|
||||
'peak_hour': peak_hour_record['hour_of_day'],''',
|
||||
''' 'metadata': {
|
||||
'start_time': f"{min_off_peak['hour_of_day']:02d}:00",
|
||||
'end_time': f"{min_off_peak['hour_of_day']+2:02d}:00",
|
||||
'savings_euros': round(potential_savings * 0.15, 2),
|
||||
'equipment_name': equipment,
|
||||
'peak_hour': peak_hour_record['hour_of_day'],'''
|
||||
)
|
||||
|
||||
# Pattern 8: Replace production_delay in DB handler (line 632-643)
|
||||
pattern8 = r''' template_data = self\.format_spanish_message\(
|
||||
'production_delay',
|
||||
batch_name=f"\{data\['product_name'\]\} #\{data\.get\('batch_number', 'N/A'\)\}",
|
||||
delay_minutes=data\['delay_minutes'\]
|
||||
\)
|
||||
|
||||
await self\.publish_item\(tenant_id, \{
|
||||
'type': 'production_delay',
|
||||
'severity': 'high',
|
||||
'title': template_data\['title'\],
|
||||
'message': template_data\['message'\],
|
||||
'actions': template_data\['actions'\],'''
|
||||
|
||||
replacement8 = ''' await self.publish_item(tenant_id, {
|
||||
'type': 'production_delay',
|
||||
'severity': 'high',
|
||||
'title': 'Raw Alert - Will be enriched',
|
||||
'message': 'Raw Alert - Will be enriched',
|
||||
'actions': [],'''
|
||||
|
||||
content = content.replace(pattern8, replacement8)
|
||||
|
||||
# Add batch_name to DB handler metadata
|
||||
content = content.replace(
|
||||
''' 'metadata': {
|
||||
'batch_id': data['batch_id'],
|
||||
'delay_minutes': data['delay_minutes'],''',
|
||||
''' 'metadata': {
|
||||
'batch_id': data['batch_id'],
|
||||
'batch_name': f"{data['product_name']} #{data.get('batch_number', 'N/A')}",
|
||||
'delay_minutes': data['delay_minutes'],'''
|
||||
)
|
||||
|
||||
# Write the updated file
|
||||
with open('app/services/production_alert_service.py', 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
print("Migration complete!")
|
||||
print("Updated all template usages to send raw data for enrichment")
|
||||
@@ -692,6 +692,105 @@
|
||||
"production_notes": "Masa madre preparada ayer - Listo para horneado",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000001"]
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-0000000000a1",
|
||||
"batch_number": "BATCH-CHOCOLATE-CAKE-EVENING",
|
||||
"product_id": "20000000-0000-0000-0000-000000000004",
|
||||
"product_name": "Tarta de Chocolate Premium",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000004",
|
||||
"planned_start_offset_days": 0,
|
||||
"planned_start_hour": 17,
|
||||
"planned_start_minute": 0,
|
||||
"planned_duration_minutes": 180,
|
||||
"planned_quantity": 5.0,
|
||||
"actual_quantity": null,
|
||||
"status": "PENDING",
|
||||
"priority": "HIGH",
|
||||
"current_process_stage": null,
|
||||
"yield_percentage": null,
|
||||
"quality_score": null,
|
||||
"waste_quantity": null,
|
||||
"defect_quantity": null,
|
||||
"waste_defect_type": null,
|
||||
"estimated_cost": 380.00,
|
||||
"actual_cost": null,
|
||||
"labor_cost": null,
|
||||
"material_cost": null,
|
||||
"overhead_cost": null,
|
||||
"station_id": "STATION-03",
|
||||
"is_rush_order": false,
|
||||
"is_special_recipe": true,
|
||||
"is_ai_assisted": true,
|
||||
"production_notes": "⚠️ DASHBOARD: Scheduled in 5 hours but missing 3kg dark chocolate (CHO-NEG-001) - Will trigger BATCH_AT_RISK alert",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000001"]
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-0000000000a2",
|
||||
"batch_number": "BATCH-CROISSANTS-TOMORROW",
|
||||
"product_id": "20000000-0000-0000-0000-000000000002",
|
||||
"product_name": "Croissant de Mantequilla Artesanal",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000002",
|
||||
"planned_start_offset_days": 1,
|
||||
"planned_start_hour": 5,
|
||||
"planned_start_minute": 0,
|
||||
"planned_duration_minutes": 240,
|
||||
"planned_quantity": 150.0,
|
||||
"actual_quantity": null,
|
||||
"status": "PENDING",
|
||||
"priority": "HIGH",
|
||||
"current_process_stage": null,
|
||||
"yield_percentage": null,
|
||||
"quality_score": null,
|
||||
"waste_quantity": null,
|
||||
"defect_quantity": null,
|
||||
"waste_defect_type": null,
|
||||
"estimated_cost": 420.00,
|
||||
"actual_cost": null,
|
||||
"labor_cost": null,
|
||||
"material_cost": null,
|
||||
"overhead_cost": null,
|
||||
"station_id": "STATION-02",
|
||||
"is_rush_order": false,
|
||||
"is_special_recipe": false,
|
||||
"is_ai_assisted": true,
|
||||
"production_notes": "⚠️ DASHBOARD: Tomorrow morning batch - Depends on yeast (LEV-SEC-001) and butter (MAN-SAL-001) - Coordinates with PO approval escalation scenario",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000002", "50000000-0000-0000-0000-000000000001"]
|
||||
},
|
||||
{
|
||||
"id": "40000000-0000-0000-0000-0000000000a3",
|
||||
"batch_number": "BATCH-BAGUETTES-001",
|
||||
"product_id": "20000000-0000-0000-0000-000000000001",
|
||||
"product_name": "Baguette Francesa Tradicional",
|
||||
"recipe_id": "30000000-0000-0000-0000-000000000001",
|
||||
"planned_start_offset_days": 0,
|
||||
"planned_start_hour": 14,
|
||||
"planned_start_minute": 0,
|
||||
"planned_duration_minutes": 165,
|
||||
"planned_quantity": 80.0,
|
||||
"actual_quantity": null,
|
||||
"status": "PENDING",
|
||||
"priority": "MEDIUM",
|
||||
"current_process_stage": null,
|
||||
"yield_percentage": null,
|
||||
"quality_score": null,
|
||||
"waste_quantity": null,
|
||||
"defect_quantity": null,
|
||||
"waste_defect_type": null,
|
||||
"estimated_cost": 120.00,
|
||||
"actual_cost": null,
|
||||
"labor_cost": null,
|
||||
"material_cost": null,
|
||||
"overhead_cost": null,
|
||||
"station_id": "STATION-01",
|
||||
"is_rush_order": false,
|
||||
"is_special_recipe": false,
|
||||
"is_ai_assisted": true,
|
||||
"production_notes": "⚠️ DASHBOARD: At risk due to flour (HAR-T55-001) running low - Will be affected if delivery is late",
|
||||
"quality_notes": null,
|
||||
"equipment_used": ["50000000-0000-0000-0000-000000000001"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -39,7 +39,8 @@ DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # Cen
|
||||
# Base reference date for date calculations
|
||||
# MUST match shared/utils/demo_dates.py for proper demo session cloning
|
||||
# This fixed date allows demo sessions to adjust all dates relative to session creation time
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
# IMPORTANT: Must match the actual dates in seed data (production batches start Jan 8, 2025)
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 8, 6, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def load_batches_data():
|
||||
|
||||
@@ -25,6 +25,10 @@ import structlog
|
||||
|
||||
from app.models.production import Equipment, EquipmentType, EquipmentStatus
|
||||
|
||||
# Add shared path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
# Configure logging
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -32,9 +36,6 @@ logger = structlog.get_logger()
|
||||
DEMO_TENANT_SAN_PABLO = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
|
||||
DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # Central bakery
|
||||
|
||||
# Base reference date for date calculations
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def load_equipment_data():
|
||||
"""Load equipment data from JSON file"""
|
||||
|
||||
@@ -25,6 +25,10 @@ import structlog
|
||||
|
||||
from app.models.production import QualityCheckTemplate
|
||||
|
||||
# Add shared path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
# Configure logging
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -35,9 +39,6 @@ DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # Cen
|
||||
# System user ID (first admin user from auth service)
|
||||
SYSTEM_USER_ID = uuid.UUID("50000000-0000-0000-0000-000000000004")
|
||||
|
||||
# Base reference date for date calculations
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def load_quality_templates_data():
|
||||
"""Load quality templates data from JSON file"""
|
||||
|
||||
Reference in New Issue
Block a user