New alert system and panel de control page

This commit is contained in:
Urtzi Alfaro
2025-11-27 15:52:40 +01:00
parent 1a2f4602f3
commit e902419b6e
178 changed files with 20982 additions and 6944 deletions

View File

@@ -0,0 +1,420 @@
"""
Delivery Tracking Service
Tracks purchase order deliveries and generates appropriate alerts:
- DELIVERY_SCHEDULED: When PO is approved and delivery date is set
- DELIVERY_ARRIVING_SOON: 2 hours before delivery window
- DELIVERY_OVERDUE: 30 minutes after expected delivery time
- STOCK_RECEIPT_INCOMPLETE: If delivery not marked as received
Integrates with procurement service to get PO details and expected delivery windows.
"""
import structlog
from datetime import datetime, timedelta, timezone
from typing import Dict, Any, Optional, List
from uuid import UUID
import httpx
from shared.schemas.alert_types import AlertTypeConstants
from shared.alerts.base_service import BaseAlertService
logger = structlog.get_logger()
class DeliveryTrackingService:
"""Tracks deliveries and generates lifecycle alerts"""
def __init__(self, config, db_manager, redis_client, rabbitmq_client):
self.config = config
self.db_manager = db_manager
self.redis = redis_client
self.rabbitmq = rabbitmq_client
self.alert_service = BaseAlertService(config)
self.http_client = httpx.AsyncClient(
timeout=30.0,
follow_redirects=True
)
async def check_expected_deliveries(self, tenant_id: UUID) -> Dict[str, int]:
"""
Check all expected deliveries for a tenant and generate appropriate alerts.
Called by scheduled job (runs every hour).
Returns:
Dict with counts: {
'arriving_soon': int,
'overdue': int,
'receipt_incomplete': int
}
"""
logger.info("Checking expected deliveries", tenant_id=str(tenant_id))
counts = {
'arriving_soon': 0,
'overdue': 0,
'receipt_incomplete': 0
}
try:
# Get expected deliveries from procurement service
deliveries = await self._get_expected_deliveries(tenant_id)
now = datetime.now(timezone.utc)
for delivery in deliveries:
po_id = delivery.get('po_id')
po_number = delivery.get('po_number')
expected_date = delivery.get('expected_delivery_date')
delivery_window_hours = delivery.get('delivery_window_hours', 4) # Default 4h window
status = delivery.get('status')
if not expected_date:
continue
# Parse expected date
if isinstance(expected_date, str):
expected_date = datetime.fromisoformat(expected_date)
# Make timezone-aware
if expected_date.tzinfo is None:
expected_date = expected_date.replace(tzinfo=timezone.utc)
# Calculate delivery window
window_start = expected_date
window_end = expected_date + timedelta(hours=delivery_window_hours)
# Check if arriving soon (2 hours before window)
arriving_soon_time = window_start - timedelta(hours=2)
if arriving_soon_time <= now < window_start and status == 'approved':
if await self._send_arriving_soon_alert(tenant_id, delivery):
counts['arriving_soon'] += 1
# Check if overdue (30 min after window end)
overdue_time = window_end + timedelta(minutes=30)
if now >= overdue_time and status == 'approved':
if await self._send_overdue_alert(tenant_id, delivery):
counts['overdue'] += 1
# Check if receipt incomplete (delivery window passed, not marked received)
if now > window_end and status == 'approved':
if await self._send_receipt_incomplete_alert(tenant_id, delivery):
counts['receipt_incomplete'] += 1
logger.info(
"Delivery check completed",
tenant_id=str(tenant_id),
**counts
)
except Exception as e:
logger.error(
"Error checking deliveries",
tenant_id=str(tenant_id),
error=str(e)
)
return counts
async def _get_expected_deliveries(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Query procurement service for expected deliveries.
Returns:
List of delivery dicts with:
- po_id, po_number, expected_delivery_date
- supplier_id, supplier_name
- line_items (product list)
- status (approved, in_transit, received)
"""
try:
procurement_url = self.config.PROCUREMENT_SERVICE_URL
response = await self.http_client.get(
f"{procurement_url}/api/internal/expected-deliveries",
params={
"tenant_id": str(tenant_id),
"days_ahead": 1, # Check today + tomorrow
"include_overdue": True
},
headers={"X-Internal-Service": "orchestrator"}
)
if response.status_code == 200:
data = response.json()
return data.get('deliveries', [])
else:
logger.warning(
"Failed to get expected deliveries",
status_code=response.status_code,
tenant_id=str(tenant_id)
)
return []
except Exception as e:
logger.error(
"Error fetching expected deliveries",
tenant_id=str(tenant_id),
error=str(e)
)
return []
async def _send_arriving_soon_alert(
self,
tenant_id: UUID,
delivery: Dict[str, Any]
) -> bool:
"""
Send DELIVERY_ARRIVING_SOON alert (2h before delivery window).
This appears in the action queue with "Mark as Received" action.
"""
# Check if already sent
cache_key = f"delivery_alert:arriving:{tenant_id}:{delivery['po_id']}"
if await self.redis.exists(cache_key):
return False
po_number = delivery.get('po_number', 'N/A')
supplier_name = delivery.get('supplier_name', 'Supplier')
expected_date = delivery.get('expected_delivery_date')
line_items = delivery.get('line_items', [])
# Format product list
products = [item['product_name'] for item in line_items[:3]]
product_list = ", ".join(products)
if len(line_items) > 3:
product_list += f" (+{len(line_items) - 3} more)"
# Calculate time until arrival
if isinstance(expected_date, str):
expected_date = datetime.fromisoformat(expected_date)
if expected_date.tzinfo is None:
expected_date = expected_date.replace(tzinfo=timezone.utc)
hours_until = (expected_date - datetime.now(timezone.utc)).total_seconds() / 3600
alert_data = {
"tenant_id": str(tenant_id),
"alert_type": AlertTypeConstants.DELIVERY_ARRIVING_SOON,
"title": f"Delivery arriving soon: {supplier_name}",
"message": f"Purchase order {po_number} expected in ~{hours_until:.1f} hours. Products: {product_list}",
"service": "orchestrator",
"actions": ["mark_delivery_received", "call_supplier"],
"alert_metadata": {
"po_id": delivery['po_id'],
"po_number": po_number,
"supplier_id": delivery.get('supplier_id'),
"supplier_name": supplier_name,
"supplier_phone": delivery.get('supplier_phone'),
"expected_delivery_date": expected_date.isoformat(),
"line_items": line_items,
"hours_until_arrival": hours_until,
"confidence_score": 0.9
}
}
success = await self.alert_service.send_alert(alert_data)
if success:
# Cache for 24 hours to avoid duplicate alerts
await self.redis.set(cache_key, "1", ex=86400)
logger.info(
"Sent arriving soon alert",
po_number=po_number,
supplier=supplier_name
)
return success
async def _send_overdue_alert(
self,
tenant_id: UUID,
delivery: Dict[str, Any]
) -> bool:
"""
Send DELIVERY_OVERDUE alert (30min after expected window).
Critical priority - needs immediate action (call supplier).
"""
# Check if already sent
cache_key = f"delivery_alert:overdue:{tenant_id}:{delivery['po_id']}"
if await self.redis.exists(cache_key):
return False
po_number = delivery.get('po_number', 'N/A')
supplier_name = delivery.get('supplier_name', 'Supplier')
expected_date = delivery.get('expected_delivery_date')
# Calculate how late
if isinstance(expected_date, str):
expected_date = datetime.fromisoformat(expected_date)
if expected_date.tzinfo is None:
expected_date = expected_date.replace(tzinfo=timezone.utc)
hours_late = (datetime.now(timezone.utc) - expected_date).total_seconds() / 3600
alert_data = {
"tenant_id": str(tenant_id),
"alert_type": AlertTypeConstants.DELIVERY_OVERDUE,
"title": f"Delivery overdue: {supplier_name}",
"message": f"Purchase order {po_number} was expected {hours_late:.1f} hours ago. Contact supplier immediately.",
"service": "orchestrator",
"actions": ["call_supplier", "snooze", "report_issue"],
"alert_metadata": {
"po_id": delivery['po_id'],
"po_number": po_number,
"supplier_id": delivery.get('supplier_id'),
"supplier_name": supplier_name,
"supplier_phone": delivery.get('supplier_phone'),
"expected_delivery_date": expected_date.isoformat(),
"hours_late": hours_late,
"financial_impact": delivery.get('total_amount', 0), # Blocked capital
"affected_orders": len(delivery.get('affected_production_batches', [])),
"confidence_score": 1.0
}
}
success = await self.alert_service.send_alert(alert_data)
if success:
# Cache for 48 hours
await self.redis.set(cache_key, "1", ex=172800)
logger.warning(
"Sent overdue delivery alert",
po_number=po_number,
supplier=supplier_name,
hours_late=hours_late
)
return success
async def _send_receipt_incomplete_alert(
self,
tenant_id: UUID,
delivery: Dict[str, Any]
) -> bool:
"""
Send STOCK_RECEIPT_INCOMPLETE alert.
Delivery window has passed but stock not marked as received.
"""
# Check if already sent
cache_key = f"delivery_alert:receipt:{tenant_id}:{delivery['po_id']}"
if await self.redis.exists(cache_key):
return False
po_number = delivery.get('po_number', 'N/A')
supplier_name = delivery.get('supplier_name', 'Supplier')
alert_data = {
"tenant_id": str(tenant_id),
"alert_type": AlertTypeConstants.STOCK_RECEIPT_INCOMPLETE,
"title": f"Confirm stock receipt: {po_number}",
"message": f"Delivery from {supplier_name} should have arrived. Please confirm receipt and log lot details.",
"service": "orchestrator",
"actions": ["complete_stock_receipt", "report_missing"],
"alert_metadata": {
"po_id": delivery['po_id'],
"po_number": po_number,
"supplier_id": delivery.get('supplier_id'),
"supplier_name": supplier_name,
"expected_delivery_date": delivery.get('expected_delivery_date'),
"confidence_score": 0.8
}
}
success = await self.alert_service.send_alert(alert_data)
if success:
# Cache for 7 days
await self.redis.set(cache_key, "1", ex=604800)
logger.info(
"Sent receipt incomplete alert",
po_number=po_number
)
return success
async def mark_delivery_received(
self,
tenant_id: UUID,
po_id: UUID,
received_by_user_id: UUID
) -> Dict[str, Any]:
"""
Mark delivery as received and trigger stock receipt workflow.
This is called when user clicks "Mark as Received" action button.
Returns:
Dict with receipt_id and status
"""
try:
# Call inventory service to create draft stock receipt
inventory_url = self.config.INVENTORY_SERVICE_URL
response = await self.http_client.post(
f"{inventory_url}/api/inventory/stock-receipts",
json={
"tenant_id": str(tenant_id),
"po_id": str(po_id),
"received_by_user_id": str(received_by_user_id)
},
headers={"X-Internal-Service": "orchestrator"}
)
if response.status_code in [200, 201]:
receipt_data = response.json()
# Clear delivery alerts
await self._clear_delivery_alerts(tenant_id, po_id)
logger.info(
"Delivery marked as received",
po_id=str(po_id),
receipt_id=receipt_data.get('id')
)
return {
"status": "success",
"receipt_id": receipt_data.get('id'),
"message": "Stock receipt created. Please complete lot details."
}
else:
logger.error(
"Failed to create stock receipt",
status_code=response.status_code,
po_id=str(po_id)
)
return {
"status": "error",
"message": "Failed to create stock receipt"
}
except Exception as e:
logger.error(
"Error marking delivery received",
po_id=str(po_id),
error=str(e)
)
return {
"status": "error",
"message": str(e)
}
async def _clear_delivery_alerts(self, tenant_id: UUID, po_id: UUID):
"""Clear all delivery-related alerts for a PO once received"""
alert_types = [
"arriving",
"overdue",
"receipt"
]
for alert_type in alert_types:
cache_key = f"delivery_alert:{alert_type}:{tenant_id}:{po_id}"
await self.redis.delete(cache_key)
logger.debug("Cleared delivery alerts", po_id=str(po_id))
async def close(self):
"""Close HTTP client on shutdown"""
await self.http_client.aclose()

View File

@@ -0,0 +1,275 @@
"""
Orchestration Notification Service
Emits informational notifications for orchestration events:
- orchestration_run_started: When an orchestration run begins
- orchestration_run_completed: When an orchestration run finishes successfully
- action_created: When the orchestrator creates an action (PO, batch, adjustment)
These are NOTIFICATIONS (not alerts) - informational state changes that don't require user action.
"""
import logging
from datetime import datetime, timezone
from typing import Optional, Dict, Any, List
from sqlalchemy.orm import Session
from shared.schemas.event_classification import RawEvent, EventClass, EventDomain
from shared.alerts.base_service import BaseAlertService
logger = logging.getLogger(__name__)
class OrchestrationNotificationService(BaseAlertService):
"""
Service for emitting orchestration notifications (informational state changes).
"""
def __init__(self, rabbitmq_url: str = None):
super().__init__(service_name="orchestrator", rabbitmq_url=rabbitmq_url)
async def emit_orchestration_run_started_notification(
self,
db: Session,
tenant_id: str,
run_id: str,
run_type: str, # 'scheduled', 'manual', 'triggered'
scope: str, # 'full', 'inventory_only', 'production_only'
) -> None:
"""
Emit notification when an orchestration run starts.
Args:
db: Database session
tenant_id: Tenant ID
run_id: Orchestration run ID
run_type: Type of run
scope: Scope of run
"""
try:
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.NOTIFICATION,
event_domain=EventDomain.OPERATIONS,
event_type="orchestration_run_started",
title="Orchestration Started",
message=f"AI orchestration run started ({run_type}, scope: {scope})",
service="orchestrator",
event_metadata={
"run_id": run_id,
"run_type": run_type,
"scope": scope,
"started_at": datetime.now(timezone.utc).isoformat(),
},
timestamp=datetime.now(timezone.utc),
)
await self.publish_item(tenant_id, event.dict(), item_type="notification")
logger.info(
f"Orchestration run started notification emitted: {run_id}",
extra={"tenant_id": tenant_id, "run_id": run_id}
)
except Exception as e:
logger.error(
f"Failed to emit orchestration run started notification: {e}",
extra={"tenant_id": tenant_id, "run_id": run_id},
exc_info=True,
)
async def emit_orchestration_run_completed_notification(
self,
db: Session,
tenant_id: str,
run_id: str,
duration_seconds: float,
actions_created: int,
actions_by_type: Dict[str, int], # e.g., {'purchase_order': 2, 'production_batch': 3}
status: str = "success",
) -> None:
"""
Emit notification when an orchestration run completes.
Args:
db: Database session
tenant_id: Tenant ID
run_id: Orchestration run ID
duration_seconds: Run duration
actions_created: Total actions created
actions_by_type: Breakdown of actions by type
status: Run status (success, partial, failed)
"""
try:
# Build message with action summary
if actions_created == 0:
message = "No actions needed"
else:
action_summary = ", ".join([f"{count} {action_type}" for action_type, count in actions_by_type.items()])
message = f"Created {actions_created} actions: {action_summary}"
message += f" ({duration_seconds:.1f}s)"
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.NOTIFICATION,
event_domain=EventDomain.OPERATIONS,
event_type="orchestration_run_completed",
title=f"Orchestration Completed: {status.title()}",
message=message,
service="orchestrator",
event_metadata={
"run_id": run_id,
"status": status,
"duration_seconds": duration_seconds,
"actions_created": actions_created,
"actions_by_type": actions_by_type,
"completed_at": datetime.now(timezone.utc).isoformat(),
},
timestamp=datetime.now(timezone.utc),
)
await self.publish_item(tenant_id, event.dict(), item_type="notification")
logger.info(
f"Orchestration run completed notification emitted: {run_id} ({actions_created} actions)",
extra={"tenant_id": tenant_id, "run_id": run_id}
)
except Exception as e:
logger.error(
f"Failed to emit orchestration run completed notification: {e}",
extra={"tenant_id": tenant_id, "run_id": run_id},
exc_info=True,
)
async def emit_action_created_notification(
self,
db: Session,
tenant_id: str,
run_id: str,
action_id: str,
action_type: str, # 'purchase_order', 'production_batch', 'inventory_adjustment'
action_details: Dict[str, Any], # Type-specific details
reason: str,
estimated_impact: Optional[Dict[str, Any]] = None,
) -> None:
"""
Emit notification when the orchestrator creates an action.
Args:
db: Database session
tenant_id: Tenant ID
run_id: Orchestration run ID
action_id: Created action ID
action_type: Type of action
action_details: Action-specific details
reason: Reason for creating action
estimated_impact: Estimated impact (optional)
"""
try:
# Build title and message based on action type
if action_type == "purchase_order":
title = f"Purchase Order Created: {action_details.get('supplier_name', 'Unknown')}"
message = f"Ordered {action_details.get('items_count', 0)} items - {reason}"
elif action_type == "production_batch":
title = f"Production Batch Scheduled: {action_details.get('product_name', 'Unknown')}"
message = f"Scheduled {action_details.get('quantity', 0)} {action_details.get('unit', 'units')} - {reason}"
elif action_type == "inventory_adjustment":
title = f"Inventory Adjustment: {action_details.get('ingredient_name', 'Unknown')}"
message = f"Adjusted by {action_details.get('quantity', 0)} {action_details.get('unit', 'units')} - {reason}"
else:
title = f"Action Created: {action_type}"
message = reason
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.NOTIFICATION,
event_domain=EventDomain.OPERATIONS,
event_type="action_created",
title=title,
message=message,
service="orchestrator",
event_metadata={
"run_id": run_id,
"action_id": action_id,
"action_type": action_type,
"action_details": action_details,
"reason": reason,
"estimated_impact": estimated_impact,
"created_at": datetime.now(timezone.utc).isoformat(),
},
timestamp=datetime.now(timezone.utc),
)
await self.publish_item(tenant_id, event.dict(), item_type="notification")
logger.info(
f"Action created notification emitted: {action_type} - {action_id}",
extra={"tenant_id": tenant_id, "action_id": action_id}
)
except Exception as e:
logger.error(
f"Failed to emit action created notification: {e}",
extra={"tenant_id": tenant_id, "action_id": action_id},
exc_info=True,
)
async def emit_action_completed_notification(
self,
db: Session,
tenant_id: str,
action_id: str,
action_type: str,
action_status: str, # 'approved', 'completed', 'rejected', 'cancelled'
completed_by: Optional[str] = None,
) -> None:
"""
Emit notification when an orchestrator action is completed/resolved.
Args:
db: Database session
tenant_id: Tenant ID
action_id: Action ID
action_type: Type of action
action_status: Final status
completed_by: Who completed it (optional)
"""
try:
message = f"{action_type.replace('_', ' ').title()}: {action_status}"
if completed_by:
message += f" by {completed_by}"
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.NOTIFICATION,
event_domain=EventDomain.OPERATIONS,
event_type="action_completed",
title=f"Action {action_status.title()}",
message=message,
service="orchestrator",
event_metadata={
"action_id": action_id,
"action_type": action_type,
"action_status": action_status,
"completed_by": completed_by,
"completed_at": datetime.now(timezone.utc).isoformat(),
},
timestamp=datetime.now(timezone.utc),
)
await self.publish_item(tenant_id, event.dict(), item_type="notification")
logger.info(
f"Action completed notification emitted: {action_id} ({action_status})",
extra={"tenant_id": tenant_id, "action_id": action_id}
)
except Exception as e:
logger.error(
f"Failed to emit action completed notification: {e}",
extra={"tenant_id": tenant_id, "action_id": action_id},
exc_info=True,
)

View File

@@ -352,6 +352,15 @@ class OrchestratorSchedulerService(BaseAlertService):
ai_insights_posted = saga_result.get('ai_insights_posted', 0)
ai_insights_errors = saga_result.get('ai_insights_errors', [])
# Generate reasoning metadata for the orchestrator context
reasoning_metadata = self._generate_reasoning_metadata(
forecast_data,
production_data,
procurement_data,
ai_insights_generated,
ai_insights_posted
)
await repo.update_run(run_id, {
'status': OrchestrationStatus.completed,
'completed_at': completed_at,
@@ -377,10 +386,107 @@ class OrchestratorSchedulerService(BaseAlertService):
'ai_insights_posted': ai_insights_posted,
'ai_insights_completed_at': completed_at,
'saga_steps_total': total_steps,
'saga_steps_completed': completed_steps
'saga_steps_completed': completed_steps,
'run_metadata': reasoning_metadata
})
await session.commit()
def _generate_reasoning_metadata(
self,
forecast_data: Dict[str, Any],
production_data: Dict[str, Any],
procurement_data: Dict[str, Any],
ai_insights_generated: int,
ai_insights_posted: int
) -> Dict[str, Any]:
"""
Generate reasoning metadata for orchestration run that will be used by alert processor.
This creates structured reasoning data that the alert processor can use to provide
context when showing AI reasoning to users.
"""
reasoning_metadata = {
'reasoning': {
'type': 'daily_orchestration_summary',
'timestamp': datetime.now(timezone.utc).isoformat(),
'summary': 'Daily orchestration run completed successfully',
'details': {}
},
'purchase_orders': [],
'production_batches': [],
'ai_insights': {
'generated': ai_insights_generated,
'posted': ai_insights_posted
}
}
# Add forecast reasoning
if forecast_data:
reasoning_metadata['reasoning']['details']['forecasting'] = {
'forecasts_created': forecast_data.get('forecasts_created', 0),
'method': 'automated_daily_forecast',
'reasoning': 'Generated forecasts based on historical patterns and seasonal trends'
}
# Add production reasoning
if production_data:
reasoning_metadata['reasoning']['details']['production'] = {
'batches_created': production_data.get('batches_created', 0),
'method': 'demand_based_scheduling',
'reasoning': 'Scheduled production batches based on forecasted demand and inventory levels'
}
# Add procurement reasoning
if procurement_data:
reasoning_metadata['reasoning']['details']['procurement'] = {
'requirements_created': procurement_data.get('requirements_created', 0),
'pos_created': procurement_data.get('pos_created', 0),
'method': 'automated_procurement',
'reasoning': 'Generated procurement plan based on production needs and inventory optimization'
}
# Add purchase order details with reasoning
if procurement_data and procurement_data.get('purchase_orders'):
for po in procurement_data['purchase_orders']:
po_reasoning = {
'id': po.get('id'),
'status': po.get('status', 'created'),
'delivery_date': po.get('delivery_date'),
'reasoning': {
'type': 'inventory_optimization',
'parameters': {
'trigger': 'low_stock_prediction',
'min_depletion_days': po.get('min_depletion_days', 3),
'quantity': po.get('quantity'),
'unit': po.get('unit'),
'supplier': po.get('supplier_name'),
'financial_impact_eur': po.get('estimated_savings_eur', 0)
}
}
}
reasoning_metadata['purchase_orders'].append(po_reasoning)
# Add production batch details with reasoning
if production_data and production_data.get('production_batches'):
for batch in production_data['production_batches']:
batch_reasoning = {
'id': batch.get('id'),
'status': batch.get('status', 'scheduled'),
'scheduled_date': batch.get('scheduled_date'),
'reasoning': {
'type': 'demand_forecasting',
'parameters': {
'trigger': 'forecasted_demand',
'forecasted_quantity': batch.get('forecasted_quantity'),
'product_name': batch.get('product_name'),
'financial_impact_eur': batch.get('estimated_revenue_eur', 0)
}
}
}
reasoning_metadata['production_batches'].append(batch_reasoning)
return reasoning_metadata
async def _mark_orchestration_failed(self, run_id: uuid.UUID, error_message: str):
"""Mark orchestration run as failed"""
async with self.db_manager.get_session() as session: