Initial commit - production deployment
This commit is contained in:
1
services/alert_processor/app/enrichment/__init__.py
Normal file
1
services/alert_processor/app/enrichment/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Enrichment components for alert processing."""
|
||||
156
services/alert_processor/app/enrichment/business_impact.py
Normal file
156
services/alert_processor/app/enrichment/business_impact.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
Business impact analyzer for alerts.
|
||||
|
||||
Calculates financial impact, affected orders, customer impact, and other
|
||||
business metrics from event metadata.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class BusinessImpactAnalyzer:
|
||||
"""Analyze business impact from event metadata"""
|
||||
|
||||
def analyze(self, event_type: str, metadata: Dict[str, Any]) -> dict:
|
||||
"""
|
||||
Analyze business impact for an event.
|
||||
|
||||
Returns dict with:
|
||||
- financial_impact_eur: Direct financial cost
|
||||
- affected_orders: Number of orders impacted
|
||||
- affected_customers: List of customer names
|
||||
- production_delay_hours: Hours of production delay
|
||||
- estimated_revenue_loss_eur: Potential revenue loss
|
||||
- customer_impact: high/medium/low
|
||||
- waste_risk_kg: Potential waste in kg
|
||||
"""
|
||||
|
||||
impact = {
|
||||
"financial_impact_eur": 0,
|
||||
"affected_orders": 0,
|
||||
"affected_customers": [],
|
||||
"production_delay_hours": 0,
|
||||
"estimated_revenue_loss_eur": 0,
|
||||
"customer_impact": "low",
|
||||
"waste_risk_kg": 0
|
||||
}
|
||||
|
||||
# Stock-related impacts
|
||||
if "stock" in event_type or "shortage" in event_type:
|
||||
impact.update(self._analyze_stock_impact(metadata))
|
||||
|
||||
# Production-related impacts
|
||||
elif "production" in event_type or "delay" in event_type or "equipment" in event_type:
|
||||
impact.update(self._analyze_production_impact(metadata))
|
||||
|
||||
# Procurement-related impacts
|
||||
elif "po_" in event_type or "delivery" in event_type:
|
||||
impact.update(self._analyze_procurement_impact(metadata))
|
||||
|
||||
# Quality-related impacts
|
||||
elif "quality" in event_type or "expired" in event_type:
|
||||
impact.update(self._analyze_quality_impact(metadata))
|
||||
|
||||
return impact
|
||||
|
||||
def _analyze_stock_impact(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze impact of stock-related alerts"""
|
||||
impact = {}
|
||||
|
||||
# Calculate financial impact
|
||||
shortage_amount = metadata.get("shortage_amount", 0)
|
||||
unit_cost = metadata.get("unit_cost", 5) # Default €5/kg
|
||||
impact["financial_impact_eur"] = float(shortage_amount) * unit_cost
|
||||
|
||||
# Affected orders from metadata
|
||||
impact["affected_orders"] = metadata.get("affected_orders", 0)
|
||||
|
||||
# Customer impact based on affected orders
|
||||
if impact["affected_orders"] > 5:
|
||||
impact["customer_impact"] = "high"
|
||||
elif impact["affected_orders"] > 2:
|
||||
impact["customer_impact"] = "medium"
|
||||
|
||||
# Revenue loss (estimated)
|
||||
avg_order_value = 50 # €50 per order
|
||||
impact["estimated_revenue_loss_eur"] = impact["affected_orders"] * avg_order_value
|
||||
|
||||
return impact
|
||||
|
||||
def _analyze_production_impact(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze impact of production-related alerts"""
|
||||
impact = {}
|
||||
|
||||
# Delay minutes to hours
|
||||
delay_minutes = metadata.get("delay_minutes", 0)
|
||||
impact["production_delay_hours"] = round(delay_minutes / 60, 1)
|
||||
|
||||
# Affected orders and customers
|
||||
impact["affected_orders"] = metadata.get("affected_orders", 0)
|
||||
|
||||
customer_names = metadata.get("customer_names", [])
|
||||
impact["affected_customers"] = customer_names
|
||||
|
||||
# Customer impact based on delay
|
||||
if delay_minutes > 120: # 2+ hours
|
||||
impact["customer_impact"] = "high"
|
||||
elif delay_minutes > 60: # 1+ hours
|
||||
impact["customer_impact"] = "medium"
|
||||
|
||||
# Financial impact: hourly production cost
|
||||
hourly_cost = 100 # €100/hour operational cost
|
||||
impact["financial_impact_eur"] = impact["production_delay_hours"] * hourly_cost
|
||||
|
||||
# Revenue loss
|
||||
if impact["affected_orders"] > 0:
|
||||
avg_order_value = 50
|
||||
impact["estimated_revenue_loss_eur"] = impact["affected_orders"] * avg_order_value
|
||||
|
||||
return impact
|
||||
|
||||
def _analyze_procurement_impact(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze impact of procurement-related alerts"""
|
||||
impact = {}
|
||||
|
||||
# Extract potential_loss_eur from reasoning_data.parameters
|
||||
reasoning_data = metadata.get("reasoning_data", {})
|
||||
parameters = reasoning_data.get("parameters", {})
|
||||
potential_loss_eur = parameters.get("potential_loss_eur")
|
||||
|
||||
# Use potential loss from reasoning as financial impact (what's at risk)
|
||||
# Fallback to PO amount only if reasoning data is not available
|
||||
if potential_loss_eur is not None:
|
||||
impact["financial_impact_eur"] = float(potential_loss_eur)
|
||||
else:
|
||||
po_amount = metadata.get("po_amount", metadata.get("total_amount", 0))
|
||||
impact["financial_impact_eur"] = float(po_amount)
|
||||
|
||||
# Days overdue affects customer impact
|
||||
days_overdue = metadata.get("days_overdue", 0)
|
||||
if days_overdue > 3:
|
||||
impact["customer_impact"] = "high"
|
||||
elif days_overdue > 1:
|
||||
impact["customer_impact"] = "medium"
|
||||
|
||||
return impact
|
||||
|
||||
def _analyze_quality_impact(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze impact of quality-related alerts"""
|
||||
impact = {}
|
||||
|
||||
# Expired products
|
||||
expired_count = metadata.get("expired_count", 0)
|
||||
total_value = metadata.get("total_value", 0)
|
||||
|
||||
impact["financial_impact_eur"] = float(total_value)
|
||||
impact["waste_risk_kg"] = metadata.get("total_quantity_kg", 0)
|
||||
|
||||
if expired_count > 5:
|
||||
impact["customer_impact"] = "high"
|
||||
elif expired_count > 2:
|
||||
impact["customer_impact"] = "medium"
|
||||
|
||||
return impact
|
||||
244
services/alert_processor/app/enrichment/message_generator.py
Normal file
244
services/alert_processor/app/enrichment/message_generator.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""
|
||||
Message generator for creating i18n message keys and parameters.
|
||||
|
||||
Converts minimal event metadata into structured i18n format for frontend translation.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from app.utils.message_templates import ALERT_TEMPLATES, NOTIFICATION_TEMPLATES, RECOMMENDATION_TEMPLATES
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class MessageGenerator:
|
||||
"""Generates i18n message keys and parameters from event metadata"""
|
||||
|
||||
def generate_message(self, event_type: str, metadata: Dict[str, Any], event_class: str = "alert") -> dict:
|
||||
"""
|
||||
Generate i18n structure for frontend.
|
||||
|
||||
Args:
|
||||
event_type: Alert/notification/recommendation type
|
||||
metadata: Event metadata dictionary
|
||||
event_class: One of: alert, notification, recommendation
|
||||
|
||||
Returns:
|
||||
Dictionary with title_key, title_params, message_key, message_params
|
||||
"""
|
||||
|
||||
# Select appropriate template collection
|
||||
if event_class == "notification":
|
||||
templates = NOTIFICATION_TEMPLATES
|
||||
elif event_class == "recommendation":
|
||||
templates = RECOMMENDATION_TEMPLATES
|
||||
else:
|
||||
templates = ALERT_TEMPLATES
|
||||
|
||||
template = templates.get(event_type)
|
||||
|
||||
if not template:
|
||||
logger.warning("no_template_found", event_type=event_type, event_class=event_class)
|
||||
return self._generate_fallback(event_type, metadata)
|
||||
|
||||
# Build parameters from metadata
|
||||
title_params = self._build_params(template["title_params"], metadata)
|
||||
message_params = self._build_params(template["message_params"], metadata)
|
||||
|
||||
# Select message variant based on context
|
||||
message_key = self._select_message_variant(
|
||||
template["message_variants"],
|
||||
metadata
|
||||
)
|
||||
|
||||
return {
|
||||
"title_key": template["title_key"],
|
||||
"title_params": title_params,
|
||||
"message_key": message_key,
|
||||
"message_params": message_params
|
||||
}
|
||||
|
||||
def _generate_fallback(self, event_type: str, metadata: Dict[str, Any]) -> dict:
|
||||
"""Generate fallback message structure when template not found"""
|
||||
return {
|
||||
"title_key": "alerts.generic.title",
|
||||
"title_params": {},
|
||||
"message_key": "alerts.generic.message",
|
||||
"message_params": {
|
||||
"event_type": event_type,
|
||||
"metadata_summary": self._summarize_metadata(metadata)
|
||||
}
|
||||
}
|
||||
|
||||
def _summarize_metadata(self, metadata: Dict[str, Any]) -> str:
|
||||
"""Create human-readable summary of metadata"""
|
||||
# Take first 3 fields
|
||||
items = list(metadata.items())[:3]
|
||||
summary_parts = [f"{k}: {v}" for k, v in items]
|
||||
return ", ".join(summary_parts)
|
||||
|
||||
def _build_params(self, param_mapping: dict, metadata: dict) -> dict:
|
||||
"""
|
||||
Extract and transform parameters from metadata.
|
||||
|
||||
param_mapping format: {"display_param_name": "metadata_key"}
|
||||
"""
|
||||
params = {}
|
||||
|
||||
for param_key, metadata_key in param_mapping.items():
|
||||
if metadata_key in metadata:
|
||||
value = metadata[metadata_key]
|
||||
|
||||
# Apply transformations based on parameter suffix
|
||||
if param_key.endswith("_kg"):
|
||||
value = round(float(value), 1)
|
||||
elif param_key.endswith("_eur"):
|
||||
value = round(float(value), 2)
|
||||
elif param_key.endswith("_percentage"):
|
||||
value = round(float(value), 1)
|
||||
elif param_key.endswith("_date"):
|
||||
value = self._format_date(value)
|
||||
elif param_key.endswith("_day_name"):
|
||||
value = self._format_day_name(value)
|
||||
elif param_key.endswith("_datetime"):
|
||||
value = self._format_datetime(value)
|
||||
|
||||
params[param_key] = value
|
||||
|
||||
return params
|
||||
|
||||
def _select_message_variant(self, variants: dict, metadata: dict) -> str:
|
||||
"""
|
||||
Select appropriate message variant based on metadata context.
|
||||
|
||||
Checks for specific conditions in priority order.
|
||||
"""
|
||||
|
||||
# Check for PO-related variants
|
||||
if "po_id" in metadata:
|
||||
if metadata.get("po_status") == "pending_approval":
|
||||
variant = variants.get("with_po_pending")
|
||||
if variant:
|
||||
return variant
|
||||
else:
|
||||
variant = variants.get("with_po_created")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for time-based variants
|
||||
if "hours_until" in metadata:
|
||||
variant = variants.get("with_hours")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
if "production_date" in metadata or "planned_date" in metadata:
|
||||
variant = variants.get("with_date")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for customer-related variants
|
||||
if "customer_names" in metadata and metadata.get("customer_names"):
|
||||
variant = variants.get("with_customers")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for order-related variants
|
||||
if "affected_orders" in metadata and metadata.get("affected_orders", 0) > 0:
|
||||
variant = variants.get("with_orders")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for supplier contact variants
|
||||
if "supplier_contact" in metadata:
|
||||
variant = variants.get("with_supplier")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for batch-related variants
|
||||
if "affected_batches" in metadata and metadata.get("affected_batches", 0) > 0:
|
||||
variant = variants.get("with_batches")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for product names list variants
|
||||
if "product_names" in metadata and metadata.get("product_names"):
|
||||
variant = variants.get("with_names")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Check for time duration variants
|
||||
if "hours_overdue" in metadata:
|
||||
variant = variants.get("with_hours")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
if "days_overdue" in metadata:
|
||||
variant = variants.get("with_days")
|
||||
if variant:
|
||||
return variant
|
||||
|
||||
# Default to generic variant
|
||||
return variants.get("generic", variants[list(variants.keys())[0]])
|
||||
|
||||
def _format_date(self, date_value: Any) -> str:
|
||||
"""
|
||||
Format date for display.
|
||||
|
||||
Accepts:
|
||||
- ISO string: "2025-12-10"
|
||||
- datetime object
|
||||
- date object
|
||||
|
||||
Returns: ISO format "YYYY-MM-DD"
|
||||
"""
|
||||
if isinstance(date_value, str):
|
||||
# Already a string, might be ISO format
|
||||
try:
|
||||
dt = datetime.fromisoformat(date_value.replace('Z', '+00:00'))
|
||||
return dt.date().isoformat()
|
||||
except:
|
||||
return date_value
|
||||
|
||||
if isinstance(date_value, datetime):
|
||||
return date_value.date().isoformat()
|
||||
|
||||
if hasattr(date_value, 'isoformat'):
|
||||
return date_value.isoformat()
|
||||
|
||||
return str(date_value)
|
||||
|
||||
def _format_day_name(self, date_value: Any) -> str:
|
||||
"""
|
||||
Format day name with date.
|
||||
|
||||
Example: "miércoles 10 de diciembre"
|
||||
|
||||
Note: Frontend will handle localization.
|
||||
For now, return ISO date and let frontend format.
|
||||
"""
|
||||
iso_date = self._format_date(date_value)
|
||||
|
||||
try:
|
||||
dt = datetime.fromisoformat(iso_date)
|
||||
# Frontend will use this to format in user's language
|
||||
return iso_date
|
||||
except:
|
||||
return iso_date
|
||||
|
||||
def _format_datetime(self, datetime_value: Any) -> str:
|
||||
"""
|
||||
Format datetime for display.
|
||||
|
||||
Returns: ISO 8601 format with timezone
|
||||
"""
|
||||
if isinstance(datetime_value, str):
|
||||
return datetime_value
|
||||
|
||||
if isinstance(datetime_value, datetime):
|
||||
return datetime_value.isoformat()
|
||||
|
||||
if hasattr(datetime_value, 'isoformat'):
|
||||
return datetime_value.isoformat()
|
||||
|
||||
return str(datetime_value)
|
||||
165
services/alert_processor/app/enrichment/orchestrator_client.py
Normal file
165
services/alert_processor/app/enrichment/orchestrator_client.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""
|
||||
Orchestrator client for querying AI action context.
|
||||
|
||||
Queries the orchestrator service to determine if AI has already
|
||||
addressed the issue and what actions were taken.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
import httpx
|
||||
import structlog
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class OrchestratorClient:
|
||||
"""HTTP client for querying orchestrator service"""
|
||||
|
||||
def __init__(self, base_url: str = "http://orchestrator-service:8000"):
|
||||
"""
|
||||
Initialize orchestrator client.
|
||||
|
||||
Args:
|
||||
base_url: Base URL of orchestrator service
|
||||
"""
|
||||
self.base_url = base_url
|
||||
self.timeout = 10.0 # 10 second timeout
|
||||
|
||||
async def get_context(
|
||||
self,
|
||||
tenant_id: str,
|
||||
event_type: str,
|
||||
metadata: Dict[str, Any]
|
||||
) -> dict:
|
||||
"""
|
||||
Query orchestrator for AI action context.
|
||||
|
||||
Returns dict with:
|
||||
- already_addressed: Boolean - did AI handle this?
|
||||
- action_type: Type of action taken
|
||||
- action_id: ID of the action
|
||||
- action_summary: Human-readable summary
|
||||
- reasoning: AI reasoning for the action
|
||||
- confidence: Confidence score (0-1)
|
||||
- estimated_savings_eur: Estimated savings
|
||||
- prevented_issue: What issue was prevented
|
||||
- created_at: When action was created
|
||||
"""
|
||||
|
||||
context = {
|
||||
"already_addressed": False,
|
||||
"confidence": 0.8 # Default confidence
|
||||
}
|
||||
|
||||
try:
|
||||
# Build query based on event type and metadata
|
||||
query_params = self._build_query_params(event_type, metadata)
|
||||
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/api/internal/recent-actions",
|
||||
params={
|
||||
"tenant_id": tenant_id,
|
||||
**query_params
|
||||
},
|
||||
headers={
|
||||
"x-internal-service": "alert-intelligence"
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
context.update(self._parse_response(data, event_type, metadata))
|
||||
|
||||
elif response.status_code == 404:
|
||||
# No recent actions found - that's okay
|
||||
logger.debug("no_orchestrator_actions", tenant_id=tenant_id, event_type=event_type)
|
||||
|
||||
else:
|
||||
logger.warning(
|
||||
"orchestrator_query_failed",
|
||||
status_code=response.status_code,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.warning("orchestrator_timeout", tenant_id=tenant_id, event_type=event_type)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("orchestrator_query_error", error=str(e), tenant_id=tenant_id)
|
||||
|
||||
return context
|
||||
|
||||
def _build_query_params(self, event_type: str, metadata: Dict[str, Any]) -> dict:
|
||||
"""Build query parameters based on event type"""
|
||||
params = {}
|
||||
|
||||
# For stock-related alerts, query for PO actions
|
||||
if "stock" in event_type or "shortage" in event_type:
|
||||
if metadata.get("ingredient_id"):
|
||||
params["related_entity_type"] = "ingredient"
|
||||
params["related_entity_id"] = metadata["ingredient_id"]
|
||||
params["action_types"] = "purchase_order_created,purchase_order_approved"
|
||||
|
||||
# For production delays, query for batch adjustments
|
||||
elif "production" in event_type or "delay" in event_type:
|
||||
if metadata.get("batch_id"):
|
||||
params["related_entity_type"] = "production_batch"
|
||||
params["related_entity_id"] = metadata["batch_id"]
|
||||
params["action_types"] = "production_adjusted,batch_rescheduled"
|
||||
|
||||
# For PO approval, check if already approved
|
||||
elif "po_approval" in event_type:
|
||||
if metadata.get("po_id"):
|
||||
params["related_entity_type"] = "purchase_order"
|
||||
params["related_entity_id"] = metadata["po_id"]
|
||||
params["action_types"] = "purchase_order_approved,purchase_order_rejected"
|
||||
|
||||
# Look for recent actions (last 24 hours)
|
||||
params["since_hours"] = 24
|
||||
|
||||
return params
|
||||
|
||||
def _parse_response(
|
||||
self,
|
||||
data: dict,
|
||||
event_type: str,
|
||||
metadata: Dict[str, Any]
|
||||
) -> dict:
|
||||
"""Parse orchestrator response into context"""
|
||||
|
||||
if not data or not data.get("actions"):
|
||||
return {"already_addressed": False}
|
||||
|
||||
# Get most recent action
|
||||
actions = data.get("actions", [])
|
||||
if not actions:
|
||||
return {"already_addressed": False}
|
||||
|
||||
most_recent = actions[0]
|
||||
|
||||
context = {
|
||||
"already_addressed": True,
|
||||
"action_type": most_recent.get("action_type"),
|
||||
"action_id": most_recent.get("id"),
|
||||
"action_summary": most_recent.get("summary", ""),
|
||||
"reasoning": most_recent.get("reasoning", {}),
|
||||
"confidence": most_recent.get("confidence", 0.8),
|
||||
"created_at": most_recent.get("created_at"),
|
||||
"action_status": most_recent.get("status", "completed")
|
||||
}
|
||||
|
||||
# Extract specific fields based on action type
|
||||
if most_recent.get("action_type") == "purchase_order_created":
|
||||
context["estimated_savings_eur"] = most_recent.get("estimated_savings_eur", 0)
|
||||
context["prevented_issue"] = "stockout"
|
||||
|
||||
if most_recent.get("delivery_date"):
|
||||
context["delivery_date"] = most_recent["delivery_date"]
|
||||
|
||||
elif most_recent.get("action_type") == "production_adjusted":
|
||||
context["prevented_issue"] = "production_delay"
|
||||
context["adjustment_type"] = most_recent.get("adjustment_type")
|
||||
|
||||
return context
|
||||
256
services/alert_processor/app/enrichment/priority_scorer.py
Normal file
256
services/alert_processor/app/enrichment/priority_scorer.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
Multi-factor priority scoring for alerts.
|
||||
|
||||
Calculates priority score (0-100) based on:
|
||||
- Business impact (40%): Financial impact, affected orders, customer impact
|
||||
- Urgency (30%): Time until consequence, deadlines
|
||||
- User agency (20%): Can user fix it? External dependencies?
|
||||
- Confidence (10%): AI confidence in assessment
|
||||
|
||||
Also applies escalation boosts for age and deadline proximity.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class PriorityScorer:
|
||||
"""Calculate multi-factor priority score (0-100)"""
|
||||
|
||||
# Weights for priority calculation
|
||||
BUSINESS_IMPACT_WEIGHT = 0.4
|
||||
URGENCY_WEIGHT = 0.3
|
||||
USER_AGENCY_WEIGHT = 0.2
|
||||
CONFIDENCE_WEIGHT = 0.1
|
||||
|
||||
# Priority thresholds
|
||||
CRITICAL_THRESHOLD = 90
|
||||
IMPORTANT_THRESHOLD = 70
|
||||
STANDARD_THRESHOLD = 50
|
||||
|
||||
def calculate_priority(
|
||||
self,
|
||||
business_impact: dict,
|
||||
urgency: dict,
|
||||
user_agency: dict,
|
||||
orchestrator_context: dict
|
||||
) -> int:
|
||||
"""
|
||||
Calculate weighted priority score.
|
||||
|
||||
Args:
|
||||
business_impact: Business impact context
|
||||
urgency: Urgency context
|
||||
user_agency: User agency context
|
||||
orchestrator_context: AI orchestrator context
|
||||
|
||||
Returns:
|
||||
Priority score (0-100)
|
||||
"""
|
||||
|
||||
# Score each dimension (0-100)
|
||||
impact_score = self._score_business_impact(business_impact)
|
||||
urgency_score = self._score_urgency(urgency)
|
||||
agency_score = self._score_user_agency(user_agency)
|
||||
confidence_score = orchestrator_context.get("confidence", 0.8) * 100
|
||||
|
||||
# Weighted average
|
||||
total_score = (
|
||||
impact_score * self.BUSINESS_IMPACT_WEIGHT +
|
||||
urgency_score * self.URGENCY_WEIGHT +
|
||||
agency_score * self.USER_AGENCY_WEIGHT +
|
||||
confidence_score * self.CONFIDENCE_WEIGHT
|
||||
)
|
||||
|
||||
# Apply escalation boost if needed
|
||||
escalation_boost = self._calculate_escalation_boost(urgency)
|
||||
total_score = min(100, total_score + escalation_boost)
|
||||
|
||||
score = int(total_score)
|
||||
|
||||
logger.debug(
|
||||
"priority_calculated",
|
||||
score=score,
|
||||
impact_score=impact_score,
|
||||
urgency_score=urgency_score,
|
||||
agency_score=agency_score,
|
||||
confidence_score=confidence_score,
|
||||
escalation_boost=escalation_boost
|
||||
)
|
||||
|
||||
return score
|
||||
|
||||
def _score_business_impact(self, impact: dict) -> int:
|
||||
"""
|
||||
Score business impact (0-100).
|
||||
|
||||
Considers:
|
||||
- Financial impact in EUR
|
||||
- Number of affected orders
|
||||
- Customer impact level
|
||||
- Production delays
|
||||
- Revenue at risk
|
||||
"""
|
||||
score = 50 # Base score
|
||||
|
||||
# Financial impact
|
||||
financial_impact = impact.get("financial_impact_eur", 0)
|
||||
if financial_impact > 1000:
|
||||
score += 30
|
||||
elif financial_impact > 500:
|
||||
score += 20
|
||||
elif financial_impact > 100:
|
||||
score += 10
|
||||
|
||||
# Affected orders
|
||||
affected_orders = impact.get("affected_orders", 0)
|
||||
if affected_orders > 10:
|
||||
score += 15
|
||||
elif affected_orders > 5:
|
||||
score += 10
|
||||
elif affected_orders > 0:
|
||||
score += 5
|
||||
|
||||
# Customer impact
|
||||
customer_impact = impact.get("customer_impact", "low")
|
||||
if customer_impact == "high":
|
||||
score += 15
|
||||
elif customer_impact == "medium":
|
||||
score += 5
|
||||
|
||||
# Production delay hours
|
||||
production_delay_hours = impact.get("production_delay_hours", 0)
|
||||
if production_delay_hours > 4:
|
||||
score += 10
|
||||
elif production_delay_hours > 2:
|
||||
score += 5
|
||||
|
||||
# Revenue loss
|
||||
revenue_loss = impact.get("estimated_revenue_loss_eur", 0)
|
||||
if revenue_loss > 500:
|
||||
score += 10
|
||||
elif revenue_loss > 200:
|
||||
score += 5
|
||||
|
||||
return min(100, score)
|
||||
|
||||
def _score_urgency(self, urgency: dict) -> int:
|
||||
"""
|
||||
Score urgency (0-100).
|
||||
|
||||
Considers:
|
||||
- Time until consequence
|
||||
- Can it wait until tomorrow?
|
||||
- Deadline proximity
|
||||
- Peak hour relevance
|
||||
"""
|
||||
score = 50 # Base score
|
||||
|
||||
# Time until consequence
|
||||
hours_until = urgency.get("hours_until_consequence", 24)
|
||||
if hours_until < 2:
|
||||
score += 40
|
||||
elif hours_until < 6:
|
||||
score += 30
|
||||
elif hours_until < 12:
|
||||
score += 20
|
||||
elif hours_until < 24:
|
||||
score += 10
|
||||
|
||||
# Can it wait?
|
||||
if not urgency.get("can_wait_until_tomorrow", True):
|
||||
score += 10
|
||||
|
||||
# Deadline present
|
||||
if urgency.get("deadline_utc"):
|
||||
score += 5
|
||||
|
||||
# Peak hour relevant (production/demand related)
|
||||
if urgency.get("peak_hour_relevant", False):
|
||||
score += 5
|
||||
|
||||
return min(100, score)
|
||||
|
||||
def _score_user_agency(self, agency: dict) -> int:
|
||||
"""
|
||||
Score user agency (0-100).
|
||||
|
||||
Higher score when user CAN fix the issue.
|
||||
Lower score when blocked or requires external parties.
|
||||
|
||||
Considers:
|
||||
- Can user fix it?
|
||||
- Requires external party?
|
||||
- Has blockers?
|
||||
- Suggested workarounds available?
|
||||
"""
|
||||
score = 50 # Base score
|
||||
|
||||
# Can user fix?
|
||||
if agency.get("can_user_fix", False):
|
||||
score += 30
|
||||
else:
|
||||
score -= 20
|
||||
|
||||
# Requires external party?
|
||||
if agency.get("requires_external_party", False):
|
||||
score -= 10
|
||||
|
||||
# Has blockers?
|
||||
blockers = agency.get("blockers", [])
|
||||
score -= len(blockers) * 5
|
||||
|
||||
# Has suggested workaround?
|
||||
if agency.get("suggested_workaround"):
|
||||
score += 5
|
||||
|
||||
return max(0, min(100, score))
|
||||
|
||||
def _calculate_escalation_boost(self, urgency: dict) -> int:
|
||||
"""
|
||||
Calculate escalation boost for pending alerts.
|
||||
|
||||
Boosts priority for:
|
||||
- Age-based escalation (pending >48h, >72h)
|
||||
- Deadline proximity (<6h, <24h)
|
||||
|
||||
Maximum boost: +30 points
|
||||
"""
|
||||
boost = 0
|
||||
|
||||
# Age-based escalation
|
||||
hours_pending = urgency.get("hours_pending", 0)
|
||||
if hours_pending > 72:
|
||||
boost += 20
|
||||
elif hours_pending > 48:
|
||||
boost += 10
|
||||
|
||||
# Deadline proximity
|
||||
hours_until = urgency.get("hours_until_consequence", 24)
|
||||
if hours_until < 6:
|
||||
boost += 30
|
||||
elif hours_until < 24:
|
||||
boost += 15
|
||||
|
||||
# Cap at +30
|
||||
return min(30, boost)
|
||||
|
||||
def get_priority_level(self, score: int) -> str:
|
||||
"""
|
||||
Convert numeric score to priority level.
|
||||
|
||||
- 90-100: critical
|
||||
- 70-89: important
|
||||
- 50-69: standard
|
||||
- 0-49: info
|
||||
"""
|
||||
if score >= self.CRITICAL_THRESHOLD:
|
||||
return "critical"
|
||||
elif score >= self.IMPORTANT_THRESHOLD:
|
||||
return "important"
|
||||
elif score >= self.STANDARD_THRESHOLD:
|
||||
return "standard"
|
||||
else:
|
||||
return "info"
|
||||
304
services/alert_processor/app/enrichment/smart_actions.py
Normal file
304
services/alert_processor/app/enrichment/smart_actions.py
Normal file
@@ -0,0 +1,304 @@
|
||||
"""
|
||||
Smart action generator for alerts.
|
||||
|
||||
Generates actionable buttons with deep links, phone numbers,
|
||||
and other interactive elements based on alert type and metadata.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class SmartActionGenerator:
|
||||
"""Generate smart action buttons for alerts"""
|
||||
|
||||
def generate_actions(
|
||||
self,
|
||||
event_type: str,
|
||||
metadata: Dict[str, Any],
|
||||
orchestrator_context: dict
|
||||
) -> List[dict]:
|
||||
"""
|
||||
Generate smart actions for an event.
|
||||
|
||||
Each action has:
|
||||
- action_type: Identifier for frontend handling
|
||||
- label_key: i18n key for button label
|
||||
- label_params: Parameters for label translation
|
||||
- variant: primary/secondary/danger/ghost
|
||||
- disabled: Boolean
|
||||
- disabled_reason_key: i18n key if disabled
|
||||
- consequence_key: i18n key for confirmation dialog
|
||||
- url: Deep link or tel: or mailto:
|
||||
- metadata: Additional data for action
|
||||
"""
|
||||
|
||||
actions = []
|
||||
|
||||
# If AI already addressed, show "View Action" button
|
||||
if orchestrator_context and orchestrator_context.get("already_addressed"):
|
||||
actions.append(self._create_view_action(orchestrator_context))
|
||||
return actions
|
||||
|
||||
# Generate actions based on event type
|
||||
if "po_approval" in event_type:
|
||||
actions.extend(self._create_po_approval_actions(metadata))
|
||||
|
||||
elif "stock" in event_type or "shortage" in event_type:
|
||||
actions.extend(self._create_stock_actions(metadata))
|
||||
|
||||
elif "production" in event_type or "delay" in event_type:
|
||||
actions.extend(self._create_production_actions(metadata))
|
||||
|
||||
elif "equipment" in event_type:
|
||||
actions.extend(self._create_equipment_actions(metadata))
|
||||
|
||||
elif "delivery" in event_type or "overdue" in event_type:
|
||||
actions.extend(self._create_delivery_actions(metadata))
|
||||
|
||||
elif "temperature" in event_type:
|
||||
actions.extend(self._create_temperature_actions(metadata))
|
||||
|
||||
# Always add common actions
|
||||
actions.extend(self._create_common_actions())
|
||||
|
||||
return actions
|
||||
|
||||
def _create_view_action(self, orchestrator_context: dict) -> dict:
|
||||
"""Create action to view what AI did"""
|
||||
return {
|
||||
"action_type": "open_reasoning",
|
||||
"label_key": "actions.view_ai_action",
|
||||
"label_params": {},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"metadata": {
|
||||
"action_id": orchestrator_context.get("action_id"),
|
||||
"action_type": orchestrator_context.get("action_type")
|
||||
}
|
||||
}
|
||||
|
||||
def _create_po_approval_actions(self, metadata: Dict[str, Any]) -> List[dict]:
|
||||
"""Create actions for PO approval alerts"""
|
||||
po_id = metadata.get("po_id")
|
||||
po_amount = metadata.get("total_amount", metadata.get("po_amount", 0))
|
||||
|
||||
return [
|
||||
{
|
||||
"action_type": "approve_po",
|
||||
"label_key": "actions.approve_po",
|
||||
"label_params": {"amount": po_amount},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"consequence_key": "actions.approve_po_consequence",
|
||||
"url": f"/app/procurement/purchase-orders/{po_id}",
|
||||
"metadata": {"po_id": po_id, "amount": po_amount}
|
||||
},
|
||||
{
|
||||
"action_type": "reject_po",
|
||||
"label_key": "actions.reject_po",
|
||||
"label_params": {},
|
||||
"variant": "danger",
|
||||
"disabled": False,
|
||||
"consequence_key": "actions.reject_po_consequence",
|
||||
"url": f"/app/procurement/purchase-orders/{po_id}",
|
||||
"metadata": {"po_id": po_id}
|
||||
},
|
||||
{
|
||||
"action_type": "modify_po",
|
||||
"label_key": "actions.modify_po",
|
||||
"label_params": {},
|
||||
"variant": "secondary",
|
||||
"disabled": False,
|
||||
"url": f"/app/procurement/purchase-orders/{po_id}/edit",
|
||||
"metadata": {"po_id": po_id}
|
||||
}
|
||||
]
|
||||
|
||||
def _create_stock_actions(self, metadata: Dict[str, Any]) -> List[dict]:
|
||||
"""Create actions for stock-related alerts"""
|
||||
actions = []
|
||||
|
||||
# If supplier info available, add call button
|
||||
if metadata.get("supplier_contact"):
|
||||
actions.append({
|
||||
"action_type": "call_supplier",
|
||||
"label_key": "actions.call_supplier",
|
||||
"label_params": {
|
||||
"supplier": metadata.get("supplier_name", "Supplier"),
|
||||
"phone": metadata.get("supplier_contact")
|
||||
},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"tel:{metadata['supplier_contact']}",
|
||||
"metadata": {
|
||||
"supplier_name": metadata.get("supplier_name"),
|
||||
"phone": metadata.get("supplier_contact")
|
||||
}
|
||||
})
|
||||
|
||||
# If PO exists, add view PO button
|
||||
if metadata.get("po_id"):
|
||||
if metadata.get("po_status") == "pending_approval":
|
||||
actions.append({
|
||||
"action_type": "approve_po",
|
||||
"label_key": "actions.approve_po",
|
||||
"label_params": {"amount": metadata.get("po_amount", 0)},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"/app/procurement/purchase-orders/{metadata['po_id']}",
|
||||
"metadata": {"po_id": metadata["po_id"]}
|
||||
})
|
||||
else:
|
||||
actions.append({
|
||||
"action_type": "view_po",
|
||||
"label_key": "actions.view_po",
|
||||
"label_params": {"po_number": metadata.get("po_number", metadata["po_id"])},
|
||||
"variant": "secondary",
|
||||
"disabled": False,
|
||||
"url": f"/app/procurement/purchase-orders/{metadata['po_id']}",
|
||||
"metadata": {"po_id": metadata["po_id"]}
|
||||
})
|
||||
|
||||
# Add create PO button if no PO exists
|
||||
else:
|
||||
actions.append({
|
||||
"action_type": "create_po",
|
||||
"label_key": "actions.create_po",
|
||||
"label_params": {},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"/app/procurement/purchase-orders/new?ingredient_id={metadata.get('ingredient_id')}",
|
||||
"metadata": {"ingredient_id": metadata.get("ingredient_id")}
|
||||
})
|
||||
|
||||
return actions
|
||||
|
||||
def _create_production_actions(self, metadata: Dict[str, Any]) -> List[dict]:
|
||||
"""Create actions for production-related alerts"""
|
||||
actions = []
|
||||
|
||||
if metadata.get("batch_id"):
|
||||
actions.append({
|
||||
"action_type": "view_batch",
|
||||
"label_key": "actions.view_batch",
|
||||
"label_params": {"batch_number": metadata.get("batch_number", "")},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"/app/production/batches/{metadata['batch_id']}",
|
||||
"metadata": {"batch_id": metadata["batch_id"]}
|
||||
})
|
||||
|
||||
actions.append({
|
||||
"action_type": "adjust_production",
|
||||
"label_key": "actions.adjust_production",
|
||||
"label_params": {},
|
||||
"variant": "secondary",
|
||||
"disabled": False,
|
||||
"url": f"/app/production/batches/{metadata['batch_id']}/adjust",
|
||||
"metadata": {"batch_id": metadata["batch_id"]}
|
||||
})
|
||||
|
||||
return actions
|
||||
|
||||
def _create_equipment_actions(self, metadata: Dict[str, Any]) -> List[dict]:
|
||||
"""Create actions for equipment-related alerts"""
|
||||
return [
|
||||
{
|
||||
"action_type": "view_equipment",
|
||||
"label_key": "actions.view_equipment",
|
||||
"label_params": {"equipment_name": metadata.get("equipment_name", "")},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"/app/production/equipment/{metadata.get('equipment_id')}",
|
||||
"metadata": {"equipment_id": metadata.get("equipment_id")}
|
||||
},
|
||||
{
|
||||
"action_type": "schedule_maintenance",
|
||||
"label_key": "actions.schedule_maintenance",
|
||||
"label_params": {},
|
||||
"variant": "secondary",
|
||||
"disabled": False,
|
||||
"url": f"/app/production/equipment/{metadata.get('equipment_id')}/maintenance",
|
||||
"metadata": {"equipment_id": metadata.get("equipment_id")}
|
||||
}
|
||||
]
|
||||
|
||||
def _create_delivery_actions(self, metadata: Dict[str, Any]) -> List[dict]:
|
||||
"""Create actions for delivery-related alerts"""
|
||||
actions = []
|
||||
|
||||
if metadata.get("supplier_contact"):
|
||||
actions.append({
|
||||
"action_type": "call_supplier",
|
||||
"label_key": "actions.call_supplier",
|
||||
"label_params": {
|
||||
"supplier": metadata.get("supplier_name", "Supplier"),
|
||||
"phone": metadata.get("supplier_contact")
|
||||
},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"tel:{metadata['supplier_contact']}",
|
||||
"metadata": {
|
||||
"supplier_name": metadata.get("supplier_name"),
|
||||
"phone": metadata.get("supplier_contact")
|
||||
}
|
||||
})
|
||||
|
||||
if metadata.get("po_id"):
|
||||
actions.append({
|
||||
"action_type": "view_po",
|
||||
"label_key": "actions.view_po",
|
||||
"label_params": {"po_number": metadata.get("po_number", "")},
|
||||
"variant": "secondary",
|
||||
"disabled": False,
|
||||
"url": f"/app/procurement/purchase-orders/{metadata['po_id']}",
|
||||
"metadata": {"po_id": metadata["po_id"]}
|
||||
})
|
||||
|
||||
return actions
|
||||
|
||||
def _create_temperature_actions(self, metadata: Dict[str, Any]) -> List[dict]:
|
||||
"""Create actions for temperature breach alerts"""
|
||||
return [
|
||||
{
|
||||
"action_type": "view_sensor",
|
||||
"label_key": "actions.view_sensor",
|
||||
"label_params": {"location": metadata.get("location", "")},
|
||||
"variant": "primary",
|
||||
"disabled": False,
|
||||
"url": f"/app/inventory/sensors/{metadata.get('sensor_id')}",
|
||||
"metadata": {"sensor_id": metadata.get("sensor_id")}
|
||||
},
|
||||
{
|
||||
"action_type": "acknowledge_breach",
|
||||
"label_key": "actions.acknowledge_breach",
|
||||
"label_params": {},
|
||||
"variant": "secondary",
|
||||
"disabled": False,
|
||||
"metadata": {"sensor_id": metadata.get("sensor_id")}
|
||||
}
|
||||
]
|
||||
|
||||
def _create_common_actions(self) -> List[dict]:
|
||||
"""Create common actions available for all alerts"""
|
||||
return [
|
||||
{
|
||||
"action_type": "snooze",
|
||||
"label_key": "actions.snooze",
|
||||
"label_params": {"hours": 4},
|
||||
"variant": "ghost",
|
||||
"disabled": False,
|
||||
"metadata": {"snooze_hours": 4}
|
||||
},
|
||||
{
|
||||
"action_type": "dismiss",
|
||||
"label_key": "actions.dismiss",
|
||||
"label_params": {},
|
||||
"variant": "ghost",
|
||||
"disabled": False,
|
||||
"metadata": {}
|
||||
}
|
||||
]
|
||||
173
services/alert_processor/app/enrichment/urgency_analyzer.py
Normal file
173
services/alert_processor/app/enrichment/urgency_analyzer.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Urgency analyzer for alerts.
|
||||
|
||||
Assesses time sensitivity, deadlines, and determines if action can wait.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class UrgencyAnalyzer:
|
||||
"""Analyze urgency from event metadata"""
|
||||
|
||||
def analyze(self, event_type: str, metadata: Dict[str, Any]) -> dict:
|
||||
"""
|
||||
Analyze urgency for an event.
|
||||
|
||||
Returns dict with:
|
||||
- hours_until_consequence: Time until impact occurs
|
||||
- can_wait_until_tomorrow: Boolean
|
||||
- deadline_utc: ISO datetime if deadline exists
|
||||
- peak_hour_relevant: Boolean
|
||||
- hours_pending: Age of alert
|
||||
"""
|
||||
|
||||
urgency = {
|
||||
"hours_until_consequence": 24, # Default: 24 hours
|
||||
"can_wait_until_tomorrow": True,
|
||||
"deadline_utc": None,
|
||||
"peak_hour_relevant": False,
|
||||
"hours_pending": 0
|
||||
}
|
||||
|
||||
# Calculate based on event type
|
||||
if "critical" in event_type or "urgent" in event_type:
|
||||
urgency["hours_until_consequence"] = 2
|
||||
urgency["can_wait_until_tomorrow"] = False
|
||||
|
||||
elif "production" in event_type:
|
||||
urgency.update(self._analyze_production_urgency(metadata))
|
||||
|
||||
elif "stock" in event_type or "shortage" in event_type:
|
||||
urgency.update(self._analyze_stock_urgency(metadata))
|
||||
|
||||
elif "delivery" in event_type or "overdue" in event_type:
|
||||
urgency.update(self._analyze_delivery_urgency(metadata))
|
||||
|
||||
elif "po_approval" in event_type:
|
||||
urgency.update(self._analyze_po_approval_urgency(metadata))
|
||||
|
||||
# Check for explicit deadlines
|
||||
if "required_delivery_date" in metadata:
|
||||
urgency.update(self._calculate_deadline_urgency(metadata["required_delivery_date"]))
|
||||
|
||||
if "production_date" in metadata:
|
||||
urgency.update(self._calculate_deadline_urgency(metadata["production_date"]))
|
||||
|
||||
if "expected_date" in metadata:
|
||||
urgency.update(self._calculate_deadline_urgency(metadata["expected_date"]))
|
||||
|
||||
return urgency
|
||||
|
||||
def _analyze_production_urgency(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze urgency for production alerts"""
|
||||
urgency = {}
|
||||
|
||||
delay_minutes = metadata.get("delay_minutes", 0)
|
||||
|
||||
if delay_minutes > 120:
|
||||
urgency["hours_until_consequence"] = 1
|
||||
urgency["can_wait_until_tomorrow"] = False
|
||||
elif delay_minutes > 60:
|
||||
urgency["hours_until_consequence"] = 4
|
||||
urgency["can_wait_until_tomorrow"] = False
|
||||
else:
|
||||
urgency["hours_until_consequence"] = 8
|
||||
|
||||
# Production is peak-hour sensitive
|
||||
urgency["peak_hour_relevant"] = True
|
||||
|
||||
return urgency
|
||||
|
||||
def _analyze_stock_urgency(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze urgency for stock alerts"""
|
||||
urgency = {}
|
||||
|
||||
# Hours until needed
|
||||
if "hours_until" in metadata:
|
||||
urgency["hours_until_consequence"] = metadata["hours_until"]
|
||||
urgency["can_wait_until_tomorrow"] = urgency["hours_until_consequence"] > 24
|
||||
|
||||
# Days until expiry
|
||||
elif "days_until_expiry" in metadata:
|
||||
days = metadata["days_until_expiry"]
|
||||
if days <= 1:
|
||||
urgency["hours_until_consequence"] = days * 24
|
||||
urgency["can_wait_until_tomorrow"] = False
|
||||
else:
|
||||
urgency["hours_until_consequence"] = days * 24
|
||||
|
||||
return urgency
|
||||
|
||||
def _analyze_delivery_urgency(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze urgency for delivery alerts"""
|
||||
urgency = {}
|
||||
|
||||
days_overdue = metadata.get("days_overdue", 0)
|
||||
|
||||
if days_overdue > 3:
|
||||
urgency["hours_until_consequence"] = 2
|
||||
urgency["can_wait_until_tomorrow"] = False
|
||||
elif days_overdue > 1:
|
||||
urgency["hours_until_consequence"] = 8
|
||||
urgency["can_wait_until_tomorrow"] = False
|
||||
|
||||
return urgency
|
||||
|
||||
def _analyze_po_approval_urgency(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""
|
||||
Analyze urgency for PO approval alerts.
|
||||
|
||||
Uses stockout time (when you run out of stock) instead of delivery date
|
||||
to determine true urgency.
|
||||
"""
|
||||
urgency = {}
|
||||
|
||||
# Extract min_depletion_hours from reasoning_data.parameters
|
||||
reasoning_data = metadata.get("reasoning_data", {})
|
||||
parameters = reasoning_data.get("parameters", {})
|
||||
min_depletion_hours = parameters.get("min_depletion_hours")
|
||||
|
||||
if min_depletion_hours is not None:
|
||||
urgency["hours_until_consequence"] = max(0, round(min_depletion_hours, 1))
|
||||
urgency["can_wait_until_tomorrow"] = min_depletion_hours > 24
|
||||
|
||||
# Set deadline_utc to when stock runs out
|
||||
now = datetime.now(timezone.utc)
|
||||
stockout_time = now + timedelta(hours=min_depletion_hours)
|
||||
urgency["deadline_utc"] = stockout_time.isoformat()
|
||||
|
||||
logger.info(
|
||||
"po_approval_urgency_calculated",
|
||||
min_depletion_hours=min_depletion_hours,
|
||||
stockout_deadline=urgency["deadline_utc"],
|
||||
can_wait=urgency["can_wait_until_tomorrow"]
|
||||
)
|
||||
|
||||
return urgency
|
||||
|
||||
def _calculate_deadline_urgency(self, deadline_str: str) -> dict:
|
||||
"""Calculate urgency based on deadline"""
|
||||
try:
|
||||
if isinstance(deadline_str, str):
|
||||
deadline = datetime.fromisoformat(deadline_str.replace('Z', '+00:00'))
|
||||
else:
|
||||
deadline = deadline_str
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
time_until = deadline - now
|
||||
|
||||
hours_until = time_until.total_seconds() / 3600
|
||||
|
||||
return {
|
||||
"deadline_utc": deadline.isoformat(),
|
||||
"hours_until_consequence": max(0, round(hours_until, 1)),
|
||||
"can_wait_until_tomorrow": hours_until > 24
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning("deadline_parse_failed", deadline=deadline_str, error=str(e))
|
||||
return {}
|
||||
116
services/alert_processor/app/enrichment/user_agency.py
Normal file
116
services/alert_processor/app/enrichment/user_agency.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
User agency analyzer for alerts.
|
||||
|
||||
Determines whether user can fix the issue, what blockers exist,
|
||||
and if external parties are required.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class UserAgencyAnalyzer:
|
||||
"""Analyze user's ability to act on alerts"""
|
||||
|
||||
def analyze(
|
||||
self,
|
||||
event_type: str,
|
||||
metadata: Dict[str, Any],
|
||||
orchestrator_context: dict
|
||||
) -> dict:
|
||||
"""
|
||||
Analyze user agency for an event.
|
||||
|
||||
Returns dict with:
|
||||
- can_user_fix: Boolean - can user resolve this?
|
||||
- requires_external_party: Boolean
|
||||
- external_party_name: Name of required party
|
||||
- external_party_contact: Contact info
|
||||
- blockers: List of blocking factors
|
||||
- suggested_workaround: Optional workaround suggestion
|
||||
"""
|
||||
|
||||
agency = {
|
||||
"can_user_fix": True,
|
||||
"requires_external_party": False,
|
||||
"external_party_name": None,
|
||||
"external_party_contact": None,
|
||||
"blockers": [],
|
||||
"suggested_workaround": None
|
||||
}
|
||||
|
||||
# If orchestrator already addressed it, user agency is low
|
||||
if orchestrator_context and orchestrator_context.get("already_addressed"):
|
||||
agency["can_user_fix"] = False
|
||||
agency["blockers"].append("ai_already_handled")
|
||||
return agency
|
||||
|
||||
# Analyze based on event type
|
||||
if "po_approval" in event_type:
|
||||
agency["can_user_fix"] = True
|
||||
|
||||
elif "delivery" in event_type or "supplier" in event_type:
|
||||
agency.update(self._analyze_supplier_agency(metadata))
|
||||
|
||||
elif "equipment" in event_type:
|
||||
agency.update(self._analyze_equipment_agency(metadata))
|
||||
|
||||
elif "stock" in event_type:
|
||||
agency.update(self._analyze_stock_agency(metadata, orchestrator_context))
|
||||
|
||||
return agency
|
||||
|
||||
def _analyze_supplier_agency(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze agency for supplier-related alerts"""
|
||||
agency = {
|
||||
"requires_external_party": True,
|
||||
"external_party_name": metadata.get("supplier_name"),
|
||||
"external_party_contact": metadata.get("supplier_contact")
|
||||
}
|
||||
|
||||
# User can contact supplier but can't directly fix
|
||||
if not metadata.get("supplier_contact"):
|
||||
agency["blockers"].append("no_supplier_contact")
|
||||
|
||||
return agency
|
||||
|
||||
def _analyze_equipment_agency(self, metadata: Dict[str, Any]) -> dict:
|
||||
"""Analyze agency for equipment-related alerts"""
|
||||
agency = {}
|
||||
|
||||
equipment_type = metadata.get("equipment_type", "")
|
||||
|
||||
if "oven" in equipment_type.lower() or "mixer" in equipment_type.lower():
|
||||
agency["requires_external_party"] = True
|
||||
agency["external_party_name"] = "Maintenance Team"
|
||||
agency["blockers"].append("requires_technician")
|
||||
|
||||
return agency
|
||||
|
||||
def _analyze_stock_agency(
|
||||
self,
|
||||
metadata: Dict[str, Any],
|
||||
orchestrator_context: dict
|
||||
) -> dict:
|
||||
"""Analyze agency for stock-related alerts"""
|
||||
agency = {}
|
||||
|
||||
# If PO exists, user just needs to approve
|
||||
if metadata.get("po_id"):
|
||||
if metadata.get("po_status") == "pending_approval":
|
||||
agency["can_user_fix"] = True
|
||||
agency["suggested_workaround"] = "Approve pending PO"
|
||||
else:
|
||||
agency["blockers"].append("waiting_for_delivery")
|
||||
agency["requires_external_party"] = True
|
||||
agency["external_party_name"] = metadata.get("supplier_name")
|
||||
|
||||
# If no PO, user needs to create one
|
||||
elif metadata.get("supplier_name"):
|
||||
agency["can_user_fix"] = True
|
||||
agency["requires_external_party"] = True
|
||||
agency["external_party_name"] = metadata.get("supplier_name")
|
||||
|
||||
return agency
|
||||
Reference in New Issue
Block a user