New alert service
This commit is contained in:
789
services/suppliers/app/consumers/alert_event_consumer.py
Normal file
789
services/suppliers/app/consumers/alert_event_consumer.py
Normal file
@@ -0,0 +1,789 @@
|
||||
"""
|
||||
Alert Event Consumer
|
||||
Processes supplier alert events from RabbitMQ and sends notifications
|
||||
Handles email and Slack notifications for critical alerts
|
||||
"""
|
||||
import json
|
||||
import structlog
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from shared.messaging import RabbitMQClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class AlertEventConsumer:
|
||||
"""
|
||||
Consumes supplier alert events and sends notifications
|
||||
Handles email and Slack notifications for critical alerts
|
||||
"""
|
||||
|
||||
def __init__(self, db_session: AsyncSession):
|
||||
self.db_session = db_session
|
||||
self.notification_config = self._load_notification_config()
|
||||
|
||||
def _load_notification_config(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Load notification configuration from environment
|
||||
|
||||
Returns:
|
||||
Configuration dict with email/Slack settings
|
||||
"""
|
||||
import os
|
||||
|
||||
return {
|
||||
'enabled': os.getenv('ALERT_NOTIFICATION_ENABLED', 'true').lower() == 'true',
|
||||
'email': {
|
||||
'enabled': os.getenv('ALERT_EMAIL_ENABLED', 'true').lower() == 'true',
|
||||
'recipients': os.getenv('ALERT_EMAIL_RECIPIENTS', 'procurement@company.com').split(','),
|
||||
'from_address': os.getenv('ALERT_EMAIL_FROM', 'noreply@bakery-ia.com'),
|
||||
'smtp_host': os.getenv('SMTP_HOST', 'localhost'),
|
||||
'smtp_port': int(os.getenv('SMTP_PORT', '587')),
|
||||
'smtp_username': os.getenv('SMTP_USERNAME', ''),
|
||||
'smtp_password': os.getenv('SMTP_PASSWORD', ''),
|
||||
'use_tls': os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
|
||||
},
|
||||
'slack': {
|
||||
'enabled': os.getenv('ALERT_SLACK_ENABLED', 'false').lower() == 'true',
|
||||
'webhook_url': os.getenv('ALERT_SLACK_WEBHOOK_URL', ''),
|
||||
'channel': os.getenv('ALERT_SLACK_CHANNEL', '#procurement'),
|
||||
'username': os.getenv('ALERT_SLACK_USERNAME', 'Supplier Alert Bot')
|
||||
},
|
||||
'rate_limiting': {
|
||||
'enabled': os.getenv('ALERT_RATE_LIMITING_ENABLED', 'true').lower() == 'true',
|
||||
'max_per_hour': int(os.getenv('ALERT_MAX_PER_HOUR', '10')),
|
||||
'max_per_day': int(os.getenv('ALERT_MAX_PER_DAY', '50'))
|
||||
}
|
||||
}
|
||||
|
||||
async def consume_alert_events(
|
||||
self,
|
||||
rabbitmq_client: RabbitMQClient
|
||||
):
|
||||
"""
|
||||
Start consuming alert events from RabbitMQ
|
||||
"""
|
||||
async def process_message(message):
|
||||
"""Process a single alert event message"""
|
||||
try:
|
||||
async with message.process():
|
||||
# Parse event data
|
||||
event_data = json.loads(message.body.decode())
|
||||
logger.info(
|
||||
"Received alert event",
|
||||
event_id=event_data.get('event_id'),
|
||||
event_type=event_data.get('event_type'),
|
||||
tenant_id=event_data.get('tenant_id')
|
||||
)
|
||||
|
||||
# Process the event
|
||||
await self.process_alert_event(event_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing alert event",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Start consuming events
|
||||
await rabbitmq_client.consume_events(
|
||||
exchange_name="suppliers.events",
|
||||
queue_name="suppliers.alerts.notifications",
|
||||
routing_key="suppliers.alert.*",
|
||||
callback=process_message
|
||||
)
|
||||
|
||||
logger.info("Started consuming alert events")
|
||||
|
||||
async def process_alert_event(self, event_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Process an alert event based on type
|
||||
|
||||
Args:
|
||||
event_data: Full event payload from RabbitMQ
|
||||
|
||||
Returns:
|
||||
bool: True if processed successfully
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['enabled']:
|
||||
logger.info("Alert notifications disabled, skipping")
|
||||
return True
|
||||
|
||||
event_type = event_data.get('event_type')
|
||||
data = event_data.get('data', {})
|
||||
tenant_id = event_data.get('tenant_id')
|
||||
|
||||
if not tenant_id:
|
||||
logger.warning("Alert event missing tenant_id", event_data=event_data)
|
||||
return False
|
||||
|
||||
# Route to appropriate handler
|
||||
if event_type == 'suppliers.alert.cost_variance':
|
||||
success = await self._handle_cost_variance_alert(tenant_id, data)
|
||||
elif event_type == 'suppliers.alert.quality':
|
||||
success = await self._handle_quality_alert(tenant_id, data)
|
||||
elif event_type == 'suppliers.alert.delivery':
|
||||
success = await self._handle_delivery_alert(tenant_id, data)
|
||||
else:
|
||||
logger.warning("Unknown alert event type", event_type=event_type)
|
||||
success = True # Mark as processed to avoid retry
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
"Alert event processed successfully",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
"Alert event processing failed",
|
||||
event_type=event_type,
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error in process_alert_event",
|
||||
error=str(e),
|
||||
event_id=event_data.get('event_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_cost_variance_alert(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle cost variance alert notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Alert data
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
alert_id = data.get('alert_id')
|
||||
severity = data.get('severity', 'warning')
|
||||
supplier_name = data.get('supplier_name', 'Unknown Supplier')
|
||||
ingredient_name = data.get('ingredient_name', 'Unknown Ingredient')
|
||||
variance_percentage = data.get('variance_percentage', 0)
|
||||
old_price = data.get('old_price', 0)
|
||||
new_price = data.get('new_price', 0)
|
||||
recommendations = data.get('recommendations', [])
|
||||
|
||||
# Check rate limiting
|
||||
if not await self._check_rate_limit(tenant_id, 'cost_variance'):
|
||||
logger.warning(
|
||||
"Rate limit exceeded for cost variance alerts",
|
||||
tenant_id=tenant_id
|
||||
)
|
||||
return True # Don't fail, just skip
|
||||
|
||||
# Format notification message
|
||||
notification_data = {
|
||||
'alert_id': alert_id,
|
||||
'severity': severity,
|
||||
'supplier_name': supplier_name,
|
||||
'ingredient_name': ingredient_name,
|
||||
'variance_percentage': variance_percentage,
|
||||
'old_price': old_price,
|
||||
'new_price': new_price,
|
||||
'price_change': new_price - old_price,
|
||||
'recommendations': recommendations,
|
||||
'alert_url': self._generate_alert_url(tenant_id, alert_id)
|
||||
}
|
||||
|
||||
# Send notifications based on severity
|
||||
notifications_sent = 0
|
||||
|
||||
if severity in ['critical', 'warning']:
|
||||
# Send email for critical and warning alerts
|
||||
if await self._send_email_notification(
|
||||
tenant_id,
|
||||
'cost_variance',
|
||||
notification_data
|
||||
):
|
||||
notifications_sent += 1
|
||||
|
||||
if severity == 'critical':
|
||||
# Send Slack for critical alerts only
|
||||
if await self._send_slack_notification(
|
||||
tenant_id,
|
||||
'cost_variance',
|
||||
notification_data
|
||||
):
|
||||
notifications_sent += 1
|
||||
|
||||
# Record notification sent
|
||||
await self._record_notification(
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
notification_type='cost_variance',
|
||||
channels_sent=notifications_sent
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Cost variance alert notification sent",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
severity=severity,
|
||||
notifications_sent=notifications_sent
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling cost variance alert",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
alert_id=data.get('alert_id'),
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_quality_alert(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle quality alert notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Alert data
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
alert_id = data.get('alert_id')
|
||||
severity = data.get('severity', 'warning')
|
||||
supplier_name = data.get('supplier_name', 'Unknown Supplier')
|
||||
|
||||
logger.info(
|
||||
"Processing quality alert",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
severity=severity,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
# Check rate limiting
|
||||
if not await self._check_rate_limit(tenant_id, 'quality'):
|
||||
return True
|
||||
|
||||
# For now, just log quality alerts
|
||||
# In production, would implement email/Slack similar to cost variance
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling quality alert",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _handle_delivery_alert(
|
||||
self,
|
||||
tenant_id: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Handle delivery alert notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
data: Alert data
|
||||
|
||||
Returns:
|
||||
bool: True if handled successfully
|
||||
"""
|
||||
try:
|
||||
alert_id = data.get('alert_id')
|
||||
severity = data.get('severity', 'warning')
|
||||
supplier_name = data.get('supplier_name', 'Unknown Supplier')
|
||||
|
||||
logger.info(
|
||||
"Processing delivery alert",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
severity=severity,
|
||||
supplier=supplier_name
|
||||
)
|
||||
|
||||
# Check rate limiting
|
||||
if not await self._check_rate_limit(tenant_id, 'delivery'):
|
||||
return True
|
||||
|
||||
# For now, just log delivery alerts
|
||||
# In production, would implement email/Slack similar to cost variance
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error handling delivery alert",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _check_rate_limit(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_type: str
|
||||
) -> bool:
|
||||
"""
|
||||
Check if notification rate limit has been exceeded using Redis
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
alert_type: Type of alert
|
||||
|
||||
Returns:
|
||||
bool: True if within rate limit, False if exceeded
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['rate_limiting']['enabled']:
|
||||
return True
|
||||
|
||||
# Redis-based rate limiting implementation
|
||||
try:
|
||||
import redis.asyncio as redis
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Connect to Redis
|
||||
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
||||
redis_client = await redis.from_url(redis_url, decode_responses=True)
|
||||
|
||||
# Rate limit keys
|
||||
hour_key = f"alert_rate_limit:{tenant_id}:{alert_type}:hour:{datetime.utcnow().strftime('%Y%m%d%H')}"
|
||||
day_key = f"alert_rate_limit:{tenant_id}:{alert_type}:day:{datetime.utcnow().strftime('%Y%m%d')}"
|
||||
|
||||
# Get current counts
|
||||
hour_count = await redis_client.get(hour_key)
|
||||
day_count = await redis_client.get(day_key)
|
||||
|
||||
hour_count = int(hour_count) if hour_count else 0
|
||||
day_count = int(day_count) if day_count else 0
|
||||
|
||||
# Check limits
|
||||
max_per_hour = self.notification_config['rate_limiting']['max_per_hour']
|
||||
max_per_day = self.notification_config['rate_limiting']['max_per_day']
|
||||
|
||||
if hour_count >= max_per_hour:
|
||||
logger.warning(
|
||||
"Hourly rate limit exceeded",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
count=hour_count,
|
||||
limit=max_per_hour
|
||||
)
|
||||
await redis_client.close()
|
||||
return False
|
||||
|
||||
if day_count >= max_per_day:
|
||||
logger.warning(
|
||||
"Daily rate limit exceeded",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
count=day_count,
|
||||
limit=max_per_day
|
||||
)
|
||||
await redis_client.close()
|
||||
return False
|
||||
|
||||
# Increment counters
|
||||
pipe = redis_client.pipeline()
|
||||
pipe.incr(hour_key)
|
||||
pipe.expire(hour_key, 3600) # 1 hour TTL
|
||||
pipe.incr(day_key)
|
||||
pipe.expire(day_key, 86400) # 24 hour TTL
|
||||
await pipe.execute()
|
||||
|
||||
await redis_client.close()
|
||||
|
||||
logger.debug(
|
||||
"Rate limit check passed",
|
||||
tenant_id=tenant_id,
|
||||
alert_type=alert_type,
|
||||
hour_count=hour_count + 1,
|
||||
day_count=day_count + 1
|
||||
)
|
||||
return True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Redis not available, skipping rate limiting")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error checking rate limit",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
exc_info=True
|
||||
)
|
||||
# On error, allow notification
|
||||
return True
|
||||
|
||||
async def _send_email_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send email notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
notification_type: Type of notification
|
||||
data: Notification data
|
||||
|
||||
Returns:
|
||||
bool: True if sent successfully
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['email']['enabled']:
|
||||
logger.debug("Email notifications disabled")
|
||||
return False
|
||||
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
|
||||
# Build email content
|
||||
subject = self._format_email_subject(notification_type, data)
|
||||
body = self._format_email_body(notification_type, data)
|
||||
|
||||
# Create message
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = self.notification_config['email']['from_address']
|
||||
msg['To'] = ', '.join(self.notification_config['email']['recipients'])
|
||||
|
||||
# Attach HTML body
|
||||
html_part = MIMEText(body, 'html')
|
||||
msg.attach(html_part)
|
||||
|
||||
# Send email
|
||||
smtp_config = self.notification_config['email']
|
||||
with smtplib.SMTP(smtp_config['smtp_host'], smtp_config['smtp_port']) as server:
|
||||
if smtp_config['use_tls']:
|
||||
server.starttls()
|
||||
|
||||
if smtp_config['smtp_username'] and smtp_config['smtp_password']:
|
||||
server.login(smtp_config['smtp_username'], smtp_config['smtp_password'])
|
||||
|
||||
server.send_message(msg)
|
||||
|
||||
logger.info(
|
||||
"Email notification sent",
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type,
|
||||
recipients=len(self.notification_config['email']['recipients'])
|
||||
)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error sending email notification",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
async def _send_slack_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Send Slack notification
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
notification_type: Type of notification
|
||||
data: Notification data
|
||||
|
||||
Returns:
|
||||
bool: True if sent successfully
|
||||
"""
|
||||
try:
|
||||
if not self.notification_config['slack']['enabled']:
|
||||
logger.debug("Slack notifications disabled")
|
||||
return False
|
||||
|
||||
webhook_url = self.notification_config['slack']['webhook_url']
|
||||
if not webhook_url:
|
||||
logger.warning("Slack webhook URL not configured")
|
||||
return False
|
||||
|
||||
import aiohttp
|
||||
|
||||
# Format Slack message
|
||||
message = self._format_slack_message(notification_type, data)
|
||||
|
||||
# Send to Slack
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(webhook_url, json=message) as response:
|
||||
if response.status == 200:
|
||||
logger.info(
|
||||
"Slack notification sent",
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type
|
||||
)
|
||||
return True
|
||||
else:
|
||||
logger.error(
|
||||
"Slack notification failed",
|
||||
status=response.status,
|
||||
response=await response.text()
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error sending Slack notification",
|
||||
error=str(e),
|
||||
tenant_id=tenant_id,
|
||||
notification_type=notification_type,
|
||||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
def _format_email_subject(
|
||||
self,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> str:
|
||||
"""Format email subject line"""
|
||||
if notification_type == 'cost_variance':
|
||||
severity = data.get('severity', 'warning').upper()
|
||||
ingredient = data.get('ingredient_name', 'Unknown')
|
||||
variance = data.get('variance_percentage', 0)
|
||||
|
||||
return f"[{severity}] Price Alert: {ingredient} (+{variance:.1f}%)"
|
||||
|
||||
return f"Supplier Alert: {notification_type}"
|
||||
|
||||
def _format_email_body(
|
||||
self,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> str:
|
||||
"""Format email body (HTML)"""
|
||||
if notification_type == 'cost_variance':
|
||||
severity = data.get('severity', 'warning')
|
||||
severity_color = '#dc3545' if severity == 'critical' else '#ffc107'
|
||||
|
||||
html = f"""
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {{ font-family: Arial, sans-serif; }}
|
||||
.alert-box {{
|
||||
border-left: 4px solid {severity_color};
|
||||
padding: 15px;
|
||||
background-color: #f8f9fa;
|
||||
margin: 20px 0;
|
||||
}}
|
||||
.metric {{
|
||||
display: inline-block;
|
||||
margin: 10px 20px 10px 0;
|
||||
}}
|
||||
.metric-label {{
|
||||
color: #6c757d;
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
}}
|
||||
.metric-value {{
|
||||
font-size: 24px;
|
||||
font-weight: bold;
|
||||
color: #212529;
|
||||
}}
|
||||
.recommendations {{
|
||||
background-color: #e7f3ff;
|
||||
border: 1px solid #bee5eb;
|
||||
padding: 15px;
|
||||
margin: 20px 0;
|
||||
}}
|
||||
.btn {{
|
||||
display: inline-block;
|
||||
padding: 10px 20px;
|
||||
background-color: #007bff;
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
border-radius: 4px;
|
||||
margin-top: 15px;
|
||||
}}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h2>Cost Variance Alert</h2>
|
||||
|
||||
<div class="alert-box">
|
||||
<strong>{data.get('supplier_name')}</strong> - {data.get('ingredient_name')}
|
||||
<br><br>
|
||||
|
||||
<div class="metric">
|
||||
<div class="metric-label">Previous Price</div>
|
||||
<div class="metric-value">${data.get('old_price', 0):.2f}</div>
|
||||
</div>
|
||||
|
||||
<div class="metric">
|
||||
<div class="metric-label">New Price</div>
|
||||
<div class="metric-value">${data.get('new_price', 0):.2f}</div>
|
||||
</div>
|
||||
|
||||
<div class="metric">
|
||||
<div class="metric-label">Change</div>
|
||||
<div class="metric-value" style="color: {severity_color};">
|
||||
+{data.get('variance_percentage', 0):.1f}%
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="recommendations">
|
||||
<strong>Recommended Actions:</strong>
|
||||
<ul>
|
||||
{''.join(f'<li>{rec}</li>' for rec in data.get('recommendations', []))}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<a href="{data.get('alert_url', '#')}" class="btn">View Alert Details</a>
|
||||
|
||||
<hr style="margin-top: 30px; border: none; border-top: 1px solid #dee2e6;">
|
||||
<p style="color: #6c757d; font-size: 12px;">
|
||||
This is an automated notification from the Bakery IA Supplier Management System.
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return html
|
||||
|
||||
return "<html><body><p>Alert notification</p></body></html>"
|
||||
|
||||
def _format_slack_message(
|
||||
self,
|
||||
notification_type: str,
|
||||
data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Format Slack message payload"""
|
||||
if notification_type == 'cost_variance':
|
||||
severity = data.get('severity', 'warning')
|
||||
emoji = ':rotating_light:' if severity == 'critical' else ':warning:'
|
||||
color = 'danger' if severity == 'critical' else 'warning'
|
||||
|
||||
message = {
|
||||
"username": self.notification_config['slack']['username'],
|
||||
"channel": self.notification_config['slack']['channel'],
|
||||
"icon_emoji": emoji,
|
||||
"attachments": [
|
||||
{
|
||||
"color": color,
|
||||
"title": f"Cost Variance Alert - {data.get('supplier_name')}",
|
||||
"fields": [
|
||||
{
|
||||
"title": "Ingredient",
|
||||
"value": data.get('ingredient_name'),
|
||||
"short": True
|
||||
},
|
||||
{
|
||||
"title": "Price Change",
|
||||
"value": f"+{data.get('variance_percentage', 0):.1f}%",
|
||||
"short": True
|
||||
},
|
||||
{
|
||||
"title": "Previous Price",
|
||||
"value": f"${data.get('old_price', 0):.2f}",
|
||||
"short": True
|
||||
},
|
||||
{
|
||||
"title": "New Price",
|
||||
"value": f"${data.get('new_price', 0):.2f}",
|
||||
"short": True
|
||||
}
|
||||
],
|
||||
"text": "*Recommendations:*\n" + "\n".join(
|
||||
f"• {rec}" for rec in data.get('recommendations', [])
|
||||
),
|
||||
"footer": "Bakery IA Supplier Management",
|
||||
"ts": int(datetime.utcnow().timestamp())
|
||||
}
|
||||
]
|
||||
}
|
||||
return message
|
||||
|
||||
return {
|
||||
"username": self.notification_config['slack']['username'],
|
||||
"text": f"Alert: {notification_type}"
|
||||
}
|
||||
|
||||
def _generate_alert_url(self, tenant_id: str, alert_id: str) -> str:
|
||||
"""Generate URL to view alert in dashboard"""
|
||||
import os
|
||||
base_url = os.getenv('FRONTEND_BASE_URL', 'http://localhost:3000')
|
||||
return f"{base_url}/app/suppliers/alerts/{alert_id}"
|
||||
|
||||
async def _record_notification(
|
||||
self,
|
||||
tenant_id: str,
|
||||
alert_id: str,
|
||||
notification_type: str,
|
||||
channels_sent: int
|
||||
):
|
||||
"""
|
||||
Record that notification was sent
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant ID
|
||||
alert_id: Alert ID
|
||||
notification_type: Type of notification
|
||||
channels_sent: Number of channels sent to
|
||||
"""
|
||||
try:
|
||||
# In production, would store in database:
|
||||
# - notification_log table
|
||||
# - Used for rate limiting and audit trail
|
||||
|
||||
logger.info(
|
||||
"Notification recorded",
|
||||
tenant_id=tenant_id,
|
||||
alert_id=alert_id,
|
||||
notification_type=notification_type,
|
||||
channels_sent=channels_sent
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error recording notification",
|
||||
error=str(e),
|
||||
alert_id=alert_id
|
||||
)
|
||||
|
||||
|
||||
# Factory function for creating consumer instance
|
||||
def create_alert_event_consumer(db_session: AsyncSession) -> AlertEventConsumer:
|
||||
"""Create alert event consumer instance"""
|
||||
return AlertEventConsumer(db_session)
|
||||
Reference in New Issue
Block a user