New alert service

This commit is contained in:
Urtzi Alfaro
2025-12-05 20:07:01 +01:00
parent 1fe3a73549
commit 667e6e0404
393 changed files with 26002 additions and 61033 deletions

View File

@@ -98,18 +98,27 @@ class ForecastEventConsumer:
async def _get_parent_tenant_id(self, tenant_id: str) -> Optional[str]:
"""
Get parent tenant ID for a child tenant
In a real implementation, this would call the tenant service
Get parent tenant ID for a child tenant using the tenant service
"""
# This is a placeholder implementation
# In real implementation, this would use TenantServiceClient to get tenant hierarchy
try:
# Simulate checking tenant hierarchy
# In real implementation: return await self.tenant_client.get_parent_tenant_id(tenant_id)
# For now, we'll return a placeholder implementation that would check the database
# This is just a simulation of the actual implementation needed
return None # Placeholder - real implementation needed
from shared.clients.tenant_client import TenantServiceClient
from shared.config.base import get_settings
# Create tenant client
config = get_settings()
tenant_client = TenantServiceClient(config)
# Get parent tenant information
parent_tenant = await tenant_client.get_parent_tenant(tenant_id)
if parent_tenant:
parent_tenant_id = parent_tenant.get('id')
logger.info(f"Found parent tenant {parent_tenant_id} for child tenant {tenant_id}")
return parent_tenant_id
else:
logger.debug(f"No parent tenant found for tenant {tenant_id} (tenant may be standalone or parent)")
return None
except Exception as e:
logger.error(f"Error getting parent tenant ID for {tenant_id}: {e}")
return None

View File

@@ -10,7 +10,6 @@ from fastapi import FastAPI
from sqlalchemy import text
from app.core.config import settings
from app.core.database import database_manager
from app.services.messaging import setup_messaging, cleanup_messaging
from app.services.forecasting_alert_service import ForecastingAlertService
from shared.service_base import StandardFastAPIService
@@ -49,6 +48,8 @@ class ForecastingService(StandardFastAPIService):
]
self.alert_service = None
self.rabbitmq_client = None
self.event_publisher = None
# Create custom checks for alert service
async def alert_service_check():
@@ -103,20 +104,38 @@ class ForecastingService(StandardFastAPIService):
)
async def _setup_messaging(self):
"""Setup messaging for forecasting service"""
await setup_messaging()
self.logger.info("Messaging initialized")
"""Setup messaging for forecasting service using unified messaging"""
from shared.messaging import UnifiedEventPublisher, RabbitMQClient
try:
self.rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="forecasting-service")
await self.rabbitmq_client.connect()
# Create unified event publisher
self.event_publisher = UnifiedEventPublisher(self.rabbitmq_client, "forecasting-service")
self.logger.info("Forecasting service unified messaging setup completed")
except Exception as e:
self.logger.error("Failed to setup forecasting unified messaging", error=str(e))
raise
async def _cleanup_messaging(self):
"""Cleanup messaging for forecasting service"""
await cleanup_messaging()
try:
if self.rabbitmq_client:
await self.rabbitmq_client.disconnect()
self.logger.info("Forecasting service messaging cleanup completed")
except Exception as e:
self.logger.error("Error during forecasting messaging cleanup", error=str(e))
async def on_startup(self, app: FastAPI):
"""Custom startup logic for forecasting service"""
# Initialize forecasting alert service
self.alert_service = ForecastingAlertService(settings)
await self.alert_service.start()
self.logger.info("Forecasting alert service initialized")
await super().on_startup(app)
# Initialize forecasting alert service with EventPublisher
if self.event_publisher:
self.alert_service = ForecastingAlertService(self.event_publisher)
await self.alert_service.start()
self.logger.info("Forecasting alert service initialized")
else:
self.logger.error("Event publisher not initialized, alert service unavailable")
async def on_shutdown(self, app: FastAPI):

View File

@@ -7,19 +7,11 @@ from .forecasting_service import ForecastingService, EnhancedForecastingService
from .prediction_service import PredictionService
from .model_client import ModelClient
from .data_client import DataClient
from .messaging import (
publish_forecast_generated,
publish_batch_forecast_completed,
ForecastingStatusPublisher
)
__all__ = [
"ForecastingService",
"EnhancedForecastingService",
"PredictionService",
"ModelClient",
"DataClient",
"publish_forecast_generated",
"publish_batch_forecast_completed",
"ForecastingStatusPublisher"
"DataClient"
]

View File

@@ -217,12 +217,44 @@ class EnterpriseForecastingService:
async def _fetch_sales_data(self, tenant_id: str, start_date: date, end_date: date) -> Dict[str, Any]:
"""
Helper method to fetch sales data (in a real implementation, this would call the sales service)
Helper method to fetch sales data from the sales service
"""
# This is a placeholder implementation
# In real implementation, this would call the sales service
return {
'total_sales': 0, # Placeholder - would come from sales service
'date_range': f"{start_date} to {end_date}",
'tenant_id': tenant_id
}
try:
from shared.clients.sales_client import SalesServiceClient
from shared.config.base import get_settings
# Create sales client
config = get_settings()
sales_client = SalesServiceClient(config, calling_service_name="forecasting")
# Fetch sales data for the date range
sales_data = await sales_client.get_sales_data(
tenant_id=tenant_id,
start_date=start_date.isoformat(),
end_date=end_date.isoformat(),
aggregation="daily"
)
# Calculate total sales from the retrieved data
total_sales = 0
if sales_data:
for sale in sales_data:
# Sum up quantity_sold or total_amount depending on what's available
total_sales += sale.get('quantity_sold', 0)
return {
'total_sales': total_sales,
'date_range': f"{start_date} to {end_date}",
'tenant_id': tenant_id,
'record_count': len(sales_data) if sales_data else 0
}
except Exception as e:
logger.error(f"Failed to fetch sales data for tenant {tenant_id}: {e}")
# Return empty result on error
return {
'total_sales': 0,
'date_range': f"{start_date} to {end_date}",
'tenant_id': tenant_id,
'error': str(e)
}

View File

@@ -1,7 +1,8 @@
# services/forecasting/app/services/forecasting_alert_service.py
"""
Forecasting-specific alert and recommendation detection service
Monitors demand patterns, weather impacts, and holiday preparations
Forecasting Alert Service - Simplified
Emits minimal events using EventPublisher.
All enrichment handled by alert_processor.
"""
import json
@@ -9,538 +10,330 @@ from typing import List, Dict, Any, Optional
from uuid import UUID
from datetime import datetime, timedelta
import structlog
from apscheduler.triggers.cron import CronTrigger
from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
from shared.messaging import UnifiedEventPublisher
from app.clients.inventory_client import get_inventory_client
logger = structlog.get_logger()
class ForecastingAlertService(BaseAlertService, AlertServiceMixin):
"""Forecasting service alert and recommendation detection"""
def setup_scheduled_checks(self):
"""Forecasting-specific scheduled checks for alerts and recommendations"""
# Weekend demand surge analysis - every Friday at 3 PM
self.scheduler.add_job(
self.check_weekend_demand_surge,
CronTrigger(day_of_week=4, hour=15, minute=0), # Friday 3 PM
id='weekend_surge_check',
misfire_grace_time=3600,
max_instances=1
class ForecastingAlertService:
"""Simplified forecasting alert service using EventPublisher"""
def __init__(self, event_publisher: UnifiedEventPublisher):
self.publisher = event_publisher
async def start(self):
"""Start the forecasting alert service"""
logger.info("ForecastingAlertService started")
# Add any initialization logic here if needed
async def stop(self):
"""Stop the forecasting alert service"""
logger.info("ForecastingAlertService stopped")
# Add any cleanup logic here if needed
async def health_check(self):
"""Health check for the forecasting alert service"""
try:
# Check if the event publisher is available and operational
if hasattr(self, 'publisher') and self.publisher:
# Basic check if publisher is available
return True
return False
except Exception as e:
logger.error("ForecastingAlertService health check failed", error=str(e))
return False
async def emit_demand_surge_weekend(
self,
tenant_id: UUID,
product_name: str,
inventory_product_id: str,
predicted_demand: float,
growth_percentage: float,
forecast_date: str,
weather_favorable: bool = False
):
"""Emit weekend demand surge alert"""
# Determine severity based on growth magnitude
if growth_percentage > 100:
severity = 'high'
elif growth_percentage > 75:
severity = 'medium'
else:
severity = 'low'
metadata = {
"product_name": product_name,
"inventory_product_id": str(inventory_product_id),
"predicted_demand": float(predicted_demand),
"growth_percentage": float(growth_percentage),
"forecast_date": forecast_date,
"weather_favorable": weather_favorable
}
await self.publisher.publish_alert(
event_type="forecasting.demand_surge_weekend",
tenant_id=tenant_id,
severity=severity,
data=metadata
)
# Weather impact analysis - every 6 hours during business days
self.scheduler.add_job(
self.check_weather_impact,
CronTrigger(hour='6,12,18', day_of_week='0-6'),
id='weather_impact_check',
misfire_grace_time=300,
max_instances=1
logger.info(
"demand_surge_weekend_emitted",
tenant_id=str(tenant_id),
product_name=product_name,
growth_percentage=growth_percentage
)
# Holiday preparation analysis - daily at 9 AM
self.scheduler.add_job(
self.check_holiday_preparation,
CronTrigger(hour=9, minute=0),
id='holiday_prep_check',
misfire_grace_time=3600,
max_instances=1
async def emit_weather_impact_alert(
self,
tenant_id: UUID,
forecast_date: str,
precipitation: float,
expected_demand_change: float,
traffic_volume: int,
weather_type: str = "general",
product_name: Optional[str] = None
):
"""Emit weather impact alert"""
# Determine severity based on impact
if expected_demand_change < -20:
severity = 'high'
elif expected_demand_change < -10:
severity = 'medium'
else:
severity = 'low'
metadata = {
"forecast_date": forecast_date,
"precipitation_mm": float(precipitation),
"expected_demand_change": float(expected_demand_change),
"traffic_volume": traffic_volume,
"weather_type": weather_type
}
if product_name:
metadata["product_name"] = product_name
# Add triggers information
triggers = ['weather_conditions', 'demand_forecast']
if precipitation > 0:
triggers.append('rain_forecast')
if expected_demand_change < -15:
triggers.append('outdoor_events_cancelled')
metadata["triggers"] = triggers
await self.publisher.publish_alert(
event_type="forecasting.weather_impact_alert",
tenant_id=tenant_id,
severity=severity,
data=metadata
)
# Demand pattern analysis - every Monday at 8 AM
self.scheduler.add_job(
self.analyze_demand_patterns,
CronTrigger(day_of_week=0, hour=8, minute=0),
id='demand_pattern_analysis',
misfire_grace_time=3600,
max_instances=1
logger.info(
"weather_impact_alert_emitted",
tenant_id=str(tenant_id),
weather_type=weather_type,
expected_demand_change=expected_demand_change
)
logger.info("Forecasting alert schedules configured",
service=self.config.SERVICE_NAME)
async def check_weekend_demand_surge(self):
"""Check for predicted weekend demand surges (alerts)"""
try:
self._checks_performed += 1
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
async def emit_holiday_preparation(
self,
tenant_id: UUID,
holiday_name: str,
days_until_holiday: int,
product_name: str,
spike_percentage: float,
avg_holiday_demand: float,
avg_normal_demand: float,
holiday_date: str
):
"""Emit holiday preparation alert"""
tenants = await self.get_active_tenants()
# Determine severity based on spike magnitude and preparation time
if spike_percentage > 75 and days_until_holiday <= 3:
severity = 'high'
elif spike_percentage > 50 or days_until_holiday <= 3:
severity = 'medium'
else:
severity = 'low'
for tenant_id in tenants:
try:
async with self.db_manager.get_session() as session:
alert_repo = ForecastingAlertRepository(session)
surges = await alert_repo.get_weekend_demand_surges(tenant_id)
metadata = {
"holiday_name": holiday_name,
"days_until_holiday": days_until_holiday,
"product_name": product_name,
"spike_percentage": float(spike_percentage),
"avg_holiday_demand": float(avg_holiday_demand),
"avg_normal_demand": float(avg_normal_demand),
"holiday_date": holiday_date
}
for surge in surges:
await self._process_weekend_surge(tenant_id, surge)
# Add triggers information
triggers = [f'spanish_holiday_in_{days_until_holiday}_days']
if spike_percentage > 25:
triggers.append('historical_demand_spike')
except Exception as e:
logger.error("Error checking weekend demand surge",
tenant_id=str(tenant_id),
error=str(e))
metadata["triggers"] = triggers
except Exception as e:
logger.error("Weekend demand surge check failed", error=str(e))
self._errors_count += 1
async def _process_weekend_surge(self, tenant_id: UUID, surge: Dict[str, Any]):
"""Process weekend demand surge alert"""
try:
growth_percentage = surge['growth_percentage']
avg_growth_percentage = surge['avg_growth_percentage']
max_growth = max(growth_percentage, avg_growth_percentage)
# Resolve product name with fallback
product_name = await self._resolve_product_name(
tenant_id,
str(surge['inventory_product_id']),
surge.get('product_name')
)
# Determine severity based on growth magnitude
if max_growth > 100:
severity = 'high'
elif max_growth > 75:
severity = 'medium'
else:
severity = 'low'
# Format message based on weather conditions (simplified check)
weather_favorable = await self._check_favorable_weather(surge['forecast_date'])
await self.publish_item(tenant_id, {
'type': 'demand_surge_weekend',
'severity': severity,
'title': f'📈 Fin de semana con alta demanda: {product_name}',
'message': f'📈 Fin de semana con alta demanda: {product_name} +{max_growth:.0f}%',
'actions': ['increase_production', 'order_extra_ingredients', 'schedule_staff'],
'triggers': [
f'weekend_forecast > {max_growth:.0f}%_normal',
'weather_favorable' if weather_favorable else 'weather_normal'
],
'metadata': {
'product_name': product_name,
'inventory_product_id': str(surge['inventory_product_id']),
'predicted_demand': float(surge['predicted_demand']),
'growth_percentage': float(max_growth),
'forecast_date': surge['forecast_date'].isoformat(),
'weather_favorable': weather_favorable
}
}, item_type='alert')
except Exception as e:
logger.error("Error processing weekend surge",
product_name=surge.get('product_name'),
error=str(e))
async def check_weather_impact(self):
"""Check for weather impact on demand (alerts)"""
try:
self._checks_performed += 1
await self.publisher.publish_alert(
event_type="forecasting.holiday_preparation",
tenant_id=tenant_id,
severity=severity,
data=metadata
)
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
logger.info(
"holiday_preparation_emitted",
tenant_id=str(tenant_id),
holiday_name=holiday_name,
spike_percentage=spike_percentage
)
tenants = await self.get_active_tenants()
async def emit_demand_optimization_recommendation(
self,
tenant_id: UUID,
product_name: str,
optimization_potential: float,
peak_demand: float,
min_demand: float,
demand_range: float
):
"""Emit demand pattern optimization recommendation"""
for tenant_id in tenants:
try:
async with self.db_manager.get_session() as session:
alert_repo = ForecastingAlertRepository(session)
weather_impacts = await alert_repo.get_weather_impact_forecasts(tenant_id)
metadata = {
"product_name": product_name,
"optimization_potential": float(optimization_potential),
"peak_demand": float(peak_demand),
"min_demand": float(min_demand),
"demand_range": float(demand_range)
}
for impact in weather_impacts:
await self._process_weather_impact(tenant_id, impact)
await self.publisher.publish_recommendation(
event_type="forecasting.demand_pattern_optimization",
tenant_id=tenant_id,
data=metadata
)
except Exception as e:
logger.error("Error checking weather impact",
tenant_id=str(tenant_id),
error=str(e))
logger.info(
"demand_pattern_optimization_emitted",
tenant_id=str(tenant_id),
product_name=product_name,
optimization_potential=optimization_potential
)
except Exception as e:
logger.error("Weather impact check failed", error=str(e))
self._errors_count += 1
async def _process_weather_impact(self, tenant_id: UUID, impact: Dict[str, Any]):
"""Process weather impact alert"""
try:
rain_forecast = impact['rain_forecast']
demand_change = impact['demand_change']
precipitation = impact['weather_precipitation'] or 0.0
if rain_forecast:
# Rain impact alert
triggers = ['rain_forecast']
if demand_change < -15:
triggers.append('outdoor_events_cancelled')
await self.publish_item(tenant_id, {
'type': 'weather_impact_alert',
'severity': 'low',
'title': '🌧️ Impacto climático previsto',
'message': '🌧️ Lluvia prevista: -20% tráfico peatonal esperado',
'actions': ['reduce_fresh_production', 'focus_comfort_products', 'delivery_promo'],
'triggers': triggers,
'metadata': {
'forecast_date': impact['forecast_date'].isoformat(),
'precipitation_mm': float(precipitation),
'expected_demand_change': float(demand_change),
'traffic_volume': impact.get('traffic_volume', 100),
'weather_type': 'rain'
}
}, item_type='alert')
elif demand_change < -20:
# General weather impact alert
product_name = await self._resolve_product_name(
tenant_id,
str(impact['inventory_product_id']),
impact.get('product_name')
)
await self.publish_item(tenant_id, {
'type': 'weather_impact_alert',
'severity': 'low',
'title': f'🌤️ Impacto climático: {product_name}',
'message': f'Condiciones climáticas pueden reducir demanda de {product_name} en {abs(demand_change):.0f}%',
'actions': ['adjust_production', 'focus_indoor_products', 'plan_promotions'],
'triggers': ['weather_conditions', 'demand_forecast_low'],
'metadata': {
'product_name': product_name,
'forecast_date': impact['forecast_date'].isoformat(),
'expected_demand_change': float(demand_change),
'temperature': impact.get('weather_temperature'),
'weather_type': 'general'
}
}, item_type='alert')
except Exception as e:
logger.error("Error processing weather impact",
product_name=impact.get('product_name'),
error=str(e))
async def check_holiday_preparation(self):
"""Check for upcoming Spanish holidays requiring preparation (alerts)"""
try:
self._checks_performed += 1
async def emit_demand_spike_detected(
self,
tenant_id: UUID,
product_name: str,
spike_percentage: float
):
"""Emit demand spike detected event"""
# Check for Spanish holidays in the next 3-7 days
upcoming_holidays = await self._get_upcoming_spanish_holidays(3, 7)
# Determine severity based on spike magnitude
if spike_percentage > 50:
severity = 'high'
elif spike_percentage > 20:
severity = 'medium'
else:
severity = 'low'
if not upcoming_holidays:
return
metadata = {
"product_name": product_name,
"spike_percentage": float(spike_percentage),
"detection_source": "database"
}
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
await self.publisher.publish_alert(
event_type="forecasting.demand_spike_detected",
tenant_id=tenant_id,
severity=severity,
data=metadata
)
tenants = await self.get_active_tenants()
logger.info(
"demand_spike_detected_emitted",
tenant_id=str(tenant_id),
product_name=product_name,
spike_percentage=spike_percentage
)
for tenant_id in tenants:
try:
async with self.db_manager.get_session() as session:
alert_repo = ForecastingAlertRepository(session)
demand_spikes = await alert_repo.get_holiday_demand_spikes(tenant_id)
async def emit_severe_weather_impact(
self,
tenant_id: UUID,
weather_type: str,
severity_level: str,
duration_hours: int
):
"""Emit severe weather impact event"""
for holiday_info in upcoming_holidays:
for spike in demand_spikes:
await self._process_holiday_preparation(
tenant_id, holiday_info, spike
)
# Determine alert severity based on weather severity
if severity_level == 'critical' or duration_hours > 24:
alert_severity = 'urgent'
elif severity_level == 'high' or duration_hours > 12:
alert_severity = 'high'
else:
alert_severity = 'medium'
except Exception as e:
logger.error("Error checking holiday preparation",
tenant_id=str(tenant_id),
error=str(e))
metadata = {
"weather_type": weather_type,
"severity_level": severity_level,
"duration_hours": duration_hours
}
except Exception as e:
logger.error("Holiday preparation check failed", error=str(e))
self._errors_count += 1
async def _process_holiday_preparation(self, tenant_id: UUID, holiday: Dict[str, Any], spike: Dict[str, Any]):
"""Process holiday preparation alert"""
try:
days_until_holiday = holiday['days_until']
holiday_name = holiday['name']
spike_percentage = spike['spike_percentage']
# Determine severity based on spike magnitude and preparation time
if spike_percentage > 75 and days_until_holiday <= 3:
severity = 'high'
elif spike_percentage > 50 or days_until_holiday <= 3:
severity = 'medium'
else:
severity = 'low'
triggers = [f'spanish_holiday_in_{days_until_holiday}_days']
if spike_percentage > 25:
triggers.append('historical_demand_spike')
await self.publish_item(tenant_id, {
'type': 'holiday_preparation',
'severity': severity,
'title': f'🎉 Preparación para {holiday_name}',
'message': f'🎉 {holiday_name} en {days_until_holiday} días: pedidos especiales aumentan {spike_percentage:.0f}%',
'actions': ['prepare_special_menu', 'stock_decorations', 'extend_hours'],
'triggers': triggers,
'metadata': {
'holiday_name': holiday_name,
'days_until_holiday': days_until_holiday,
'product_name': spike['product_name'],
'spike_percentage': float(spike_percentage),
'avg_holiday_demand': float(spike['avg_holiday_demand']),
'avg_normal_demand': float(spike['avg_normal_demand']),
'holiday_date': holiday['date'].isoformat()
}
}, item_type='alert')
except Exception as e:
logger.error("Error processing holiday preparation",
holiday_name=holiday.get('name'),
error=str(e))
async def analyze_demand_patterns(self):
"""Analyze demand patterns for recommendations"""
try:
self._checks_performed += 1
await self.publisher.publish_alert(
event_type="forecasting.severe_weather_impact",
tenant_id=tenant_id,
severity=alert_severity,
data=metadata
)
from app.repositories.forecasting_alert_repository import ForecastingAlertRepository
logger.info(
"severe_weather_impact_emitted",
tenant_id=str(tenant_id),
weather_type=weather_type,
severity_level=severity_level
)
tenants = await self.get_active_tenants()
async def emit_unexpected_demand_spike(
self,
tenant_id: UUID,
product_name: str,
spike_percentage: float,
current_sales: float,
forecasted_sales: float
):
"""Emit unexpected sales spike event"""
for tenant_id in tenants:
try:
async with self.db_manager.get_session() as session:
alert_repo = ForecastingAlertRepository(session)
patterns = await alert_repo.get_demand_pattern_analysis(tenant_id)
# Determine severity based on spike magnitude
if spike_percentage > 75:
severity = 'high'
elif spike_percentage > 40:
severity = 'medium'
else:
severity = 'low'
for pattern in patterns:
await self._generate_demand_pattern_recommendation(tenant_id, pattern)
metadata = {
"product_name": product_name,
"spike_percentage": float(spike_percentage),
"current_sales": float(current_sales),
"forecasted_sales": float(forecasted_sales)
}
except Exception as e:
logger.error("Error analyzing demand patterns",
tenant_id=str(tenant_id),
error=str(e))
await self.publisher.publish_alert(
event_type="forecasting.unexpected_demand_spike",
tenant_id=tenant_id,
severity=severity,
data=metadata
)
except Exception as e:
logger.error("Demand pattern analysis failed", error=str(e))
self._errors_count += 1
async def _generate_demand_pattern_recommendation(self, tenant_id: UUID, pattern: Dict[str, Any]):
"""Generate demand pattern optimization recommendation"""
try:
if not self.should_send_recommendation(tenant_id, 'demand_optimization'):
return
demand_range = pattern['demand_range']
peak_demand = pattern['peak_demand']
overall_avg = pattern['overall_avg']
optimization_potential = (demand_range / overall_avg) * 100
await self.publish_item(tenant_id, {
'type': 'demand_pattern_optimization',
'severity': 'medium',
'title': f'📊 Optimización de Patrones: {pattern["product_name"]}',
'message': f'Demanda de {pattern["product_name"]} varía {optimization_potential:.0f}% durante la semana. Oportunidad de optimización.',
'actions': ['Analizar patrones semanales', 'Ajustar producción diaria', 'Optimizar inventario', 'Planificar promociones'],
'metadata': {
'product_name': pattern['product_name'],
'optimization_potential': float(optimization_potential),
'peak_demand': float(peak_demand),
'min_demand': float(pattern['min_demand']),
'demand_range': float(demand_range),
'recommendation_type': 'demand_optimization'
}
}, item_type='recommendation')
except Exception as e:
logger.error("Error generating demand pattern recommendation",
product_name=pattern.get('product_name'),
error=str(e))
# Helper methods
async def _resolve_product_name(self, tenant_id: UUID, inventory_product_id: str, fallback_name: Optional[str] = None) -> str:
"""
Resolve product name, with fallbacks for when inventory service is unavailable
"""
# If we already have a product name, use it
if fallback_name:
return fallback_name
# Try to get from inventory service
try:
inventory_client = get_inventory_client()
product_name = await inventory_client.get_product_name(str(tenant_id), inventory_product_id)
if product_name:
return product_name
except Exception as e:
logger.debug("Failed to resolve product name from inventory service",
inventory_product_id=inventory_product_id,
error=str(e))
# Fallback to generic name
return f"Product-{inventory_product_id[:8]}"
async def _check_favorable_weather(self, forecast_date: datetime) -> bool:
"""Simple weather favorability check"""
# In a real implementation, this would check actual weather APIs
# For now, return a simple heuristic based on season
month = forecast_date.month
return month in [4, 5, 6, 7, 8, 9] # Spring/Summer months
async def _get_upcoming_spanish_holidays(self, min_days: int, max_days: int) -> List[Dict[str, Any]]:
"""Get upcoming Spanish holidays within date range"""
today = datetime.now().date()
holidays = []
# Major Spanish holidays
spanish_holidays = [
{"name": "Año Nuevo", "month": 1, "day": 1},
{"name": "Reyes Magos", "month": 1, "day": 6},
{"name": "Día del Trabajador", "month": 5, "day": 1},
{"name": "Asunción", "month": 8, "day": 15},
{"name": "Fiesta Nacional", "month": 10, "day": 12},
{"name": "Todos los Santos", "month": 11, "day": 1},
{"name": "Constitución", "month": 12, "day": 6},
{"name": "Inmaculada", "month": 12, "day": 8},
{"name": "Navidad", "month": 12, "day": 25}
]
current_year = today.year
for holiday in spanish_holidays:
# Check current year
holiday_date = datetime(current_year, holiday["month"], holiday["day"]).date()
days_until = (holiday_date - today).days
if min_days <= days_until <= max_days:
holidays.append({
"name": holiday["name"],
"date": holiday_date,
"days_until": days_until
})
# Check next year if needed
if holiday_date < today:
next_year_date = datetime(current_year + 1, holiday["month"], holiday["day"]).date()
days_until = (next_year_date - today).days
if min_days <= days_until <= max_days:
holidays.append({
"name": holiday["name"],
"date": next_year_date,
"days_until": days_until
})
return holidays
async def register_db_listeners(self, conn):
"""Register forecasting-specific database listeners"""
try:
await conn.add_listener('forecasting_alerts', self.handle_forecasting_db_alert)
logger.info("Database listeners registered",
service=self.config.SERVICE_NAME)
except Exception as e:
logger.error("Failed to register database listeners",
service=self.config.SERVICE_NAME,
error=str(e))
async def handle_forecasting_db_alert(self, connection, pid, channel, payload):
"""Handle forecasting alert from database trigger"""
try:
data = json.loads(payload)
tenant_id = UUID(data['tenant_id'])
if data['alert_type'] == 'demand_spike':
await self.publish_item(tenant_id, {
'type': 'demand_spike_detected',
'severity': 'medium',
'title': f'📈 Pico de Demanda Detectado',
'message': f'Demanda inesperada de {data["product_name"]}: {data["spike_percentage"]:.0f}% sobre lo normal.',
'actions': ['Revisar inventario', 'Aumentar producción', 'Notificar equipo'],
'metadata': {
'product_name': data['product_name'],
'spike_percentage': data['spike_percentage'],
'trigger_source': 'database'
}
}, item_type='alert')
except Exception as e:
logger.error("Error handling forecasting DB alert", error=str(e))
async def start_event_listener(self):
"""Listen for forecasting-affecting events"""
try:
# Subscribe to weather events that might affect forecasting
await self.rabbitmq_client.consume_events(
"bakery_events",
f"forecasting.weather.{self.config.SERVICE_NAME}",
"weather.severe_change",
self.handle_weather_event
)
# Subscribe to sales events that might trigger demand alerts
await self.rabbitmq_client.consume_events(
"bakery_events",
f"forecasting.sales.{self.config.SERVICE_NAME}",
"sales.unexpected_spike",
self.handle_sales_spike_event
)
logger.info("Event listeners started",
service=self.config.SERVICE_NAME)
except Exception as e:
logger.error("Failed to start event listeners",
service=self.config.SERVICE_NAME,
error=str(e))
async def handle_weather_event(self, message):
"""Handle severe weather change event"""
try:
weather_data = json.loads(message.body)
tenant_id = UUID(weather_data['tenant_id'])
if weather_data['change_type'] == 'severe_storm':
await self.publish_item(tenant_id, {
'type': 'severe_weather_impact',
'severity': 'high',
'title': '⛈️ Impacto Climático Severo',
'message': f'Tormenta severa prevista: reducir producción de productos frescos y activar delivery.',
'actions': ['reduce_fresh_production', 'activate_delivery', 'secure_outdoor_displays'],
'metadata': {
'weather_type': weather_data['change_type'],
'severity_level': weather_data.get('severity', 'high'),
'duration_hours': weather_data.get('duration_hours', 0)
}
}, item_type='alert')
except Exception as e:
logger.error("Error handling weather event", error=str(e))
async def handle_sales_spike_event(self, message):
"""Handle unexpected sales spike event"""
try:
sales_data = json.loads(message.body)
tenant_id = UUID(sales_data['tenant_id'])
await self.publish_item(tenant_id, {
'type': 'unexpected_demand_spike',
'severity': 'medium',
'title': '📈 Pico de Ventas Inesperado',
'message': f'Ventas de {sales_data["product_name"]} {sales_data["spike_percentage"]:.0f}% sobre pronóstico.',
'actions': ['increase_production', 'check_inventory', 'update_forecast'],
'metadata': {
'product_name': sales_data['product_name'],
'spike_percentage': sales_data['spike_percentage'],
'current_sales': sales_data.get('current_sales', 0),
'forecasted_sales': sales_data.get('forecasted_sales', 0)
}
}, item_type='alert')
except Exception as e:
logger.error("Error handling sales spike event", error=str(e))
logger.info(
"unexpected_demand_spike_emitted",
tenant_id=str(tenant_id),
product_name=product_name,
spike_percentage=spike_percentage
)

View File

@@ -1,104 +1,76 @@
"""
Forecasting Recommendation Service
Forecasting Recommendation Service - Simplified
Emits RECOMMENDATIONS (not alerts) for demand forecasting insights:
- demand_surge_predicted: Upcoming demand spike
- weather_impact_forecast: Weather affecting demand
- holiday_preparation: Holiday demand prep
- seasonal_trend_insight: Seasonal pattern detected
- inventory_optimization_opportunity: Stock optimization suggestion
These are RECOMMENDATIONS - AI-generated suggestions that are advisory, not urgent.
Users can choose to act on them or dismiss them.
Emits minimal events using EventPublisher.
All enrichment handled by alert_processor.
"""
import logging
from datetime import datetime, timezone
from typing import Optional, Dict, Any, List
from sqlalchemy.orm import Session
from uuid import UUID
import structlog
from shared.schemas.event_classification import RawEvent, EventClass, EventDomain
from shared.alerts.base_service import BaseAlertService
from shared.messaging import UnifiedEventPublisher
logger = structlog.get_logger()
logger = logging.getLogger(__name__)
class ForecastingRecommendationService(BaseAlertService):
class ForecastingRecommendationService:
"""
Service for emitting forecasting recommendations (AI-generated suggestions).
Service for emitting forecasting recommendations using EventPublisher.
"""
def __init__(self, rabbitmq_url: str = None):
super().__init__(service_name="forecasting", rabbitmq_url=rabbitmq_url)
def __init__(self, event_publisher: UnifiedEventPublisher):
self.publisher = event_publisher
async def emit_demand_surge_recommendation(
self,
db: Session,
tenant_id: str,
tenant_id: UUID,
product_sku: str,
product_name: str,
predicted_demand: float,
normal_demand: float,
surge_percentage: float,
surge_date: datetime,
surge_date: str,
confidence_score: float,
reasoning: str,
) -> None:
"""
Emit RECOMMENDATION for predicted demand surge.
This is a RECOMMENDATION (not alert) - proactive suggestion to prepare.
"""
try:
message = f"{product_name} demand expected to surge by {surge_percentage:.0f}% on {surge_date.strftime('%A, %B %d')} (from {normal_demand:.0f} to {predicted_demand:.0f} units)"
metadata = {
"product_sku": product_sku,
"product_name": product_name,
"predicted_demand": float(predicted_demand),
"normal_demand": float(normal_demand),
"surge_percentage": float(surge_percentage),
"surge_date": surge_date,
"confidence_score": float(confidence_score),
"reasoning": reasoning,
"estimated_impact": {
"additional_revenue_eur": predicted_demand * 5, # Rough estimate
"stockout_risk": "high" if surge_percentage > 50 else "medium",
},
}
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.RECOMMENDATION,
event_domain=EventDomain.DEMAND,
event_type="demand_surge_predicted",
title=f"Demand Surge: {product_name}",
message=message,
service="forecasting",
actions=["increase_production", "check_inventory", "view_forecast"],
event_metadata={
"product_sku": product_sku,
"product_name": product_name,
"predicted_demand": predicted_demand,
"normal_demand": normal_demand,
"surge_percentage": surge_percentage,
"surge_date": surge_date.isoformat(),
"confidence_score": confidence_score,
"reasoning": reasoning,
"estimated_impact": {
"additional_revenue_eur": predicted_demand * 5, # Rough estimate
"stockout_risk": "high" if surge_percentage > 50 else "medium",
},
},
timestamp=datetime.now(timezone.utc),
)
await self.publisher.publish_recommendation(
event_type="demand.demand_surge_predicted",
tenant_id=tenant_id,
data=metadata
)
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
logger.info(
f"Demand surge recommendation emitted: {product_name} (+{surge_percentage:.0f}%)",
extra={"tenant_id": tenant_id, "product_sku": product_sku}
)
except Exception as e:
logger.error(
f"Failed to emit demand surge recommendation: {e}",
extra={"tenant_id": tenant_id, "product_sku": product_sku},
exc_info=True,
)
logger.info(
"demand_surge_recommendation_emitted",
tenant_id=str(tenant_id),
product_name=product_name,
surge_percentage=surge_percentage
)
async def emit_weather_impact_recommendation(
self,
db: Session,
tenant_id: str,
tenant_id: UUID,
weather_event: str, # 'rain', 'snow', 'heatwave', etc.
forecast_date: datetime,
forecast_date: str,
affected_products: List[Dict[str, Any]],
impact_description: str,
confidence_score: float,
@@ -106,52 +78,31 @@ class ForecastingRecommendationService(BaseAlertService):
"""
Emit RECOMMENDATION for weather impact on demand.
"""
try:
products_summary = ", ".join([p['product_name'] for p in affected_products[:3]])
if len(affected_products) > 3:
products_summary += f" and {len(affected_products) - 3} more"
metadata = {
"weather_event": weather_event,
"forecast_date": forecast_date,
"affected_products": affected_products,
"impact_description": impact_description,
"confidence_score": float(confidence_score),
}
message = f"{weather_event.title()} forecast for {forecast_date.strftime('%A')} - {impact_description}"
await self.publisher.publish_recommendation(
event_type="demand.weather_impact_forecast",
tenant_id=tenant_id,
data=metadata
)
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.RECOMMENDATION,
event_domain=EventDomain.DEMAND,
event_type="weather_impact_forecast",
title=f"Weather Impact: {weather_event.title()}",
message=message,
service="forecasting",
actions=["adjust_production", "view_affected_products"],
event_metadata={
"weather_event": weather_event,
"forecast_date": forecast_date.isoformat(),
"affected_products": affected_products,
"impact_description": impact_description,
"confidence_score": confidence_score,
},
timestamp=datetime.now(timezone.utc),
)
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
logger.info(
f"Weather impact recommendation emitted: {weather_event}",
extra={"tenant_id": tenant_id, "weather_event": weather_event}
)
except Exception as e:
logger.error(
f"Failed to emit weather impact recommendation: {e}",
extra={"tenant_id": tenant_id},
exc_info=True,
)
logger.info(
"weather_impact_recommendation_emitted",
tenant_id=str(tenant_id),
weather_event=weather_event
)
async def emit_holiday_preparation_recommendation(
self,
db: Session,
tenant_id: str,
tenant_id: UUID,
holiday_name: str,
holiday_date: datetime,
holiday_date: str,
days_until_holiday: int,
recommended_products: List[Dict[str, Any]],
preparation_tips: List[str],
@@ -159,47 +110,30 @@ class ForecastingRecommendationService(BaseAlertService):
"""
Emit RECOMMENDATION for holiday preparation.
"""
try:
message = f"{holiday_name} in {days_until_holiday} days - Prepare for increased demand"
metadata = {
"holiday_name": holiday_name,
"holiday_date": holiday_date,
"days_until_holiday": days_until_holiday,
"recommended_products": recommended_products,
"preparation_tips": preparation_tips,
"confidence_score": 0.9, # High confidence for known holidays
}
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.RECOMMENDATION,
event_domain=EventDomain.DEMAND,
event_type="holiday_preparation",
title=f"Prepare for {holiday_name}",
message=message,
service="forecasting",
actions=["view_recommendations", "adjust_orders"],
event_metadata={
"holiday_name": holiday_name,
"holiday_date": holiday_date.isoformat(),
"days_until_holiday": days_until_holiday,
"recommended_products": recommended_products,
"preparation_tips": preparation_tips,
"confidence_score": 0.9, # High confidence for known holidays
},
timestamp=datetime.now(timezone.utc),
)
await self.publisher.publish_recommendation(
event_type="demand.holiday_preparation",
tenant_id=tenant_id,
data=metadata
)
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
logger.info(
f"Holiday preparation recommendation emitted: {holiday_name}",
extra={"tenant_id": tenant_id, "holiday": holiday_name}
)
except Exception as e:
logger.error(
f"Failed to emit holiday preparation recommendation: {e}",
extra={"tenant_id": tenant_id},
exc_info=True,
)
logger.info(
"holiday_preparation_recommendation_emitted",
tenant_id=str(tenant_id),
holiday=holiday_name
)
async def emit_seasonal_trend_recommendation(
self,
db: Session,
tenant_id: str,
tenant_id: UUID,
season: str, # 'spring', 'summer', 'fall', 'winter'
trend_type: str, # 'increasing', 'decreasing', 'stable'
affected_categories: List[str],
@@ -209,45 +143,30 @@ class ForecastingRecommendationService(BaseAlertService):
"""
Emit RECOMMENDATION for seasonal trend insight.
"""
try:
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.RECOMMENDATION,
event_domain=EventDomain.DEMAND,
event_type="seasonal_trend_insight",
title=f"Seasonal Trend: {season.title()}",
message=f"{trend_description} - Affects: {', '.join(affected_categories)}",
service="forecasting",
actions=["view_details", "adjust_strategy"],
event_metadata={
"season": season,
"trend_type": trend_type,
"affected_categories": affected_categories,
"trend_description": trend_description,
"suggested_actions": suggested_actions,
"confidence_score": 0.85,
},
timestamp=datetime.now(timezone.utc),
)
metadata = {
"season": season,
"trend_type": trend_type,
"affected_categories": affected_categories,
"trend_description": trend_description,
"suggested_actions": suggested_actions,
"confidence_score": 0.85,
}
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
await self.publisher.publish_recommendation(
event_type="demand.seasonal_trend_insight",
tenant_id=tenant_id,
data=metadata
)
logger.info(
f"Seasonal trend recommendation emitted: {season}",
extra={"tenant_id": tenant_id, "season": season}
)
except Exception as e:
logger.error(
f"Failed to emit seasonal trend recommendation: {e}",
extra={"tenant_id": tenant_id},
exc_info=True,
)
logger.info(
"seasonal_trend_recommendation_emitted",
tenant_id=str(tenant_id),
season=season
)
async def emit_inventory_optimization_recommendation(
self,
db: Session,
tenant_id: str,
tenant_id: UUID,
ingredient_id: str,
ingredient_name: str,
current_stock: float,
@@ -259,62 +178,41 @@ class ForecastingRecommendationService(BaseAlertService):
"""
Emit RECOMMENDATION for inventory optimization.
"""
try:
if current_stock > optimal_stock:
action = "reduce"
difference = current_stock - optimal_stock
message = f"Consider reducing {ingredient_name} stock by {difference:.1f} {unit} - {reason}"
else:
action = "increase"
difference = optimal_stock - current_stock
message = f"Consider increasing {ingredient_name} stock by {difference:.1f} {unit} - {reason}"
difference = abs(current_stock - optimal_stock)
action = "reduce" if current_stock > optimal_stock else "increase"
estimated_impact = {}
if estimated_savings_eur:
estimated_impact["financial_savings_eur"] = estimated_savings_eur
estimated_impact = {}
if estimated_savings_eur:
estimated_impact["financial_savings_eur"] = estimated_savings_eur
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.RECOMMENDATION,
event_domain=EventDomain.INVENTORY,
event_type="inventory_optimization_opportunity",
title=f"Optimize Stock: {ingredient_name}",
message=message,
service="forecasting",
actions=["adjust_stock", "view_analysis"],
event_metadata={
"ingredient_id": ingredient_id,
"ingredient_name": ingredient_name,
"current_stock": current_stock,
"optimal_stock": optimal_stock,
"difference": difference,
"action": action,
"unit": unit,
"reason": reason,
"estimated_impact": estimated_impact if estimated_impact else None,
"confidence_score": 0.75,
},
timestamp=datetime.now(timezone.utc),
)
metadata = {
"ingredient_id": ingredient_id,
"ingredient_name": ingredient_name,
"current_stock": float(current_stock),
"optimal_stock": float(optimal_stock),
"difference": float(difference),
"action": action,
"unit": unit,
"reason": reason,
"estimated_impact": estimated_impact if estimated_impact else None,
"confidence_score": 0.75,
}
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
await self.publisher.publish_recommendation(
event_type="inventory.inventory_optimization_opportunity",
tenant_id=tenant_id,
data=metadata
)
logger.info(
f"Inventory optimization recommendation emitted: {ingredient_name}",
extra={"tenant_id": tenant_id, "ingredient_id": ingredient_id}
)
except Exception as e:
logger.error(
f"Failed to emit inventory optimization recommendation: {e}",
extra={"tenant_id": tenant_id, "ingredient_id": ingredient_id},
exc_info=True,
)
logger.info(
"inventory_optimization_recommendation_emitted",
tenant_id=str(tenant_id),
ingredient_name=ingredient_name
)
async def emit_cost_reduction_recommendation(
self,
db: Session,
tenant_id: str,
tenant_id: UUID,
opportunity_type: str, # 'supplier_switch', 'bulk_purchase', 'seasonal_buying'
title: str,
description: str,
@@ -325,35 +223,24 @@ class ForecastingRecommendationService(BaseAlertService):
"""
Emit RECOMMENDATION for cost reduction opportunity.
"""
try:
event = RawEvent(
tenant_id=tenant_id,
event_class=EventClass.RECOMMENDATION,
event_domain=EventDomain.SUPPLY_CHAIN,
event_type="cost_reduction_suggestion",
title=title,
message=f"{description} - Potential savings: €{estimated_savings_eur:.2f}",
service="forecasting",
actions=suggested_actions,
event_metadata={
"opportunity_type": opportunity_type,
"estimated_savings_eur": estimated_savings_eur,
"details": details,
"confidence_score": 0.8,
},
timestamp=datetime.now(timezone.utc),
)
metadata = {
"opportunity_type": opportunity_type,
"title": title,
"description": description,
"estimated_savings_eur": float(estimated_savings_eur),
"suggested_actions": suggested_actions,
"details": details,
"confidence_score": 0.8,
}
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
await self.publisher.publish_recommendation(
event_type="supply_chain.cost_reduction_suggestion",
tenant_id=tenant_id,
data=metadata
)
logger.info(
f"Cost reduction recommendation emitted: {opportunity_type}",
extra={"tenant_id": tenant_id, "opportunity_type": opportunity_type}
)
except Exception as e:
logger.error(
f"Failed to emit cost reduction recommendation: {e}",
extra={"tenant_id": tenant_id},
exc_info=True,
)
logger.info(
"cost_reduction_recommendation_emitted",
tenant_id=str(tenant_id),
opportunity_type=opportunity_type
)

View File

@@ -1,196 +0,0 @@
# ================================================================
# services/forecasting/app/services/messaging.py
# ================================================================
"""
Messaging service for event publishing and consuming
"""
import structlog
import json
from typing import Dict, Any
import asyncio
import datetime
from shared.messaging.rabbitmq import RabbitMQClient
from shared.messaging.events import (
TrainingCompletedEvent,
DataImportedEvent,
ForecastGeneratedEvent,
)
from app.core.config import settings
logger = structlog.get_logger()
# Global messaging instance
rabbitmq_client = None
async def setup_messaging():
"""Initialize messaging services"""
global rabbitmq_client
try:
rabbitmq_client = RabbitMQClient(settings.RABBITMQ_URL, service_name="forecasting_service")
await rabbitmq_client.connect()
# Set up event handlers
# We need to adapt the callback to accept aio_pika.IncomingMessage
await rabbitmq_client.consume_events(
exchange_name="training.events",
queue_name="forecasting_model_update_queue",
routing_key="training.completed", # Assuming model updates are part of training.completed events
callback=handle_model_updated_message
)
await rabbitmq_client.consume_events(
exchange_name="data.events",
queue_name="forecasting_weather_update_queue",
routing_key="data.weather.updated", # This needs to match the actual event type if different
callback=handle_weather_updated_message
)
logger.info("Messaging setup completed")
except Exception as e:
logger.error("Failed to setup messaging", error=str(e))
raise
async def cleanup_messaging():
"""Cleanup messaging connections"""
global rabbitmq_client
try:
if rabbitmq_client:
await rabbitmq_client.disconnect()
logger.info("Messaging cleanup completed")
except Exception as e:
logger.error("Error during messaging cleanup", error=str(e))
async def publish_forecast_completed(data: Dict[str, Any]):
"""Publish forecast completed event"""
if rabbitmq_client:
event = ForecastGeneratedEvent(service_name="forecasting_service", data=data, event_type="forecast.completed")
await rabbitmq_client.publish_forecast_event(event_type="completed", forecast_data=event.to_dict())
async def publish_batch_completed(data: Dict[str, Any]):
"""Publish batch forecast completed event"""
if rabbitmq_client:
event = ForecastGeneratedEvent(service_name="forecasting_service", data=data, event_type="forecast.batch.completed")
await rabbitmq_client.publish_forecast_event(event_type="batch.completed", forecast_data=event.to_dict())
# Event handler wrappers for aio_pika messages
async def handle_model_updated_message(message: Any):
async with message.process():
try:
event_data = json.loads(message.body.decode())
# Assuming the actual event data is nested under a 'data' key within the event dictionary
await handle_model_updated(event_data.get("data", {}))
except json.JSONDecodeError as e:
logger.error("Failed to decode model updated message JSON", error=str(e), body=message.body)
except Exception as e:
logger.error("Error processing model updated message", error=str(e), body=message.body)
async def handle_weather_updated_message(message: Any):
async with message.process():
try:
event_data = json.loads(message.body.decode())
# Assuming the actual event data is nested under a 'data' key within the event dictionary
await handle_weather_updated(event_data.get("data", {}))
except json.JSONDecodeError as e:
logger.error("Failed to decode weather updated message JSON", error=str(e), body=message.body)
except Exception as e:
logger.error("Error processing weather updated message", error=str(e), body=message.body)
# Original Event handlers (now called from the message wrappers)
async def handle_model_updated(data: Dict[str, Any]):
"""Handle model updated event from training service"""
try:
logger.info("Received model updated event",
model_id=data.get("model_id"),
tenant_id=data.get("tenant_id"))
# Clear model cache for this model
# This will be handled by PredictionService
except Exception as e:
logger.error("Error handling model updated event", error=str(e))
async def handle_weather_updated(data: Dict[str, Any]):
"""Handle weather data updated event"""
try:
logger.info("Received weather updated event",
date=data.get("date"))
# Could trigger re-forecasting if needed
except Exception as e:
logger.error("Error handling weather updated event", error=str(e))
async def publish_forecasts_deleted_event(tenant_id: str, deletion_stats: Dict[str, Any]):
"""Publish forecasts deletion event to message queue"""
try:
await rabbitmq_client.publish_event(
exchange="forecasting_events",
routing_key="forecasting.tenant.deleted",
message={
"event_type": "tenant_forecasts_deleted",
"tenant_id": tenant_id,
"timestamp": datetime.now(timezone.utc).isoformat(),
"deletion_stats": deletion_stats
}
)
except Exception as e:
logger.error("Failed to publish forecasts deletion event", error=str(e))
# Additional publishing functions for compatibility
async def publish_forecast_generated(data: dict) -> bool:
"""Publish forecast generated event"""
try:
if rabbitmq_client:
await rabbitmq_client.publish_event(
exchange="forecasting_events",
routing_key="forecast.generated",
message=data
)
return True
except Exception as e:
logger.error("Failed to publish forecast generated event", error=str(e))
return False
async def publish_batch_forecast_completed(data: dict) -> bool:
"""Publish batch forecast completed event"""
try:
if rabbitmq_client:
await rabbitmq_client.publish_event(
exchange="forecasting_events",
routing_key="forecast.batch.completed",
message=data
)
return True
except Exception as e:
logger.error("Failed to publish batch forecast event", error=str(e))
return False
# Publisher class for compatibility
class ForecastingStatusPublisher:
"""Publisher for forecasting status events"""
async def publish_status(self, status: str, data: dict) -> bool:
"""Publish forecasting status"""
try:
if rabbitmq_client:
await rabbitmq_client.publish_event(
exchange="forecasting_events",
routing_key=f"forecast.status.{status}",
message=data
)
return True
except Exception as e:
logger.error(f"Failed to publish {status} status", error=str(e))
return False

View File

@@ -324,15 +324,117 @@ class RetrainingTriggerService:
"outdated_models": 0
}
# TODO: Trigger retraining for outdated models
# Would need to get list of outdated products from training service
# Trigger retraining for outdated models
try:
from shared.clients.training_client import TrainingServiceClient
from shared.config.base import get_settings
from shared.messaging import get_rabbitmq_client
return {
"status": "analyzed",
"tenant_id": str(tenant_id),
"outdated_models": outdated_count,
"message": "Scheduled retraining analysis complete"
}
config = get_settings()
training_client = TrainingServiceClient(config, "forecasting")
# Get list of models that need retraining
outdated_models = await training_client.get_outdated_models(
tenant_id=str(tenant_id),
max_age_days=max_model_age_days,
min_accuracy=0.85, # Configurable threshold
min_new_data_points=1000 # Configurable threshold
)
if not outdated_models:
logger.info("No specific models returned for retraining", tenant_id=tenant_id)
return {
"status": "no_models_found",
"tenant_id": str(tenant_id),
"outdated_models": outdated_count
}
# Publish retraining events to RabbitMQ for each model
rabbitmq_client = get_rabbitmq_client()
triggered_models = []
if rabbitmq_client:
for model in outdated_models:
try:
import uuid as uuid_module
from datetime import datetime
retraining_event = {
"event_id": str(uuid_module.uuid4()),
"event_type": "training.retrain.requested",
"timestamp": datetime.utcnow().isoformat(),
"tenant_id": str(tenant_id),
"data": {
"model_id": model.get('id'),
"product_id": model.get('product_id'),
"model_type": model.get('model_type'),
"current_accuracy": model.get('accuracy'),
"model_age_days": model.get('age_days'),
"new_data_points": model.get('new_data_points', 0),
"trigger_reason": model.get('trigger_reason', 'scheduled_check'),
"priority": model.get('priority', 'normal'),
"requested_by": "system_scheduled_check"
}
}
await rabbitmq_client.publish_event(
exchange_name="training.events",
routing_key="training.retrain.requested",
event_data=retraining_event
)
triggered_models.append({
'model_id': model.get('id'),
'product_id': model.get('product_id'),
'event_id': retraining_event['event_id']
})
logger.info(
"Published retraining request",
model_id=model.get('id'),
product_id=model.get('product_id'),
event_id=retraining_event['event_id'],
trigger_reason=model.get('trigger_reason')
)
except Exception as publish_error:
logger.error(
"Failed to publish retraining event",
model_id=model.get('id'),
error=str(publish_error)
)
# Continue with other models even if one fails
else:
logger.warning(
"RabbitMQ client not available, cannot trigger retraining",
tenant_id=tenant_id
)
return {
"status": "retraining_triggered",
"tenant_id": str(tenant_id),
"outdated_models": outdated_count,
"triggered_count": len(triggered_models),
"triggered_models": triggered_models,
"message": f"Triggered retraining for {len(triggered_models)} models"
}
except Exception as trigger_error:
logger.error(
"Failed to trigger retraining",
tenant_id=tenant_id,
error=str(trigger_error),
exc_info=True
)
# Return analysis result even if triggering failed
return {
"status": "trigger_failed",
"tenant_id": str(tenant_id),
"outdated_models": outdated_count,
"error": str(trigger_error),
"message": "Analysis complete but failed to trigger retraining"
}
except Exception as e:
logger.error(