New alert system and panel de control page
This commit is contained in:
@@ -11,6 +11,11 @@ import uuid
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models.forecasts import Forecast, PredictionBatch
|
||||
@@ -40,6 +45,7 @@ async def clone_demo_data(
|
||||
virtual_tenant_id: str,
|
||||
demo_account_type: str,
|
||||
session_id: Optional[str] = None,
|
||||
session_created_at: Optional[str] = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: bool = Depends(verify_internal_api_key)
|
||||
):
|
||||
@@ -55,18 +61,35 @@ async def clone_demo_data(
|
||||
virtual_tenant_id: Target virtual tenant UUID
|
||||
demo_account_type: Type of demo account
|
||||
session_id: Originating session ID for tracing
|
||||
session_created_at: ISO timestamp when demo session was created (for date adjustment)
|
||||
|
||||
Returns:
|
||||
Cloning status and record counts
|
||||
"""
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
# Parse session_created_at or fallback to now
|
||||
if session_created_at:
|
||||
try:
|
||||
session_time = datetime.fromisoformat(session_created_at.replace('Z', '+00:00'))
|
||||
except (ValueError, AttributeError) as e:
|
||||
logger.warning(
|
||||
"Invalid session_created_at format, using current time",
|
||||
session_created_at=session_created_at,
|
||||
error=str(e)
|
||||
)
|
||||
session_time = datetime.now(timezone.utc)
|
||||
else:
|
||||
logger.warning("session_created_at not provided, using current time")
|
||||
session_time = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(
|
||||
"Starting forecasting data cloning",
|
||||
base_tenant_id=base_tenant_id,
|
||||
virtual_tenant_id=virtual_tenant_id,
|
||||
demo_account_type=demo_account_type,
|
||||
session_id=session_id
|
||||
session_id=session_id,
|
||||
session_time=session_time.isoformat()
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -92,23 +115,21 @@ async def clone_demo_data(
|
||||
base_tenant=str(base_uuid)
|
||||
)
|
||||
|
||||
# Calculate date offset to make forecasts recent
|
||||
if base_forecasts:
|
||||
max_date = max(forecast.forecast_date for forecast in base_forecasts)
|
||||
today = datetime.now(timezone.utc)
|
||||
date_offset = today - max_date
|
||||
else:
|
||||
date_offset = timedelta(days=0)
|
||||
|
||||
for forecast in base_forecasts:
|
||||
adjusted_forecast_date = adjust_date_for_demo(
|
||||
forecast.forecast_date,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if forecast.forecast_date else None
|
||||
|
||||
new_forecast = Forecast(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
inventory_product_id=forecast.inventory_product_id, # Keep product reference
|
||||
product_name=forecast.product_name,
|
||||
location=forecast.location,
|
||||
forecast_date=forecast.forecast_date + date_offset,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
forecast_date=adjusted_forecast_date,
|
||||
created_at=session_time,
|
||||
predicted_demand=forecast.predicted_demand,
|
||||
confidence_lower=forecast.confidence_lower,
|
||||
confidence_upper=forecast.confidence_upper,
|
||||
@@ -143,12 +164,23 @@ async def clone_demo_data(
|
||||
)
|
||||
|
||||
for batch in base_batches:
|
||||
adjusted_requested_at = adjust_date_for_demo(
|
||||
batch.requested_at,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.requested_at else None
|
||||
adjusted_completed_at = adjust_date_for_demo(
|
||||
batch.completed_at,
|
||||
session_time,
|
||||
BASE_REFERENCE_DATE
|
||||
) if batch.completed_at else None
|
||||
|
||||
new_batch = PredictionBatch(
|
||||
id=uuid.uuid4(),
|
||||
tenant_id=virtual_uuid,
|
||||
batch_name=batch.batch_name,
|
||||
requested_at=batch.requested_at + date_offset,
|
||||
completed_at=batch.completed_at + date_offset if batch.completed_at else None,
|
||||
requested_at=adjusted_requested_at,
|
||||
completed_at=adjusted_completed_at,
|
||||
status=batch.status,
|
||||
total_products=batch.total_products,
|
||||
completed_products=batch.completed_products,
|
||||
|
||||
@@ -12,7 +12,6 @@ import structlog
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from shared.alerts.base_service import BaseAlertService, AlertServiceMixin
|
||||
from shared.alerts.templates import format_item_message
|
||||
from app.clients.inventory_client import get_inventory_client
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -0,0 +1,359 @@
|
||||
"""
|
||||
Forecasting Recommendation Service
|
||||
|
||||
Emits RECOMMENDATIONS (not alerts) for demand forecasting insights:
|
||||
- demand_surge_predicted: Upcoming demand spike
|
||||
- weather_impact_forecast: Weather affecting demand
|
||||
- holiday_preparation: Holiday demand prep
|
||||
- seasonal_trend_insight: Seasonal pattern detected
|
||||
- inventory_optimization_opportunity: Stock optimization suggestion
|
||||
|
||||
These are RECOMMENDATIONS - AI-generated suggestions that are advisory, not urgent.
|
||||
Users can choose to act on them or dismiss them.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from shared.schemas.event_classification import RawEvent, EventClass, EventDomain
|
||||
from shared.alerts.base_service import BaseAlertService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ForecastingRecommendationService(BaseAlertService):
|
||||
"""
|
||||
Service for emitting forecasting recommendations (AI-generated suggestions).
|
||||
"""
|
||||
|
||||
def __init__(self, rabbitmq_url: str = None):
|
||||
super().__init__(service_name="forecasting", rabbitmq_url=rabbitmq_url)
|
||||
|
||||
async def emit_demand_surge_recommendation(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
product_sku: str,
|
||||
product_name: str,
|
||||
predicted_demand: float,
|
||||
normal_demand: float,
|
||||
surge_percentage: float,
|
||||
surge_date: datetime,
|
||||
confidence_score: float,
|
||||
reasoning: str,
|
||||
) -> None:
|
||||
"""
|
||||
Emit RECOMMENDATION for predicted demand surge.
|
||||
|
||||
This is a RECOMMENDATION (not alert) - proactive suggestion to prepare.
|
||||
"""
|
||||
try:
|
||||
message = f"{product_name} demand expected to surge by {surge_percentage:.0f}% on {surge_date.strftime('%A, %B %d')} (from {normal_demand:.0f} to {predicted_demand:.0f} units)"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.RECOMMENDATION,
|
||||
event_domain=EventDomain.DEMAND,
|
||||
event_type="demand_surge_predicted",
|
||||
title=f"Demand Surge: {product_name}",
|
||||
message=message,
|
||||
service="forecasting",
|
||||
actions=["increase_production", "check_inventory", "view_forecast"],
|
||||
event_metadata={
|
||||
"product_sku": product_sku,
|
||||
"product_name": product_name,
|
||||
"predicted_demand": predicted_demand,
|
||||
"normal_demand": normal_demand,
|
||||
"surge_percentage": surge_percentage,
|
||||
"surge_date": surge_date.isoformat(),
|
||||
"confidence_score": confidence_score,
|
||||
"reasoning": reasoning,
|
||||
"estimated_impact": {
|
||||
"additional_revenue_eur": predicted_demand * 5, # Rough estimate
|
||||
"stockout_risk": "high" if surge_percentage > 50 else "medium",
|
||||
},
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
|
||||
|
||||
logger.info(
|
||||
f"Demand surge recommendation emitted: {product_name} (+{surge_percentage:.0f}%)",
|
||||
extra={"tenant_id": tenant_id, "product_sku": product_sku}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit demand surge recommendation: {e}",
|
||||
extra={"tenant_id": tenant_id, "product_sku": product_sku},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_weather_impact_recommendation(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
weather_event: str, # 'rain', 'snow', 'heatwave', etc.
|
||||
forecast_date: datetime,
|
||||
affected_products: List[Dict[str, Any]],
|
||||
impact_description: str,
|
||||
confidence_score: float,
|
||||
) -> None:
|
||||
"""
|
||||
Emit RECOMMENDATION for weather impact on demand.
|
||||
"""
|
||||
try:
|
||||
products_summary = ", ".join([p['product_name'] for p in affected_products[:3]])
|
||||
if len(affected_products) > 3:
|
||||
products_summary += f" and {len(affected_products) - 3} more"
|
||||
|
||||
message = f"{weather_event.title()} forecast for {forecast_date.strftime('%A')} - {impact_description}"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.RECOMMENDATION,
|
||||
event_domain=EventDomain.DEMAND,
|
||||
event_type="weather_impact_forecast",
|
||||
title=f"Weather Impact: {weather_event.title()}",
|
||||
message=message,
|
||||
service="forecasting",
|
||||
actions=["adjust_production", "view_affected_products"],
|
||||
event_metadata={
|
||||
"weather_event": weather_event,
|
||||
"forecast_date": forecast_date.isoformat(),
|
||||
"affected_products": affected_products,
|
||||
"impact_description": impact_description,
|
||||
"confidence_score": confidence_score,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
|
||||
|
||||
logger.info(
|
||||
f"Weather impact recommendation emitted: {weather_event}",
|
||||
extra={"tenant_id": tenant_id, "weather_event": weather_event}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit weather impact recommendation: {e}",
|
||||
extra={"tenant_id": tenant_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_holiday_preparation_recommendation(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
holiday_name: str,
|
||||
holiday_date: datetime,
|
||||
days_until_holiday: int,
|
||||
recommended_products: List[Dict[str, Any]],
|
||||
preparation_tips: List[str],
|
||||
) -> None:
|
||||
"""
|
||||
Emit RECOMMENDATION for holiday preparation.
|
||||
"""
|
||||
try:
|
||||
message = f"{holiday_name} in {days_until_holiday} days - Prepare for increased demand"
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.RECOMMENDATION,
|
||||
event_domain=EventDomain.DEMAND,
|
||||
event_type="holiday_preparation",
|
||||
title=f"Prepare for {holiday_name}",
|
||||
message=message,
|
||||
service="forecasting",
|
||||
actions=["view_recommendations", "adjust_orders"],
|
||||
event_metadata={
|
||||
"holiday_name": holiday_name,
|
||||
"holiday_date": holiday_date.isoformat(),
|
||||
"days_until_holiday": days_until_holiday,
|
||||
"recommended_products": recommended_products,
|
||||
"preparation_tips": preparation_tips,
|
||||
"confidence_score": 0.9, # High confidence for known holidays
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
|
||||
|
||||
logger.info(
|
||||
f"Holiday preparation recommendation emitted: {holiday_name}",
|
||||
extra={"tenant_id": tenant_id, "holiday": holiday_name}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit holiday preparation recommendation: {e}",
|
||||
extra={"tenant_id": tenant_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_seasonal_trend_recommendation(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
season: str, # 'spring', 'summer', 'fall', 'winter'
|
||||
trend_type: str, # 'increasing', 'decreasing', 'stable'
|
||||
affected_categories: List[str],
|
||||
trend_description: str,
|
||||
suggested_actions: List[str],
|
||||
) -> None:
|
||||
"""
|
||||
Emit RECOMMENDATION for seasonal trend insight.
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.RECOMMENDATION,
|
||||
event_domain=EventDomain.DEMAND,
|
||||
event_type="seasonal_trend_insight",
|
||||
title=f"Seasonal Trend: {season.title()}",
|
||||
message=f"{trend_description} - Affects: {', '.join(affected_categories)}",
|
||||
service="forecasting",
|
||||
actions=["view_details", "adjust_strategy"],
|
||||
event_metadata={
|
||||
"season": season,
|
||||
"trend_type": trend_type,
|
||||
"affected_categories": affected_categories,
|
||||
"trend_description": trend_description,
|
||||
"suggested_actions": suggested_actions,
|
||||
"confidence_score": 0.85,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
|
||||
|
||||
logger.info(
|
||||
f"Seasonal trend recommendation emitted: {season}",
|
||||
extra={"tenant_id": tenant_id, "season": season}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit seasonal trend recommendation: {e}",
|
||||
extra={"tenant_id": tenant_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_inventory_optimization_recommendation(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
ingredient_id: str,
|
||||
ingredient_name: str,
|
||||
current_stock: float,
|
||||
optimal_stock: float,
|
||||
unit: str,
|
||||
reason: str,
|
||||
estimated_savings_eur: Optional[float] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Emit RECOMMENDATION for inventory optimization.
|
||||
"""
|
||||
try:
|
||||
if current_stock > optimal_stock:
|
||||
action = "reduce"
|
||||
difference = current_stock - optimal_stock
|
||||
message = f"Consider reducing {ingredient_name} stock by {difference:.1f} {unit} - {reason}"
|
||||
else:
|
||||
action = "increase"
|
||||
difference = optimal_stock - current_stock
|
||||
message = f"Consider increasing {ingredient_name} stock by {difference:.1f} {unit} - {reason}"
|
||||
|
||||
estimated_impact = {}
|
||||
if estimated_savings_eur:
|
||||
estimated_impact["financial_savings_eur"] = estimated_savings_eur
|
||||
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.RECOMMENDATION,
|
||||
event_domain=EventDomain.INVENTORY,
|
||||
event_type="inventory_optimization_opportunity",
|
||||
title=f"Optimize Stock: {ingredient_name}",
|
||||
message=message,
|
||||
service="forecasting",
|
||||
actions=["adjust_stock", "view_analysis"],
|
||||
event_metadata={
|
||||
"ingredient_id": ingredient_id,
|
||||
"ingredient_name": ingredient_name,
|
||||
"current_stock": current_stock,
|
||||
"optimal_stock": optimal_stock,
|
||||
"difference": difference,
|
||||
"action": action,
|
||||
"unit": unit,
|
||||
"reason": reason,
|
||||
"estimated_impact": estimated_impact if estimated_impact else None,
|
||||
"confidence_score": 0.75,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
|
||||
|
||||
logger.info(
|
||||
f"Inventory optimization recommendation emitted: {ingredient_name}",
|
||||
extra={"tenant_id": tenant_id, "ingredient_id": ingredient_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit inventory optimization recommendation: {e}",
|
||||
extra={"tenant_id": tenant_id, "ingredient_id": ingredient_id},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def emit_cost_reduction_recommendation(
|
||||
self,
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
opportunity_type: str, # 'supplier_switch', 'bulk_purchase', 'seasonal_buying'
|
||||
title: str,
|
||||
description: str,
|
||||
estimated_savings_eur: float,
|
||||
suggested_actions: List[str],
|
||||
details: Dict[str, Any],
|
||||
) -> None:
|
||||
"""
|
||||
Emit RECOMMENDATION for cost reduction opportunity.
|
||||
"""
|
||||
try:
|
||||
event = RawEvent(
|
||||
tenant_id=tenant_id,
|
||||
event_class=EventClass.RECOMMENDATION,
|
||||
event_domain=EventDomain.SUPPLY_CHAIN,
|
||||
event_type="cost_reduction_suggestion",
|
||||
title=title,
|
||||
message=f"{description} - Potential savings: €{estimated_savings_eur:.2f}",
|
||||
service="forecasting",
|
||||
actions=suggested_actions,
|
||||
event_metadata={
|
||||
"opportunity_type": opportunity_type,
|
||||
"estimated_savings_eur": estimated_savings_eur,
|
||||
"details": details,
|
||||
"confidence_score": 0.8,
|
||||
},
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
await self.publish_item(tenant_id, event.dict(), item_type="recommendation")
|
||||
|
||||
logger.info(
|
||||
f"Cost reduction recommendation emitted: {opportunity_type}",
|
||||
extra={"tenant_id": tenant_id, "opportunity_type": opportunity_type}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to emit cost reduction recommendation: {e}",
|
||||
extra={"tenant_id": tenant_id},
|
||||
exc_info=True,
|
||||
)
|
||||
@@ -26,16 +26,17 @@ import structlog
|
||||
|
||||
from app.models.forecasts import Forecast, PredictionBatch
|
||||
|
||||
|
||||
# Add shared path for demo utilities
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
||||
from shared.utils.demo_dates import BASE_REFERENCE_DATE
|
||||
|
||||
# Configure logging
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# Base demo tenant IDs
|
||||
DEMO_TENANT_SAN_PABLO = uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6") # Individual bakery
|
||||
DEMO_TENANT_LA_ESPIGA = uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # Central bakery
|
||||
|
||||
# Base reference date for date calculations
|
||||
BASE_REFERENCE_DATE = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
# Day of week mapping
|
||||
DAYS_OF_WEEK = {
|
||||
0: "lunes",
|
||||
@@ -442,8 +443,25 @@ async def seed_all(db: AsyncSession):
|
||||
}
|
||||
|
||||
|
||||
def validate_base_reference_date():
|
||||
"""Ensure BASE_REFERENCE_DATE hasn't changed since last seed"""
|
||||
expected_date = datetime(2025, 1, 8, 6, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
if BASE_REFERENCE_DATE != expected_date:
|
||||
logger.warning(
|
||||
"BASE_REFERENCE_DATE has changed! This may cause date inconsistencies.",
|
||||
current=BASE_REFERENCE_DATE.isoformat(),
|
||||
expected=expected_date.isoformat()
|
||||
)
|
||||
# Don't fail - just warn. Allow intentional changes.
|
||||
|
||||
logger.info("BASE_REFERENCE_DATE validation", date=BASE_REFERENCE_DATE.isoformat())
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main execution function"""
|
||||
validate_base_reference_date() # Add this line
|
||||
|
||||
# Get database URL from environment
|
||||
database_url = os.getenv("FORECASTING_DATABASE_URL")
|
||||
if not database_url:
|
||||
|
||||
Reference in New Issue
Block a user