Improve the frontend and repository layer

This commit is contained in:
Urtzi Alfaro
2025-10-23 07:44:54 +02:00
parent 8d30172483
commit 07c33fa578
112 changed files with 14726 additions and 2733 deletions

View File

@@ -0,0 +1,214 @@
# services/forecasting/app/repositories/forecasting_alert_repository.py
"""
Forecasting Alert Repository
Data access layer for forecasting-specific alert detection and analysis
"""
from typing import List, Dict, Any
from uuid import UUID
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
import structlog
logger = structlog.get_logger()
class ForecastingAlertRepository:
"""Repository for forecasting alert data access"""
def __init__(self, session: AsyncSession):
self.session = session
async def get_weekend_demand_surges(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Get predicted weekend demand surges
Returns forecasts showing significant growth over previous weeks
"""
try:
query = text("""
WITH weekend_forecast AS (
SELECT
f.tenant_id,
f.inventory_product_id,
f.product_name,
f.predicted_demand,
f.forecast_date,
LAG(f.predicted_demand, 7) OVER (
PARTITION BY f.tenant_id, f.inventory_product_id
ORDER BY f.forecast_date
) as prev_week_demand,
AVG(f.predicted_demand) OVER (
PARTITION BY f.tenant_id, f.inventory_product_id
ORDER BY f.forecast_date
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
) as avg_weekly_demand
FROM forecasts f
WHERE f.forecast_date >= CURRENT_DATE + INTERVAL '1 day'
AND f.forecast_date <= CURRENT_DATE + INTERVAL '3 days'
AND EXTRACT(DOW FROM f.forecast_date) IN (6, 0)
AND f.tenant_id = :tenant_id
),
surge_analysis AS (
SELECT *,
CASE
WHEN prev_week_demand > 0 THEN
(predicted_demand - prev_week_demand) / prev_week_demand * 100
ELSE 0
END as growth_percentage,
CASE
WHEN avg_weekly_demand > 0 THEN
(predicted_demand - avg_weekly_demand) / avg_weekly_demand * 100
ELSE 0
END as avg_growth_percentage
FROM weekend_forecast
)
SELECT * FROM surge_analysis
WHERE growth_percentage > 50 OR avg_growth_percentage > 50
ORDER BY growth_percentage DESC
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get weekend demand surges", error=str(e), tenant_id=str(tenant_id))
raise
async def get_weather_impact_forecasts(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Get weather impact on demand forecasts
Returns forecasts with rain or significant demand changes
"""
try:
query = text("""
WITH weather_impact AS (
SELECT
f.tenant_id,
f.inventory_product_id,
f.product_name,
f.predicted_demand,
f.forecast_date,
f.weather_precipitation,
f.weather_temperature,
f.traffic_volume,
AVG(f.predicted_demand) OVER (
PARTITION BY f.tenant_id, f.inventory_product_id
ORDER BY f.forecast_date
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
) as avg_demand
FROM forecasts f
WHERE f.forecast_date >= CURRENT_DATE + INTERVAL '1 day'
AND f.forecast_date <= CURRENT_DATE + INTERVAL '2 days'
AND f.tenant_id = :tenant_id
),
rain_impact AS (
SELECT *,
CASE
WHEN weather_precipitation > 2.0 THEN true
ELSE false
END as rain_forecast,
CASE
WHEN traffic_volume < 80 THEN true
ELSE false
END as low_traffic_expected,
(predicted_demand - avg_demand) / avg_demand * 100 as demand_change
FROM weather_impact
)
SELECT * FROM rain_impact
WHERE rain_forecast = true OR demand_change < -15
ORDER BY demand_change ASC
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get weather impact forecasts", error=str(e), tenant_id=str(tenant_id))
raise
async def get_holiday_demand_spikes(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Get historical holiday demand spike analysis
Returns products with significant holiday demand increases
"""
try:
query = text("""
WITH holiday_demand AS (
SELECT
f.tenant_id,
f.inventory_product_id,
f.product_name,
AVG(f.predicted_demand) as avg_holiday_demand,
AVG(CASE WHEN f.is_holiday = false THEN f.predicted_demand END) as avg_normal_demand,
COUNT(*) as forecast_count
FROM forecasts f
WHERE f.created_at > CURRENT_DATE - INTERVAL '365 days'
AND f.tenant_id = :tenant_id
GROUP BY f.tenant_id, f.inventory_product_id, f.product_name
HAVING COUNT(*) >= 10
),
demand_spike_analysis AS (
SELECT *,
CASE
WHEN avg_normal_demand > 0 THEN
(avg_holiday_demand - avg_normal_demand) / avg_normal_demand * 100
ELSE 0
END as spike_percentage
FROM holiday_demand
)
SELECT * FROM demand_spike_analysis
WHERE spike_percentage > 25
ORDER BY spike_percentage DESC
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get holiday demand spikes", error=str(e), tenant_id=str(tenant_id))
raise
async def get_demand_pattern_analysis(self, tenant_id: UUID) -> List[Dict[str, Any]]:
"""
Get weekly demand pattern analysis for optimization
Returns products with significant demand variations
"""
try:
query = text("""
WITH weekly_patterns AS (
SELECT
f.tenant_id,
f.inventory_product_id,
f.product_name,
EXTRACT(DOW FROM f.forecast_date) as day_of_week,
AVG(f.predicted_demand) as avg_demand,
STDDEV(f.predicted_demand) as demand_variance,
COUNT(*) as data_points
FROM forecasts f
WHERE f.created_at > CURRENT_DATE - INTERVAL '60 days'
AND f.tenant_id = :tenant_id
GROUP BY f.tenant_id, f.inventory_product_id, f.product_name, EXTRACT(DOW FROM f.forecast_date)
HAVING COUNT(*) >= 5
),
pattern_analysis AS (
SELECT
tenant_id, inventory_product_id, product_name,
MAX(avg_demand) as peak_demand,
MIN(avg_demand) as min_demand,
AVG(avg_demand) as overall_avg,
MAX(avg_demand) - MIN(avg_demand) as demand_range
FROM weekly_patterns
GROUP BY tenant_id, inventory_product_id, product_name
)
SELECT * FROM pattern_analysis
WHERE demand_range > overall_avg * 0.3
AND peak_demand > overall_avg * 1.5
ORDER BY demand_range DESC
""")
result = await self.session.execute(query, {"tenant_id": tenant_id})
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error("Failed to get demand pattern analysis", error=str(e), tenant_id=str(tenant_id))
raise