Add forecasting service

This commit is contained in:
Urtzi Alfaro
2025-07-21 19:48:56 +02:00
parent 2d85dd3e9e
commit 0e7ca10a29
24 changed files with 2937 additions and 179 deletions

View File

@@ -0,0 +1,54 @@
# ================================================================
# services/forecasting/tests/conftest.py
# ================================================================
"""
Test configuration and fixtures for forecasting service
"""
import pytest
import asyncio
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.pool import StaticPool
from app.core.config import settings
from shared.database.base import Base
# Test database URL
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
@pytest.fixture(scope="session")
def event_loop():
"""Create an instance of the default event loop for the test session."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture
async def test_db():
"""Create test database session"""
# Create test engine
engine = create_async_engine(
TEST_DATABASE_URL,
poolclass=StaticPool,
connect_args={"check_same_thread": False},
echo=False
)
# Create tables
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
# Create session factory
TestSessionLocal = async_sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False
)
# Provide session
async with TestSessionLocal() as session:
yield session
# Cleanup
await engine.dispose()

View File

@@ -0,0 +1,114 @@
# ================================================================
# Integration Tests: tests/integration/test_forecasting_flow.py
# ================================================================
"""
Integration tests for complete forecasting flow
"""
import pytest
import httpx
import asyncio
from datetime import date, timedelta
import json
class TestForecastingFlow:
"""Test complete forecasting workflow"""
@pytest.mark.asyncio
async def test_complete_forecast_flow(self):
"""Test complete flow from training to forecasting"""
base_url = "http://localhost:8000" # API Gateway
# Test data
tenant_id = "test-tenant-123"
product_name = "Pan Integral"
location = "madrid_centro"
async with httpx.AsyncClient() as client:
# 1. Check if model exists
model_response = await client.get(
f"{base_url}/api/v1/training/models/latest",
params={
"tenant_id": tenant_id,
"product_name": product_name,
"location": location
}
)
# 2. Generate forecast
forecast_request = {
"tenant_id": tenant_id,
"product_name": product_name,
"location": location,
"forecast_date": (date.today() + timedelta(days=1)).isoformat(),
"business_type": "individual",
"include_weather": True,
"include_traffic": True,
"confidence_level": 0.8
}
forecast_response = await client.post(
f"{base_url}/api/v1/forecasting/single",
json=forecast_request
)
assert forecast_response.status_code == 200
forecast_data = forecast_response.json()
# Verify forecast structure
assert "id" in forecast_data
assert "predicted_demand" in forecast_data
assert "confidence_lower" in forecast_data
assert "confidence_upper" in forecast_data
assert forecast_data["product_name"] == product_name
# 3. Get forecast list
list_response = await client.get(
f"{base_url}/api/v1/forecasting/list",
params={"location": location}
)
assert list_response.status_code == 200
forecasts = list_response.json()
assert len(forecasts) > 0
# 4. Check for alerts
alerts_response = await client.get(
f"{base_url}/api/v1/forecasting/alerts"
)
assert alerts_response.status_code == 200
@pytest.mark.asyncio
async def test_batch_forecasting(self):
"""Test batch forecasting functionality"""
base_url = "http://localhost:8000"
batch_request = {
"tenant_id": "test-tenant-123",
"batch_name": "Weekly Forecast Batch",
"products": ["Pan Integral", "Croissant", "Café con Leche"],
"location": "madrid_centro",
"forecast_days": 7,
"business_type": "individual",
"include_weather": True,
"include_traffic": True,
"confidence_level": 0.8
}
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(
f"{base_url}/api/v1/forecasting/batch",
json=batch_request
)
assert response.status_code == 200
batch_data = response.json()
assert "id" in batch_data
assert batch_data["batch_name"] == "Weekly Forecast Batch"
assert batch_data["total_products"] == 21 # 3 products * 7 days
assert batch_data["status"] in ["completed", "partial"]

View File

@@ -0,0 +1,106 @@
# ================================================================
# Performance Tests: tests/performance/test_forecasting_performance.py
# ================================================================
"""
Performance tests for forecasting service
"""
import pytest
import httpx
import asyncio
import time
from concurrent.futures import ThreadPoolExecutor
import statistics
class TestForecastingPerformance:
"""Performance tests for forecasting operations"""
@pytest.mark.asyncio
async def test_single_forecast_performance(self):
"""Test single forecast generation performance"""
base_url = "http://localhost:8000"
forecast_request = {
"tenant_id": "perf-test-tenant",
"product_name": "Pan Integral",
"location": "madrid_centro",
"forecast_date": "2024-01-17",
"business_type": "individual",
"confidence_level": 0.8
}
times = []
async with httpx.AsyncClient() as client:
for _ in range(10):
start_time = time.time()
response = await client.post(
f"{base_url}/api/v1/forecasting/single",
json=forecast_request
)
end_time = time.time()
times.append(end_time - start_time)
assert response.status_code == 200
# Performance assertions
avg_time = statistics.mean(times)
p95_time = statistics.quantiles(times, n=20)[18] # 95th percentile
assert avg_time < 2.0, f"Average response time {avg_time}s exceeds 2s"
assert p95_time < 5.0, f"95th percentile {p95_time}s exceeds 5s"
print(f"Average response time: {avg_time:.2f}s")
print(f"95th percentile: {p95_time:.2f}s")
@pytest.mark.asyncio
async def test_concurrent_forecasts(self):
"""Test concurrent forecast generation"""
base_url = "http://localhost:8000"
async def make_forecast_request(product_id):
forecast_request = {
"tenant_id": "perf-test-tenant",
"product_name": f"Product_{product_id}",
"location": "madrid_centro",
"forecast_date": "2024-01-17",
"business_type": "individual"
}
async with httpx.AsyncClient() as client:
start_time = time.time()
response = await client.post(
f"{base_url}/api/v1/forecasting/single",
json=forecast_request
)
end_time = time.time()
return {
"status_code": response.status_code,
"response_time": end_time - start_time,
"product_id": product_id
}
# Run 20 concurrent requests
tasks = [make_forecast_request(i) for i in range(20)]
results = await asyncio.gather(*tasks, return_exceptions=True)
# Analyze results
successful = [r for r in results if isinstance(r, dict) and r["status_code"] == 200]
failed = [r for r in results if not isinstance(r, dict) or r["status_code"] != 200]
success_rate = len(successful) / len(results)
assert success_rate >= 0.95, f"Success rate {success_rate} below 95%"
if successful:
avg_concurrent_time = statistics.mean([r["response_time"] for r in successful])
assert avg_concurrent_time < 10.0, f"Average concurrent time {avg_concurrent_time}s exceeds 10s"
print(f"Concurrent success rate: {success_rate:.2%}")
print(f"Average concurrent response time: {avg_concurrent_time:.2f}s")

View File

@@ -0,0 +1,135 @@
# ================================================================
# services/forecasting/tests/test_forecasting.py
# ================================================================
"""
Tests for forecasting service
"""
import pytest
import asyncio
from datetime import date, datetime, timedelta
from unittest.mock import Mock, AsyncMock, patch
import uuid
from app.services.forecasting_service import ForecastingService
from app.schemas.forecasts import ForecastRequest, BusinessType
from app.models.forecasts import Forecast
class TestForecastingService:
"""Test cases for ForecastingService"""
@pytest.fixture
def forecasting_service(self):
return ForecastingService()
@pytest.fixture
def sample_forecast_request(self):
return ForecastRequest(
tenant_id=str(uuid.uuid4()),
product_name="Pan Integral",
location="madrid_centro",
forecast_date=date.today() + timedelta(days=1),
business_type=BusinessType.INDIVIDUAL,
include_weather=True,
include_traffic=True,
confidence_level=0.8
)
@pytest.mark.asyncio
async def test_generate_forecast_success(self, forecasting_service, sample_forecast_request):
"""Test successful forecast generation"""
# Mock database session
mock_db = AsyncMock()
# Mock external dependencies
with patch.object(forecasting_service, '_get_latest_model') as mock_get_model, \
patch.object(forecasting_service, '_prepare_forecast_features') as mock_prepare_features, \
patch.object(forecasting_service.prediction_service, 'predict') as mock_predict, \
patch.object(forecasting_service, '_check_and_create_alerts') as mock_check_alerts:
# Setup mocks
mock_get_model.return_value = {
"model_id": str(uuid.uuid4()),
"version": "1.0.0",
"algorithm": "prophet"
}
mock_prepare_features.return_value = {
"date": "2024-01-16",
"day_of_week": 1,
"is_weekend": False,
"is_holiday": False,
"temperature": 15.0,
"precipitation": 0.0
}
mock_predict.return_value = {
"demand": 85.5,
"lower_bound": 70.2,
"upper_bound": 100.8
}
# Execute test
result = await forecasting_service.generate_forecast(sample_forecast_request, mock_db)
# Assertions
assert isinstance(result, Forecast)
assert result.product_name == "Pan Integral"
assert result.predicted_demand == 85.5
assert result.confidence_lower == 70.2
assert result.confidence_upper == 100.8
# Verify mocks were called
mock_get_model.assert_called_once()
mock_prepare_features.assert_called_once()
mock_predict.assert_called_once()
mock_check_alerts.assert_called_once()
@pytest.mark.asyncio
async def test_generate_forecast_no_model(self, forecasting_service, sample_forecast_request):
"""Test forecast generation when no model is found"""
mock_db = AsyncMock()
with patch.object(forecasting_service, '_get_latest_model') as mock_get_model:
mock_get_model.return_value = None
# Should raise ValueError
with pytest.raises(ValueError, match="No trained model found"):
await forecasting_service.generate_forecast(sample_forecast_request, mock_db)
@pytest.mark.asyncio
async def test_prepare_forecast_features(self, forecasting_service, sample_forecast_request):
"""Test feature preparation for forecasting"""
with patch.object(forecasting_service, '_is_spanish_holiday') as mock_holiday, \
patch.object(forecasting_service, '_get_weather_forecast') as mock_weather, \
patch.object(forecasting_service, '_get_traffic_forecast') as mock_traffic:
# Setup mocks
mock_holiday.return_value = False
mock_weather.return_value = {
"temperature": 18.5,
"precipitation": 0.0,
"humidity": 65.0,
"weather_description": "Clear"
}
mock_traffic.return_value = {
"traffic_volume": 1200,
"pedestrian_count": 850
}
# Execute test
features = await forecasting_service._prepare_forecast_features(sample_forecast_request)
# Assertions
assert "date" in features
assert "day_of_week" in features
assert "is_weekend" in features
assert "is_holiday" in features
assert features["business_type"] == "individual"
assert features["temperature"] == 18.5
assert features["traffic_volume"] == 1200