Refactor all main.py

This commit is contained in:
Urtzi Alfaro
2025-09-29 13:13:12 +02:00
parent 4777e59e7a
commit befcc126b0
35 changed files with 2537 additions and 1993 deletions

View File

@@ -7,67 +7,74 @@ Database configuration for forecasting service
import structlog
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.pool import NullPool
from sqlalchemy import text
from typing import AsyncGenerator
from app.core.config import settings
from shared.database.base import Base
from shared.database.base import Base, DatabaseManager
logger = structlog.get_logger()
# Create async engine
engine = create_async_engine(
async_engine = create_async_engine(
settings.DATABASE_URL,
poolclass=NullPool,
echo=settings.DEBUG,
future=True
pool_size=10,
max_overflow=20,
pool_pre_ping=True,
pool_recycle=3600
)
# Create session factory
# Create async session factory
AsyncSessionLocal = async_sessionmaker(
engine,
bind=async_engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False
expire_on_commit=False
)
class DatabaseManager:
"""Database management operations"""
async def create_tables(self):
"""Create database tables"""
async with engine.begin() as conn:
async def get_db() -> AsyncGenerator[AsyncSession, None]:
"""Get database session"""
async with AsyncSessionLocal() as session:
try:
yield session
except Exception as e:
await session.rollback()
logger.error("Database session error", error=str(e))
raise
finally:
await session.close()
async def init_database():
"""Initialize database tables"""
try:
async with async_engine.begin() as conn:
# Import all models to ensure they are registered
from app.models.forecast import ForecastBatch, Forecast
from app.models.prediction import PredictionBatch, Prediction
# Create all tables
await conn.run_sync(Base.metadata.create_all)
logger.info("Forecasting database tables created successfully")
async def get_session(self) -> AsyncSession:
"""Get database session"""
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception as e:
await session.rollback()
logger.error(f"Database session error: {e}")
raise
finally:
await session.close()
# Global database manager instance
database_manager = DatabaseManager()
async def get_db() -> AsyncSession:
"""Database dependency"""
async for session in database_manager.get_session():
yield session
logger.info("Forecasting database initialized successfully")
except Exception as e:
logger.error("Failed to initialize forecasting database", error=str(e))
raise
async def get_db_health() -> bool:
"""Check database health"""
try:
async with AsyncSessionLocal() as session:
await session.execute(text("SELECT 1"))
return True
async with async_engine.begin() as conn:
await conn.execute(text("SELECT 1"))
return True
except Exception as e:
logger.error(f"Database health check failed: {e}")
logger.error("Database health check failed", error=str(e))
return False
# Database manager instance for service_base compatibility
database_manager = DatabaseManager(
database_url=settings.DATABASE_URL,
service_name="forecasting-service",
pool_size=10,
max_overflow=20,
echo=settings.DEBUG
)

View File

@@ -6,144 +6,141 @@ Forecasting Service Main Application
Demand prediction and forecasting service for bakery operations
"""
import structlog
from contextlib import asynccontextmanager
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from fastapi import FastAPI
from app.core.config import settings
from app.core.database import database_manager, get_db_health
from app.core.database import database_manager
from app.api import forecasts, predictions
from app.services.messaging import setup_messaging, cleanup_messaging
from app.services.forecasting_alert_service import ForecastingAlertService
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector
from shared.service_base import StandardFastAPIService
# Setup structured logging
setup_logging("forecasting-service", settings.LOG_LEVEL)
logger = structlog.get_logger()
# Initialize metrics collector
metrics_collector = MetricsCollector("forecasting-service")
class ForecastingService(StandardFastAPIService):
"""Forecasting Service with standardized setup"""
# Initialize alert service
alert_service = None
def __init__(self):
# Define expected database tables for health checks
forecasting_expected_tables = [
'forecasts', 'prediction_batches', 'model_performance_metrics', 'prediction_cache'
]
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Application lifespan manager for startup and shutdown events"""
# Startup
logger.info("Starting Forecasting Service", version="1.0.0")
try:
# Initialize database
logger.info("Initializing database connection")
await database_manager.create_tables()
logger.info("Database initialized successfully")
# Initialize messaging
logger.info("Setting up messaging")
self.alert_service = None
# Create custom checks for alert service
async def alert_service_check():
"""Custom health check for forecasting alert service"""
return await self.alert_service.health_check() if self.alert_service else False
# Define custom metrics for forecasting service
forecasting_custom_metrics = {
"forecasts_generated_total": {
"type": "counter",
"description": "Total forecasts generated"
},
"predictions_served_total": {
"type": "counter",
"description": "Total predictions served"
},
"prediction_errors_total": {
"type": "counter",
"description": "Total prediction errors"
},
"forecast_processing_time_seconds": {
"type": "histogram",
"description": "Time to process forecast request"
},
"prediction_processing_time_seconds": {
"type": "histogram",
"description": "Time to process prediction request"
},
"model_cache_hits_total": {
"type": "counter",
"description": "Total model cache hits"
},
"model_cache_misses_total": {
"type": "counter",
"description": "Total model cache misses"
}
}
super().__init__(
service_name="forecasting-service",
app_name="Bakery Forecasting Service",
description="AI-powered demand prediction and forecasting service for bakery operations",
version="1.0.0",
log_level=settings.LOG_LEVEL,
cors_origins=settings.CORS_ORIGINS_LIST,
api_prefix="/api/v1",
database_manager=database_manager,
expected_tables=forecasting_expected_tables,
custom_health_checks={"alert_service": alert_service_check},
enable_messaging=True,
custom_metrics=forecasting_custom_metrics
)
async def _setup_messaging(self):
"""Setup messaging for forecasting service"""
await setup_messaging()
logger.info("Messaging initialized")
self.logger.info("Messaging initialized")
async def _cleanup_messaging(self):
"""Cleanup messaging for forecasting service"""
await cleanup_messaging()
async def on_startup(self, app: FastAPI):
"""Custom startup logic for forecasting service"""
# Initialize forecasting alert service
logger.info("Setting up forecasting alert service")
global alert_service
alert_service = ForecastingAlertService(settings)
await alert_service.start()
logger.info("Forecasting alert service initialized")
# Register custom metrics
metrics_collector.register_counter("forecasts_generated_total", "Total forecasts generated")
metrics_collector.register_counter("predictions_served_total", "Total predictions served")
metrics_collector.register_counter("prediction_errors_total", "Total prediction errors") # ← MISSING REGISTRATION!
metrics_collector.register_histogram("forecast_processing_time_seconds", "Time to process forecast request")
metrics_collector.register_histogram("prediction_processing_time_seconds", "Time to process prediction request") # ← ADD MISSING METRIC!
metrics_collector.register_gauge("active_models_count", "Number of active models")
metrics_collector.register_counter("model_cache_hits_total", "Total model cache hits") # ← ADD USEFUL METRIC!
metrics_collector.register_counter("model_cache_misses_total", "Total model cache misses") # ← ADD USEFUL METRIC!
# Start metrics server
metrics_collector.start_metrics_server(8080)
logger.info("Forecasting Service started successfully")
yield
except Exception as e:
logger.error("Failed to start Forecasting Service", error=str(e))
raise
finally:
# Shutdown
logger.info("Shutting down Forecasting Service")
try:
# Cleanup alert service
if alert_service:
await alert_service.stop()
logger.info("Alert service cleanup completed")
await cleanup_messaging()
logger.info("Messaging cleanup completed")
except Exception as e:
logger.error("Error during messaging cleanup", error=str(e))
self.alert_service = ForecastingAlertService(settings)
await self.alert_service.start()
self.logger.info("Forecasting alert service initialized")
# Create FastAPI app with lifespan
app = FastAPI(
title="Bakery Forecasting Service",
description="AI-powered demand prediction and forecasting service for bakery operations",
version="1.0.0",
async def on_shutdown(self, app: FastAPI):
"""Custom shutdown logic for forecasting service"""
# Cleanup alert service
if self.alert_service:
await self.alert_service.stop()
self.logger.info("Alert service cleanup completed")
def get_service_features(self):
"""Return forecasting-specific features"""
return [
"demand_prediction",
"ai_forecasting",
"model_performance_tracking",
"prediction_caching",
"alert_notifications",
"messaging_integration"
]
def setup_custom_endpoints(self):
"""Setup custom endpoints for forecasting service"""
@self.app.get("/alert-metrics")
async def get_alert_metrics():
"""Alert service metrics endpoint"""
if self.alert_service:
return self.alert_service.get_metrics()
return {"error": "Alert service not initialized"}
# Create service instance
service = ForecastingService()
# Create FastAPI app with standardized setup
app = service.create_app(
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan
redoc_url="/redoc"
)
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS_LIST,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Setup standard endpoints
service.setup_standard_endpoints()
# Setup custom endpoints
service.setup_custom_endpoints()
# Include API routers
app.include_router(forecasts.router, prefix="/api/v1", tags=["forecasts"])
app.include_router(predictions.router, prefix="/api/v1", tags=["predictions"])
@app.get("/health")
async def health_check():
"""Health check endpoint"""
db_health = await get_db_health()
alert_health = await alert_service.health_check() if alert_service else {"status": "not_initialized"}
overall_health = db_health and alert_health.get("status") == "healthy"
return {
"status": "healthy" if overall_health else "unhealthy",
"service": "forecasting-service",
"version": "1.0.0",
"database": "connected" if db_health else "disconnected",
"alert_service": alert_health,
"timestamp": structlog.get_logger().info("Health check requested")
}
@app.get("/metrics")
async def get_metrics():
"""Metrics endpoint for Prometheus"""
return metrics_collector.get_metrics()
@app.get("/alert-metrics")
async def get_alert_metrics():
"""Alert service metrics endpoint"""
if alert_service:
return alert_service.get_metrics()
return {"error": "Alert service not initialized"}
service.add_router(forecasts.router, tags=["forecasts"])
service.add_router(predictions.router, tags=["predictions"])
if __name__ == "__main__":
import uvicorn