2025-07-21 19:48:56 +02:00
|
|
|
# ================================================================
|
|
|
|
|
# services/forecasting/app/core/database.py
|
|
|
|
|
# ================================================================
|
2025-07-17 13:09:24 +02:00
|
|
|
"""
|
|
|
|
|
Database configuration for forecasting service
|
|
|
|
|
"""
|
|
|
|
|
|
2025-07-21 19:48:56 +02:00
|
|
|
import structlog
|
|
|
|
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
|
|
|
|
from sqlalchemy import text
|
2025-09-29 13:13:12 +02:00
|
|
|
from typing import AsyncGenerator
|
2025-07-21 19:48:56 +02:00
|
|
|
|
2025-07-17 13:09:24 +02:00
|
|
|
from app.core.config import settings
|
2025-09-29 13:13:12 +02:00
|
|
|
from shared.database.base import Base, DatabaseManager
|
2025-07-21 19:48:56 +02:00
|
|
|
|
|
|
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
|
|
|
|
# Create async engine
|
2025-09-29 13:13:12 +02:00
|
|
|
async_engine = create_async_engine(
|
2025-07-21 19:48:56 +02:00
|
|
|
settings.DATABASE_URL,
|
|
|
|
|
echo=settings.DEBUG,
|
2025-09-29 13:13:12 +02:00
|
|
|
pool_size=10,
|
|
|
|
|
max_overflow=20,
|
|
|
|
|
pool_pre_ping=True,
|
|
|
|
|
pool_recycle=3600
|
2025-07-21 19:48:56 +02:00
|
|
|
)
|
|
|
|
|
|
2025-09-29 13:13:12 +02:00
|
|
|
# Create async session factory
|
2025-07-21 19:48:56 +02:00
|
|
|
AsyncSessionLocal = async_sessionmaker(
|
2025-09-29 13:13:12 +02:00
|
|
|
bind=async_engine,
|
2025-07-21 19:48:56 +02:00
|
|
|
class_=AsyncSession,
|
2025-09-29 13:13:12 +02:00
|
|
|
expire_on_commit=False
|
2025-07-21 19:48:56 +02:00
|
|
|
)
|
|
|
|
|
|
2025-09-29 13:13:12 +02:00
|
|
|
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
|
|
|
|
"""Get database session"""
|
|
|
|
|
async with AsyncSessionLocal() as session:
|
|
|
|
|
try:
|
|
|
|
|
yield session
|
|
|
|
|
except Exception as e:
|
|
|
|
|
await session.rollback()
|
|
|
|
|
logger.error("Database session error", error=str(e))
|
|
|
|
|
raise
|
|
|
|
|
finally:
|
|
|
|
|
await session.close()
|
2025-07-21 19:48:56 +02:00
|
|
|
|
2025-09-29 13:13:12 +02:00
|
|
|
async def init_database():
|
|
|
|
|
"""Initialize database tables"""
|
|
|
|
|
try:
|
|
|
|
|
async with async_engine.begin() as conn:
|
|
|
|
|
# Import all models to ensure they are registered
|
|
|
|
|
from app.models.forecast import ForecastBatch, Forecast
|
|
|
|
|
from app.models.prediction import PredictionBatch, Prediction
|
2025-07-17 13:09:24 +02:00
|
|
|
|
2025-09-29 13:13:12 +02:00
|
|
|
# Create all tables
|
|
|
|
|
await conn.run_sync(Base.metadata.create_all)
|
|
|
|
|
|
|
|
|
|
logger.info("Forecasting database initialized successfully")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to initialize forecasting database", error=str(e))
|
|
|
|
|
raise
|
2025-07-17 13:09:24 +02:00
|
|
|
|
2025-07-21 19:48:56 +02:00
|
|
|
async def get_db_health() -> bool:
|
|
|
|
|
"""Check database health"""
|
|
|
|
|
try:
|
2025-09-29 13:13:12 +02:00
|
|
|
async with async_engine.begin() as conn:
|
|
|
|
|
await conn.execute(text("SELECT 1"))
|
|
|
|
|
return True
|
2025-07-21 19:48:56 +02:00
|
|
|
except Exception as e:
|
2025-09-29 13:13:12 +02:00
|
|
|
logger.error("Database health check failed", error=str(e))
|
2025-07-21 19:48:56 +02:00
|
|
|
return False
|
2025-09-29 13:13:12 +02:00
|
|
|
|
2025-11-15 15:20:10 +01:00
|
|
|
|
|
|
|
|
async def get_connection_pool_stats() -> dict:
|
|
|
|
|
"""
|
|
|
|
|
Get current connection pool statistics for monitoring.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Dictionary with pool statistics including usage and capacity
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
pool = async_engine.pool
|
|
|
|
|
|
|
|
|
|
# Get pool stats
|
|
|
|
|
stats = {
|
|
|
|
|
"pool_size": pool.size(),
|
|
|
|
|
"checked_in_connections": pool.checkedin(),
|
|
|
|
|
"checked_out_connections": pool.checkedout(),
|
|
|
|
|
"overflow_connections": pool.overflow(),
|
|
|
|
|
"total_connections": pool.size() + pool.overflow(),
|
|
|
|
|
"max_capacity": 10 + 20, # pool_size + max_overflow
|
|
|
|
|
"usage_percentage": round(((pool.size() + pool.overflow()) / 30) * 100, 2)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Add health status
|
|
|
|
|
if stats["usage_percentage"] > 90:
|
|
|
|
|
stats["status"] = "critical"
|
|
|
|
|
stats["message"] = "Connection pool near capacity"
|
|
|
|
|
elif stats["usage_percentage"] > 80:
|
|
|
|
|
stats["status"] = "warning"
|
|
|
|
|
stats["message"] = "Connection pool usage high"
|
|
|
|
|
else:
|
|
|
|
|
stats["status"] = "healthy"
|
|
|
|
|
stats["message"] = "Connection pool healthy"
|
|
|
|
|
|
|
|
|
|
return stats
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Failed to get connection pool stats", error=str(e))
|
|
|
|
|
return {
|
|
|
|
|
"status": "error",
|
|
|
|
|
"message": f"Failed to get pool stats: {str(e)}"
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-29 13:13:12 +02:00
|
|
|
# Database manager instance for service_base compatibility
|
|
|
|
|
database_manager = DatabaseManager(
|
|
|
|
|
database_url=settings.DATABASE_URL,
|
|
|
|
|
service_name="forecasting-service",
|
|
|
|
|
pool_size=10,
|
|
|
|
|
max_overflow=20,
|
|
|
|
|
echo=settings.DEBUG
|
|
|
|
|
)
|