Files
bakery-ia/services/ai_insights/app/main.py

168 lines
5.1 KiB
Python
Raw Normal View History

2025-11-05 13:34:56 +01:00
"""Main FastAPI application for AI Insights Service."""
2026-01-08 12:58:00 +01:00
from fastapi import FastAPI, Response
2025-11-05 13:34:56 +01:00
from fastapi.middleware.cors import CORSMiddleware
from contextlib import asynccontextmanager
import structlog
2026-01-08 12:58:00 +01:00
import os
2025-11-05 13:34:56 +01:00
from app.core.config import settings
from app.core.database import init_db, close_db
from app.api import insights
2026-01-08 12:58:00 +01:00
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector, add_metrics_middleware
2026-01-08 20:48:24 +01:00
from shared.monitoring.system_metrics import SystemMetricsCollector
2026-01-08 12:58:00 +01:00
# OpenTelemetry imports
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
from opentelemetry.instrumentation.redis import RedisInstrumentor
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from opentelemetry.sdk.resources import Resource
# Configure OpenTelemetry tracing
def setup_tracing(service_name: str = "ai-insights"):
"""Initialize OpenTelemetry tracing with OTLP exporter for Jaeger"""
resource = Resource.create({"service.name": service_name})
otlp_exporter = OTLPSpanExporter(
2026-01-09 07:26:11 +01:00
endpoint=os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "http://signoz-otel-collector.bakery-ia.svc.cluster.local:4317"),
2026-01-08 12:58:00 +01:00
insecure=True
)
2025-11-05 13:34:56 +01:00
2026-01-08 12:58:00 +01:00
provider = TracerProvider(resource=resource)
processor = BatchSpanProcessor(otlp_exporter)
provider.add_span_processor(processor)
trace.set_tracer_provider(provider)
return provider
# Initialize tracing
tracer_provider = setup_tracing("ai-insights")
2025-11-05 13:34:56 +01:00
2026-01-08 12:58:00 +01:00
# Setup logging
setup_logging("ai-insights", getattr(settings, 'LOG_LEVEL', 'INFO'))
2025-11-05 13:34:56 +01:00
logger = structlog.get_logger()
2026-01-09 14:48:44 +01:00
# Setup OpenTelemetry logging export if enabled
logger.info(f"OTEL_LOGS_EXPORTER env var: {os.getenv('OTEL_LOGS_EXPORTER', 'not set')}")
if os.getenv("OTEL_LOGS_EXPORTER", "").lower() == "otlp":
try:
logger.info("Attempting to setup OpenTelemetry logging")
from shared.monitoring.logs_exporter import setup_otel_logging
result = setup_otel_logging("ai-insights", settings.SERVICE_VERSION)
if result:
logger.info("OpenTelemetry logs export enabled for ai-insights")
else:
logger.warning("OpenTelemetry logs export setup returned None")
except Exception as e:
logger.error(f"Failed to setup OpenTelemetry logs export: {e}", exc_info=True)
else:
logger.info("OpenTelemetry logs export disabled - OTEL_LOGS_EXPORTER not set to otlp")
2025-11-05 13:34:56 +01:00
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Lifespan event handler for startup and shutdown."""
# Startup
logger.info("Starting AI Insights Service", service=settings.SERVICE_NAME, version=settings.SERVICE_VERSION)
await init_db()
logger.info("Database initialized")
2026-01-08 20:48:24 +01:00
# Initialize system metrics collection
system_metrics = SystemMetricsCollector("ai-insights")
logger.info("System metrics collection started")
# Note: Metrics are exported via OpenTelemetry OTLP to SigNoz - no metrics server needed
logger.info("Metrics export configured via OpenTelemetry OTLP")
2026-01-08 12:58:00 +01:00
2025-11-05 13:34:56 +01:00
yield
# Shutdown
logger.info("Shutting down AI Insights Service")
await close_db()
logger.info("Database connections closed")
# Create FastAPI app
app = FastAPI(
title="AI Insights Service",
description="Intelligent insights and recommendations for bakery operations",
version=settings.SERVICE_VERSION,
lifespan=lifespan
)
2026-01-08 12:58:00 +01:00
# Instrument FastAPI with OpenTelemetry
FastAPIInstrumentor.instrument_app(app)
# Instrument httpx for outgoing requests
HTTPXClientInstrumentor().instrument()
# Instrument Redis
RedisInstrumentor().instrument()
# Instrument SQLAlchemy
SQLAlchemyInstrumentor().instrument()
# Initialize metrics collector
metrics_collector = MetricsCollector("ai-insights")
# Add metrics middleware to track HTTP requests
add_metrics_middleware(app, metrics_collector)
2025-11-05 13:34:56 +01:00
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.ALLOWED_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(
insights.router,
prefix=settings.API_V1_PREFIX,
tags=["insights"]
)
@app.get("/")
async def root():
"""Root endpoint."""
return {
"service": settings.SERVICE_NAME,
"version": settings.SERVICE_VERSION,
"status": "running"
}
@app.get("/health")
async def health_check():
"""Health check endpoint."""
return {
"status": "healthy",
"service": settings.SERVICE_NAME,
"version": settings.SERVICE_VERSION
}
2026-01-08 20:48:24 +01:00
# Note: Metrics are exported via OpenTelemetry OTLP to SigNoz
# The /metrics endpoint is not needed as metrics are pushed automatically
2026-01-08 12:58:00 +01:00
2025-11-05 13:34:56 +01:00
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"app.main:app",
host="0.0.0.0",
port=8000,
reload=True,
log_level=settings.LOG_LEVEL.lower()
)