168 lines
5.1 KiB
Python
168 lines
5.1 KiB
Python
"""Main FastAPI application for AI Insights Service."""
|
|
|
|
from fastapi import FastAPI, Response
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from contextlib import asynccontextmanager
|
|
import structlog
|
|
import os
|
|
|
|
from app.core.config import settings
|
|
from app.core.database import init_db, close_db
|
|
from app.api import insights
|
|
from shared.monitoring.logging import setup_logging
|
|
from shared.monitoring.metrics import MetricsCollector, add_metrics_middleware
|
|
from shared.monitoring.system_metrics import SystemMetricsCollector
|
|
|
|
# OpenTelemetry imports
|
|
from opentelemetry import trace
|
|
from opentelemetry.sdk.trace import TracerProvider
|
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
|
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
|
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
|
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
|
|
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
|
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
|
from opentelemetry.sdk.resources import Resource
|
|
|
|
# Configure OpenTelemetry tracing
|
|
def setup_tracing(service_name: str = "ai-insights"):
|
|
"""Initialize OpenTelemetry tracing with OTLP exporter for Jaeger"""
|
|
resource = Resource.create({"service.name": service_name})
|
|
|
|
otlp_exporter = OTLPSpanExporter(
|
|
endpoint=os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "http://signoz-otel-collector.bakery-ia.svc.cluster.local:4317"),
|
|
insecure=True
|
|
)
|
|
|
|
provider = TracerProvider(resource=resource)
|
|
processor = BatchSpanProcessor(otlp_exporter)
|
|
provider.add_span_processor(processor)
|
|
trace.set_tracer_provider(provider)
|
|
|
|
return provider
|
|
|
|
# Initialize tracing
|
|
tracer_provider = setup_tracing("ai-insights")
|
|
|
|
# Setup logging
|
|
setup_logging("ai-insights", getattr(settings, 'LOG_LEVEL', 'INFO'))
|
|
logger = structlog.get_logger()
|
|
|
|
# Setup OpenTelemetry logging export if enabled
|
|
logger.info(f"OTEL_LOGS_EXPORTER env var: {os.getenv('OTEL_LOGS_EXPORTER', 'not set')}")
|
|
if os.getenv("OTEL_LOGS_EXPORTER", "").lower() == "otlp":
|
|
try:
|
|
logger.info("Attempting to setup OpenTelemetry logging")
|
|
from shared.monitoring.logs_exporter import setup_otel_logging
|
|
result = setup_otel_logging("ai-insights", settings.SERVICE_VERSION)
|
|
if result:
|
|
logger.info("OpenTelemetry logs export enabled for ai-insights")
|
|
else:
|
|
logger.warning("OpenTelemetry logs export setup returned None")
|
|
except Exception as e:
|
|
logger.error(f"Failed to setup OpenTelemetry logs export: {e}", exc_info=True)
|
|
else:
|
|
logger.info("OpenTelemetry logs export disabled - OTEL_LOGS_EXPORTER not set to otlp")
|
|
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
"""Lifespan event handler for startup and shutdown."""
|
|
# Startup
|
|
logger.info("Starting AI Insights Service", service=settings.SERVICE_NAME, version=settings.SERVICE_VERSION)
|
|
await init_db()
|
|
logger.info("Database initialized")
|
|
|
|
# Initialize system metrics collection
|
|
system_metrics = SystemMetricsCollector("ai-insights")
|
|
logger.info("System metrics collection started")
|
|
|
|
# Note: Metrics are exported via OpenTelemetry OTLP to SigNoz - no metrics server needed
|
|
logger.info("Metrics export configured via OpenTelemetry OTLP")
|
|
|
|
yield
|
|
|
|
# Shutdown
|
|
logger.info("Shutting down AI Insights Service")
|
|
await close_db()
|
|
logger.info("Database connections closed")
|
|
|
|
|
|
# Create FastAPI app
|
|
app = FastAPI(
|
|
title="AI Insights Service",
|
|
description="Intelligent insights and recommendations for bakery operations",
|
|
version=settings.SERVICE_VERSION,
|
|
lifespan=lifespan
|
|
)
|
|
|
|
# Instrument FastAPI with OpenTelemetry
|
|
FastAPIInstrumentor.instrument_app(app)
|
|
|
|
# Instrument httpx for outgoing requests
|
|
HTTPXClientInstrumentor().instrument()
|
|
|
|
# Instrument Redis
|
|
RedisInstrumentor().instrument()
|
|
|
|
# Instrument SQLAlchemy
|
|
SQLAlchemyInstrumentor().instrument()
|
|
|
|
# Initialize metrics collector
|
|
metrics_collector = MetricsCollector("ai-insights")
|
|
|
|
# Add metrics middleware to track HTTP requests
|
|
add_metrics_middleware(app, metrics_collector)
|
|
|
|
# CORS middleware
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=settings.ALLOWED_ORIGINS,
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
# Include routers
|
|
app.include_router(
|
|
insights.router,
|
|
prefix=settings.API_V1_PREFIX,
|
|
tags=["insights"]
|
|
)
|
|
|
|
|
|
@app.get("/")
|
|
async def root():
|
|
"""Root endpoint."""
|
|
return {
|
|
"service": settings.SERVICE_NAME,
|
|
"version": settings.SERVICE_VERSION,
|
|
"status": "running"
|
|
}
|
|
|
|
|
|
@app.get("/health")
|
|
async def health_check():
|
|
"""Health check endpoint."""
|
|
return {
|
|
"status": "healthy",
|
|
"service": settings.SERVICE_NAME,
|
|
"version": settings.SERVICE_VERSION
|
|
}
|
|
|
|
|
|
# Note: Metrics are exported via OpenTelemetry OTLP to SigNoz
|
|
# The /metrics endpoint is not needed as metrics are pushed automatically
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
|
|
uvicorn.run(
|
|
"app.main:app",
|
|
host="0.0.0.0",
|
|
port=8000,
|
|
reload=True,
|
|
log_level=settings.LOG_LEVEL.lower()
|
|
)
|