153 lines
4.1 KiB
Python
153 lines
4.1 KiB
Python
"""Main FastAPI application for AI Insights Service."""
|
|
|
|
from fastapi import FastAPI, Response
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from contextlib import asynccontextmanager
|
|
import structlog
|
|
import os
|
|
|
|
from app.core.config import settings
|
|
from app.core.database import init_db, close_db
|
|
from app.api import insights
|
|
from shared.monitoring.logging import setup_logging
|
|
from shared.monitoring.metrics import MetricsCollector, add_metrics_middleware
|
|
|
|
# OpenTelemetry imports
|
|
from opentelemetry import trace
|
|
from opentelemetry.sdk.trace import TracerProvider
|
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
|
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
|
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
|
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
|
|
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
|
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
|
from opentelemetry.sdk.resources import Resource
|
|
|
|
# Configure OpenTelemetry tracing
|
|
def setup_tracing(service_name: str = "ai-insights"):
|
|
"""Initialize OpenTelemetry tracing with OTLP exporter for Jaeger"""
|
|
resource = Resource.create({"service.name": service_name})
|
|
|
|
otlp_exporter = OTLPSpanExporter(
|
|
endpoint=os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "http://otel-collector.monitoring.svc.cluster.local:4317"),
|
|
insecure=True
|
|
)
|
|
|
|
provider = TracerProvider(resource=resource)
|
|
processor = BatchSpanProcessor(otlp_exporter)
|
|
provider.add_span_processor(processor)
|
|
trace.set_tracer_provider(provider)
|
|
|
|
return provider
|
|
|
|
# Initialize tracing
|
|
tracer_provider = setup_tracing("ai-insights")
|
|
|
|
# Setup logging
|
|
setup_logging("ai-insights", getattr(settings, 'LOG_LEVEL', 'INFO'))
|
|
logger = structlog.get_logger()
|
|
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
"""Lifespan event handler for startup and shutdown."""
|
|
# Startup
|
|
logger.info("Starting AI Insights Service", service=settings.SERVICE_NAME, version=settings.SERVICE_VERSION)
|
|
await init_db()
|
|
logger.info("Database initialized")
|
|
|
|
# Start metrics server
|
|
metrics_collector.start_metrics_server(8080)
|
|
logger.info("Metrics server started on port 8080")
|
|
|
|
yield
|
|
|
|
# Shutdown
|
|
logger.info("Shutting down AI Insights Service")
|
|
await close_db()
|
|
logger.info("Database connections closed")
|
|
|
|
|
|
# Create FastAPI app
|
|
app = FastAPI(
|
|
title="AI Insights Service",
|
|
description="Intelligent insights and recommendations for bakery operations",
|
|
version=settings.SERVICE_VERSION,
|
|
lifespan=lifespan
|
|
)
|
|
|
|
# Instrument FastAPI with OpenTelemetry
|
|
FastAPIInstrumentor.instrument_app(app)
|
|
|
|
# Instrument httpx for outgoing requests
|
|
HTTPXClientInstrumentor().instrument()
|
|
|
|
# Instrument Redis
|
|
RedisInstrumentor().instrument()
|
|
|
|
# Instrument SQLAlchemy
|
|
SQLAlchemyInstrumentor().instrument()
|
|
|
|
# Initialize metrics collector
|
|
metrics_collector = MetricsCollector("ai-insights")
|
|
|
|
# Add metrics middleware to track HTTP requests
|
|
add_metrics_middleware(app, metrics_collector)
|
|
|
|
# CORS middleware
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=settings.ALLOWED_ORIGINS,
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
# Include routers
|
|
app.include_router(
|
|
insights.router,
|
|
prefix=settings.API_V1_PREFIX,
|
|
tags=["insights"]
|
|
)
|
|
|
|
|
|
@app.get("/")
|
|
async def root():
|
|
"""Root endpoint."""
|
|
return {
|
|
"service": settings.SERVICE_NAME,
|
|
"version": settings.SERVICE_VERSION,
|
|
"status": "running"
|
|
}
|
|
|
|
|
|
@app.get("/health")
|
|
async def health_check():
|
|
"""Health check endpoint."""
|
|
return {
|
|
"status": "healthy",
|
|
"service": settings.SERVICE_NAME,
|
|
"version": settings.SERVICE_VERSION
|
|
}
|
|
|
|
|
|
@app.get("/metrics")
|
|
async def metrics():
|
|
"""Prometheus metrics endpoint"""
|
|
return Response(
|
|
content=metrics_collector.get_metrics(),
|
|
media_type="text/plain; version=0.0.4; charset=utf-8"
|
|
)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
|
|
uvicorn.run(
|
|
"app.main:app",
|
|
host="0.0.0.0",
|
|
port=8000,
|
|
reload=True,
|
|
log_level=settings.LOG_LEVEL.lower()
|
|
)
|