2025-07-17 13:09:24 +02:00
|
|
|
"""
|
|
|
|
|
API Gateway - Central entry point for all microservices
|
|
|
|
|
Handles routing, authentication, rate limiting, and cross-cutting concerns
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import asyncio
|
2025-07-18 14:41:39 +02:00
|
|
|
import structlog
|
2025-08-14 13:26:59 +02:00
|
|
|
from fastapi import FastAPI, Request, HTTPException, Depends, WebSocket, WebSocketDisconnect
|
2025-07-17 13:09:24 +02:00
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
2025-09-04 23:19:53 +02:00
|
|
|
from fastapi.responses import JSONResponse, StreamingResponse
|
2025-07-17 13:09:24 +02:00
|
|
|
import httpx
|
|
|
|
|
import time
|
2025-09-04 23:19:53 +02:00
|
|
|
import redis.asyncio as aioredis
|
2025-07-17 13:09:24 +02:00
|
|
|
from typing import Dict, Any
|
|
|
|
|
|
|
|
|
|
from app.core.config import settings
|
|
|
|
|
from app.core.service_discovery import ServiceDiscovery
|
2025-07-17 19:54:04 +02:00
|
|
|
from app.middleware.auth import AuthMiddleware
|
|
|
|
|
from app.middleware.logging import LoggingMiddleware
|
|
|
|
|
from app.middleware.rate_limit import RateLimitMiddleware
|
2025-08-14 13:26:59 +02:00
|
|
|
from app.routes import auth, tenant, notification, nominatim, user
|
2025-07-17 13:09:24 +02:00
|
|
|
from shared.monitoring.logging import setup_logging
|
|
|
|
|
from shared.monitoring.metrics import MetricsCollector
|
|
|
|
|
|
|
|
|
|
# Setup logging
|
|
|
|
|
setup_logging("gateway", settings.LOG_LEVEL)
|
2025-07-18 14:41:39 +02:00
|
|
|
logger = structlog.get_logger()
|
2025-07-17 13:09:24 +02:00
|
|
|
|
|
|
|
|
# Create FastAPI app
|
|
|
|
|
app = FastAPI(
|
|
|
|
|
title="Bakery Forecasting API Gateway",
|
|
|
|
|
description="Central API Gateway for bakery forecasting microservices",
|
|
|
|
|
version="1.0.0",
|
|
|
|
|
docs_url="/docs",
|
|
|
|
|
redoc_url="/redoc"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Initialize metrics collector
|
|
|
|
|
metrics_collector = MetricsCollector("gateway")
|
|
|
|
|
|
|
|
|
|
# Service discovery
|
|
|
|
|
service_discovery = ServiceDiscovery()
|
|
|
|
|
|
2025-09-04 23:19:53 +02:00
|
|
|
# Redis client for SSE streaming
|
|
|
|
|
redis_client = None
|
|
|
|
|
|
2025-07-17 19:54:04 +02:00
|
|
|
# CORS middleware - Add first
|
2025-07-17 13:09:24 +02:00
|
|
|
app.add_middleware(
|
|
|
|
|
CORSMiddleware,
|
2025-07-17 19:46:41 +02:00
|
|
|
allow_origins=settings.CORS_ORIGINS_LIST,
|
2025-07-17 13:09:24 +02:00
|
|
|
allow_credentials=True,
|
|
|
|
|
allow_methods=["*"],
|
|
|
|
|
allow_headers=["*"],
|
|
|
|
|
)
|
|
|
|
|
|
2025-07-17 19:54:04 +02:00
|
|
|
# Custom middleware - Add in correct order (outer to inner)
|
|
|
|
|
app.add_middleware(LoggingMiddleware)
|
2025-08-08 23:06:54 +02:00
|
|
|
app.add_middleware(RateLimitMiddleware, calls_per_minute=300)
|
2025-07-17 19:54:04 +02:00
|
|
|
app.add_middleware(AuthMiddleware)
|
2025-07-17 13:09:24 +02:00
|
|
|
|
|
|
|
|
# Include routers
|
|
|
|
|
app.include_router(auth.router, prefix="/api/v1/auth", tags=["authentication"])
|
2025-07-26 19:15:18 +02:00
|
|
|
app.include_router(user.router, prefix="/api/v1/users", tags=["users"])
|
2025-07-17 13:09:24 +02:00
|
|
|
app.include_router(tenant.router, prefix="/api/v1/tenants", tags=["tenants"])
|
|
|
|
|
app.include_router(notification.router, prefix="/api/v1/notifications", tags=["notifications"])
|
2025-07-22 17:01:12 +02:00
|
|
|
app.include_router(nominatim.router, prefix="/api/v1/nominatim", tags=["location"])
|
2025-07-17 13:09:24 +02:00
|
|
|
|
2025-09-09 12:02:41 +02:00
|
|
|
|
2025-07-17 13:09:24 +02:00
|
|
|
@app.on_event("startup")
|
|
|
|
|
async def startup_event():
|
|
|
|
|
"""Application startup"""
|
2025-09-04 23:19:53 +02:00
|
|
|
global redis_client
|
|
|
|
|
|
2025-07-17 13:09:24 +02:00
|
|
|
logger.info("Starting API Gateway")
|
|
|
|
|
|
2025-09-04 23:19:53 +02:00
|
|
|
# Connect to Redis for SSE streaming
|
|
|
|
|
try:
|
|
|
|
|
redis_client = aioredis.from_url(settings.REDIS_URL)
|
|
|
|
|
logger.info("Connected to Redis for SSE streaming")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Failed to connect to Redis: {e}")
|
2025-08-02 21:56:25 +02:00
|
|
|
|
2025-07-26 21:10:54 +02:00
|
|
|
metrics_collector.register_counter(
|
|
|
|
|
"gateway_auth_requests_total",
|
2025-08-02 21:56:25 +02:00
|
|
|
"Total authentication requests"
|
2025-07-26 21:10:54 +02:00
|
|
|
)
|
|
|
|
|
metrics_collector.register_counter(
|
|
|
|
|
"gateway_auth_responses_total",
|
2025-08-02 21:56:25 +02:00
|
|
|
"Total authentication responses"
|
|
|
|
|
)
|
|
|
|
|
metrics_collector.register_counter(
|
|
|
|
|
"gateway_auth_errors_total",
|
|
|
|
|
"Total authentication errors"
|
2025-07-26 21:10:54 +02:00
|
|
|
)
|
2025-08-02 21:56:25 +02:00
|
|
|
|
2025-07-26 21:10:54 +02:00
|
|
|
metrics_collector.register_histogram(
|
|
|
|
|
"gateway_request_duration_seconds",
|
2025-08-02 21:56:25 +02:00
|
|
|
"Request duration in seconds"
|
2025-07-26 21:10:54 +02:00
|
|
|
)
|
|
|
|
|
|
2025-08-02 21:56:25 +02:00
|
|
|
logger.info("Metrics registered successfully")
|
2025-07-17 13:09:24 +02:00
|
|
|
|
2025-07-26 21:10:54 +02:00
|
|
|
metrics_collector.start_metrics_server(8080)
|
2025-07-17 13:09:24 +02:00
|
|
|
|
|
|
|
|
logger.info("API Gateway started successfully")
|
|
|
|
|
|
|
|
|
|
@app.on_event("shutdown")
|
|
|
|
|
async def shutdown_event():
|
|
|
|
|
"""Application shutdown"""
|
2025-09-04 23:19:53 +02:00
|
|
|
global redis_client
|
|
|
|
|
|
2025-07-17 13:09:24 +02:00
|
|
|
logger.info("Shutting down API Gateway")
|
|
|
|
|
|
2025-09-04 23:19:53 +02:00
|
|
|
# Close Redis connection
|
|
|
|
|
if redis_client:
|
|
|
|
|
await redis_client.close()
|
|
|
|
|
|
2025-07-17 13:09:24 +02:00
|
|
|
# Clean up service discovery
|
2025-07-18 12:57:13 +02:00
|
|
|
# await service_discovery.cleanup()
|
2025-07-17 13:09:24 +02:00
|
|
|
|
|
|
|
|
logger.info("API Gateway shutdown complete")
|
|
|
|
|
|
|
|
|
|
@app.get("/health")
|
|
|
|
|
async def health_check():
|
|
|
|
|
"""Health check endpoint"""
|
|
|
|
|
return {
|
|
|
|
|
"status": "healthy",
|
2025-07-17 19:54:04 +02:00
|
|
|
"service": "api-gateway",
|
2025-07-17 13:09:24 +02:00
|
|
|
"version": "1.0.0",
|
|
|
|
|
"timestamp": time.time()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@app.get("/metrics")
|
2025-07-17 19:54:04 +02:00
|
|
|
async def metrics():
|
|
|
|
|
"""Metrics endpoint for monitoring"""
|
|
|
|
|
return {"metrics": "enabled"}
|
2025-07-17 13:09:24 +02:00
|
|
|
|
2025-09-04 23:19:53 +02:00
|
|
|
# ================================================================
|
|
|
|
|
# SERVER-SENT EVENTS (SSE) ENDPOINT
|
|
|
|
|
# ================================================================
|
|
|
|
|
|
|
|
|
|
@app.get("/api/events")
|
|
|
|
|
async def events_stream(request: Request, token: str):
|
|
|
|
|
"""Server-Sent Events stream for real-time notifications"""
|
|
|
|
|
global redis_client
|
|
|
|
|
|
|
|
|
|
if not redis_client:
|
|
|
|
|
raise HTTPException(status_code=503, detail="SSE service unavailable")
|
|
|
|
|
|
|
|
|
|
# Extract tenant_id from JWT token (basic extraction - you might want proper JWT validation)
|
|
|
|
|
try:
|
|
|
|
|
import jwt
|
|
|
|
|
import base64
|
|
|
|
|
import json as json_lib
|
|
|
|
|
|
|
|
|
|
# Decode JWT without verification for tenant_id (in production, verify the token)
|
|
|
|
|
payload = jwt.decode(token, options={"verify_signature": False})
|
|
|
|
|
tenant_id = payload.get('tenant_id')
|
|
|
|
|
user_id = payload.get('user_id')
|
|
|
|
|
|
|
|
|
|
if not tenant_id:
|
|
|
|
|
raise HTTPException(status_code=401, detail="Invalid token: missing tenant_id")
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Token decode error: {e}")
|
|
|
|
|
raise HTTPException(status_code=401, detail="Invalid token")
|
|
|
|
|
|
|
|
|
|
logger.info(f"SSE connection established for tenant: {tenant_id}")
|
|
|
|
|
|
|
|
|
|
async def event_generator():
|
|
|
|
|
"""Generate server-sent events from Redis pub/sub"""
|
|
|
|
|
pubsub = None
|
|
|
|
|
try:
|
|
|
|
|
# Subscribe to tenant-specific alert channel
|
|
|
|
|
pubsub = redis_client.pubsub()
|
|
|
|
|
channel_name = f"alerts:{tenant_id}"
|
|
|
|
|
await pubsub.subscribe(channel_name)
|
|
|
|
|
|
|
|
|
|
# Send initial connection event
|
|
|
|
|
yield f"event: connection\n"
|
|
|
|
|
yield f"data: {json_lib.dumps({'type': 'connected', 'message': 'SSE connection established', 'timestamp': time.time()})}\n\n"
|
|
|
|
|
|
|
|
|
|
heartbeat_counter = 0
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
# Check if client has disconnected
|
|
|
|
|
if await request.is_disconnected():
|
|
|
|
|
logger.info(f"SSE client disconnected for tenant: {tenant_id}")
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Get message from Redis with timeout
|
|
|
|
|
message = await asyncio.wait_for(pubsub.get_message(ignore_subscribe_messages=True), timeout=10.0)
|
|
|
|
|
|
|
|
|
|
if message and message['type'] == 'message':
|
|
|
|
|
# Forward the alert/notification from Redis
|
|
|
|
|
alert_data = json_lib.loads(message['data'])
|
|
|
|
|
|
|
|
|
|
# Determine event type based on alert data
|
|
|
|
|
event_type = "notification"
|
|
|
|
|
if alert_data.get('item_type') == 'alert':
|
|
|
|
|
if alert_data.get('severity') in ['high', 'urgent']:
|
|
|
|
|
event_type = "inventory_alert"
|
|
|
|
|
else:
|
|
|
|
|
event_type = "notification"
|
|
|
|
|
elif alert_data.get('item_type') == 'recommendation':
|
|
|
|
|
event_type = "notification"
|
|
|
|
|
|
|
|
|
|
yield f"event: {event_type}\n"
|
|
|
|
|
yield f"data: {json_lib.dumps(alert_data)}\n\n"
|
|
|
|
|
|
|
|
|
|
logger.debug(f"SSE message sent to tenant {tenant_id}: {alert_data.get('title')}")
|
|
|
|
|
|
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
|
# Send heartbeat every 10 timeouts (100 seconds)
|
|
|
|
|
heartbeat_counter += 1
|
|
|
|
|
if heartbeat_counter >= 10:
|
|
|
|
|
yield f"event: heartbeat\n"
|
|
|
|
|
yield f"data: {json_lib.dumps({'type': 'heartbeat', 'timestamp': time.time()})}\n\n"
|
|
|
|
|
heartbeat_counter = 0
|
|
|
|
|
|
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
|
logger.info(f"SSE connection cancelled for tenant: {tenant_id}")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"SSE error for tenant {tenant_id}: {e}")
|
|
|
|
|
finally:
|
|
|
|
|
if pubsub:
|
|
|
|
|
await pubsub.unsubscribe()
|
|
|
|
|
await pubsub.close()
|
|
|
|
|
logger.info(f"SSE connection closed for tenant: {tenant_id}")
|
|
|
|
|
|
|
|
|
|
return StreamingResponse(
|
|
|
|
|
event_generator(),
|
|
|
|
|
media_type="text/event-stream",
|
|
|
|
|
headers={
|
|
|
|
|
"Cache-Control": "no-cache",
|
|
|
|
|
"Connection": "keep-alive",
|
|
|
|
|
"Access-Control-Allow-Origin": "*",
|
|
|
|
|
"Access-Control-Allow-Headers": "Cache-Control",
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2025-08-14 13:26:59 +02:00
|
|
|
# ================================================================
|
|
|
|
|
# WEBSOCKET ROUTING FOR TRAINING SERVICE
|
|
|
|
|
# ================================================================
|
|
|
|
|
|
|
|
|
|
@app.websocket("/api/v1/ws/tenants/{tenant_id}/training/jobs/{job_id}/live")
|
|
|
|
|
async def websocket_training_progress(websocket: WebSocket, tenant_id: str, job_id: str):
|
2025-08-15 17:53:59 +02:00
|
|
|
"""WebSocket proxy that forwards connections directly to training service"""
|
2025-08-14 13:26:59 +02:00
|
|
|
await websocket.accept()
|
|
|
|
|
|
|
|
|
|
# Get token from query params
|
|
|
|
|
token = websocket.query_params.get("token")
|
|
|
|
|
if not token:
|
2025-08-15 17:53:59 +02:00
|
|
|
logger.warning(f"WebSocket connection rejected - missing token for job {job_id}")
|
2025-08-14 13:26:59 +02:00
|
|
|
await websocket.close(code=1008, reason="Authentication token required")
|
|
|
|
|
return
|
|
|
|
|
|
2025-08-15 17:53:59 +02:00
|
|
|
logger.info(f"Proxying WebSocket connection to training service for job {job_id}, tenant {tenant_id}")
|
|
|
|
|
|
|
|
|
|
# Build WebSocket URL to training service
|
2025-08-14 13:26:59 +02:00
|
|
|
training_service_base = settings.TRAINING_SERVICE_URL.rstrip('/')
|
2025-08-15 17:53:59 +02:00
|
|
|
training_ws_url = training_service_base.replace('http://', 'ws://').replace('https://', 'wss://')
|
|
|
|
|
training_ws_url = f"{training_ws_url}/api/v1/ws/tenants/{tenant_id}/training/jobs/{job_id}/live?token={token}"
|
2025-08-14 13:26:59 +02:00
|
|
|
|
|
|
|
|
try:
|
2025-08-15 17:53:59 +02:00
|
|
|
# Connect to training service WebSocket
|
|
|
|
|
import websockets
|
|
|
|
|
async with websockets.connect(training_ws_url) as training_ws:
|
|
|
|
|
logger.info(f"Connected to training service WebSocket for job {job_id}")
|
2025-08-14 13:26:59 +02:00
|
|
|
|
2025-08-15 17:53:59 +02:00
|
|
|
async def forward_to_training():
|
|
|
|
|
"""Forward messages from frontend to training service"""
|
|
|
|
|
try:
|
|
|
|
|
async for message in websocket.iter_text():
|
|
|
|
|
await training_ws.send(message)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error forwarding to training service: {e}")
|
2025-08-14 13:26:59 +02:00
|
|
|
|
2025-08-15 17:53:59 +02:00
|
|
|
async def forward_to_frontend():
|
|
|
|
|
"""Forward messages from training service to frontend"""
|
2025-08-14 13:26:59 +02:00
|
|
|
try:
|
2025-08-15 17:53:59 +02:00
|
|
|
async for message in training_ws:
|
|
|
|
|
await websocket.send_text(message)
|
2025-08-14 13:26:59 +02:00
|
|
|
except Exception as e:
|
2025-08-15 17:53:59 +02:00
|
|
|
logger.error(f"Error forwarding to frontend: {e}")
|
|
|
|
|
|
|
|
|
|
# Run both forwarding tasks concurrently
|
|
|
|
|
await asyncio.gather(
|
|
|
|
|
forward_to_training(),
|
|
|
|
|
forward_to_frontend(),
|
|
|
|
|
return_exceptions=True
|
|
|
|
|
)
|
2025-08-14 13:26:59 +02:00
|
|
|
|
|
|
|
|
except Exception as e:
|
2025-08-15 17:53:59 +02:00
|
|
|
logger.error(f"WebSocket proxy error for job {job_id}: {e}")
|
|
|
|
|
try:
|
|
|
|
|
await websocket.close(code=1011, reason="Training service connection failed")
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
finally:
|
|
|
|
|
logger.info(f"WebSocket proxy closed for job {job_id}")
|
2025-08-14 13:26:59 +02:00
|
|
|
|
2025-07-17 13:09:24 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
|
import uvicorn
|
|
|
|
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|