Start integrating the onboarding flow with backend 3
This commit is contained in:
@@ -7,9 +7,10 @@ import asyncio
|
||||
import structlog
|
||||
from fastapi import FastAPI, Request, HTTPException, Depends, WebSocket, WebSocketDisconnect
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.responses import JSONResponse, StreamingResponse
|
||||
import httpx
|
||||
import time
|
||||
import redis.asyncio as aioredis
|
||||
from typing import Dict, Any
|
||||
|
||||
from app.core.config import settings
|
||||
@@ -40,6 +41,9 @@ metrics_collector = MetricsCollector("gateway")
|
||||
# Service discovery
|
||||
service_discovery = ServiceDiscovery()
|
||||
|
||||
# Redis client for SSE streaming
|
||||
redis_client = None
|
||||
|
||||
# CORS middleware - Add first
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
@@ -64,8 +68,16 @@ app.include_router(nominatim.router, prefix="/api/v1/nominatim", tags=["location
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
"""Application startup"""
|
||||
global redis_client
|
||||
|
||||
logger.info("Starting API Gateway")
|
||||
|
||||
# Connect to Redis for SSE streaming
|
||||
try:
|
||||
redis_client = aioredis.from_url(settings.REDIS_URL)
|
||||
logger.info("Connected to Redis for SSE streaming")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
|
||||
metrics_collector.register_counter(
|
||||
"gateway_auth_requests_total",
|
||||
@@ -94,8 +106,14 @@ async def startup_event():
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
"""Application shutdown"""
|
||||
global redis_client
|
||||
|
||||
logger.info("Shutting down API Gateway")
|
||||
|
||||
# Close Redis connection
|
||||
if redis_client:
|
||||
await redis_client.close()
|
||||
|
||||
# Clean up service discovery
|
||||
# await service_discovery.cleanup()
|
||||
|
||||
@@ -116,6 +134,111 @@ async def metrics():
|
||||
"""Metrics endpoint for monitoring"""
|
||||
return {"metrics": "enabled"}
|
||||
|
||||
# ================================================================
|
||||
# SERVER-SENT EVENTS (SSE) ENDPOINT
|
||||
# ================================================================
|
||||
|
||||
@app.get("/api/events")
|
||||
async def events_stream(request: Request, token: str):
|
||||
"""Server-Sent Events stream for real-time notifications"""
|
||||
global redis_client
|
||||
|
||||
if not redis_client:
|
||||
raise HTTPException(status_code=503, detail="SSE service unavailable")
|
||||
|
||||
# Extract tenant_id from JWT token (basic extraction - you might want proper JWT validation)
|
||||
try:
|
||||
import jwt
|
||||
import base64
|
||||
import json as json_lib
|
||||
|
||||
# Decode JWT without verification for tenant_id (in production, verify the token)
|
||||
payload = jwt.decode(token, options={"verify_signature": False})
|
||||
tenant_id = payload.get('tenant_id')
|
||||
user_id = payload.get('user_id')
|
||||
|
||||
if not tenant_id:
|
||||
raise HTTPException(status_code=401, detail="Invalid token: missing tenant_id")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Token decode error: {e}")
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
|
||||
logger.info(f"SSE connection established for tenant: {tenant_id}")
|
||||
|
||||
async def event_generator():
|
||||
"""Generate server-sent events from Redis pub/sub"""
|
||||
pubsub = None
|
||||
try:
|
||||
# Subscribe to tenant-specific alert channel
|
||||
pubsub = redis_client.pubsub()
|
||||
channel_name = f"alerts:{tenant_id}"
|
||||
await pubsub.subscribe(channel_name)
|
||||
|
||||
# Send initial connection event
|
||||
yield f"event: connection\n"
|
||||
yield f"data: {json_lib.dumps({'type': 'connected', 'message': 'SSE connection established', 'timestamp': time.time()})}\n\n"
|
||||
|
||||
heartbeat_counter = 0
|
||||
|
||||
while True:
|
||||
# Check if client has disconnected
|
||||
if await request.is_disconnected():
|
||||
logger.info(f"SSE client disconnected for tenant: {tenant_id}")
|
||||
break
|
||||
|
||||
try:
|
||||
# Get message from Redis with timeout
|
||||
message = await asyncio.wait_for(pubsub.get_message(ignore_subscribe_messages=True), timeout=10.0)
|
||||
|
||||
if message and message['type'] == 'message':
|
||||
# Forward the alert/notification from Redis
|
||||
alert_data = json_lib.loads(message['data'])
|
||||
|
||||
# Determine event type based on alert data
|
||||
event_type = "notification"
|
||||
if alert_data.get('item_type') == 'alert':
|
||||
if alert_data.get('severity') in ['high', 'urgent']:
|
||||
event_type = "inventory_alert"
|
||||
else:
|
||||
event_type = "notification"
|
||||
elif alert_data.get('item_type') == 'recommendation':
|
||||
event_type = "notification"
|
||||
|
||||
yield f"event: {event_type}\n"
|
||||
yield f"data: {json_lib.dumps(alert_data)}\n\n"
|
||||
|
||||
logger.debug(f"SSE message sent to tenant {tenant_id}: {alert_data.get('title')}")
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# Send heartbeat every 10 timeouts (100 seconds)
|
||||
heartbeat_counter += 1
|
||||
if heartbeat_counter >= 10:
|
||||
yield f"event: heartbeat\n"
|
||||
yield f"data: {json_lib.dumps({'type': 'heartbeat', 'timestamp': time.time()})}\n\n"
|
||||
heartbeat_counter = 0
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info(f"SSE connection cancelled for tenant: {tenant_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"SSE error for tenant {tenant_id}: {e}")
|
||||
finally:
|
||||
if pubsub:
|
||||
await pubsub.unsubscribe()
|
||||
await pubsub.close()
|
||||
logger.info(f"SSE connection closed for tenant: {tenant_id}")
|
||||
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Headers": "Cache-Control",
|
||||
}
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# WEBSOCKET ROUTING FOR TRAINING SERVICE
|
||||
# ================================================================
|
||||
|
||||
@@ -179,6 +179,32 @@ async def proxy_tenant_orders(request: Request, tenant_id: str = Path(...), path
|
||||
target_path = f"/api/v1/tenants/{tenant_id}/orders/{path}".rstrip("/")
|
||||
return await _proxy_to_orders_service(request, target_path, tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# TENANT-SCOPED SUPPLIER SERVICE ENDPOINTS
|
||||
# ================================================================
|
||||
|
||||
@router.api_route("/{tenant_id}/suppliers/{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||
@router.api_route("/{tenant_id}/suppliers", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||
async def proxy_tenant_suppliers(request: Request, tenant_id: str = Path(...), path: str = ""):
|
||||
"""Proxy tenant supplier requests to suppliers service"""
|
||||
if path:
|
||||
target_path = f"/api/v1/tenants/{tenant_id}/suppliers/{path}".rstrip("/")
|
||||
else:
|
||||
target_path = f"/api/v1/tenants/{tenant_id}/suppliers"
|
||||
return await _proxy_to_suppliers_service(request, target_path, tenant_id=tenant_id)
|
||||
|
||||
@router.api_route("/{tenant_id}/purchase-orders{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||
async def proxy_tenant_purchase_orders(request: Request, tenant_id: str = Path(...), path: str = ""):
|
||||
"""Proxy tenant purchase order requests to suppliers service"""
|
||||
target_path = f"/api/v1/tenants/{tenant_id}/purchase-orders{path}".rstrip("/")
|
||||
return await _proxy_to_suppliers_service(request, target_path, tenant_id=tenant_id)
|
||||
|
||||
@router.api_route("/{tenant_id}/deliveries{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||
async def proxy_tenant_deliveries(request: Request, tenant_id: str = Path(...), path: str = ""):
|
||||
"""Proxy tenant delivery requests to suppliers service"""
|
||||
target_path = f"/api/v1/tenants/{tenant_id}/deliveries{path}".rstrip("/")
|
||||
return await _proxy_to_suppliers_service(request, target_path, tenant_id=tenant_id)
|
||||
|
||||
# ================================================================
|
||||
# PROXY HELPER FUNCTIONS
|
||||
# ================================================================
|
||||
@@ -219,6 +245,10 @@ async def _proxy_to_orders_service(request: Request, target_path: str, tenant_id
|
||||
"""Proxy request to orders service"""
|
||||
return await _proxy_request(request, target_path, settings.ORDERS_SERVICE_URL, tenant_id=tenant_id)
|
||||
|
||||
async def _proxy_to_suppliers_service(request: Request, target_path: str, tenant_id: str = None):
|
||||
"""Proxy request to suppliers service"""
|
||||
return await _proxy_request(request, target_path, settings.SUPPLIERS_SERVICE_URL, tenant_id=tenant_id)
|
||||
|
||||
async def _proxy_request(request: Request, target_path: str, service_url: str, tenant_id: str = None):
|
||||
"""Generic proxy function with enhanced error handling"""
|
||||
|
||||
@@ -246,6 +276,15 @@ async def _proxy_request(request: Request, target_path: str, service_url: str, t
|
||||
if tenant_id:
|
||||
headers["X-Tenant-ID"] = tenant_id
|
||||
|
||||
# Add user context headers if available
|
||||
if hasattr(request.state, 'user') and request.state.user:
|
||||
user = request.state.user
|
||||
headers["x-user-id"] = str(user.get('user_id', ''))
|
||||
headers["x-user-email"] = str(user.get('email', ''))
|
||||
headers["x-user-role"] = str(user.get('role', 'user'))
|
||||
headers["x-user-full-name"] = str(user.get('full_name', ''))
|
||||
headers["x-tenant-id"] = tenant_id or str(user.get('tenant_id', ''))
|
||||
|
||||
# Get request body if present
|
||||
body = None
|
||||
if request.method in ["POST", "PUT", "PATCH"]:
|
||||
|
||||
@@ -5,6 +5,7 @@ redis==5.0.1
|
||||
pydantic==2.5.0
|
||||
pydantic-settings==2.1.0
|
||||
python-jose[cryptography]==3.3.0
|
||||
PyJWT==2.8.0
|
||||
python-multipart==0.0.6
|
||||
prometheus-client==0.17.1
|
||||
python-json-logger==2.0.4
|
||||
|
||||
Reference in New Issue
Block a user