Initial microservices setup from artifacts

This commit is contained in:
Urtzi Alfaro
2025-07-17 13:09:24 +02:00
commit 347ff51bd7
200 changed files with 9559 additions and 0 deletions

0
gateway/app/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,52 @@
"""
Gateway configuration
"""
import os
from typing import List, Dict
from pydantic import BaseSettings
class Settings(BaseSettings):
"""Application settings"""
# Basic settings
APP_NAME: str = "Bakery Forecasting Gateway"
VERSION: str = "1.0.0"
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
# CORS settings
CORS_ORIGINS: List[str] = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001").split(",")
# Service URLs
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
TRAINING_SERVICE_URL: str = os.getenv("TRAINING_SERVICE_URL", "http://training-service:8000")
FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000")
DATA_SERVICE_URL: str = os.getenv("DATA_SERVICE_URL", "http://data-service:8000")
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000")
# Redis settings
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/6")
# Rate limiting
RATE_LIMIT_REQUESTS: int = int(os.getenv("RATE_LIMIT_REQUESTS", "100"))
RATE_LIMIT_WINDOW: int = int(os.getenv("RATE_LIMIT_WINDOW", "60"))
# JWT settings
JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "your-secret-key-change-in-production")
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
@property
def SERVICES(self) -> Dict[str, str]:
"""Service registry"""
return {
"auth": self.AUTH_SERVICE_URL,
"training": self.TRAINING_SERVICE_URL,
"forecasting": self.FORECASTING_SERVICE_URL,
"data": self.DATA_SERVICE_URL,
"tenant": self.TENANT_SERVICE_URL,
"notification": self.NOTIFICATION_SERVICE_URL
}
settings = Settings()

View File

@@ -0,0 +1,122 @@
"""
Service discovery for microservices
"""
import asyncio
import logging
from typing import Dict, List, Optional
import httpx
import redis.asyncio as redis
from datetime import datetime, timedelta
from app.core.config import settings
logger = logging.getLogger(__name__)
class ServiceDiscovery:
"""Service discovery and health checking"""
def __init__(self):
self.redis_client = redis.from_url(settings.REDIS_URL)
self.services = settings.SERVICES
self.health_check_interval = 30 # seconds
self.health_check_task = None
async def initialize(self):
"""Initialize service discovery"""
logger.info("Initializing service discovery")
# Start health check task
self.health_check_task = asyncio.create_task(self._health_check_loop())
# Initial health check
await self._check_all_services()
async def cleanup(self):
"""Cleanup service discovery"""
if self.health_check_task:
self.health_check_task.cancel()
try:
await self.health_check_task
except asyncio.CancelledError:
pass
await self.redis_client.close()
async def get_service_url(self, service_name: str) -> Optional[str]:
"""Get service URL"""
return self.services.get(service_name)
async def get_healthy_services(self) -> List[str]:
"""Get list of healthy services"""
healthy_services = []
for service_name in self.services:
is_healthy = await self._is_service_healthy(service_name)
if is_healthy:
healthy_services.append(service_name)
return healthy_services
async def _health_check_loop(self):
"""Continuous health check loop"""
while True:
try:
await self._check_all_services()
await asyncio.sleep(self.health_check_interval)
except asyncio.CancelledError:
break
except Exception as e:
logger.error(f"Health check error: {e}")
await asyncio.sleep(self.health_check_interval)
async def _check_all_services(self):
"""Check health of all services"""
for service_name, service_url in self.services.items():
try:
is_healthy = await self._check_service_health(service_url)
await self._update_service_health(service_name, is_healthy)
except Exception as e:
logger.error(f"Health check failed for {service_name}: {e}")
await self._update_service_health(service_name, False)
async def _check_service_health(self, service_url: str) -> bool:
"""Check individual service health"""
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(f"{service_url}/health")
return response.status_code == 200
except Exception as e:
logger.warning(f"Service health check failed: {e}")
return False
async def _update_service_health(self, service_name: str, is_healthy: bool):
"""Update service health status in Redis"""
try:
key = f"service_health:{service_name}"
value = {
"healthy": is_healthy,
"last_check": datetime.utcnow().isoformat(),
"url": self.services[service_name]
}
await self.redis_client.hset(key, mapping=value)
await self.redis_client.expire(key, 300) # 5 minutes TTL
except Exception as e:
logger.error(f"Failed to update service health for {service_name}: {e}")
async def _is_service_healthy(self, service_name: str) -> bool:
"""Check if service is healthy from Redis cache"""
try:
key = f"service_health:{service_name}"
health_data = await self.redis_client.hgetall(key)
if not health_data:
return False
return health_data.get(b'healthy', b'false').decode() == 'True'
except Exception as e:
logger.error(f"Failed to check service health for {service_name}: {e}")
return False

131
gateway/app/main.py Normal file
View File

@@ -0,0 +1,131 @@
"""
API Gateway - Central entry point for all microservices
Handles routing, authentication, rate limiting, and cross-cutting concerns
"""
import asyncio
import logging
from fastapi import FastAPI, Request, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
import httpx
import time
from typing import Dict, Any
from app.core.config import settings
from app.core.service_discovery import ServiceDiscovery
from app.middleware.auth import auth_middleware
from app.middleware.logging import logging_middleware
from app.middleware.rate_limit import rate_limit_middleware
from app.routes import auth, training, forecasting, data, tenant, notification
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector
# Setup logging
setup_logging("gateway", settings.LOG_LEVEL)
logger = logging.getLogger(__name__)
# Create FastAPI app
app = FastAPI(
title="Bakery Forecasting API Gateway",
description="Central API Gateway for bakery forecasting microservices",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc"
)
# Initialize metrics collector
metrics_collector = MetricsCollector("gateway")
# Service discovery
service_discovery = ServiceDiscovery()
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Custom middleware
app.add_middleware(auth_middleware)
app.add_middleware(logging_middleware)
app.add_middleware(rate_limit_middleware)
# Include routers
app.include_router(auth.router, prefix="/api/v1/auth", tags=["authentication"])
app.include_router(training.router, prefix="/api/v1/training", tags=["training"])
app.include_router(forecasting.router, prefix="/api/v1/forecasting", tags=["forecasting"])
app.include_router(data.router, prefix="/api/v1/data", tags=["data"])
app.include_router(tenant.router, prefix="/api/v1/tenants", tags=["tenants"])
app.include_router(notification.router, prefix="/api/v1/notifications", tags=["notifications"])
@app.on_event("startup")
async def startup_event():
"""Application startup"""
logger.info("Starting API Gateway")
# Start metrics server
metrics_collector.start_metrics_server(8080)
# Initialize service discovery
await service_discovery.initialize()
logger.info("API Gateway started successfully")
@app.on_event("shutdown")
async def shutdown_event():
"""Application shutdown"""
logger.info("Shutting down API Gateway")
# Clean up service discovery
await service_discovery.cleanup()
logger.info("API Gateway shutdown complete")
@app.get("/health")
async def health_check():
"""Health check endpoint"""
healthy_services = await service_discovery.get_healthy_services()
return {
"status": "healthy",
"service": "gateway",
"version": "1.0.0",
"healthy_services": healthy_services,
"total_services": len(settings.SERVICES),
"timestamp": time.time()
}
@app.get("/metrics")
async def get_metrics():
"""Get basic metrics"""
return {
"service": "gateway",
"uptime": time.time() - app.state.start_time if hasattr(app.state, 'start_time') else 0,
"healthy_services": await service_discovery.get_healthy_services()
}
@app.exception_handler(HTTPException)
async def http_exception_handler(request: Request, exc: HTTPException):
"""Handle HTTP exceptions"""
logger.error(f"HTTP {exc.status_code}: {exc.detail}")
return JSONResponse(
status_code=exc.status_code,
content={"detail": exc.detail, "service": "gateway"}
)
@app.exception_handler(Exception)
async def general_exception_handler(request: Request, exc: Exception):
"""Handle general exceptions"""
logger.error(f"Unhandled exception: {exc}", exc_info=True)
return JSONResponse(
status_code=500,
content={"detail": "Internal server error", "service": "gateway"}
)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

View File

@@ -0,0 +1,101 @@
"""
Authentication middleware for gateway
"""
import logging
from fastapi import Request, HTTPException
from fastapi.responses import JSONResponse
import httpx
from typing import Optional
from app.core.config import settings
from shared.auth.jwt_handler import JWTHandler
logger = logging.getLogger(__name__)
# JWT handler
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
# Routes that don't require authentication
PUBLIC_ROUTES = [
"/health",
"/metrics",
"/docs",
"/redoc",
"/openapi.json",
"/api/v1/auth/login",
"/api/v1/auth/register",
"/api/v1/auth/refresh"
]
async def auth_middleware(request: Request, call_next):
"""Authentication middleware"""
# Check if route requires authentication
if _is_public_route(request.url.path):
return await call_next(request)
# Get token from header
token = _extract_token(request)
if not token:
return JSONResponse(
status_code=401,
content={"detail": "Authentication required"}
)
# Verify token
try:
# First try to verify token locally
payload = jwt_handler.verify_token(token)
if payload:
# Add user info to request state
request.state.user = payload
return await call_next(request)
else:
# Token invalid or expired, verify with auth service
user_info = await _verify_with_auth_service(token)
if user_info:
request.state.user = user_info
return await call_next(request)
else:
return JSONResponse(
status_code=401,
content={"detail": "Invalid or expired token"}
)
except Exception as e:
logger.error(f"Authentication error: {e}")
return JSONResponse(
status_code=401,
content={"detail": "Authentication failed"}
)
def _is_public_route(path: str) -> bool:
"""Check if route is public"""
return any(path.startswith(route) for route in PUBLIC_ROUTES)
def _extract_token(request: Request) -> Optional[str]:
"""Extract JWT token from request"""
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
return auth_header.split(" ")[1]
return None
async def _verify_with_auth_service(token: str) -> Optional[dict]:
"""Verify token with auth service"""
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/verify",
headers={"Authorization": f"Bearer {token}"}
)
if response.status_code == 200:
return response.json()
else:
return None
except Exception as e:
logger.error(f"Auth service verification failed: {e}")
return None

View File

@@ -0,0 +1,48 @@
"""
Logging middleware for gateway
"""
import logging
import time
from fastapi import Request
import json
logger = logging.getLogger(__name__)
async def logging_middleware(request: Request, call_next):
"""Logging middleware"""
start_time = time.time()
# Log request
logger.info(
f"Request: {request.method} {request.url.path}",
extra={
"method": request.method,
"url": request.url.path,
"query_params": str(request.query_params),
"client_host": request.client.host,
"user_agent": request.headers.get("user-agent", ""),
"request_id": getattr(request.state, 'request_id', None)
}
)
# Process request
response = await call_next(request)
# Calculate duration
duration = time.time() - start_time
# Log response
logger.info(
f"Response: {response.status_code} in {duration:.3f}s",
extra={
"status_code": response.status_code,
"duration": duration,
"method": request.method,
"url": request.url.path,
"request_id": getattr(request.state, 'request_id', None)
}
)
return response

View File

@@ -0,0 +1,85 @@
"""
Rate limiting middleware for gateway
"""
import logging
from fastapi import Request, HTTPException
from fastapi.responses import JSONResponse
import redis.asyncio as redis
from datetime import datetime, timedelta
import hashlib
from app.core.config import settings
logger = logging.getLogger(__name__)
# Redis client for rate limiting
redis_client = redis.from_url(settings.REDIS_URL)
async def rate_limit_middleware(request: Request, call_next):
"""Rate limiting middleware"""
# Skip rate limiting for health checks
if request.url.path in ["/health", "/metrics"]:
return await call_next(request)
# Get client identifier (IP address or user ID)
client_id = _get_client_id(request)
# Check rate limit
if await _is_rate_limited(client_id):
return JSONResponse(
status_code=429,
content={
"detail": "Rate limit exceeded",
"retry_after": settings.RATE_LIMIT_WINDOW
}
)
# Process request
response = await call_next(request)
# Update rate limit counter
await _update_rate_limit(client_id)
return response
def _get_client_id(request: Request) -> str:
"""Get client identifier for rate limiting"""
# Use user ID if authenticated, otherwise use IP
if hasattr(request.state, 'user') and request.state.user:
return f"user:{request.state.user.get('user_id', 'unknown')}"
else:
# Hash IP address for privacy
ip = request.client.host
return f"ip:{hashlib.md5(ip.encode()).hexdigest()}"
async def _is_rate_limited(client_id: str) -> bool:
"""Check if client is rate limited"""
try:
key = f"rate_limit:{client_id}"
current_count = await redis_client.get(key)
if current_count is None:
return False
return int(current_count) >= settings.RATE_LIMIT_REQUESTS
except Exception as e:
logger.error(f"Rate limit check failed: {e}")
return False
async def _update_rate_limit(client_id: str):
"""Update rate limit counter"""
try:
key = f"rate_limit:{client_id}"
# Increment counter
current_count = await redis_client.incr(key)
# Set TTL on first request
if current_count == 1:
await redis_client.expire(key, settings.RATE_LIMIT_WINDOW)
except Exception as e:
logger.error(f"Rate limit update failed: {e}")

View File

161
gateway/app/routes/auth.py Normal file
View File

@@ -0,0 +1,161 @@
"""
Authentication routes for gateway
"""
from fastapi import APIRouter, Request, HTTPException
from fastapi.responses import JSONResponse
import httpx
import logging
from app.core.config import settings
from app.core.service_discovery import ServiceDiscovery
logger = logging.getLogger(__name__)
router = APIRouter()
service_discovery = ServiceDiscovery()
@router.post("/login")
async def login(request: Request):
"""Proxy login request to auth service"""
try:
body = await request.body()
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/login",
content=body,
headers={"Content-Type": "application/json"}
)
if response.status_code == 200:
return response.json()
else:
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
except Exception as e:
logger.error(f"Login error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/register")
async def register(request: Request):
"""Proxy register request to auth service"""
try:
body = await request.body()
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/register",
content=body,
headers={"Content-Type": "application/json"}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
except Exception as e:
logger.error(f"Register error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/refresh")
async def refresh_token(request: Request):
"""Proxy refresh token request to auth service"""
try:
body = await request.body()
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/refresh",
content=body,
headers={"Content-Type": "application/json"}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
except Exception as e:
logger.error(f"Refresh token error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/verify")
async def verify_token(request: Request):
"""Proxy token verification to auth service"""
try:
auth_header = request.headers.get("Authorization")
if not auth_header:
raise HTTPException(status_code=401, detail="Authorization header required")
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/verify",
headers={"Authorization": auth_header}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
except Exception as e:
logger.error(f"Token verification error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/logout")
async def logout(request: Request):
"""Proxy logout request to auth service"""
try:
auth_header = request.headers.get("Authorization")
if not auth_header:
raise HTTPException(status_code=401, detail="Authorization header required")
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/logout",
headers={"Authorization": auth_header}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
except Exception as e:
logger.error(f"Logout error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -0,0 +1,166 @@
"""
Training routes for gateway
"""
from fastapi import APIRouter, Request, HTTPException, Query
from fastapi.responses import JSONResponse
import httpx
import logging
from typing import Optional
from app.core.config import settings
logger = logging.getLogger(__name__)
router = APIRouter()
@router.post("/train")
async def start_training(request: Request):
"""Proxy training request to training service"""
try:
body = await request.body()
auth_header = request.headers.get("Authorization")
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{settings.TRAINING_SERVICE_URL}/train",
content=body,
headers={
"Content-Type": "application/json",
"Authorization": auth_header
}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Training service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Training service unavailable"
)
except Exception as e:
logger.error(f"Training error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/status/{training_job_id}")
async def get_training_status(training_job_id: str, request: Request):
"""Get training job status"""
try:
auth_header = request.headers.get("Authorization")
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(
f"{settings.TRAINING_SERVICE_URL}/status/{training_job_id}",
headers={"Authorization": auth_header}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Training service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Training service unavailable"
)
except Exception as e:
logger.error(f"Training status error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/models")
async def get_trained_models(request: Request):
"""Get trained models"""
try:
auth_header = request.headers.get("Authorization")
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(
f"{settings.TRAINING_SERVICE_URL}/models",
headers={"Authorization": auth_header}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Training service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Training service unavailable"
)
except Exception as e:
logger.error(f"Get models error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/jobs")
async def get_training_jobs(
request: Request,
limit: Optional[int] = Query(10, ge=1, le=100),
offset: Optional[int] = Query(0, ge=0)
):
"""Get training jobs"""
try:
auth_header = request.headers.get("Authorization")
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(
f"{settings.TRAINING_SERVICE_URL}/jobs",
params={"limit": limit, "offset": offset},
headers={"Authorization": auth_header}
)
return JSONResponse(
status_code=response.status_code,
content=response.json()
)
except httpx.RequestError as e:
logger.error(f"Training service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Training service unavailable"
)
except Exception as e:
logger.error(f"Get training jobs error: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
# gateway/Dockerfile
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Add shared libraries to Python path
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/health || exit 1
# Run application
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

13
gateway/requirements.txt Normal file
View File

@@ -0,0 +1,13 @@
fastapi==0.104.1
uvicorn[standard]==0.24.0
httpx==0.25.2
redis==5.0.1
pydantic==2.5.0
pydantic-settings==2.1.0
python-jose[cryptography]==3.3.0
python-multipart==0.0.6
prometheus-client==0.17.1
python-json-logger==2.0.4
email-validator==2.0.0
aio-pika==9.3.0
pytz==2023.3

View File

View File

@@ -0,0 +1,41 @@
"""
Authentication decorators for FastAPI
"""
from functools import wraps
from fastapi import HTTPException, Depends
from fastapi.security import HTTPBearer
import httpx
import logging
logger = logging.getLogger(__name__)
security = HTTPBearer()
def verify_service_token(auth_service_url: str):
"""Verify service token with auth service"""
async def verify_token(token: str = Depends(security)):
try:
async with httpx.AsyncClient() as client:
response = await client.post(
f"{auth_service_url}/verify",
headers={"Authorization": f"Bearer {token.credentials}"}
)
if response.status_code == 200:
return response.json()
else:
raise HTTPException(
status_code=401,
detail="Invalid authentication credentials"
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
return verify_token

View File

@@ -0,0 +1,58 @@
"""
Shared JWT Authentication Handler
Used across all microservices for consistent authentication
"""
import jwt
from datetime import datetime, timedelta
from typing import Optional, Dict, Any
import logging
logger = logging.getLogger(__name__)
class JWTHandler:
"""JWT token handling for microservices"""
def __init__(self, secret_key: str, algorithm: str = "HS256"):
self.secret_key = secret_key
self.algorithm = algorithm
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
"""Create JWT access token"""
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=30)
to_encode.update({"exp": expire, "type": "access"})
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
return encoded_jwt
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
"""Create JWT refresh token"""
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(days=7)
to_encode.update({"exp": expire, "type": "refresh"})
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
return encoded_jwt
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
"""Verify and decode JWT token"""
try:
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
return payload
except jwt.ExpiredSignatureError:
logger.warning("Token has expired")
return None
except jwt.InvalidTokenError:
logger.warning("Invalid token")
return None

View File

View File

@@ -0,0 +1,56 @@
"""
Base database configuration for all microservices
"""
import os
from sqlalchemy import create_engine
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import StaticPool
import logging
logger = logging.getLogger(__name__)
Base = declarative_base()
class DatabaseManager:
"""Database manager for microservices"""
def __init__(self, database_url: str):
self.database_url = database_url
self.async_engine = create_async_engine(
database_url,
echo=False,
pool_pre_ping=True,
pool_recycle=300,
pool_size=20,
max_overflow=30
)
self.async_session_local = sessionmaker(
self.async_engine,
class_=AsyncSession,
expire_on_commit=False
)
async def get_db(self):
"""Get database session"""
async with self.async_session_local() as session:
try:
yield session
except Exception as e:
logger.error(f"Database session error: {e}")
await session.rollback()
raise
finally:
await session.close()
async def create_tables(self):
"""Create database tables"""
async with self.async_engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
async def drop_tables(self):
"""Drop database tables"""
async with self.async_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)

View File

View File

@@ -0,0 +1,73 @@
"""
Event definitions for microservices communication
"""
from dataclasses import dataclass
from datetime import datetime
from typing import Dict, Any, Optional
import uuid
@dataclass
class BaseEvent:
"""Base event class"""
event_id: str
event_type: str
service_name: str
timestamp: datetime
data: Dict[str, Any]
correlation_id: Optional[str] = None
def __post_init__(self):
if not self.event_id:
self.event_id = str(uuid.uuid4())
if not self.timestamp:
self.timestamp = datetime.utcnow()
# Training Events
@dataclass
class TrainingStartedEvent(BaseEvent):
event_type: str = "training.started"
@dataclass
class TrainingCompletedEvent(BaseEvent):
event_type: str = "training.completed"
@dataclass
class TrainingFailedEvent(BaseEvent):
event_type: str = "training.failed"
# Forecasting Events
@dataclass
class ForecastGeneratedEvent(BaseEvent):
event_type: str = "forecast.generated"
@dataclass
class ForecastRequestedEvent(BaseEvent):
event_type: str = "forecast.requested"
# User Events
@dataclass
class UserRegisteredEvent(BaseEvent):
event_type: str = "user.registered"
@dataclass
class UserLoginEvent(BaseEvent):
event_type: str = "user.login"
# Tenant Events
@dataclass
class TenantCreatedEvent(BaseEvent):
event_type: str = "tenant.created"
@dataclass
class TenantUpdatedEvent(BaseEvent):
event_type: str = "tenant.updated"
# Notification Events
@dataclass
class NotificationSentEvent(BaseEvent):
event_type: str = "notification.sent"
@dataclass
class NotificationFailedEvent(BaseEvent):
event_type: str = "notification.failed"

View File

@@ -0,0 +1,96 @@
"""
RabbitMQ messaging client for microservices
"""
import asyncio
import json
import logging
from typing import Dict, Any, Callable
import aio_pika
from aio_pika import connect_robust, Message, DeliveryMode
logger = logging.getLogger(__name__)
class RabbitMQClient:
"""RabbitMQ client for microservices communication"""
def __init__(self, connection_url: str):
self.connection_url = connection_url
self.connection = None
self.channel = None
async def connect(self):
"""Connect to RabbitMQ"""
try:
self.connection = await connect_robust(self.connection_url)
self.channel = await self.connection.channel()
logger.info("Connected to RabbitMQ")
except Exception as e:
logger.error(f"Failed to connect to RabbitMQ: {e}")
raise
async def disconnect(self):
"""Disconnect from RabbitMQ"""
if self.connection:
await self.connection.close()
logger.info("Disconnected from RabbitMQ")
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
"""Publish event to RabbitMQ"""
try:
if not self.channel:
await self.connect()
# Declare exchange
exchange = await self.channel.declare_exchange(
exchange_name,
aio_pika.ExchangeType.TOPIC,
durable=True
)
# Create message
message = Message(
json.dumps(event_data).encode(),
delivery_mode=DeliveryMode.PERSISTENT,
content_type="application/json"
)
# Publish message
await exchange.publish(message, routing_key=routing_key)
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
except Exception as e:
logger.error(f"Failed to publish event: {e}")
raise
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
"""Consume events from RabbitMQ"""
try:
if not self.channel:
await self.connect()
# Declare exchange
exchange = await self.channel.declare_exchange(
exchange_name,
aio_pika.ExchangeType.TOPIC,
durable=True
)
# Declare queue
queue = await self.channel.declare_queue(
queue_name,
durable=True
)
# Bind queue to exchange
await queue.bind(exchange, routing_key)
# Set up consumer
await queue.consume(callback)
logger.info(f"Started consuming events from {queue_name}")
except Exception as e:
logger.error(f"Failed to consume events: {e}")
raise

View File

View File

@@ -0,0 +1,77 @@
"""
Centralized logging configuration for microservices
"""
import logging
import logging.config
import os
from typing import Dict, Any
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
"""Set up logging configuration for a microservice"""
config: Dict[str, Any] = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
},
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": log_level,
"formatter": "standard",
"stream": "ext://sys.stdout"
},
"file": {
"class": "logging.FileHandler",
"level": log_level,
"formatter": "detailed",
"filename": f"/var/log/{service_name}.log",
"mode": "a"
},
"logstash": {
"class": "logstash.TCPLogstashHandler",
"host": os.getenv("LOGSTASH_HOST", "localhost"),
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
"version": 1,
"message_type": "logstash",
"fqdn": False,
"tags": [service_name]
}
},
"loggers": {
"": {
"handlers": ["console", "file"],
"level": log_level,
"propagate": False
},
"uvicorn": {
"handlers": ["console"],
"level": log_level,
"propagate": False
},
"uvicorn.access": {
"handlers": ["console"],
"level": log_level,
"propagate": False
}
}
}
# Add logstash handler if in production
if os.getenv("ENVIRONMENT") == "production":
config["loggers"][""]["handlers"].append("logstash")
logging.config.dictConfig(config)
logger = logging.getLogger(__name__)
logger.info(f"Logging configured for {service_name}")

View File

@@ -0,0 +1,112 @@
"""
Metrics collection for microservices
"""
import time
import logging
from typing import Dict, Any
from prometheus_client import Counter, Histogram, Gauge, start_http_server
from functools import wraps
logger = logging.getLogger(__name__)
# Prometheus metrics
REQUEST_COUNT = Counter(
'http_requests_total',
'Total HTTP requests',
['method', 'endpoint', 'status_code', 'service']
)
REQUEST_DURATION = Histogram(
'http_request_duration_seconds',
'HTTP request duration in seconds',
['method', 'endpoint', 'service']
)
ACTIVE_CONNECTIONS = Gauge(
'active_connections',
'Active database connections',
['service']
)
TRAINING_JOBS = Counter(
'training_jobs_total',
'Total training jobs',
['status', 'service']
)
FORECASTS_GENERATED = Counter(
'forecasts_generated_total',
'Total forecasts generated',
['service']
)
class MetricsCollector:
"""Metrics collector for microservices"""
def __init__(self, service_name: str):
self.service_name = service_name
self.start_time = time.time()
def start_metrics_server(self, port: int = 8080):
"""Start Prometheus metrics server"""
try:
start_http_server(port)
logger.info(f"Metrics server started on port {port}")
except Exception as e:
logger.error(f"Failed to start metrics server: {e}")
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
"""Record HTTP request metrics"""
REQUEST_COUNT.labels(
method=method,
endpoint=endpoint,
status_code=status_code,
service=self.service_name
).inc()
REQUEST_DURATION.labels(
method=method,
endpoint=endpoint,
service=self.service_name
).observe(duration)
def record_training_job(self, status: str):
"""Record training job metrics"""
TRAINING_JOBS.labels(
status=status,
service=self.service_name
).inc()
def record_forecast_generated(self):
"""Record forecast generation metrics"""
FORECASTS_GENERATED.labels(
service=self.service_name
).inc()
def set_active_connections(self, count: int):
"""Set active database connections"""
ACTIVE_CONNECTIONS.labels(
service=self.service_name
).set(count)
def metrics_middleware(metrics_collector: MetricsCollector):
"""Middleware to collect metrics"""
def middleware(request, call_next):
start_time = time.time()
response = call_next(request)
duration = time.time() - start_time
metrics_collector.record_request(
method=request.method,
endpoint=request.url.path,
status_code=response.status_code,
duration=duration
)
return response
return middleware

View File

View File

@@ -0,0 +1,71 @@
"""
DateTime utilities for microservices
"""
from datetime import datetime, timezone, timedelta
from typing import Optional
import pytz
def utc_now() -> datetime:
"""Get current UTC datetime"""
return datetime.now(timezone.utc)
def madrid_now() -> datetime:
"""Get current Madrid datetime"""
madrid_tz = pytz.timezone('Europe/Madrid')
return datetime.now(madrid_tz)
def to_utc(dt: datetime) -> datetime:
"""Convert datetime to UTC"""
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(timezone.utc)
def to_madrid(dt: datetime) -> datetime:
"""Convert datetime to Madrid timezone"""
madrid_tz = pytz.timezone('Europe/Madrid')
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(madrid_tz)
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
"""Format datetime as string"""
return dt.strftime(format_str)
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
"""Parse datetime from string"""
return datetime.strptime(dt_str, format_str)
def is_business_hours(dt: Optional[datetime] = None) -> bool:
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
if dt is None:
dt = madrid_now()
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
madrid_dt = to_madrid(dt)
# Check if it's a weekday (Monday=0, Sunday=6)
if madrid_dt.weekday() >= 5: # Weekend
return False
# Check if it's business hours
return 9 <= madrid_dt.hour < 18
def next_business_day(dt: Optional[datetime] = None) -> datetime:
"""Get next business day"""
if dt is None:
dt = madrid_now()
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
madrid_dt = to_madrid(dt)
# Add days until we reach a weekday
while madrid_dt.weekday() >= 5: # Weekend
madrid_dt += timedelta(days=1)
# Set to 9 AM
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)

View File

@@ -0,0 +1,67 @@
"""
Validation utilities for microservices
"""
import re
from typing import Any, Optional
from email_validator import validate_email, EmailNotValidError
def validate_spanish_phone(phone: str) -> bool:
"""Validate Spanish phone number"""
# Spanish phone pattern: +34 followed by 9 digits
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
def validate_email_address(email: str) -> bool:
"""Validate email address"""
try:
validate_email(email)
return True
except EmailNotValidError:
return False
def validate_tenant_name(name: str) -> bool:
"""Validate tenant name"""
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
return bool(re.match(pattern, name))
def validate_address(address: str) -> bool:
"""Validate address"""
# Must be 5-200 characters
return 5 <= len(address.strip()) <= 200
def validate_coordinates(latitude: float, longitude: float) -> bool:
"""Validate Madrid coordinates"""
# Madrid is roughly between these coordinates
madrid_bounds = {
'lat_min': 40.3,
'lat_max': 40.6,
'lon_min': -3.8,
'lon_max': -3.5
}
return (
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
)
def validate_product_name(name: str) -> bool:
"""Validate product name"""
# Must be 1-50 characters, letters, numbers, spaces
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
return bool(re.match(pattern, name))
def validate_positive_number(value: Any) -> bool:
"""Validate positive number"""
try:
return float(value) > 0
except (ValueError, TypeError):
return False
def validate_non_negative_number(value: Any) -> bool:
"""Validate non-negative number"""
try:
return float(value) >= 0
except (ValueError, TypeError):
return False