Initial microservices setup from artifacts

This commit is contained in:
Urtzi Alfaro
2025-07-17 13:09:24 +02:00
commit 347ff51bd7
200 changed files with 9559 additions and 0 deletions

View File

View File

View File

@@ -0,0 +1,33 @@
"""
Models API endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List
import logging
from app.core.database import get_db
from app.core.auth import verify_token
from app.schemas.training import TrainedModelResponse
from app.services.training_service import TrainingService
logger = logging.getLogger(__name__)
router = APIRouter()
training_service = TrainingService()
@router.get("/", response_model=List[TrainedModelResponse])
async def get_trained_models(
user_data: dict = Depends(verify_token),
db: AsyncSession = Depends(get_db)
):
"""Get trained models"""
try:
return await training_service.get_trained_models(user_data, db)
except Exception as e:
logger.error(f"Get trained models error: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get trained models"
)

View File

@@ -0,0 +1,77 @@
"""
Training API endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, status, Query
from sqlalchemy.ext.asyncio import AsyncSession
from typing import List, Optional
import logging
from app.core.database import get_db
from app.core.auth import verify_token
from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse
from app.services.training_service import TrainingService
logger = logging.getLogger(__name__)
router = APIRouter()
training_service = TrainingService()
@router.post("/train", response_model=TrainingJobResponse)
async def start_training(
request: TrainingRequest,
user_data: dict = Depends(verify_token),
db: AsyncSession = Depends(get_db)
):
"""Start training job"""
try:
return await training_service.start_training(request, user_data, db)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
except Exception as e:
logger.error(f"Training start error: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to start training"
)
@router.get("/status/{job_id}", response_model=TrainingJobResponse)
async def get_training_status(
job_id: str,
user_data: dict = Depends(verify_token),
db: AsyncSession = Depends(get_db)
):
"""Get training job status"""
try:
return await training_service.get_training_status(job_id, user_data, db)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=str(e)
)
except Exception as e:
logger.error(f"Get training status error: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get training status"
)
@router.get("/jobs", response_model=List[TrainingJobResponse])
async def get_training_jobs(
limit: int = Query(10, ge=1, le=100),
offset: int = Query(0, ge=0),
user_data: dict = Depends(verify_token),
db: AsyncSession = Depends(get_db)
):
"""Get training jobs"""
try:
return await training_service.get_training_jobs(user_data, limit, offset, db)
except Exception as e:
logger.error(f"Get training jobs error: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to get training jobs"
)

View File

View File

@@ -0,0 +1,38 @@
"""
Authentication utilities for training service
"""
import httpx
from fastapi import HTTPException, status, Depends
from fastapi.security import HTTPBearer
import logging
from app.core.config import settings
logger = logging.getLogger(__name__)
security = HTTPBearer()
async def verify_token(token: str = Depends(security)):
"""Verify token with auth service"""
try:
async with httpx.AsyncClient() as client:
response = await client.post(
f"{settings.AUTH_SERVICE_URL}/auth/verify",
headers={"Authorization": f"Bearer {token.credentials}"}
)
if response.status_code == 200:
return response.json()
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid authentication credentials"
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Authentication service unavailable"
)

View File

@@ -0,0 +1,44 @@
"""
Training service configuration
"""
import os
from pydantic import BaseSettings
class Settings(BaseSettings):
"""Application settings"""
# Basic settings
APP_NAME: str = "Training Service"
VERSION: str = "1.0.0"
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
# Database settings
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db")
# Redis settings
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/1")
# RabbitMQ settings
RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/")
# Service URLs
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
DATA_SERVICE_URL: str = os.getenv("DATA_SERVICE_URL", "http://data-service:8000")
# ML Settings
MODEL_STORAGE_PATH: str = os.getenv("MODEL_STORAGE_PATH", "/app/models")
MAX_TRAINING_TIME_MINUTES: int = int(os.getenv("MAX_TRAINING_TIME_MINUTES", "30"))
MIN_TRAINING_DATA_DAYS: int = int(os.getenv("MIN_TRAINING_DATA_DAYS", "30"))
# Prophet Settings
PROPHET_SEASONALITY_MODE: str = os.getenv("PROPHET_SEASONALITY_MODE", "additive")
PROPHET_DAILY_SEASONALITY: bool = os.getenv("PROPHET_DAILY_SEASONALITY", "true").lower() == "true"
PROPHET_WEEKLY_SEASONALITY: bool = os.getenv("PROPHET_WEEKLY_SEASONALITY", "true").lower() == "true"
PROPHET_YEARLY_SEASONALITY: bool = os.getenv("PROPHET_YEARLY_SEASONALITY", "true").lower() == "true"
class Config:
env_file = ".env"
settings = Settings()

View File

@@ -0,0 +1,12 @@
"""
Database configuration for training service
"""
from shared.database.base import DatabaseManager
from app.core.config import settings
# Initialize database manager
database_manager = DatabaseManager(settings.DATABASE_URL)
# Alias for convenience
get_db = database_manager.get_db

View File

@@ -0,0 +1,81 @@
"""
Training Service
Handles ML model training for bakery demand forecasting
"""
import logging
from fastapi import FastAPI, BackgroundTasks
from fastapi.middleware.cors import CORSMiddleware
from app.core.config import settings
from app.core.database import database_manager
from app.api import training, models
from app.services.messaging import message_publisher
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector
# Setup logging
setup_logging("training-service", settings.LOG_LEVEL)
logger = logging.getLogger(__name__)
# Create FastAPI app
app = FastAPI(
title="Training Service",
description="ML model training service for bakery demand forecasting",
version="1.0.0"
)
# Initialize metrics collector
metrics_collector = MetricsCollector("training-service")
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(training.router, prefix="/training", tags=["training"])
app.include_router(models.router, prefix="/models", tags=["models"])
@app.on_event("startup")
async def startup_event():
"""Application startup"""
logger.info("Starting Training Service")
# Create database tables
await database_manager.create_tables()
# Initialize message publisher
await message_publisher.connect()
# Start metrics server
metrics_collector.start_metrics_server(8080)
logger.info("Training Service started successfully")
@app.on_event("shutdown")
async def shutdown_event():
"""Application shutdown"""
logger.info("Shutting down Training Service")
# Cleanup message publisher
await message_publisher.disconnect()
logger.info("Training Service shutdown complete")
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {
"status": "healthy",
"service": "training-service",
"version": "1.0.0"
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

View File

@@ -0,0 +1,174 @@
"""
ML Training implementation
"""
import asyncio
import logging
from typing import Dict, Any, List
import pandas as pd
from datetime import datetime
import joblib
import os
from prophet import Prophet
import numpy as np
from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score
from app.core.config import settings
logger = logging.getLogger(__name__)
class MLTrainer:
"""ML training implementation"""
def __init__(self):
self.model_storage_path = settings.MODEL_STORAGE_PATH
os.makedirs(self.model_storage_path, exist_ok=True)
async def train_models(self, training_data: Dict[str, Any], job_id: str, db) -> Dict[str, Any]:
"""Train models for all products"""
models_result = {}
# Get sales data
sales_data = training_data.get("sales_data", [])
external_data = training_data.get("external_data", {})
# Group by product
products_data = self._group_by_product(sales_data)
# Train model for each product
for product_name, product_sales in products_data.items():
try:
model_result = await self._train_product_model(
product_name,
product_sales,
external_data,
job_id
)
models_result[product_name] = model_result
except Exception as e:
logger.error(f"Failed to train model for {product_name}: {e}")
continue
return models_result
def _group_by_product(self, sales_data: List[Dict]) -> Dict[str, List[Dict]]:
"""Group sales data by product"""
products = {}
for sale in sales_data:
product_name = sale.get("product_name")
if product_name not in products:
products[product_name] = []
products[product_name].append(sale)
return products
async def _train_product_model(self, product_name: str, sales_data: List[Dict], external_data: Dict, job_id: str) -> Dict[str, Any]:
"""Train Prophet model for a single product"""
# Convert to DataFrame
df = pd.DataFrame(sales_data)
df['date'] = pd.to_datetime(df['date'])
# Aggregate daily sales
daily_sales = df.groupby('date')['quantity_sold'].sum().reset_index()
daily_sales.columns = ['ds', 'y']
# Add external features
daily_sales = self._add_external_features(daily_sales, external_data)
# Train Prophet model
model = Prophet(
seasonality_mode=settings.PROPHET_SEASONALITY_MODE,
daily_seasonality=settings.PROPHET_DAILY_SEASONALITY,
weekly_seasonality=settings.PROPHET_WEEKLY_SEASONALITY,
yearly_seasonality=settings.PROPHET_YEARLY_SEASONALITY
)
# Add regressors
model.add_regressor('temperature')
model.add_regressor('humidity')
model.add_regressor('precipitation')
model.add_regressor('traffic_volume')
# Fit model
model.fit(daily_sales)
# Save model
model_path = os.path.join(
self.model_storage_path,
f"{job_id}_{product_name}_prophet_model.pkl"
)
joblib.dump(model, model_path)
return {
"type": "prophet",
"path": model_path,
"training_samples": len(daily_sales),
"features": ["temperature", "humidity", "precipitation", "traffic_volume"],
"hyperparameters": {
"seasonality_mode": settings.PROPHET_SEASONALITY_MODE,
"daily_seasonality": settings.PROPHET_DAILY_SEASONALITY,
"weekly_seasonality": settings.PROPHET_WEEKLY_SEASONALITY,
"yearly_seasonality": settings.PROPHET_YEARLY_SEASONALITY
}
}
def _add_external_features(self, daily_sales: pd.DataFrame, external_data: Dict) -> pd.DataFrame:
"""Add external features to sales data"""
# Add weather data
weather_data = external_data.get("weather", [])
if weather_data:
weather_df = pd.DataFrame(weather_data)
weather_df['ds'] = pd.to_datetime(weather_df['date'])
daily_sales = daily_sales.merge(weather_df[['ds', 'temperature', 'humidity', 'precipitation']], on='ds', how='left')
# Add traffic data
traffic_data = external_data.get("traffic", [])
if traffic_data:
traffic_df = pd.DataFrame(traffic_data)
traffic_df['ds'] = pd.to_datetime(traffic_df['date'])
daily_sales = daily_sales.merge(traffic_df[['ds', 'traffic_volume']], on='ds', how='left')
# Fill missing values
daily_sales['temperature'] = daily_sales['temperature'].fillna(daily_sales['temperature'].mean())
daily_sales['humidity'] = daily_sales['humidity'].fillna(daily_sales['humidity'].mean())
daily_sales['precipitation'] = daily_sales['precipitation'].fillna(0)
daily_sales['traffic_volume'] = daily_sales['traffic_volume'].fillna(daily_sales['traffic_volume'].mean())
return daily_sales
async def validate_models(self, models_result: Dict[str, Any], db) -> Dict[str, Any]:
"""Validate trained models"""
validation_results = {}
for product_name, model_data in models_result.items():
try:
# Load model
model_path = model_data.get("path")
model = joblib.load(model_path)
# Mock validation for now (in production, you'd use actual validation data)
validation_results[product_name] = {
"mape": np.random.uniform(10, 25), # Mock MAPE between 10-25%
"rmse": np.random.uniform(8, 15), # Mock RMSE
"mae": np.random.uniform(5, 12), # Mock MAE
"r2_score": np.random.uniform(0.7, 0.9) # Mock R2 score
}
except Exception as e:
logger.error(f"Validation failed for {product_name}: {e}")
validation_results[product_name] = {
"mape": None,
"rmse": None,
"mae": None,
"r2_score": None,
"error": str(e)
}
return validation_results

View File

@@ -0,0 +1,91 @@
"""
Training schemas
"""
from pydantic import BaseModel, Field, validator
from typing import Optional, Dict, Any, List
from datetime import datetime
from enum import Enum
class TrainingJobStatus(str, Enum):
"""Training job status enum"""
QUEUED = "queued"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class TrainingRequest(BaseModel):
"""Training request schema"""
tenant_id: Optional[str] = None # Will be set from auth
force_retrain: bool = Field(default=False, description="Force retrain even if recent models exist")
products: Optional[List[str]] = Field(default=None, description="Specific products to train, or None for all")
training_days: Optional[int] = Field(default=730, ge=30, le=1095, description="Number of days of historical data to use")
@validator('training_days')
def validate_training_days(cls, v):
if v < 30:
raise ValueError('Minimum training days is 30')
if v > 1095:
raise ValueError('Maximum training days is 1095 (3 years)')
return v
class TrainingJobResponse(BaseModel):
"""Training job response schema"""
id: str
tenant_id: str
status: TrainingJobStatus
progress: int
current_step: Optional[str]
started_at: datetime
completed_at: Optional[datetime]
duration_seconds: Optional[int]
models_trained: Optional[Dict[str, Any]]
metrics: Optional[Dict[str, Any]]
error_message: Optional[str]
class Config:
from_attributes = True
class TrainedModelResponse(BaseModel):
"""Trained model response schema"""
id: str
product_name: str
model_type: str
model_version: str
mape: Optional[float]
rmse: Optional[float]
mae: Optional[float]
r2_score: Optional[float]
training_samples: Optional[int]
features_used: Optional[List[str]]
is_active: bool
created_at: datetime
last_used_at: Optional[datetime]
class Config:
from_attributes = True
class TrainingProgress(BaseModel):
"""Training progress update schema"""
job_id: str
progress: int
current_step: str
estimated_completion: Optional[datetime]
class TrainingMetrics(BaseModel):
"""Training metrics schema"""
total_jobs: int
successful_jobs: int
failed_jobs: int
average_duration: float
models_trained: int
active_models: int
class ModelValidationResult(BaseModel):
"""Model validation result schema"""
product_name: str
is_valid: bool
accuracy_score: float
validation_error: Optional[str]
recommendations: List[str]

View File

@@ -0,0 +1,50 @@
"""
Messaging service for training service
"""
from shared.messaging.rabbitmq import RabbitMQClient
from app.core.config import settings
# Global message publisher
message_publisher = RabbitMQClient(settings.RABBITMQ_URL)
# services/training/Dockerfile
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
g++ \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy shared libraries
COPY --from=shared /shared /app/shared
# Copy application code
COPY . .
# Create model storage directory
RUN mkdir -p /app/models
# Add shared libraries to Python path
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/health || exit 1
# Run application
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

View File

@@ -0,0 +1,84 @@
fastapi==0.104.1
uvicorn[standard]==0.24.0
sqlalchemy==2.0.23
asyncpg==0.29.0
alembic==1.12.1
pydantic==2.5.0
pydantic-settings==2.1.0
httpx==0.25.2
redis==5.0.1
aio-pika==9.3.0
prometheus-client==0.17.1
python-json-logger==2.0.4
# ML dependencies
prophet==1.1.4
scikit-learn==1.3.2
pandas==2.1.4
numpy==1.24.4
joblib==1.3.2
scipy==1.11.4
# Utilities
pytz==2023.3
python-dateutil==2.8.2# services/training/app/main.py
"""
Training Service
Handles ML model training for bakery demand forecasting
"""
import logging
from fastapi import FastAPI, BackgroundTasks
from fastapi.middleware.cors import CORSMiddleware
from app.core.config import settings
from app.core.database import database_manager
from app.api import training, models
from app.services.messaging import message_publisher
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector
# Setup logging
setup_logging("training-service", settings.LOG_LEVEL)
logger = logging.getLogger(__name__)
# Create FastAPI app
app = FastAPI(
title="Training Service",
description="ML model training service for bakery demand forecasting",
version="1.0.0"
)
# Initialize metrics collector
metrics_collector = MetricsCollector("training-service")
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(training.router, prefix="/training", tags=["training"])
app.include_router(models.router, prefix="/models", tags=["models"])
@app.on_event("startup")
async def startup_event():
"""Application startup"""
logger.info("Starting Training Service")
# Create database tables
await database_manager.create_tables()
# Initialize message publisher
await message_publisher.connect()
# Start metrics server
metrics_collector.start_metrics_server(8080)
logger.info("Training Service started successfully")
@

View File

@@ -0,0 +1,41 @@
"""
Authentication decorators for FastAPI
"""
from functools import wraps
from fastapi import HTTPException, Depends
from fastapi.security import HTTPBearer
import httpx
import logging
logger = logging.getLogger(__name__)
security = HTTPBearer()
def verify_service_token(auth_service_url: str):
"""Verify service token with auth service"""
async def verify_token(token: str = Depends(security)):
try:
async with httpx.AsyncClient() as client:
response = await client.post(
f"{auth_service_url}/verify",
headers={"Authorization": f"Bearer {token.credentials}"}
)
if response.status_code == 200:
return response.json()
else:
raise HTTPException(
status_code=401,
detail="Invalid authentication credentials"
)
except httpx.RequestError as e:
logger.error(f"Auth service unavailable: {e}")
raise HTTPException(
status_code=503,
detail="Authentication service unavailable"
)
return verify_token

View File

@@ -0,0 +1,58 @@
"""
Shared JWT Authentication Handler
Used across all microservices for consistent authentication
"""
import jwt
from datetime import datetime, timedelta
from typing import Optional, Dict, Any
import logging
logger = logging.getLogger(__name__)
class JWTHandler:
"""JWT token handling for microservices"""
def __init__(self, secret_key: str, algorithm: str = "HS256"):
self.secret_key = secret_key
self.algorithm = algorithm
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
"""Create JWT access token"""
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=30)
to_encode.update({"exp": expire, "type": "access"})
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
return encoded_jwt
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
"""Create JWT refresh token"""
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(days=7)
to_encode.update({"exp": expire, "type": "refresh"})
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
return encoded_jwt
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
"""Verify and decode JWT token"""
try:
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
return payload
except jwt.ExpiredSignatureError:
logger.warning("Token has expired")
return None
except jwt.InvalidTokenError:
logger.warning("Invalid token")
return None

View File

@@ -0,0 +1,56 @@
"""
Base database configuration for all microservices
"""
import os
from sqlalchemy import create_engine
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.pool import StaticPool
import logging
logger = logging.getLogger(__name__)
Base = declarative_base()
class DatabaseManager:
"""Database manager for microservices"""
def __init__(self, database_url: str):
self.database_url = database_url
self.async_engine = create_async_engine(
database_url,
echo=False,
pool_pre_ping=True,
pool_recycle=300,
pool_size=20,
max_overflow=30
)
self.async_session_local = sessionmaker(
self.async_engine,
class_=AsyncSession,
expire_on_commit=False
)
async def get_db(self):
"""Get database session"""
async with self.async_session_local() as session:
try:
yield session
except Exception as e:
logger.error(f"Database session error: {e}")
await session.rollback()
raise
finally:
await session.close()
async def create_tables(self):
"""Create database tables"""
async with self.async_engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
async def drop_tables(self):
"""Drop database tables"""
async with self.async_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)

View File

@@ -0,0 +1,73 @@
"""
Event definitions for microservices communication
"""
from dataclasses import dataclass
from datetime import datetime
from typing import Dict, Any, Optional
import uuid
@dataclass
class BaseEvent:
"""Base event class"""
event_id: str
event_type: str
service_name: str
timestamp: datetime
data: Dict[str, Any]
correlation_id: Optional[str] = None
def __post_init__(self):
if not self.event_id:
self.event_id = str(uuid.uuid4())
if not self.timestamp:
self.timestamp = datetime.utcnow()
# Training Events
@dataclass
class TrainingStartedEvent(BaseEvent):
event_type: str = "training.started"
@dataclass
class TrainingCompletedEvent(BaseEvent):
event_type: str = "training.completed"
@dataclass
class TrainingFailedEvent(BaseEvent):
event_type: str = "training.failed"
# Forecasting Events
@dataclass
class ForecastGeneratedEvent(BaseEvent):
event_type: str = "forecast.generated"
@dataclass
class ForecastRequestedEvent(BaseEvent):
event_type: str = "forecast.requested"
# User Events
@dataclass
class UserRegisteredEvent(BaseEvent):
event_type: str = "user.registered"
@dataclass
class UserLoginEvent(BaseEvent):
event_type: str = "user.login"
# Tenant Events
@dataclass
class TenantCreatedEvent(BaseEvent):
event_type: str = "tenant.created"
@dataclass
class TenantUpdatedEvent(BaseEvent):
event_type: str = "tenant.updated"
# Notification Events
@dataclass
class NotificationSentEvent(BaseEvent):
event_type: str = "notification.sent"
@dataclass
class NotificationFailedEvent(BaseEvent):
event_type: str = "notification.failed"

View File

@@ -0,0 +1,96 @@
"""
RabbitMQ messaging client for microservices
"""
import asyncio
import json
import logging
from typing import Dict, Any, Callable
import aio_pika
from aio_pika import connect_robust, Message, DeliveryMode
logger = logging.getLogger(__name__)
class RabbitMQClient:
"""RabbitMQ client for microservices communication"""
def __init__(self, connection_url: str):
self.connection_url = connection_url
self.connection = None
self.channel = None
async def connect(self):
"""Connect to RabbitMQ"""
try:
self.connection = await connect_robust(self.connection_url)
self.channel = await self.connection.channel()
logger.info("Connected to RabbitMQ")
except Exception as e:
logger.error(f"Failed to connect to RabbitMQ: {e}")
raise
async def disconnect(self):
"""Disconnect from RabbitMQ"""
if self.connection:
await self.connection.close()
logger.info("Disconnected from RabbitMQ")
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
"""Publish event to RabbitMQ"""
try:
if not self.channel:
await self.connect()
# Declare exchange
exchange = await self.channel.declare_exchange(
exchange_name,
aio_pika.ExchangeType.TOPIC,
durable=True
)
# Create message
message = Message(
json.dumps(event_data).encode(),
delivery_mode=DeliveryMode.PERSISTENT,
content_type="application/json"
)
# Publish message
await exchange.publish(message, routing_key=routing_key)
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
except Exception as e:
logger.error(f"Failed to publish event: {e}")
raise
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
"""Consume events from RabbitMQ"""
try:
if not self.channel:
await self.connect()
# Declare exchange
exchange = await self.channel.declare_exchange(
exchange_name,
aio_pika.ExchangeType.TOPIC,
durable=True
)
# Declare queue
queue = await self.channel.declare_queue(
queue_name,
durable=True
)
# Bind queue to exchange
await queue.bind(exchange, routing_key)
# Set up consumer
await queue.consume(callback)
logger.info(f"Started consuming events from {queue_name}")
except Exception as e:
logger.error(f"Failed to consume events: {e}")
raise

View File

@@ -0,0 +1,77 @@
"""
Centralized logging configuration for microservices
"""
import logging
import logging.config
import os
from typing import Dict, Any
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
"""Set up logging configuration for a microservice"""
config: Dict[str, Any] = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
},
"detailed": {
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
},
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": log_level,
"formatter": "standard",
"stream": "ext://sys.stdout"
},
"file": {
"class": "logging.FileHandler",
"level": log_level,
"formatter": "detailed",
"filename": f"/var/log/{service_name}.log",
"mode": "a"
},
"logstash": {
"class": "logstash.TCPLogstashHandler",
"host": os.getenv("LOGSTASH_HOST", "localhost"),
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
"version": 1,
"message_type": "logstash",
"fqdn": False,
"tags": [service_name]
}
},
"loggers": {
"": {
"handlers": ["console", "file"],
"level": log_level,
"propagate": False
},
"uvicorn": {
"handlers": ["console"],
"level": log_level,
"propagate": False
},
"uvicorn.access": {
"handlers": ["console"],
"level": log_level,
"propagate": False
}
}
}
# Add logstash handler if in production
if os.getenv("ENVIRONMENT") == "production":
config["loggers"][""]["handlers"].append("logstash")
logging.config.dictConfig(config)
logger = logging.getLogger(__name__)
logger.info(f"Logging configured for {service_name}")

View File

@@ -0,0 +1,112 @@
"""
Metrics collection for microservices
"""
import time
import logging
from typing import Dict, Any
from prometheus_client import Counter, Histogram, Gauge, start_http_server
from functools import wraps
logger = logging.getLogger(__name__)
# Prometheus metrics
REQUEST_COUNT = Counter(
'http_requests_total',
'Total HTTP requests',
['method', 'endpoint', 'status_code', 'service']
)
REQUEST_DURATION = Histogram(
'http_request_duration_seconds',
'HTTP request duration in seconds',
['method', 'endpoint', 'service']
)
ACTIVE_CONNECTIONS = Gauge(
'active_connections',
'Active database connections',
['service']
)
TRAINING_JOBS = Counter(
'training_jobs_total',
'Total training jobs',
['status', 'service']
)
FORECASTS_GENERATED = Counter(
'forecasts_generated_total',
'Total forecasts generated',
['service']
)
class MetricsCollector:
"""Metrics collector for microservices"""
def __init__(self, service_name: str):
self.service_name = service_name
self.start_time = time.time()
def start_metrics_server(self, port: int = 8080):
"""Start Prometheus metrics server"""
try:
start_http_server(port)
logger.info(f"Metrics server started on port {port}")
except Exception as e:
logger.error(f"Failed to start metrics server: {e}")
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
"""Record HTTP request metrics"""
REQUEST_COUNT.labels(
method=method,
endpoint=endpoint,
status_code=status_code,
service=self.service_name
).inc()
REQUEST_DURATION.labels(
method=method,
endpoint=endpoint,
service=self.service_name
).observe(duration)
def record_training_job(self, status: str):
"""Record training job metrics"""
TRAINING_JOBS.labels(
status=status,
service=self.service_name
).inc()
def record_forecast_generated(self):
"""Record forecast generation metrics"""
FORECASTS_GENERATED.labels(
service=self.service_name
).inc()
def set_active_connections(self, count: int):
"""Set active database connections"""
ACTIVE_CONNECTIONS.labels(
service=self.service_name
).set(count)
def metrics_middleware(metrics_collector: MetricsCollector):
"""Middleware to collect metrics"""
def middleware(request, call_next):
start_time = time.time()
response = call_next(request)
duration = time.time() - start_time
metrics_collector.record_request(
method=request.method,
endpoint=request.url.path,
status_code=response.status_code,
duration=duration
)
return response
return middleware

View File

@@ -0,0 +1,71 @@
"""
DateTime utilities for microservices
"""
from datetime import datetime, timezone, timedelta
from typing import Optional
import pytz
def utc_now() -> datetime:
"""Get current UTC datetime"""
return datetime.now(timezone.utc)
def madrid_now() -> datetime:
"""Get current Madrid datetime"""
madrid_tz = pytz.timezone('Europe/Madrid')
return datetime.now(madrid_tz)
def to_utc(dt: datetime) -> datetime:
"""Convert datetime to UTC"""
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(timezone.utc)
def to_madrid(dt: datetime) -> datetime:
"""Convert datetime to Madrid timezone"""
madrid_tz = pytz.timezone('Europe/Madrid')
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(madrid_tz)
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
"""Format datetime as string"""
return dt.strftime(format_str)
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
"""Parse datetime from string"""
return datetime.strptime(dt_str, format_str)
def is_business_hours(dt: Optional[datetime] = None) -> bool:
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
if dt is None:
dt = madrid_now()
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
madrid_dt = to_madrid(dt)
# Check if it's a weekday (Monday=0, Sunday=6)
if madrid_dt.weekday() >= 5: # Weekend
return False
# Check if it's business hours
return 9 <= madrid_dt.hour < 18
def next_business_day(dt: Optional[datetime] = None) -> datetime:
"""Get next business day"""
if dt is None:
dt = madrid_now()
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
madrid_dt = to_madrid(dt)
# Add days until we reach a weekday
while madrid_dt.weekday() >= 5: # Weekend
madrid_dt += timedelta(days=1)
# Set to 9 AM
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)

View File

@@ -0,0 +1,67 @@
"""
Validation utilities for microservices
"""
import re
from typing import Any, Optional
from email_validator import validate_email, EmailNotValidError
def validate_spanish_phone(phone: str) -> bool:
"""Validate Spanish phone number"""
# Spanish phone pattern: +34 followed by 9 digits
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
def validate_email_address(email: str) -> bool:
"""Validate email address"""
try:
validate_email(email)
return True
except EmailNotValidError:
return False
def validate_tenant_name(name: str) -> bool:
"""Validate tenant name"""
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
return bool(re.match(pattern, name))
def validate_address(address: str) -> bool:
"""Validate address"""
# Must be 5-200 characters
return 5 <= len(address.strip()) <= 200
def validate_coordinates(latitude: float, longitude: float) -> bool:
"""Validate Madrid coordinates"""
# Madrid is roughly between these coordinates
madrid_bounds = {
'lat_min': 40.3,
'lat_max': 40.6,
'lon_min': -3.8,
'lon_max': -3.5
}
return (
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
)
def validate_product_name(name: str) -> bool:
"""Validate product name"""
# Must be 1-50 characters, letters, numbers, spaces
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
return bool(re.match(pattern, name))
def validate_positive_number(value: Any) -> bool:
"""Validate positive number"""
try:
return float(value) > 0
except (ValueError, TypeError):
return False
def validate_non_negative_number(value: Any) -> bool:
"""Validate non-negative number"""
try:
return float(value) >= 0
except (ValueError, TypeError):
return False