Fix dockerfile
This commit is contained in:
@@ -162,9 +162,9 @@ services:
|
||||
|
||||
# Authentication Service
|
||||
auth-service:
|
||||
build:
|
||||
context: ./services/auth
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./services/auth/Dockerfile
|
||||
container_name: bakery-auth-service
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db
|
||||
@@ -188,6 +188,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./services/auth:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -196,9 +197,9 @@ services:
|
||||
|
||||
# Training Service
|
||||
training-service:
|
||||
build:
|
||||
context: ./services/training
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./services/training/Dockerfile
|
||||
container_name: bakery-training-service
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db
|
||||
@@ -223,6 +224,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./services/training:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -231,9 +233,9 @@ services:
|
||||
|
||||
# Forecasting Service
|
||||
forecasting-service:
|
||||
build:
|
||||
context: ./services/forecasting
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./services/forecasting/Dockerfile
|
||||
container_name: bakery-forecasting-service
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db
|
||||
@@ -259,6 +261,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./services/forecasting:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -267,9 +270,9 @@ services:
|
||||
|
||||
# Data Service
|
||||
data-service:
|
||||
build:
|
||||
context: ./services/data
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./services/data/Dockerfile
|
||||
container_name: bakery-data-service
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db
|
||||
@@ -295,6 +298,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./services/data:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -303,9 +307,9 @@ services:
|
||||
|
||||
# Tenant Service
|
||||
tenant-service:
|
||||
build:
|
||||
context: ./services/tenant
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./services/tenant/Dockerfile
|
||||
container_name: bakery-tenant-service
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db
|
||||
@@ -329,6 +333,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./services/tenant:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -337,9 +342,9 @@ services:
|
||||
|
||||
# Notification Service
|
||||
notification-service:
|
||||
build:
|
||||
context: ./services/notification
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./services/notification/Dockerfile
|
||||
container_name: bakery-notification-service
|
||||
environment:
|
||||
- DATABASE_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db
|
||||
@@ -368,6 +373,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./services/notification:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -376,9 +382,9 @@ services:
|
||||
|
||||
# API Gateway
|
||||
gateway:
|
||||
build:
|
||||
context: ./gateway
|
||||
dockerfile: Dockerfile
|
||||
build:
|
||||
context: . # Build context is the project root
|
||||
dockerfile: ./gateway/Dockerfile
|
||||
container_name: bakery-gateway
|
||||
environment:
|
||||
- REDIS_URL=redis://redis:6379/6
|
||||
@@ -410,6 +416,7 @@ services:
|
||||
- bakery-network
|
||||
volumes:
|
||||
- ./gateway:/app
|
||||
- ./shared:/app/shared # Explicitly mount shared
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
@@ -418,7 +425,7 @@ services:
|
||||
|
||||
# Dashboard Frontend
|
||||
dashboard:
|
||||
build:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile.dev
|
||||
container_name: bakery-dashboard
|
||||
@@ -493,7 +500,7 @@ services:
|
||||
volumes:
|
||||
- ./infrastructure/monitoring/logstash:/usr/share/logstash/pipeline
|
||||
ports:
|
||||
- "5044:5000"
|
||||
- "5044:5000"
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
networks:
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY gateway/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
COPY gateway/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
@@ -1,77 +0,0 @@
|
||||
"""
|
||||
Centralized logging configuration for microservices
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||
"""Set up logging configuration for a microservice"""
|
||||
|
||||
config: Dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
},
|
||||
"detailed": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": log_level,
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": log_level,
|
||||
"formatter": "detailed",
|
||||
"filename": f"/var/log/{service_name}.log",
|
||||
"mode": "a"
|
||||
},
|
||||
"logstash": {
|
||||
"class": "logstash.TCPLogstashHandler",
|
||||
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||
"version": 1,
|
||||
"message_type": "logstash",
|
||||
"fqdn": False,
|
||||
"tags": [service_name]
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add logstash handler if in production
|
||||
if os.getenv("ENVIRONMENT") == "production":
|
||||
config["loggers"][""]["handlers"].append("logstash")
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured for {service_name}")
|
||||
@@ -1,112 +0,0 @@
|
||||
"""
|
||||
Metrics collection for microservices
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prometheus metrics
|
||||
REQUEST_COUNT = Counter(
|
||||
'http_requests_total',
|
||||
'Total HTTP requests',
|
||||
['method', 'endpoint', 'status_code', 'service']
|
||||
)
|
||||
|
||||
REQUEST_DURATION = Histogram(
|
||||
'http_request_duration_seconds',
|
||||
'HTTP request duration in seconds',
|
||||
['method', 'endpoint', 'service']
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
'active_connections',
|
||||
'Active database connections',
|
||||
['service']
|
||||
)
|
||||
|
||||
TRAINING_JOBS = Counter(
|
||||
'training_jobs_total',
|
||||
'Total training jobs',
|
||||
['status', 'service']
|
||||
)
|
||||
|
||||
FORECASTS_GENERATED = Counter(
|
||||
'forecasts_generated_total',
|
||||
'Total forecasts generated',
|
||||
['service']
|
||||
)
|
||||
|
||||
class MetricsCollector:
|
||||
"""Metrics collector for microservices"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.start_time = time.time()
|
||||
|
||||
def start_metrics_server(self, port: int = 8080):
|
||||
"""Start Prometheus metrics server"""
|
||||
try:
|
||||
start_http_server(port)
|
||||
logger.info(f"Metrics server started on port {port}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start metrics server: {e}")
|
||||
|
||||
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||
"""Record HTTP request metrics"""
|
||||
REQUEST_COUNT.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
REQUEST_DURATION.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
service=self.service_name
|
||||
).observe(duration)
|
||||
|
||||
def record_training_job(self, status: str):
|
||||
"""Record training job metrics"""
|
||||
TRAINING_JOBS.labels(
|
||||
status=status,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def record_forecast_generated(self):
|
||||
"""Record forecast generation metrics"""
|
||||
FORECASTS_GENERATED.labels(
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def set_active_connections(self, count: int):
|
||||
"""Set active database connections"""
|
||||
ACTIVE_CONNECTIONS.labels(
|
||||
service=self.service_name
|
||||
).set(count)
|
||||
|
||||
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||
"""Middleware to collect metrics"""
|
||||
|
||||
def middleware(request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
response = call_next(request)
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
metrics_collector.record_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration=duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return middleware
|
||||
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
DateTime utilities for microservices
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import pytz
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime"""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def madrid_now() -> datetime:
|
||||
"""Get current Madrid datetime"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
return datetime.now(madrid_tz)
|
||||
|
||||
def to_utc(dt: datetime) -> datetime:
|
||||
"""Convert datetime to UTC"""
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
def to_madrid(dt: datetime) -> datetime:
|
||||
"""Convert datetime to Madrid timezone"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(madrid_tz)
|
||||
|
||||
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""Format datetime as string"""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||
"""Parse datetime from string"""
|
||||
return datetime.strptime(dt_str, format_str)
|
||||
|
||||
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||
if madrid_dt.weekday() >= 5: # Weekend
|
||||
return False
|
||||
|
||||
# Check if it's business hours
|
||||
return 9 <= madrid_dt.hour < 18
|
||||
|
||||
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||
"""Get next business day"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Add days until we reach a weekday
|
||||
while madrid_dt.weekday() >= 5: # Weekend
|
||||
madrid_dt += timedelta(days=1)
|
||||
|
||||
# Set to 9 AM
|
||||
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
Validation utilities for microservices
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
|
||||
def validate_spanish_phone(phone: str) -> bool:
|
||||
"""Validate Spanish phone number"""
|
||||
# Spanish phone pattern: +34 followed by 9 digits
|
||||
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||
|
||||
def validate_email_address(email: str) -> bool:
|
||||
"""Validate email address"""
|
||||
try:
|
||||
validate_email(email)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
def validate_tenant_name(name: str) -> bool:
|
||||
"""Validate tenant name"""
|
||||
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_address(address: str) -> bool:
|
||||
"""Validate address"""
|
||||
# Must be 5-200 characters
|
||||
return 5 <= len(address.strip()) <= 200
|
||||
|
||||
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||
"""Validate Madrid coordinates"""
|
||||
# Madrid is roughly between these coordinates
|
||||
madrid_bounds = {
|
||||
'lat_min': 40.3,
|
||||
'lat_max': 40.6,
|
||||
'lon_min': -3.8,
|
||||
'lon_max': -3.5
|
||||
}
|
||||
|
||||
return (
|
||||
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||
)
|
||||
|
||||
def validate_product_name(name: str) -> bool:
|
||||
"""Validate product name"""
|
||||
# Must be 1-50 characters, letters, numbers, spaces
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_positive_number(value: Any) -> bool:
|
||||
"""Validate positive number"""
|
||||
try:
|
||||
return float(value) > 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def validate_non_negative_number(value: Any) -> bool:
|
||||
"""Validate non-negative number"""
|
||||
try:
|
||||
return float(value) >= 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
@@ -1,3 +1,9 @@
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -9,16 +15,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY services/auth/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries
|
||||
COPY shared/ /app/shared/
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
COPY services/auth/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
@@ -1,77 +0,0 @@
|
||||
"""
|
||||
Centralized logging configuration for microservices
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||
"""Set up logging configuration for a microservice"""
|
||||
|
||||
config: Dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
},
|
||||
"detailed": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": log_level,
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": log_level,
|
||||
"formatter": "detailed",
|
||||
"filename": f"/var/log/{service_name}.log",
|
||||
"mode": "a"
|
||||
},
|
||||
"logstash": {
|
||||
"class": "logstash.TCPLogstashHandler",
|
||||
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||
"version": 1,
|
||||
"message_type": "logstash",
|
||||
"fqdn": False,
|
||||
"tags": [service_name]
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add logstash handler if in production
|
||||
if os.getenv("ENVIRONMENT") == "production":
|
||||
config["loggers"][""]["handlers"].append("logstash")
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured for {service_name}")
|
||||
@@ -1,112 +0,0 @@
|
||||
"""
|
||||
Metrics collection for microservices
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prometheus metrics
|
||||
REQUEST_COUNT = Counter(
|
||||
'http_requests_total',
|
||||
'Total HTTP requests',
|
||||
['method', 'endpoint', 'status_code', 'service']
|
||||
)
|
||||
|
||||
REQUEST_DURATION = Histogram(
|
||||
'http_request_duration_seconds',
|
||||
'HTTP request duration in seconds',
|
||||
['method', 'endpoint', 'service']
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
'active_connections',
|
||||
'Active database connections',
|
||||
['service']
|
||||
)
|
||||
|
||||
TRAINING_JOBS = Counter(
|
||||
'training_jobs_total',
|
||||
'Total training jobs',
|
||||
['status', 'service']
|
||||
)
|
||||
|
||||
FORECASTS_GENERATED = Counter(
|
||||
'forecasts_generated_total',
|
||||
'Total forecasts generated',
|
||||
['service']
|
||||
)
|
||||
|
||||
class MetricsCollector:
|
||||
"""Metrics collector for microservices"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.start_time = time.time()
|
||||
|
||||
def start_metrics_server(self, port: int = 8080):
|
||||
"""Start Prometheus metrics server"""
|
||||
try:
|
||||
start_http_server(port)
|
||||
logger.info(f"Metrics server started on port {port}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start metrics server: {e}")
|
||||
|
||||
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||
"""Record HTTP request metrics"""
|
||||
REQUEST_COUNT.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
REQUEST_DURATION.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
service=self.service_name
|
||||
).observe(duration)
|
||||
|
||||
def record_training_job(self, status: str):
|
||||
"""Record training job metrics"""
|
||||
TRAINING_JOBS.labels(
|
||||
status=status,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def record_forecast_generated(self):
|
||||
"""Record forecast generation metrics"""
|
||||
FORECASTS_GENERATED.labels(
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def set_active_connections(self, count: int):
|
||||
"""Set active database connections"""
|
||||
ACTIVE_CONNECTIONS.labels(
|
||||
service=self.service_name
|
||||
).set(count)
|
||||
|
||||
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||
"""Middleware to collect metrics"""
|
||||
|
||||
def middleware(request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
response = call_next(request)
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
metrics_collector.record_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration=duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return middleware
|
||||
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
DateTime utilities for microservices
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import pytz
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime"""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def madrid_now() -> datetime:
|
||||
"""Get current Madrid datetime"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
return datetime.now(madrid_tz)
|
||||
|
||||
def to_utc(dt: datetime) -> datetime:
|
||||
"""Convert datetime to UTC"""
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
def to_madrid(dt: datetime) -> datetime:
|
||||
"""Convert datetime to Madrid timezone"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(madrid_tz)
|
||||
|
||||
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""Format datetime as string"""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||
"""Parse datetime from string"""
|
||||
return datetime.strptime(dt_str, format_str)
|
||||
|
||||
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||
if madrid_dt.weekday() >= 5: # Weekend
|
||||
return False
|
||||
|
||||
# Check if it's business hours
|
||||
return 9 <= madrid_dt.hour < 18
|
||||
|
||||
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||
"""Get next business day"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Add days until we reach a weekday
|
||||
while madrid_dt.weekday() >= 5: # Weekend
|
||||
madrid_dt += timedelta(days=1)
|
||||
|
||||
# Set to 9 AM
|
||||
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
Validation utilities for microservices
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
|
||||
def validate_spanish_phone(phone: str) -> bool:
|
||||
"""Validate Spanish phone number"""
|
||||
# Spanish phone pattern: +34 followed by 9 digits
|
||||
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||
|
||||
def validate_email_address(email: str) -> bool:
|
||||
"""Validate email address"""
|
||||
try:
|
||||
validate_email(email)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
def validate_tenant_name(name: str) -> bool:
|
||||
"""Validate tenant name"""
|
||||
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_address(address: str) -> bool:
|
||||
"""Validate address"""
|
||||
# Must be 5-200 characters
|
||||
return 5 <= len(address.strip()) <= 200
|
||||
|
||||
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||
"""Validate Madrid coordinates"""
|
||||
# Madrid is roughly between these coordinates
|
||||
madrid_bounds = {
|
||||
'lat_min': 40.3,
|
||||
'lat_max': 40.6,
|
||||
'lon_min': -3.8,
|
||||
'lon_max': -3.5
|
||||
}
|
||||
|
||||
return (
|
||||
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||
)
|
||||
|
||||
def validate_product_name(name: str) -> bool:
|
||||
"""Validate product name"""
|
||||
# Must be 1-50 characters, letters, numbers, spaces
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_positive_number(value: Any) -> bool:
|
||||
"""Validate positive number"""
|
||||
try:
|
||||
return float(value) > 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def validate_non_negative_number(value: Any) -> bool:
|
||||
"""Validate non-negative number"""
|
||||
try:
|
||||
return float(value) >= 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
@@ -1,3 +1,9 @@
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY services/data/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
COPY services/data/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
@@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
@@ -1,77 +0,0 @@
|
||||
"""
|
||||
Centralized logging configuration for microservices
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||
"""Set up logging configuration for a microservice"""
|
||||
|
||||
config: Dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
},
|
||||
"detailed": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": log_level,
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": log_level,
|
||||
"formatter": "detailed",
|
||||
"filename": f"/var/log/{service_name}.log",
|
||||
"mode": "a"
|
||||
},
|
||||
"logstash": {
|
||||
"class": "logstash.TCPLogstashHandler",
|
||||
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||
"version": 1,
|
||||
"message_type": "logstash",
|
||||
"fqdn": False,
|
||||
"tags": [service_name]
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add logstash handler if in production
|
||||
if os.getenv("ENVIRONMENT") == "production":
|
||||
config["loggers"][""]["handlers"].append("logstash")
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured for {service_name}")
|
||||
@@ -1,112 +0,0 @@
|
||||
"""
|
||||
Metrics collection for microservices
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prometheus metrics
|
||||
REQUEST_COUNT = Counter(
|
||||
'http_requests_total',
|
||||
'Total HTTP requests',
|
||||
['method', 'endpoint', 'status_code', 'service']
|
||||
)
|
||||
|
||||
REQUEST_DURATION = Histogram(
|
||||
'http_request_duration_seconds',
|
||||
'HTTP request duration in seconds',
|
||||
['method', 'endpoint', 'service']
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
'active_connections',
|
||||
'Active database connections',
|
||||
['service']
|
||||
)
|
||||
|
||||
TRAINING_JOBS = Counter(
|
||||
'training_jobs_total',
|
||||
'Total training jobs',
|
||||
['status', 'service']
|
||||
)
|
||||
|
||||
FORECASTS_GENERATED = Counter(
|
||||
'forecasts_generated_total',
|
||||
'Total forecasts generated',
|
||||
['service']
|
||||
)
|
||||
|
||||
class MetricsCollector:
|
||||
"""Metrics collector for microservices"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.start_time = time.time()
|
||||
|
||||
def start_metrics_server(self, port: int = 8080):
|
||||
"""Start Prometheus metrics server"""
|
||||
try:
|
||||
start_http_server(port)
|
||||
logger.info(f"Metrics server started on port {port}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start metrics server: {e}")
|
||||
|
||||
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||
"""Record HTTP request metrics"""
|
||||
REQUEST_COUNT.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
REQUEST_DURATION.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
service=self.service_name
|
||||
).observe(duration)
|
||||
|
||||
def record_training_job(self, status: str):
|
||||
"""Record training job metrics"""
|
||||
TRAINING_JOBS.labels(
|
||||
status=status,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def record_forecast_generated(self):
|
||||
"""Record forecast generation metrics"""
|
||||
FORECASTS_GENERATED.labels(
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def set_active_connections(self, count: int):
|
||||
"""Set active database connections"""
|
||||
ACTIVE_CONNECTIONS.labels(
|
||||
service=self.service_name
|
||||
).set(count)
|
||||
|
||||
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||
"""Middleware to collect metrics"""
|
||||
|
||||
def middleware(request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
response = call_next(request)
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
metrics_collector.record_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration=duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return middleware
|
||||
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
DateTime utilities for microservices
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import pytz
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime"""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def madrid_now() -> datetime:
|
||||
"""Get current Madrid datetime"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
return datetime.now(madrid_tz)
|
||||
|
||||
def to_utc(dt: datetime) -> datetime:
|
||||
"""Convert datetime to UTC"""
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
def to_madrid(dt: datetime) -> datetime:
|
||||
"""Convert datetime to Madrid timezone"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(madrid_tz)
|
||||
|
||||
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""Format datetime as string"""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||
"""Parse datetime from string"""
|
||||
return datetime.strptime(dt_str, format_str)
|
||||
|
||||
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||
if madrid_dt.weekday() >= 5: # Weekend
|
||||
return False
|
||||
|
||||
# Check if it's business hours
|
||||
return 9 <= madrid_dt.hour < 18
|
||||
|
||||
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||
"""Get next business day"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Add days until we reach a weekday
|
||||
while madrid_dt.weekday() >= 5: # Weekend
|
||||
madrid_dt += timedelta(days=1)
|
||||
|
||||
# Set to 9 AM
|
||||
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
Validation utilities for microservices
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
|
||||
def validate_spanish_phone(phone: str) -> bool:
|
||||
"""Validate Spanish phone number"""
|
||||
# Spanish phone pattern: +34 followed by 9 digits
|
||||
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||
|
||||
def validate_email_address(email: str) -> bool:
|
||||
"""Validate email address"""
|
||||
try:
|
||||
validate_email(email)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
def validate_tenant_name(name: str) -> bool:
|
||||
"""Validate tenant name"""
|
||||
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_address(address: str) -> bool:
|
||||
"""Validate address"""
|
||||
# Must be 5-200 characters
|
||||
return 5 <= len(address.strip()) <= 200
|
||||
|
||||
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||
"""Validate Madrid coordinates"""
|
||||
# Madrid is roughly between these coordinates
|
||||
madrid_bounds = {
|
||||
'lat_min': 40.3,
|
||||
'lat_max': 40.6,
|
||||
'lon_min': -3.8,
|
||||
'lon_max': -3.5
|
||||
}
|
||||
|
||||
return (
|
||||
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||
)
|
||||
|
||||
def validate_product_name(name: str) -> bool:
|
||||
"""Validate product name"""
|
||||
# Must be 1-50 characters, letters, numbers, spaces
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_positive_number(value: Any) -> bool:
|
||||
"""Validate positive number"""
|
||||
try:
|
||||
return float(value) > 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def validate_non_negative_number(value: Any) -> bool:
|
||||
"""Validate non-negative number"""
|
||||
try:
|
||||
return float(value) >= 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
@@ -1,3 +1,9 @@
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY services/forecasting/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
COPY services/forecasting/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
@@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
@@ -1,77 +0,0 @@
|
||||
"""
|
||||
Centralized logging configuration for microservices
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||
"""Set up logging configuration for a microservice"""
|
||||
|
||||
config: Dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
},
|
||||
"detailed": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": log_level,
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": log_level,
|
||||
"formatter": "detailed",
|
||||
"filename": f"/var/log/{service_name}.log",
|
||||
"mode": "a"
|
||||
},
|
||||
"logstash": {
|
||||
"class": "logstash.TCPLogstashHandler",
|
||||
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||
"version": 1,
|
||||
"message_type": "logstash",
|
||||
"fqdn": False,
|
||||
"tags": [service_name]
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add logstash handler if in production
|
||||
if os.getenv("ENVIRONMENT") == "production":
|
||||
config["loggers"][""]["handlers"].append("logstash")
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured for {service_name}")
|
||||
@@ -1,112 +0,0 @@
|
||||
"""
|
||||
Metrics collection for microservices
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prometheus metrics
|
||||
REQUEST_COUNT = Counter(
|
||||
'http_requests_total',
|
||||
'Total HTTP requests',
|
||||
['method', 'endpoint', 'status_code', 'service']
|
||||
)
|
||||
|
||||
REQUEST_DURATION = Histogram(
|
||||
'http_request_duration_seconds',
|
||||
'HTTP request duration in seconds',
|
||||
['method', 'endpoint', 'service']
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
'active_connections',
|
||||
'Active database connections',
|
||||
['service']
|
||||
)
|
||||
|
||||
TRAINING_JOBS = Counter(
|
||||
'training_jobs_total',
|
||||
'Total training jobs',
|
||||
['status', 'service']
|
||||
)
|
||||
|
||||
FORECASTS_GENERATED = Counter(
|
||||
'forecasts_generated_total',
|
||||
'Total forecasts generated',
|
||||
['service']
|
||||
)
|
||||
|
||||
class MetricsCollector:
|
||||
"""Metrics collector for microservices"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.start_time = time.time()
|
||||
|
||||
def start_metrics_server(self, port: int = 8080):
|
||||
"""Start Prometheus metrics server"""
|
||||
try:
|
||||
start_http_server(port)
|
||||
logger.info(f"Metrics server started on port {port}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start metrics server: {e}")
|
||||
|
||||
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||
"""Record HTTP request metrics"""
|
||||
REQUEST_COUNT.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
REQUEST_DURATION.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
service=self.service_name
|
||||
).observe(duration)
|
||||
|
||||
def record_training_job(self, status: str):
|
||||
"""Record training job metrics"""
|
||||
TRAINING_JOBS.labels(
|
||||
status=status,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def record_forecast_generated(self):
|
||||
"""Record forecast generation metrics"""
|
||||
FORECASTS_GENERATED.labels(
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def set_active_connections(self, count: int):
|
||||
"""Set active database connections"""
|
||||
ACTIVE_CONNECTIONS.labels(
|
||||
service=self.service_name
|
||||
).set(count)
|
||||
|
||||
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||
"""Middleware to collect metrics"""
|
||||
|
||||
def middleware(request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
response = call_next(request)
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
metrics_collector.record_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration=duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return middleware
|
||||
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
DateTime utilities for microservices
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import pytz
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime"""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def madrid_now() -> datetime:
|
||||
"""Get current Madrid datetime"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
return datetime.now(madrid_tz)
|
||||
|
||||
def to_utc(dt: datetime) -> datetime:
|
||||
"""Convert datetime to UTC"""
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
def to_madrid(dt: datetime) -> datetime:
|
||||
"""Convert datetime to Madrid timezone"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(madrid_tz)
|
||||
|
||||
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""Format datetime as string"""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||
"""Parse datetime from string"""
|
||||
return datetime.strptime(dt_str, format_str)
|
||||
|
||||
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||
if madrid_dt.weekday() >= 5: # Weekend
|
||||
return False
|
||||
|
||||
# Check if it's business hours
|
||||
return 9 <= madrid_dt.hour < 18
|
||||
|
||||
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||
"""Get next business day"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Add days until we reach a weekday
|
||||
while madrid_dt.weekday() >= 5: # Weekend
|
||||
madrid_dt += timedelta(days=1)
|
||||
|
||||
# Set to 9 AM
|
||||
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
Validation utilities for microservices
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
|
||||
def validate_spanish_phone(phone: str) -> bool:
|
||||
"""Validate Spanish phone number"""
|
||||
# Spanish phone pattern: +34 followed by 9 digits
|
||||
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||
|
||||
def validate_email_address(email: str) -> bool:
|
||||
"""Validate email address"""
|
||||
try:
|
||||
validate_email(email)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
def validate_tenant_name(name: str) -> bool:
|
||||
"""Validate tenant name"""
|
||||
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_address(address: str) -> bool:
|
||||
"""Validate address"""
|
||||
# Must be 5-200 characters
|
||||
return 5 <= len(address.strip()) <= 200
|
||||
|
||||
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||
"""Validate Madrid coordinates"""
|
||||
# Madrid is roughly between these coordinates
|
||||
madrid_bounds = {
|
||||
'lat_min': 40.3,
|
||||
'lat_max': 40.6,
|
||||
'lon_min': -3.8,
|
||||
'lon_max': -3.5
|
||||
}
|
||||
|
||||
return (
|
||||
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||
)
|
||||
|
||||
def validate_product_name(name: str) -> bool:
|
||||
"""Validate product name"""
|
||||
# Must be 1-50 characters, letters, numbers, spaces
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_positive_number(value: Any) -> bool:
|
||||
"""Validate positive number"""
|
||||
try:
|
||||
return float(value) > 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def validate_non_negative_number(value: Any) -> bool:
|
||||
"""Validate non-negative number"""
|
||||
try:
|
||||
return float(value) >= 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
@@ -1,3 +1,9 @@
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY services/notification/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
COPY services/notification/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
@@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
@@ -1,77 +0,0 @@
|
||||
"""
|
||||
Centralized logging configuration for microservices
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||
"""Set up logging configuration for a microservice"""
|
||||
|
||||
config: Dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
},
|
||||
"detailed": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": log_level,
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": log_level,
|
||||
"formatter": "detailed",
|
||||
"filename": f"/var/log/{service_name}.log",
|
||||
"mode": "a"
|
||||
},
|
||||
"logstash": {
|
||||
"class": "logstash.TCPLogstashHandler",
|
||||
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||
"version": 1,
|
||||
"message_type": "logstash",
|
||||
"fqdn": False,
|
||||
"tags": [service_name]
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add logstash handler if in production
|
||||
if os.getenv("ENVIRONMENT") == "production":
|
||||
config["loggers"][""]["handlers"].append("logstash")
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured for {service_name}")
|
||||
@@ -1,112 +0,0 @@
|
||||
"""
|
||||
Metrics collection for microservices
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prometheus metrics
|
||||
REQUEST_COUNT = Counter(
|
||||
'http_requests_total',
|
||||
'Total HTTP requests',
|
||||
['method', 'endpoint', 'status_code', 'service']
|
||||
)
|
||||
|
||||
REQUEST_DURATION = Histogram(
|
||||
'http_request_duration_seconds',
|
||||
'HTTP request duration in seconds',
|
||||
['method', 'endpoint', 'service']
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
'active_connections',
|
||||
'Active database connections',
|
||||
['service']
|
||||
)
|
||||
|
||||
TRAINING_JOBS = Counter(
|
||||
'training_jobs_total',
|
||||
'Total training jobs',
|
||||
['status', 'service']
|
||||
)
|
||||
|
||||
FORECASTS_GENERATED = Counter(
|
||||
'forecasts_generated_total',
|
||||
'Total forecasts generated',
|
||||
['service']
|
||||
)
|
||||
|
||||
class MetricsCollector:
|
||||
"""Metrics collector for microservices"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.start_time = time.time()
|
||||
|
||||
def start_metrics_server(self, port: int = 8080):
|
||||
"""Start Prometheus metrics server"""
|
||||
try:
|
||||
start_http_server(port)
|
||||
logger.info(f"Metrics server started on port {port}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start metrics server: {e}")
|
||||
|
||||
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||
"""Record HTTP request metrics"""
|
||||
REQUEST_COUNT.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
REQUEST_DURATION.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
service=self.service_name
|
||||
).observe(duration)
|
||||
|
||||
def record_training_job(self, status: str):
|
||||
"""Record training job metrics"""
|
||||
TRAINING_JOBS.labels(
|
||||
status=status,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def record_forecast_generated(self):
|
||||
"""Record forecast generation metrics"""
|
||||
FORECASTS_GENERATED.labels(
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def set_active_connections(self, count: int):
|
||||
"""Set active database connections"""
|
||||
ACTIVE_CONNECTIONS.labels(
|
||||
service=self.service_name
|
||||
).set(count)
|
||||
|
||||
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||
"""Middleware to collect metrics"""
|
||||
|
||||
def middleware(request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
response = call_next(request)
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
metrics_collector.record_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration=duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return middleware
|
||||
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
DateTime utilities for microservices
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import pytz
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime"""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def madrid_now() -> datetime:
|
||||
"""Get current Madrid datetime"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
return datetime.now(madrid_tz)
|
||||
|
||||
def to_utc(dt: datetime) -> datetime:
|
||||
"""Convert datetime to UTC"""
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
def to_madrid(dt: datetime) -> datetime:
|
||||
"""Convert datetime to Madrid timezone"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(madrid_tz)
|
||||
|
||||
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""Format datetime as string"""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||
"""Parse datetime from string"""
|
||||
return datetime.strptime(dt_str, format_str)
|
||||
|
||||
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||
if madrid_dt.weekday() >= 5: # Weekend
|
||||
return False
|
||||
|
||||
# Check if it's business hours
|
||||
return 9 <= madrid_dt.hour < 18
|
||||
|
||||
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||
"""Get next business day"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Add days until we reach a weekday
|
||||
while madrid_dt.weekday() >= 5: # Weekend
|
||||
madrid_dt += timedelta(days=1)
|
||||
|
||||
# Set to 9 AM
|
||||
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
Validation utilities for microservices
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
|
||||
def validate_spanish_phone(phone: str) -> bool:
|
||||
"""Validate Spanish phone number"""
|
||||
# Spanish phone pattern: +34 followed by 9 digits
|
||||
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||
|
||||
def validate_email_address(email: str) -> bool:
|
||||
"""Validate email address"""
|
||||
try:
|
||||
validate_email(email)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
def validate_tenant_name(name: str) -> bool:
|
||||
"""Validate tenant name"""
|
||||
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_address(address: str) -> bool:
|
||||
"""Validate address"""
|
||||
# Must be 5-200 characters
|
||||
return 5 <= len(address.strip()) <= 200
|
||||
|
||||
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||
"""Validate Madrid coordinates"""
|
||||
# Madrid is roughly between these coordinates
|
||||
madrid_bounds = {
|
||||
'lat_min': 40.3,
|
||||
'lat_max': 40.6,
|
||||
'lon_min': -3.8,
|
||||
'lon_max': -3.5
|
||||
}
|
||||
|
||||
return (
|
||||
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||
)
|
||||
|
||||
def validate_product_name(name: str) -> bool:
|
||||
"""Validate product name"""
|
||||
# Must be 1-50 characters, letters, numbers, spaces
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_positive_number(value: Any) -> bool:
|
||||
"""Validate positive number"""
|
||||
try:
|
||||
return float(value) > 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def validate_non_negative_number(value: Any) -> bool:
|
||||
"""Validate non-negative number"""
|
||||
try:
|
||||
return float(value) >= 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
@@ -1,3 +1,9 @@
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY services/tenant/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
COPY services/tenant/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
@@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
@@ -1,77 +0,0 @@
|
||||
"""
|
||||
Centralized logging configuration for microservices
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||
"""Set up logging configuration for a microservice"""
|
||||
|
||||
config: Dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"standard": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
},
|
||||
"detailed": {
|
||||
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": log_level,
|
||||
"formatter": "standard",
|
||||
"stream": "ext://sys.stdout"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.FileHandler",
|
||||
"level": log_level,
|
||||
"formatter": "detailed",
|
||||
"filename": f"/var/log/{service_name}.log",
|
||||
"mode": "a"
|
||||
},
|
||||
"logstash": {
|
||||
"class": "logstash.TCPLogstashHandler",
|
||||
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||
"version": 1,
|
||||
"message_type": "logstash",
|
||||
"fqdn": False,
|
||||
"tags": [service_name]
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["console"],
|
||||
"level": log_level,
|
||||
"propagate": False
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add logstash handler if in production
|
||||
if os.getenv("ENVIRONMENT") == "production":
|
||||
config["loggers"][""]["handlers"].append("logstash")
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured for {service_name}")
|
||||
@@ -1,112 +0,0 @@
|
||||
"""
|
||||
Metrics collection for microservices
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Prometheus metrics
|
||||
REQUEST_COUNT = Counter(
|
||||
'http_requests_total',
|
||||
'Total HTTP requests',
|
||||
['method', 'endpoint', 'status_code', 'service']
|
||||
)
|
||||
|
||||
REQUEST_DURATION = Histogram(
|
||||
'http_request_duration_seconds',
|
||||
'HTTP request duration in seconds',
|
||||
['method', 'endpoint', 'service']
|
||||
)
|
||||
|
||||
ACTIVE_CONNECTIONS = Gauge(
|
||||
'active_connections',
|
||||
'Active database connections',
|
||||
['service']
|
||||
)
|
||||
|
||||
TRAINING_JOBS = Counter(
|
||||
'training_jobs_total',
|
||||
'Total training jobs',
|
||||
['status', 'service']
|
||||
)
|
||||
|
||||
FORECASTS_GENERATED = Counter(
|
||||
'forecasts_generated_total',
|
||||
'Total forecasts generated',
|
||||
['service']
|
||||
)
|
||||
|
||||
class MetricsCollector:
|
||||
"""Metrics collector for microservices"""
|
||||
|
||||
def __init__(self, service_name: str):
|
||||
self.service_name = service_name
|
||||
self.start_time = time.time()
|
||||
|
||||
def start_metrics_server(self, port: int = 8080):
|
||||
"""Start Prometheus metrics server"""
|
||||
try:
|
||||
start_http_server(port)
|
||||
logger.info(f"Metrics server started on port {port}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start metrics server: {e}")
|
||||
|
||||
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||
"""Record HTTP request metrics"""
|
||||
REQUEST_COUNT.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
status_code=status_code,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
REQUEST_DURATION.labels(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
service=self.service_name
|
||||
).observe(duration)
|
||||
|
||||
def record_training_job(self, status: str):
|
||||
"""Record training job metrics"""
|
||||
TRAINING_JOBS.labels(
|
||||
status=status,
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def record_forecast_generated(self):
|
||||
"""Record forecast generation metrics"""
|
||||
FORECASTS_GENERATED.labels(
|
||||
service=self.service_name
|
||||
).inc()
|
||||
|
||||
def set_active_connections(self, count: int):
|
||||
"""Set active database connections"""
|
||||
ACTIVE_CONNECTIONS.labels(
|
||||
service=self.service_name
|
||||
).set(count)
|
||||
|
||||
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||
"""Middleware to collect metrics"""
|
||||
|
||||
def middleware(request, call_next):
|
||||
start_time = time.time()
|
||||
|
||||
response = call_next(request)
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
metrics_collector.record_request(
|
||||
method=request.method,
|
||||
endpoint=request.url.path,
|
||||
status_code=response.status_code,
|
||||
duration=duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return middleware
|
||||
@@ -1,71 +0,0 @@
|
||||
"""
|
||||
DateTime utilities for microservices
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
import pytz
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Get current UTC datetime"""
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def madrid_now() -> datetime:
|
||||
"""Get current Madrid datetime"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
return datetime.now(madrid_tz)
|
||||
|
||||
def to_utc(dt: datetime) -> datetime:
|
||||
"""Convert datetime to UTC"""
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
def to_madrid(dt: datetime) -> datetime:
|
||||
"""Convert datetime to Madrid timezone"""
|
||||
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(madrid_tz)
|
||||
|
||||
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""Format datetime as string"""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||
"""Parse datetime from string"""
|
||||
return datetime.strptime(dt_str, format_str)
|
||||
|
||||
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||
if madrid_dt.weekday() >= 5: # Weekend
|
||||
return False
|
||||
|
||||
# Check if it's business hours
|
||||
return 9 <= madrid_dt.hour < 18
|
||||
|
||||
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||
"""Get next business day"""
|
||||
if dt is None:
|
||||
dt = madrid_now()
|
||||
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
madrid_dt = to_madrid(dt)
|
||||
|
||||
# Add days until we reach a weekday
|
||||
while madrid_dt.weekday() >= 5: # Weekend
|
||||
madrid_dt += timedelta(days=1)
|
||||
|
||||
# Set to 9 AM
|
||||
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
Validation utilities for microservices
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Optional
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
|
||||
def validate_spanish_phone(phone: str) -> bool:
|
||||
"""Validate Spanish phone number"""
|
||||
# Spanish phone pattern: +34 followed by 9 digits
|
||||
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||
|
||||
def validate_email_address(email: str) -> bool:
|
||||
"""Validate email address"""
|
||||
try:
|
||||
validate_email(email)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
def validate_tenant_name(name: str) -> bool:
|
||||
"""Validate tenant name"""
|
||||
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_address(address: str) -> bool:
|
||||
"""Validate address"""
|
||||
# Must be 5-200 characters
|
||||
return 5 <= len(address.strip()) <= 200
|
||||
|
||||
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||
"""Validate Madrid coordinates"""
|
||||
# Madrid is roughly between these coordinates
|
||||
madrid_bounds = {
|
||||
'lat_min': 40.3,
|
||||
'lat_max': 40.6,
|
||||
'lon_min': -3.8,
|
||||
'lon_max': -3.5
|
||||
}
|
||||
|
||||
return (
|
||||
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||
)
|
||||
|
||||
def validate_product_name(name: str) -> bool:
|
||||
"""Validate product name"""
|
||||
# Must be 1-50 characters, letters, numbers, spaces
|
||||
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||
return bool(re.match(pattern, name))
|
||||
|
||||
def validate_positive_number(value: Any) -> bool:
|
||||
"""Validate positive number"""
|
||||
try:
|
||||
return float(value) > 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def validate_non_negative_number(value: Any) -> bool:
|
||||
"""Validate non-negative number"""
|
||||
try:
|
||||
return float(value) >= 0
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
@@ -1,4 +1,9 @@
|
||||
# services/training/Dockerfile
|
||||
# Add this stage at the top of each service Dockerfile
|
||||
FROM python:3.11-slim as shared
|
||||
WORKDIR /shared
|
||||
COPY shared/ /shared/
|
||||
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
@@ -6,24 +11,20 @@ WORKDIR /app
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
COPY services/training/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries
|
||||
COPY shared/ /app/shared/
|
||||
# Copy shared libraries from the shared stage
|
||||
COPY --from=shared /shared /app/shared
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Create model storage directory
|
||||
RUN mkdir -p /app/models
|
||||
COPY services/training/ .
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
"""
|
||||
Authentication decorators for FastAPI
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import HTTPBearer
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
def verify_service_token(auth_service_url: str):
|
||||
"""Verify service token with auth service"""
|
||||
|
||||
async def verify_token(token: str = Depends(security)):
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{auth_service_url}/verify",
|
||||
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid authentication credentials"
|
||||
)
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Auth service unavailable: {e}")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Authentication service unavailable"
|
||||
)
|
||||
|
||||
return verify_token
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Shared JWT Authentication Handler
|
||||
Used across all microservices for consistent authentication
|
||||
"""
|
||||
|
||||
from jose import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JWTHandler:
|
||||
"""JWT token handling for microservices"""
|
||||
|
||||
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||
self.secret_key = secret_key
|
||||
self.algorithm = algorithm
|
||||
|
||||
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""Create JWT refresh token"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(datetime.timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(datetime.timezone.utc) + timedelta(days=7)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
logger.warning("Invalid token")
|
||||
return None
|
||||
@@ -1,56 +0,0 @@
|
||||
"""
|
||||
Base database configuration for all microservices
|
||||
"""
|
||||
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
from sqlalchemy.pool import StaticPool
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class DatabaseManager:
|
||||
"""Database manager for microservices"""
|
||||
|
||||
def __init__(self, database_url: str):
|
||||
self.database_url = database_url
|
||||
self.async_engine = create_async_engine(
|
||||
database_url,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
pool_recycle=300,
|
||||
pool_size=20,
|
||||
max_overflow=30
|
||||
)
|
||||
|
||||
self.async_session_local = sessionmaker(
|
||||
self.async_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db(self):
|
||||
"""Get database session"""
|
||||
async with self.async_session_local() as session:
|
||||
try:
|
||||
yield session
|
||||
except Exception as e:
|
||||
logger.error(f"Database session error: {e}")
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
async def create_tables(self):
|
||||
"""Create database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def drop_tables(self):
|
||||
"""Drop database tables"""
|
||||
async with self.async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""
|
||||
Event definitions for microservices communication
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import uuid
|
||||
|
||||
@dataclass
|
||||
class BaseEvent:
|
||||
"""Base event class"""
|
||||
event_id: str
|
||||
event_type: str
|
||||
service_name: str
|
||||
timestamp: datetime
|
||||
data: Dict[str, Any]
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.event_id:
|
||||
self.event_id = str(uuid.uuid4())
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Training Events
|
||||
@dataclass
|
||||
class TrainingStartedEvent(BaseEvent):
|
||||
event_type: str = "training.started"
|
||||
|
||||
@dataclass
|
||||
class TrainingCompletedEvent(BaseEvent):
|
||||
event_type: str = "training.completed"
|
||||
|
||||
@dataclass
|
||||
class TrainingFailedEvent(BaseEvent):
|
||||
event_type: str = "training.failed"
|
||||
|
||||
# Forecasting Events
|
||||
@dataclass
|
||||
class ForecastGeneratedEvent(BaseEvent):
|
||||
event_type: str = "forecast.generated"
|
||||
|
||||
@dataclass
|
||||
class ForecastRequestedEvent(BaseEvent):
|
||||
event_type: str = "forecast.requested"
|
||||
|
||||
# User Events
|
||||
@dataclass
|
||||
class UserRegisteredEvent(BaseEvent):
|
||||
event_type: str = "user.registered"
|
||||
|
||||
@dataclass
|
||||
class UserLoginEvent(BaseEvent):
|
||||
event_type: str = "user.login"
|
||||
|
||||
# Tenant Events
|
||||
@dataclass
|
||||
class TenantCreatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.created"
|
||||
|
||||
@dataclass
|
||||
class TenantUpdatedEvent(BaseEvent):
|
||||
event_type: str = "tenant.updated"
|
||||
|
||||
# Notification Events
|
||||
@dataclass
|
||||
class NotificationSentEvent(BaseEvent):
|
||||
event_type: str = "notification.sent"
|
||||
|
||||
@dataclass
|
||||
class NotificationFailedEvent(BaseEvent):
|
||||
event_type: str = "notification.failed"
|
||||
@@ -1,96 +0,0 @@
|
||||
"""
|
||||
RabbitMQ messaging client for microservices
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable
|
||||
import aio_pika
|
||||
from aio_pika import connect_robust, Message, DeliveryMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RabbitMQClient:
|
||||
"""RabbitMQ client for microservices communication"""
|
||||
|
||||
def __init__(self, connection_url: str):
|
||||
self.connection_url = connection_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to RabbitMQ"""
|
||||
try:
|
||||
self.connection = await connect_robust(self.connection_url)
|
||||
self.channel = await self.connection.channel()
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect(self):
|
||||
"""Disconnect from RabbitMQ"""
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
logger.info("Disconnected from RabbitMQ")
|
||||
|
||||
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||
"""Publish event to RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
json.dumps(event_data).encode(),
|
||||
delivery_mode=DeliveryMode.PERSISTENT,
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
# Publish message
|
||||
await exchange.publish(message, routing_key=routing_key)
|
||||
|
||||
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish event: {e}")
|
||||
raise
|
||||
|
||||
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||
"""Consume events from RabbitMQ"""
|
||||
try:
|
||||
if not self.channel:
|
||||
await self.connect()
|
||||
|
||||
# Declare exchange
|
||||
exchange = await self.channel.declare_exchange(
|
||||
exchange_name,
|
||||
aio_pika.ExchangeType.TOPIC,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Declare queue
|
||||
queue = await self.channel.declare_queue(
|
||||
queue_name,
|
||||
durable=True
|
||||
)
|
||||
|
||||
# Bind queue to exchange
|
||||
await queue.bind(exchange, routing_key)
|
||||
|
||||
# Set up consumer
|
||||
await queue.consume(callback)
|
||||
|
||||
logger.info(f"Started consuming events from {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to consume events: {e}")
|
||||
raise
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user