From 013d32d00f14aff50b5c04f4ac35b67bccfb3d92 Mon Sep 17 00:00:00 2001 From: Urtzi Alfaro Date: Thu, 17 Jul 2025 15:55:23 +0200 Subject: [PATCH] Fix dockerfile --- docker-compose.yml | 53 +++++---- gateway/Dockerfile | 13 +- gateway/shared/auth/__init__.py | 0 gateway/shared/auth/decorators.py | 41 ------- gateway/shared/auth/jwt_handler.py | 58 --------- gateway/shared/database/__init__.py | 0 gateway/shared/database/base.py | 56 --------- gateway/shared/messaging/__init__.py | 0 gateway/shared/messaging/events.py | 73 ------------ gateway/shared/messaging/rabbitmq.py | 96 --------------- gateway/shared/monitoring/__init__.py | 0 gateway/shared/monitoring/logging.py | 77 ------------ gateway/shared/monitoring/metrics.py | 112 ------------------ gateway/shared/utils/__init__.py | 0 gateway/shared/utils/datetime_utils.py | 71 ----------- gateway/shared/utils/validation.py | 67 ----------- services/auth/Dockerfile | 14 ++- services/auth/shared/auth/__init__.py | 0 services/auth/shared/auth/decorators.py | 41 ------- services/auth/shared/auth/jwt_handler.py | 58 --------- services/auth/shared/database/__init__.py | 0 services/auth/shared/database/base.py | 56 --------- services/auth/shared/messaging/__init__.py | 0 services/auth/shared/messaging/events.py | 73 ------------ services/auth/shared/messaging/rabbitmq.py | 96 --------------- services/auth/shared/monitoring/__init__.py | 0 services/auth/shared/monitoring/logging.py | 77 ------------ services/auth/shared/monitoring/metrics.py | 112 ------------------ services/auth/shared/utils/__init__.py | 0 services/auth/shared/utils/datetime_utils.py | 71 ----------- services/auth/shared/utils/validation.py | 67 ----------- services/data/Dockerfile | 15 ++- services/data/shared/auth/__init__.py | 0 services/data/shared/auth/decorators.py | 41 ------- services/data/shared/auth/jwt_handler.py | 58 --------- services/data/shared/database/__init__.py | 0 services/data/shared/database/base.py | 56 --------- services/data/shared/messaging/__init__.py | 0 services/data/shared/messaging/events.py | 73 ------------ services/data/shared/messaging/rabbitmq.py | 96 --------------- services/data/shared/monitoring/__init__.py | 0 services/data/shared/monitoring/logging.py | 77 ------------ services/data/shared/monitoring/metrics.py | 112 ------------------ services/data/shared/utils/__init__.py | 0 services/data/shared/utils/datetime_utils.py | 71 ----------- services/data/shared/utils/validation.py | 67 ----------- services/forecasting/Dockerfile | 15 ++- services/forecasting/shared/auth/__init__.py | 0 .../forecasting/shared/auth/decorators.py | 41 ------- .../forecasting/shared/auth/jwt_handler.py | 58 --------- .../forecasting/shared/database/__init__.py | 0 services/forecasting/shared/database/base.py | 56 --------- .../forecasting/shared/messaging/__init__.py | 0 .../forecasting/shared/messaging/events.py | 73 ------------ .../forecasting/shared/messaging/rabbitmq.py | 96 --------------- .../forecasting/shared/monitoring/__init__.py | 0 .../forecasting/shared/monitoring/logging.py | 77 ------------ .../forecasting/shared/monitoring/metrics.py | 112 ------------------ services/forecasting/shared/utils/__init__.py | 0 .../shared/utils/datetime_utils.py | 71 ----------- .../forecasting/shared/utils/validation.py | 67 ----------- services/notification/Dockerfile | 15 ++- services/notification/shared/auth/__init__.py | 0 .../notification/shared/auth/decorators.py | 41 ------- .../notification/shared/auth/jwt_handler.py | 58 --------- .../notification/shared/database/__init__.py | 0 services/notification/shared/database/base.py | 56 --------- .../notification/shared/messaging/__init__.py | 0 .../notification/shared/messaging/events.py | 73 ------------ .../notification/shared/messaging/rabbitmq.py | 96 --------------- .../shared/monitoring/__init__.py | 0 .../notification/shared/monitoring/logging.py | 77 ------------ .../notification/shared/monitoring/metrics.py | 112 ------------------ .../notification/shared/utils/__init__.py | 0 .../shared/utils/datetime_utils.py | 71 ----------- .../notification/shared/utils/validation.py | 67 ----------- services/tenant/Dockerfile | 15 ++- services/tenant/shared/auth/__init__.py | 0 services/tenant/shared/auth/decorators.py | 41 ------- services/tenant/shared/auth/jwt_handler.py | 58 --------- services/tenant/shared/database/__init__.py | 0 services/tenant/shared/database/base.py | 56 --------- services/tenant/shared/messaging/__init__.py | 0 services/tenant/shared/messaging/events.py | 73 ------------ services/tenant/shared/messaging/rabbitmq.py | 96 --------------- services/tenant/shared/monitoring/__init__.py | 0 services/tenant/shared/monitoring/logging.py | 77 ------------ services/tenant/shared/monitoring/metrics.py | 112 ------------------ services/tenant/shared/utils/__init__.py | 0 .../tenant/shared/utils/datetime_utils.py | 71 ----------- services/tenant/shared/utils/validation.py | 67 ----------- services/training/Dockerfile | 19 +-- services/training/shared/auth/__init__.py | 0 services/training/shared/auth/decorators.py | 41 ------- services/training/shared/auth/jwt_handler.py | 58 --------- services/training/shared/database/__init__.py | 0 services/training/shared/database/base.py | 56 --------- .../training/shared/messaging/__init__.py | 0 services/training/shared/messaging/events.py | 73 ------------ .../training/shared/messaging/rabbitmq.py | 96 --------------- .../training/shared/monitoring/__init__.py | 0 .../training/shared/monitoring/logging.py | 77 ------------ .../training/shared/monitoring/metrics.py | 112 ------------------ services/training/shared/utils/__init__.py | 0 .../training/shared/utils/datetime_utils.py | 71 ----------- services/training/shared/utils/validation.py | 67 ----------- 106 files changed, 109 insertions(+), 4607 deletions(-) delete mode 100644 gateway/shared/auth/__init__.py delete mode 100644 gateway/shared/auth/decorators.py delete mode 100644 gateway/shared/auth/jwt_handler.py delete mode 100644 gateway/shared/database/__init__.py delete mode 100644 gateway/shared/database/base.py delete mode 100644 gateway/shared/messaging/__init__.py delete mode 100644 gateway/shared/messaging/events.py delete mode 100644 gateway/shared/messaging/rabbitmq.py delete mode 100644 gateway/shared/monitoring/__init__.py delete mode 100644 gateway/shared/monitoring/logging.py delete mode 100644 gateway/shared/monitoring/metrics.py delete mode 100644 gateway/shared/utils/__init__.py delete mode 100644 gateway/shared/utils/datetime_utils.py delete mode 100644 gateway/shared/utils/validation.py delete mode 100644 services/auth/shared/auth/__init__.py delete mode 100644 services/auth/shared/auth/decorators.py delete mode 100644 services/auth/shared/auth/jwt_handler.py delete mode 100644 services/auth/shared/database/__init__.py delete mode 100644 services/auth/shared/database/base.py delete mode 100644 services/auth/shared/messaging/__init__.py delete mode 100644 services/auth/shared/messaging/events.py delete mode 100644 services/auth/shared/messaging/rabbitmq.py delete mode 100644 services/auth/shared/monitoring/__init__.py delete mode 100644 services/auth/shared/monitoring/logging.py delete mode 100644 services/auth/shared/monitoring/metrics.py delete mode 100644 services/auth/shared/utils/__init__.py delete mode 100644 services/auth/shared/utils/datetime_utils.py delete mode 100644 services/auth/shared/utils/validation.py delete mode 100644 services/data/shared/auth/__init__.py delete mode 100644 services/data/shared/auth/decorators.py delete mode 100644 services/data/shared/auth/jwt_handler.py delete mode 100644 services/data/shared/database/__init__.py delete mode 100644 services/data/shared/database/base.py delete mode 100644 services/data/shared/messaging/__init__.py delete mode 100644 services/data/shared/messaging/events.py delete mode 100644 services/data/shared/messaging/rabbitmq.py delete mode 100644 services/data/shared/monitoring/__init__.py delete mode 100644 services/data/shared/monitoring/logging.py delete mode 100644 services/data/shared/monitoring/metrics.py delete mode 100644 services/data/shared/utils/__init__.py delete mode 100644 services/data/shared/utils/datetime_utils.py delete mode 100644 services/data/shared/utils/validation.py delete mode 100644 services/forecasting/shared/auth/__init__.py delete mode 100644 services/forecasting/shared/auth/decorators.py delete mode 100644 services/forecasting/shared/auth/jwt_handler.py delete mode 100644 services/forecasting/shared/database/__init__.py delete mode 100644 services/forecasting/shared/database/base.py delete mode 100644 services/forecasting/shared/messaging/__init__.py delete mode 100644 services/forecasting/shared/messaging/events.py delete mode 100644 services/forecasting/shared/messaging/rabbitmq.py delete mode 100644 services/forecasting/shared/monitoring/__init__.py delete mode 100644 services/forecasting/shared/monitoring/logging.py delete mode 100644 services/forecasting/shared/monitoring/metrics.py delete mode 100644 services/forecasting/shared/utils/__init__.py delete mode 100644 services/forecasting/shared/utils/datetime_utils.py delete mode 100644 services/forecasting/shared/utils/validation.py delete mode 100644 services/notification/shared/auth/__init__.py delete mode 100644 services/notification/shared/auth/decorators.py delete mode 100644 services/notification/shared/auth/jwt_handler.py delete mode 100644 services/notification/shared/database/__init__.py delete mode 100644 services/notification/shared/database/base.py delete mode 100644 services/notification/shared/messaging/__init__.py delete mode 100644 services/notification/shared/messaging/events.py delete mode 100644 services/notification/shared/messaging/rabbitmq.py delete mode 100644 services/notification/shared/monitoring/__init__.py delete mode 100644 services/notification/shared/monitoring/logging.py delete mode 100644 services/notification/shared/monitoring/metrics.py delete mode 100644 services/notification/shared/utils/__init__.py delete mode 100644 services/notification/shared/utils/datetime_utils.py delete mode 100644 services/notification/shared/utils/validation.py delete mode 100644 services/tenant/shared/auth/__init__.py delete mode 100644 services/tenant/shared/auth/decorators.py delete mode 100644 services/tenant/shared/auth/jwt_handler.py delete mode 100644 services/tenant/shared/database/__init__.py delete mode 100644 services/tenant/shared/database/base.py delete mode 100644 services/tenant/shared/messaging/__init__.py delete mode 100644 services/tenant/shared/messaging/events.py delete mode 100644 services/tenant/shared/messaging/rabbitmq.py delete mode 100644 services/tenant/shared/monitoring/__init__.py delete mode 100644 services/tenant/shared/monitoring/logging.py delete mode 100644 services/tenant/shared/monitoring/metrics.py delete mode 100644 services/tenant/shared/utils/__init__.py delete mode 100644 services/tenant/shared/utils/datetime_utils.py delete mode 100644 services/tenant/shared/utils/validation.py delete mode 100644 services/training/shared/auth/__init__.py delete mode 100644 services/training/shared/auth/decorators.py delete mode 100644 services/training/shared/auth/jwt_handler.py delete mode 100644 services/training/shared/database/__init__.py delete mode 100644 services/training/shared/database/base.py delete mode 100644 services/training/shared/messaging/__init__.py delete mode 100644 services/training/shared/messaging/events.py delete mode 100644 services/training/shared/messaging/rabbitmq.py delete mode 100644 services/training/shared/monitoring/__init__.py delete mode 100644 services/training/shared/monitoring/logging.py delete mode 100644 services/training/shared/monitoring/metrics.py delete mode 100644 services/training/shared/utils/__init__.py delete mode 100644 services/training/shared/utils/datetime_utils.py delete mode 100644 services/training/shared/utils/validation.py diff --git a/docker-compose.yml b/docker-compose.yml index 07ac0150..88657da3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -162,9 +162,9 @@ services: # Authentication Service auth-service: - build: - context: ./services/auth - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./services/auth/Dockerfile container_name: bakery-auth-service environment: - DATABASE_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db @@ -188,6 +188,7 @@ services: - bakery-network volumes: - ./services/auth:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -196,9 +197,9 @@ services: # Training Service training-service: - build: - context: ./services/training - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./services/training/Dockerfile container_name: bakery-training-service environment: - DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db @@ -223,6 +224,7 @@ services: - bakery-network volumes: - ./services/training:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -231,9 +233,9 @@ services: # Forecasting Service forecasting-service: - build: - context: ./services/forecasting - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./services/forecasting/Dockerfile container_name: bakery-forecasting-service environment: - DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db @@ -259,6 +261,7 @@ services: - bakery-network volumes: - ./services/forecasting:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -267,9 +270,9 @@ services: # Data Service data-service: - build: - context: ./services/data - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./services/data/Dockerfile container_name: bakery-data-service environment: - DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db @@ -295,6 +298,7 @@ services: - bakery-network volumes: - ./services/data:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -303,9 +307,9 @@ services: # Tenant Service tenant-service: - build: - context: ./services/tenant - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./services/tenant/Dockerfile container_name: bakery-tenant-service environment: - DATABASE_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db @@ -329,6 +333,7 @@ services: - bakery-network volumes: - ./services/tenant:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -337,9 +342,9 @@ services: # Notification Service notification-service: - build: - context: ./services/notification - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./services/notification/Dockerfile container_name: bakery-notification-service environment: - DATABASE_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db @@ -368,6 +373,7 @@ services: - bakery-network volumes: - ./services/notification:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -376,9 +382,9 @@ services: # API Gateway gateway: - build: - context: ./gateway - dockerfile: Dockerfile + build: + context: . # Build context is the project root + dockerfile: ./gateway/Dockerfile container_name: bakery-gateway environment: - REDIS_URL=redis://redis:6379/6 @@ -410,6 +416,7 @@ services: - bakery-network volumes: - ./gateway:/app + - ./shared:/app/shared # Explicitly mount shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s @@ -418,7 +425,7 @@ services: # Dashboard Frontend dashboard: - build: + build: context: ./frontend dockerfile: Dockerfile.dev container_name: bakery-dashboard @@ -493,7 +500,7 @@ services: volumes: - ./infrastructure/monitoring/logstash:/usr/share/logstash/pipeline ports: - - "5044:5000" + - "5044:5000" depends_on: - elasticsearch networks: diff --git a/gateway/Dockerfile b/gateway/Dockerfile index e0d8620f..f3ff5be5 100644 --- a/gateway/Dockerfile +++ b/gateway/Dockerfile @@ -1,3 +1,9 @@ +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY gateway/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared + # Copy application code -COPY . . +COPY gateway/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" diff --git a/gateway/shared/auth/__init__.py b/gateway/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/gateway/shared/auth/decorators.py b/gateway/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/gateway/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/gateway/shared/auth/jwt_handler.py b/gateway/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/gateway/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/gateway/shared/database/__init__.py b/gateway/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/gateway/shared/database/base.py b/gateway/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/gateway/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/gateway/shared/messaging/__init__.py b/gateway/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/gateway/shared/messaging/events.py b/gateway/shared/messaging/events.py deleted file mode 100644 index cf8f8162..00000000 --- a/gateway/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/gateway/shared/messaging/rabbitmq.py b/gateway/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/gateway/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/gateway/shared/monitoring/__init__.py b/gateway/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/gateway/shared/monitoring/logging.py b/gateway/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/gateway/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/gateway/shared/monitoring/metrics.py b/gateway/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/gateway/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/gateway/shared/utils/__init__.py b/gateway/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/gateway/shared/utils/datetime_utils.py b/gateway/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/gateway/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/gateway/shared/utils/validation.py b/gateway/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/gateway/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file diff --git a/services/auth/Dockerfile b/services/auth/Dockerfile index a0ad7a21..5d0a3b6a 100644 --- a/services/auth/Dockerfile +++ b/services/auth/Dockerfile @@ -1,3 +1,9 @@ +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -9,16 +15,16 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY services/auth/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt -# Copy shared libraries -COPY shared/ /app/shared/ +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared # Copy application code -COPY . . +COPY services/auth/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" diff --git a/services/auth/shared/auth/__init__.py b/services/auth/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/auth/shared/auth/decorators.py b/services/auth/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/services/auth/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/services/auth/shared/auth/jwt_handler.py b/services/auth/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/services/auth/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/services/auth/shared/database/__init__.py b/services/auth/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/auth/shared/database/base.py b/services/auth/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/services/auth/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/auth/shared/messaging/__init__.py b/services/auth/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/auth/shared/messaging/events.py b/services/auth/shared/messaging/events.py deleted file mode 100644 index affa94a2..00000000 --- a/services/auth/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/auth/shared/messaging/rabbitmq.py b/services/auth/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/services/auth/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/services/auth/shared/monitoring/__init__.py b/services/auth/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/auth/shared/monitoring/logging.py b/services/auth/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/services/auth/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/auth/shared/monitoring/metrics.py b/services/auth/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/services/auth/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/services/auth/shared/utils/__init__.py b/services/auth/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/auth/shared/utils/datetime_utils.py b/services/auth/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/services/auth/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/auth/shared/utils/validation.py b/services/auth/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/services/auth/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file diff --git a/services/data/Dockerfile b/services/data/Dockerfile index ad431c34..6f6cab04 100644 --- a/services/data/Dockerfile +++ b/services/data/Dockerfile @@ -1,3 +1,9 @@ +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY services/data/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared + # Copy application code -COPY . . +COPY services/data/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" @@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ CMD curl -f http://localhost:8000/health || exit 1 # Run application -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/services/data/shared/auth/__init__.py b/services/data/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/data/shared/auth/decorators.py b/services/data/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/services/data/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/services/data/shared/auth/jwt_handler.py b/services/data/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/services/data/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/services/data/shared/database/__init__.py b/services/data/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/data/shared/database/base.py b/services/data/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/services/data/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/data/shared/messaging/__init__.py b/services/data/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/data/shared/messaging/events.py b/services/data/shared/messaging/events.py deleted file mode 100644 index cf8f8162..00000000 --- a/services/data/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/data/shared/messaging/rabbitmq.py b/services/data/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/services/data/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/services/data/shared/monitoring/__init__.py b/services/data/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/data/shared/monitoring/logging.py b/services/data/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/services/data/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/data/shared/monitoring/metrics.py b/services/data/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/services/data/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/services/data/shared/utils/__init__.py b/services/data/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/data/shared/utils/datetime_utils.py b/services/data/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/services/data/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/data/shared/utils/validation.py b/services/data/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/services/data/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file diff --git a/services/forecasting/Dockerfile b/services/forecasting/Dockerfile index ad431c34..f0d7c7e9 100644 --- a/services/forecasting/Dockerfile +++ b/services/forecasting/Dockerfile @@ -1,3 +1,9 @@ +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY services/forecasting/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared + # Copy application code -COPY . . +COPY services/forecasting/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" @@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ CMD curl -f http://localhost:8000/health || exit 1 # Run application -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/services/forecasting/shared/auth/__init__.py b/services/forecasting/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/forecasting/shared/auth/decorators.py b/services/forecasting/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/services/forecasting/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/services/forecasting/shared/auth/jwt_handler.py b/services/forecasting/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/services/forecasting/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/services/forecasting/shared/database/__init__.py b/services/forecasting/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/forecasting/shared/database/base.py b/services/forecasting/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/services/forecasting/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/forecasting/shared/messaging/__init__.py b/services/forecasting/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/forecasting/shared/messaging/events.py b/services/forecasting/shared/messaging/events.py deleted file mode 100644 index cf8f8162..00000000 --- a/services/forecasting/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/forecasting/shared/messaging/rabbitmq.py b/services/forecasting/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/services/forecasting/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/services/forecasting/shared/monitoring/__init__.py b/services/forecasting/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/forecasting/shared/monitoring/logging.py b/services/forecasting/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/services/forecasting/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/forecasting/shared/monitoring/metrics.py b/services/forecasting/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/services/forecasting/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/services/forecasting/shared/utils/__init__.py b/services/forecasting/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/forecasting/shared/utils/datetime_utils.py b/services/forecasting/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/services/forecasting/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/forecasting/shared/utils/validation.py b/services/forecasting/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/services/forecasting/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file diff --git a/services/notification/Dockerfile b/services/notification/Dockerfile index ad431c34..15c0f3d5 100644 --- a/services/notification/Dockerfile +++ b/services/notification/Dockerfile @@ -1,3 +1,9 @@ +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY services/notification/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared + # Copy application code -COPY . . +COPY services/notification/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" @@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ CMD curl -f http://localhost:8000/health || exit 1 # Run application -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/services/notification/shared/auth/__init__.py b/services/notification/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/notification/shared/auth/decorators.py b/services/notification/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/services/notification/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/services/notification/shared/auth/jwt_handler.py b/services/notification/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/services/notification/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/services/notification/shared/database/__init__.py b/services/notification/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/notification/shared/database/base.py b/services/notification/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/services/notification/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/notification/shared/messaging/__init__.py b/services/notification/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/notification/shared/messaging/events.py b/services/notification/shared/messaging/events.py deleted file mode 100644 index cf8f8162..00000000 --- a/services/notification/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/notification/shared/messaging/rabbitmq.py b/services/notification/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/services/notification/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/services/notification/shared/monitoring/__init__.py b/services/notification/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/notification/shared/monitoring/logging.py b/services/notification/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/services/notification/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/notification/shared/monitoring/metrics.py b/services/notification/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/services/notification/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/services/notification/shared/utils/__init__.py b/services/notification/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/notification/shared/utils/datetime_utils.py b/services/notification/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/services/notification/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/notification/shared/utils/validation.py b/services/notification/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/services/notification/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file diff --git a/services/tenant/Dockerfile b/services/tenant/Dockerfile index ad431c34..a445f70c 100644 --- a/services/tenant/Dockerfile +++ b/services/tenant/Dockerfile @@ -1,3 +1,9 @@ +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -9,13 +15,16 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY services/tenant/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared + # Copy application code -COPY . . +COPY services/tenant/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" @@ -28,4 +37,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ CMD curl -f http://localhost:8000/health || exit 1 # Run application -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/services/tenant/shared/auth/__init__.py b/services/tenant/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/tenant/shared/auth/decorators.py b/services/tenant/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/services/tenant/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/services/tenant/shared/auth/jwt_handler.py b/services/tenant/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/services/tenant/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/services/tenant/shared/database/__init__.py b/services/tenant/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/tenant/shared/database/base.py b/services/tenant/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/services/tenant/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/tenant/shared/messaging/__init__.py b/services/tenant/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/tenant/shared/messaging/events.py b/services/tenant/shared/messaging/events.py deleted file mode 100644 index cf8f8162..00000000 --- a/services/tenant/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/tenant/shared/messaging/rabbitmq.py b/services/tenant/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/services/tenant/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/services/tenant/shared/monitoring/__init__.py b/services/tenant/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/tenant/shared/monitoring/logging.py b/services/tenant/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/services/tenant/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/tenant/shared/monitoring/metrics.py b/services/tenant/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/services/tenant/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/services/tenant/shared/utils/__init__.py b/services/tenant/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/tenant/shared/utils/datetime_utils.py b/services/tenant/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/services/tenant/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/tenant/shared/utils/validation.py b/services/tenant/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/services/tenant/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file diff --git a/services/training/Dockerfile b/services/training/Dockerfile index 0759c4a1..8cd9dac4 100644 --- a/services/training/Dockerfile +++ b/services/training/Dockerfile @@ -1,4 +1,9 @@ -# services/training/Dockerfile +# Add this stage at the top of each service Dockerfile +FROM python:3.11-slim as shared +WORKDIR /shared +COPY shared/ /shared/ + +# Then your main service stage FROM python:3.11-slim WORKDIR /app @@ -6,24 +11,20 @@ WORKDIR /app # Install system dependencies RUN apt-get update && apt-get install -y \ gcc \ - g++ \ curl \ && rm -rf /var/lib/apt/lists/* # Copy requirements -COPY requirements.txt . +COPY services/training/requirements.txt . # Install Python dependencies RUN pip install --no-cache-dir -r requirements.txt -# Copy shared libraries -COPY shared/ /app/shared/ +# Copy shared libraries from the shared stage +COPY --from=shared /shared /app/shared # Copy application code -COPY . . - -# Create model storage directory -RUN mkdir -p /app/models +COPY services/training/ . # Add shared libraries to Python path ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" diff --git a/services/training/shared/auth/__init__.py b/services/training/shared/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/training/shared/auth/decorators.py b/services/training/shared/auth/decorators.py deleted file mode 100644 index 53095a15..00000000 --- a/services/training/shared/auth/decorators.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Authentication decorators for FastAPI -""" - -from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging - -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" - - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") - raise HTTPException( - status_code=503, - detail="Authentication service unavailable" - ) - - return verify_token \ No newline at end of file diff --git a/services/training/shared/auth/jwt_handler.py b/services/training/shared/auth/jwt_handler.py deleted file mode 100644 index 2eae4724..00000000 --- a/services/training/shared/auth/jwt_handler.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Shared JWT Authentication Handler -Used across all microservices for consistent authentication -""" - -from jose import jwt -from datetime import datetime, timedelta -from typing import Optional, Dict, Any -import logging - -logger = logging.getLogger(__name__) - -class JWTHandler: - """JWT token handling for microservices""" - - def __init__(self, secret_key: str, algorithm: str = "HS256"): - self.secret_key = secret_key - self.algorithm = algorithm - - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(minutes=30) - - to_encode.update({"exp": expire, "type": "access"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT refresh token""" - to_encode = data.copy() - - if expires_delta: - expire = datetime.now(datetime.timezone.utc) + expires_delta - else: - expire = datetime.now(datetime.timezone.utc) + timedelta(days=7) - - to_encode.update({"exp": expire, "type": "refresh"}) - - encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) - return encoded_jwt - - def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" - try: - payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) - return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") - return None - except jwt.JWTError: - logger.warning("Invalid token") - return None \ No newline at end of file diff --git a/services/training/shared/database/__init__.py b/services/training/shared/database/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/training/shared/database/base.py b/services/training/shared/database/base.py deleted file mode 100644 index e5766716..00000000 --- a/services/training/shared/database/base.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Base database configuration for all microservices -""" - -import os -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker, declarative_base -from sqlalchemy.pool import StaticPool -import logging - -logger = logging.getLogger(__name__) - -Base = declarative_base() - -class DatabaseManager: - """Database manager for microservices""" - - def __init__(self, database_url: str): - self.database_url = database_url - self.async_engine = create_async_engine( - database_url, - echo=False, - pool_pre_ping=True, - pool_recycle=300, - pool_size=20, - max_overflow=30 - ) - - self.async_session_local = sessionmaker( - self.async_engine, - class_=AsyncSession, - expire_on_commit=False - ) - - async def get_db(self): - """Get database session""" - async with self.async_session_local() as session: - try: - yield session - except Exception as e: - logger.error(f"Database session error: {e}") - await session.rollback() - raise - finally: - await session.close() - - async def create_tables(self): - """Create database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async def drop_tables(self): - """Drop database tables""" - async with self.async_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/training/shared/messaging/__init__.py b/services/training/shared/messaging/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/training/shared/messaging/events.py b/services/training/shared/messaging/events.py deleted file mode 100644 index cf8f8162..00000000 --- a/services/training/shared/messaging/events.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Event definitions for microservices communication -""" - -from dataclasses import dataclass -from datetime import datetime -from typing import Dict, Any, Optional -import uuid - -@dataclass -class BaseEvent: - """Base event class""" - event_id: str - event_type: str - service_name: str - timestamp: datetime - data: Dict[str, Any] - correlation_id: Optional[str] = None - - def __post_init__(self): - if not self.event_id: - self.event_id = str(uuid.uuid4()) - if not self.timestamp: - self.timestamp = datetime.now(datetime.timezone.utc) - -# Training Events -@dataclass -class TrainingStartedEvent(BaseEvent): - event_type: str = "training.started" - -@dataclass -class TrainingCompletedEvent(BaseEvent): - event_type: str = "training.completed" - -@dataclass -class TrainingFailedEvent(BaseEvent): - event_type: str = "training.failed" - -# Forecasting Events -@dataclass -class ForecastGeneratedEvent(BaseEvent): - event_type: str = "forecast.generated" - -@dataclass -class ForecastRequestedEvent(BaseEvent): - event_type: str = "forecast.requested" - -# User Events -@dataclass -class UserRegisteredEvent(BaseEvent): - event_type: str = "user.registered" - -@dataclass -class UserLoginEvent(BaseEvent): - event_type: str = "user.login" - -# Tenant Events -@dataclass -class TenantCreatedEvent(BaseEvent): - event_type: str = "tenant.created" - -@dataclass -class TenantUpdatedEvent(BaseEvent): - event_type: str = "tenant.updated" - -# Notification Events -@dataclass -class NotificationSentEvent(BaseEvent): - event_type: str = "notification.sent" - -@dataclass -class NotificationFailedEvent(BaseEvent): - event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/training/shared/messaging/rabbitmq.py b/services/training/shared/messaging/rabbitmq.py deleted file mode 100644 index 62d95cfb..00000000 --- a/services/training/shared/messaging/rabbitmq.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -RabbitMQ messaging client for microservices -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Callable -import aio_pika -from aio_pika import connect_robust, Message, DeliveryMode - -logger = logging.getLogger(__name__) - -class RabbitMQClient: - """RabbitMQ client for microservices communication""" - - def __init__(self, connection_url: str): - self.connection_url = connection_url - self.connection = None - self.channel = None - - async def connect(self): - """Connect to RabbitMQ""" - try: - self.connection = await connect_robust(self.connection_url) - self.channel = await self.connection.channel() - logger.info("Connected to RabbitMQ") - except Exception as e: - logger.error(f"Failed to connect to RabbitMQ: {e}") - raise - - async def disconnect(self): - """Disconnect from RabbitMQ""" - if self.connection: - await self.connection.close() - logger.info("Disconnected from RabbitMQ") - - async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): - """Publish event to RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Create message - message = Message( - json.dumps(event_data).encode(), - delivery_mode=DeliveryMode.PERSISTENT, - content_type="application/json" - ) - - # Publish message - await exchange.publish(message, routing_key=routing_key) - - logger.info(f"Published event to {exchange_name} with routing key {routing_key}") - - except Exception as e: - logger.error(f"Failed to publish event: {e}") - raise - - async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): - """Consume events from RabbitMQ""" - try: - if not self.channel: - await self.connect() - - # Declare exchange - exchange = await self.channel.declare_exchange( - exchange_name, - aio_pika.ExchangeType.TOPIC, - durable=True - ) - - # Declare queue - queue = await self.channel.declare_queue( - queue_name, - durable=True - ) - - # Bind queue to exchange - await queue.bind(exchange, routing_key) - - # Set up consumer - await queue.consume(callback) - - logger.info(f"Started consuming events from {queue_name}") - - except Exception as e: - logger.error(f"Failed to consume events: {e}") - raise \ No newline at end of file diff --git a/services/training/shared/monitoring/__init__.py b/services/training/shared/monitoring/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/training/shared/monitoring/logging.py b/services/training/shared/monitoring/logging.py deleted file mode 100644 index 0fde234d..00000000 --- a/services/training/shared/monitoring/logging.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Centralized logging configuration for microservices -""" - -import logging -import logging.config -import os -from typing import Dict, Any - -def setup_logging(service_name: str, log_level: str = "INFO") -> None: - """Set up logging configuration for a microservice""" - - config: Dict[str, Any] = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, - "detailed": { - "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": "%(asctime)s %(name)s %(levelname)s %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": log_level, - "formatter": "standard", - "stream": "ext://sys.stdout" - }, - "file": { - "class": "logging.FileHandler", - "level": log_level, - "formatter": "detailed", - "filename": f"/var/log/{service_name}.log", - "mode": "a" - }, - "logstash": { - "class": "logstash.TCPLogstashHandler", - "host": os.getenv("LOGSTASH_HOST", "localhost"), - "port": int(os.getenv("LOGSTASH_PORT", "5000")), - "version": 1, - "message_type": "logstash", - "fqdn": False, - "tags": [service_name] - } - }, - "loggers": { - "": { - "handlers": ["console", "file"], - "level": log_level, - "propagate": False - }, - "uvicorn": { - "handlers": ["console"], - "level": log_level, - "propagate": False - }, - "uvicorn.access": { - "handlers": ["console"], - "level": log_level, - "propagate": False - } - } - } - - # Add logstash handler if in production - if os.getenv("ENVIRONMENT") == "production": - config["loggers"][""]["handlers"].append("logstash") - - logging.config.dictConfig(config) - logger = logging.getLogger(__name__) - logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/training/shared/monitoring/metrics.py b/services/training/shared/monitoring/metrics.py deleted file mode 100644 index a5e35223..00000000 --- a/services/training/shared/monitoring/metrics.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Metrics collection for microservices -""" - -import time -import logging -from typing import Dict, Any -from prometheus_client import Counter, Histogram, Gauge, start_http_server -from functools import wraps - -logger = logging.getLogger(__name__) - -# Prometheus metrics -REQUEST_COUNT = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status_code', 'service'] -) - -REQUEST_DURATION = Histogram( - 'http_request_duration_seconds', - 'HTTP request duration in seconds', - ['method', 'endpoint', 'service'] -) - -ACTIVE_CONNECTIONS = Gauge( - 'active_connections', - 'Active database connections', - ['service'] -) - -TRAINING_JOBS = Counter( - 'training_jobs_total', - 'Total training jobs', - ['status', 'service'] -) - -FORECASTS_GENERATED = Counter( - 'forecasts_generated_total', - 'Total forecasts generated', - ['service'] -) - -class MetricsCollector: - """Metrics collector for microservices""" - - def __init__(self, service_name: str): - self.service_name = service_name - self.start_time = time.time() - - def start_metrics_server(self, port: int = 8080): - """Start Prometheus metrics server""" - try: - start_http_server(port) - logger.info(f"Metrics server started on port {port}") - except Exception as e: - logger.error(f"Failed to start metrics server: {e}") - - def record_request(self, method: str, endpoint: str, status_code: int, duration: float): - """Record HTTP request metrics""" - REQUEST_COUNT.labels( - method=method, - endpoint=endpoint, - status_code=status_code, - service=self.service_name - ).inc() - - REQUEST_DURATION.labels( - method=method, - endpoint=endpoint, - service=self.service_name - ).observe(duration) - - def record_training_job(self, status: str): - """Record training job metrics""" - TRAINING_JOBS.labels( - status=status, - service=self.service_name - ).inc() - - def record_forecast_generated(self): - """Record forecast generation metrics""" - FORECASTS_GENERATED.labels( - service=self.service_name - ).inc() - - def set_active_connections(self, count: int): - """Set active database connections""" - ACTIVE_CONNECTIONS.labels( - service=self.service_name - ).set(count) - -def metrics_middleware(metrics_collector: MetricsCollector): - """Middleware to collect metrics""" - - def middleware(request, call_next): - start_time = time.time() - - response = call_next(request) - - duration = time.time() - start_time - - metrics_collector.record_request( - method=request.method, - endpoint=request.url.path, - status_code=response.status_code, - duration=duration - ) - - return response - - return middleware \ No newline at end of file diff --git a/services/training/shared/utils/__init__.py b/services/training/shared/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/services/training/shared/utils/datetime_utils.py b/services/training/shared/utils/datetime_utils.py deleted file mode 100644 index 3035001a..00000000 --- a/services/training/shared/utils/datetime_utils.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -DateTime utilities for microservices -""" - -from datetime import datetime, timezone, timedelta -from typing import Optional -import pytz - -def utc_now() -> datetime: - """Get current UTC datetime""" - return datetime.now(timezone.utc) - -def madrid_now() -> datetime: - """Get current Madrid datetime""" - madrid_tz = pytz.timezone('Europe/Madrid') - return datetime.now(madrid_tz) - -def to_utc(dt: datetime) -> datetime: - """Convert datetime to UTC""" - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - -def to_madrid(dt: datetime) -> datetime: - """Convert datetime to Madrid timezone""" - madrid_tz = pytz.timezone('Europe/Madrid') - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(madrid_tz) - -def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: - """Format datetime as string""" - return dt.strftime(format_str) - -def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: - """Parse datetime from string""" - return datetime.strptime(dt_str, format_str) - -def is_business_hours(dt: Optional[datetime] = None) -> bool: - """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Check if it's a weekday (Monday=0, Sunday=6) - if madrid_dt.weekday() >= 5: # Weekend - return False - - # Check if it's business hours - return 9 <= madrid_dt.hour < 18 - -def next_business_day(dt: Optional[datetime] = None) -> datetime: - """Get next business day""" - if dt is None: - dt = madrid_now() - - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - - madrid_dt = to_madrid(dt) - - # Add days until we reach a weekday - while madrid_dt.weekday() >= 5: # Weekend - madrid_dt += timedelta(days=1) - - # Set to 9 AM - return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/training/shared/utils/validation.py b/services/training/shared/utils/validation.py deleted file mode 100644 index c855b20c..00000000 --- a/services/training/shared/utils/validation.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -Validation utilities for microservices -""" - -import re -from typing import Any, Optional -from email_validator import validate_email, EmailNotValidError - -def validate_spanish_phone(phone: str) -> bool: - """Validate Spanish phone number""" - # Spanish phone pattern: +34 followed by 9 digits - pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' - return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) - -def validate_email_address(email: str) -> bool: - """Validate email address""" - try: - validate_email(email) - return True - except EmailNotValidError: - return False - -def validate_tenant_name(name: str) -> bool: - """Validate tenant name""" - # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes - pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" - return bool(re.match(pattern, name)) - -def validate_address(address: str) -> bool: - """Validate address""" - # Must be 5-200 characters - return 5 <= len(address.strip()) <= 200 - -def validate_coordinates(latitude: float, longitude: float) -> bool: - """Validate Madrid coordinates""" - # Madrid is roughly between these coordinates - madrid_bounds = { - 'lat_min': 40.3, - 'lat_max': 40.6, - 'lon_min': -3.8, - 'lon_max': -3.5 - } - - return ( - madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and - madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] - ) - -def validate_product_name(name: str) -> bool: - """Validate product name""" - # Must be 1-50 characters, letters, numbers, spaces - pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" - return bool(re.match(pattern, name)) - -def validate_positive_number(value: Any) -> bool: - """Validate positive number""" - try: - return float(value) > 0 - except (ValueError, TypeError): - return False - -def validate_non_negative_number(value: Any) -> bool: - """Validate non-negative number""" - try: - return float(value) >= 0 - except (ValueError, TypeError): - return False \ No newline at end of file