From abc8b68ab48b1dbaeda72456eb81fbf290fd338d Mon Sep 17 00:00:00 2001 From: Urtzi Alfaro Date: Sat, 19 Jul 2025 17:49:03 +0200 Subject: [PATCH] Improve auth flow --- .env.sample | 123 +++ docker-compose.yml | 874 ++++++++++-------- gateway/app/middleware/auth.py | 257 +++-- .../grafana/dashboards/dashboard.yml | 11 + .../grafana/datasources/prometheus.yml | 9 + .../monitoring/prometheus/prometheus.yml | 60 +- services/auth/app/schemas/auth.py | 80 +- services/auth/requirements.txt | 2 + services/tenant/app/api/tenants.py | 167 ++++ services/tenant/app/main.py | 47 +- services/tenant/app/models/tenants.py | 73 ++ services/tenant/app/schemas/tenants.py | 83 ++ services/tenant/app/services/messaging.py | 41 + .../tenant/app/services/tenant_service.py | 0 shared/auth/decorators.py | 99 +- shared/auth/jwt_handler.py | 83 +- 16 files changed, 1437 insertions(+), 572 deletions(-) create mode 100644 .env.sample create mode 100644 infrastructure/monitoring/grafana/dashboards/dashboard.yml create mode 100644 infrastructure/monitoring/grafana/datasources/prometheus.yml create mode 100644 services/tenant/app/api/tenants.py create mode 100644 services/tenant/app/models/tenants.py create mode 100644 services/tenant/app/schemas/tenants.py create mode 100644 services/tenant/app/services/messaging.py create mode 100644 services/tenant/app/services/tenant_service.py diff --git a/.env.sample b/.env.sample new file mode 100644 index 00000000..92ff1297 --- /dev/null +++ b/.env.sample @@ -0,0 +1,123 @@ +# .env.example - Environment Variables Template +# Copy to .env and update values + +# ================================================================ +# JWT CONFIGURATION (CRITICAL - CHANGE IN PRODUCTION!) +# ================================================================ +JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production-minimum-32-characters-required + +# ================================================================ +# EXTERNAL API KEYS +# ================================================================ + +# AEMET (Spanish Weather Service) API Key +# Get from: https://opendata.aemet.es/centrodedescargas/altaUsuario +AEMET_API_KEY=your-aemet-api-key-here + +# Madrid Open Data API Key (Optional) +# Get from: https://datos.madrid.es/portal/site/egob/ +MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here + +# ================================================================ +# EMAIL CONFIGURATION (For notifications) +# ================================================================ + +# Gmail SMTP Configuration (recommended) +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USER=your-email@gmail.com +SMTP_PASSWORD=your-gmail-app-specific-password + +# Alternative: SendGrid +# SMTP_HOST=smtp.sendgrid.net +# SMTP_PORT=587 +# SMTP_USER=apikey +# SMTP_PASSWORD=your-sendgrid-api-key + +# ================================================================ +# WHATSAPP CONFIGURATION (Twilio) +# ================================================================ + +# Twilio WhatsApp Configuration +# Get from: https://www.twilio.com/console +WHATSAPP_ACCOUNT_SID=your-twilio-account-sid +WHATSAPP_AUTH_TOKEN=your-twilio-auth-token +WHATSAPP_FROM_NUMBER=whatsapp:+14155238886 + +# ================================================================ +# DATABASE CONFIGURATION (Auto-configured in Docker) +# ================================================================ + +# These are set automatically in docker-compose.yml +# Only change if using external databases + +# AUTH_DATABASE_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db +# TENANT_DATABASE_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db +# TRAINING_DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db +# FORECASTING_DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db +# DATA_DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db +# NOTIFICATION_DATABASE_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db + +# ================================================================ +# REDIS CONFIGURATION (Auto-configured in Docker) +# ================================================================ + +# REDIS_URL=redis://:redis_pass123@redis:6379 + +# ================================================================ +# RABBITMQ CONFIGURATION (Auto-configured in Docker) +# ================================================================ + +# RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + +# ================================================================ +# CORS CONFIGURATION +# ================================================================ + +# Allowed origins for CORS (comma-separated) +CORS_ORIGINS=http://localhost:3000,http://localhost:3001,https://yourdomain.com + +# ================================================================ +# ML/AI CONFIGURATION +# ================================================================ + +# Model storage configuration +MODEL_STORAGE_PATH=/app/models +MAX_TRAINING_TIME_MINUTES=30 +MIN_TRAINING_DATA_DAYS=30 +PROPHET_SEASONALITY_MODE=additive + +# Prediction caching +PREDICTION_CACHE_TTL_HOURS=6 + +# ================================================================ +# SECURITY CONFIGURATION +# ================================================================ + +# Password requirements +PASSWORD_MIN_LENGTH=8 +MAX_LOGIN_ATTEMPTS=5 +LOCKOUT_DURATION_MINUTES=30 + +# Rate limiting +RATE_LIMIT_CALLS_PER_MINUTE=60 +RATE_LIMIT_BURST=10 + +# Session configuration +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 +JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 + +# ================================================================ +# MONITORING CONFIGURATION +# ================================================================ + +# Log levels: DEBUG, INFO, WARNING, ERROR, CRITICAL +LOG_LEVEL=INFO + +# Service versions +SERVICE_VERSION=1.0.0 + +# Data retention +DATA_RETENTION_DAYS=365 +WEATHER_CACHE_TTL_HOURS=1 +TRAFFIC_CACHE_TTL_HOURS=1 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index ac4c3a18..b8d7bab7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,18 +1,56 @@ -# docker-compose.yml - Development Environment +# ================================================================ +# UPDATED DOCKER COMPOSE - PROPER AUTHENTICATION ARCHITECTURE +# ================================================================ + +networks: + bakery-network: + driver: bridge + +volumes: + postgres_auth_data: + postgres_training_data: + postgres_forecasting_data: + postgres_data_data: + postgres_tenant_data: + postgres_notification_data: + redis_data: + rabbitmq_data: + prometheus_data: + grafana_data: services: - # Message Broker - rabbitmq: - image: rabbitmq:3-management-alpine - container_name: bakery-rabbitmq - hostname: rabbitmq + # ================================================================ + # INFRASTRUCTURE SERVICES + # ================================================================ + + # Redis - For caching, sessions, and rate limiting + redis: + image: redis:7-alpine + container_name: bakery-redis + command: redis-server --appendonly yes --requirepass redis_pass123 ports: - - "5672:5672" - - "15672:15672" + - "6379:6379" + volumes: + - redis_data:/data + networks: + - bakery-network + healthcheck: + test: ["CMD", "redis-cli", "--raw", "incr", "ping"] + interval: 30s + timeout: 10s + retries: 3 + + # RabbitMQ - Message broker + rabbitmq: + image: rabbitmq:3-management + container_name: bakery-rabbitmq environment: - RABBITMQ_DEFAULT_USER=bakery - RABBITMQ_DEFAULT_PASS=forecast123 - RABBITMQ_DEFAULT_VHOST=/ + ports: + - "5672:5672" + - "15672:15672" volumes: - rabbitmq_data:/var/lib/rabbitmq networks: @@ -23,158 +61,208 @@ services: timeout: 10s retries: 3 - # Cache & Session Store - redis: - image: redis:7-alpine - container_name: bakery-redis - ports: - - "6379:6379" - volumes: - - redis_data:/data - networks: - - bakery-network - command: redis-server --appendonly yes - healthcheck: - test: ["CMD", "redis-cli", "ping"] - interval: 30s - timeout: 10s - retries: 3 + # ================================================================ + # DATABASE SERVICES + # ================================================================ - # Auth Service Database + # Auth Database auth-db: - image: postgres:15-alpine + image: postgres:15 container_name: bakery-auth-db environment: - POSTGRES_DB=auth_db - POSTGRES_USER=auth_user - POSTGRES_PASSWORD=auth_pass123 volumes: - - auth_db_data:/var/lib/postgresql/data - ports: - - "5432:5432" + - postgres_auth_data:/var/lib/postgresql/data networks: - bakery-network healthcheck: test: ["CMD-SHELL", "pg_isready -U auth_user -d auth_db"] - interval: 30s - timeout: 10s - retries: 3 + interval: 10s + timeout: 5s + retries: 5 - # Training Service Database - training-db: - image: postgres:15-alpine - container_name: bakery-training-db - environment: - - POSTGRES_DB=training_db - - POSTGRES_USER=training_user - - POSTGRES_PASSWORD=training_pass123 - volumes: - - training_db_data:/var/lib/postgresql/data - ports: - - "5433:5432" - networks: - - bakery-network - healthcheck: - test: ["CMD-SHELL", "pg_isready -U training_user -d training_db"] - interval: 30s - timeout: 10s - retries: 3 - - # Forecasting Service Database - forecasting-db: - image: postgres:15-alpine - container_name: bakery-forecasting-db - environment: - - POSTGRES_DB=forecasting_db - - POSTGRES_USER=forecasting_user - - POSTGRES_PASSWORD=forecasting_pass123 - volumes: - - forecasting_db_data:/var/lib/postgresql/data - ports: - - "5434:5432" - networks: - - bakery-network - healthcheck: - test: ["CMD-SHELL", "pg_isready -U forecasting_user -d forecasting_db"] - interval: 30s - timeout: 10s - retries: 3 - - # Data Service Database - data-db: - image: postgres:15-alpine - container_name: bakery-data-db - environment: - - POSTGRES_DB=data_db - - POSTGRES_USER=data_user - - POSTGRES_PASSWORD=data_pass123 - volumes: - - data_db_data:/var/lib/postgresql/data - ports: - - "5435:5432" - networks: - - bakery-network - healthcheck: - test: ["CMD-SHELL", "pg_isready -U data_user -d data_db"] - interval: 30s - timeout: 10s - retries: 3 - - # Tenant Service Database + # Tenant Database tenant-db: - image: postgres:15-alpine + image: postgres:15 container_name: bakery-tenant-db environment: - POSTGRES_DB=tenant_db - POSTGRES_USER=tenant_user - POSTGRES_PASSWORD=tenant_pass123 volumes: - - tenant_db_data:/var/lib/postgresql/data - ports: - - "5436:5432" + - postgres_tenant_data:/var/lib/postgresql/data networks: - bakery-network healthcheck: test: ["CMD-SHELL", "pg_isready -U tenant_user -d tenant_db"] - interval: 30s - timeout: 10s - retries: 3 + interval: 10s + timeout: 5s + retries: 5 - # Notification Service Database + # Training Database + training-db: + image: postgres:15 + container_name: bakery-training-db + environment: + - POSTGRES_DB=training_db + - POSTGRES_USER=training_user + - POSTGRES_PASSWORD=training_pass123 + volumes: + - postgres_training_data:/var/lib/postgresql/data + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U training_user -d training_db"] + interval: 10s + timeout: 5s + retries: 5 + + # Forecasting Database + forecasting-db: + image: postgres:15 + container_name: bakery-forecasting-db + environment: + - POSTGRES_DB=forecasting_db + - POSTGRES_USER=forecasting_user + - POSTGRES_PASSWORD=forecasting_pass123 + volumes: + - postgres_forecasting_data:/var/lib/postgresql/data + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U forecasting_user -d forecasting_db"] + interval: 10s + timeout: 5s + retries: 5 + + # Data Database + data-db: + image: postgres:15 + container_name: bakery-data-db + environment: + - POSTGRES_DB=data_db + - POSTGRES_USER=data_user + - POSTGRES_PASSWORD=data_pass123 + volumes: + - postgres_data_data:/var/lib/postgresql/data + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U data_user -d data_db"] + interval: 10s + timeout: 5s + retries: 5 + + # Notification Database notification-db: - image: postgres:15-alpine + image: postgres:15 container_name: bakery-notification-db environment: - POSTGRES_DB=notification_db - POSTGRES_USER=notification_user - POSTGRES_PASSWORD=notification_pass123 volumes: - - notification_db_data:/var/lib/postgresql/data - ports: - - "5437:5432" + - postgres_notification_data:/var/lib/postgresql/data networks: - bakery-network healthcheck: test: ["CMD-SHELL", "pg_isready -U notification_user -d notification_db"] + interval: 10s + timeout: 5s + retries: 5 + + # ================================================================ + # MICROSERVICES + # ================================================================ + + # API Gateway - Enhanced with Redis caching + gateway: + build: + context: . + dockerfile: ./gateway/Dockerfile + container_name: bakery-gateway + environment: + # Service Discovery + - AUTH_SERVICE_URL=http://auth-service:8000 + - TENANT_SERVICE_URL=http://tenant-service:8000 + - TRAINING_SERVICE_URL=http://training-service:8000 + - FORECASTING_SERVICE_URL=http://forecasting-service:8000 + - DATA_SERVICE_URL=http://data-service:8000 + - NOTIFICATION_SERVICE_URL=http://notification-service:8000 + + # Authentication & Caching + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 + - REDIS_URL=redis://:redis_pass123@redis:6379/0 + + # CORS Configuration + - CORS_ORIGINS=http://localhost:3000,http://localhost:3001,https://panaderia.vercel.app + + # Service Configuration + - SERVICE_NAME=gateway + - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + + # Rate Limiting + - RATE_LIMIT_CALLS_PER_MINUTE=60 + - RATE_LIMIT_BURST=10 + + ports: + - "8000:8000" + depends_on: + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + tenant-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./gateway:/app + - ./shared:/app/shared + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s timeout: 10s retries: 3 - # Authentication Service + # Auth Service - Enhanced with proper JWT handling auth-service: build: - context: . # Build context is the project root + context: . dockerfile: ./services/auth/Dockerfile container_name: bakery-auth-service environment: + # Database - DATABASE_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db - - REDIS_URL=redis://redis:6379/0 - - JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production + + # Redis for sessions and rate limiting + - REDIS_URL=redis://:redis_pass123@redis:6379/1 + + # Message Queue + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # JWT Configuration + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 - JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 - JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 - - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # Security Configuration + - PASSWORD_MIN_LENGTH=8 + - MAX_LOGIN_ATTEMPTS=5 + - LOCKOUT_DURATION_MINUTES=30 + + # Service Configuration - SERVICE_NAME=auth-service - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + ports: - "8001:8000" depends_on: @@ -188,136 +276,41 @@ services: - bakery-network volumes: - ./services/auth:/app - - ./shared:/app/shared # Explicitly mount shared + - ./shared:/app/shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s timeout: 10s retries: 3 - # Training Service - training-service: - build: - context: . # Build context is the project root - dockerfile: ./services/training/Dockerfile - container_name: bakery-training-service - environment: - - DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db - - REDIS_URL=redis://redis:6379/1 - - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ - - AUTH_SERVICE_URL=http://auth-service:8000 - - DATA_SERVICE_URL=http://data-service:8000 - - SERVICE_NAME=training-service - - SERVICE_VERSION=1.0.0 - ports: - - "8002:8000" - depends_on: - training-db: - condition: service_healthy - redis: - condition: service_healthy - rabbitmq: - condition: service_healthy - auth-service: - condition: service_healthy - networks: - - bakery-network - volumes: - - ./services/training:/app - - ./shared:/app/shared # Explicitly mount shared - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/health"] - interval: 30s - timeout: 10s - retries: 3 - - # Forecasting Service - forecasting-service: - build: - context: . # Build context is the project root - dockerfile: ./services/forecasting/Dockerfile - container_name: bakery-forecasting-service - environment: - - DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db - - REDIS_URL=redis://redis:6379/2 - - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ - - AUTH_SERVICE_URL=http://auth-service:8000 - - TRAINING_SERVICE_URL=http://training-service:8000 - - DATA_SERVICE_URL=http://data-service:8000 - - SERVICE_NAME=forecasting-service - - SERVICE_VERSION=1.0.0 - ports: - - "8003:8000" - depends_on: - forecasting-db: - condition: service_healthy - redis: - condition: service_healthy - rabbitmq: - condition: service_healthy - auth-service: - condition: service_healthy - networks: - - bakery-network - volumes: - - ./services/forecasting:/app - - ./shared:/app/shared # Explicitly mount shared - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/health"] - interval: 30s - timeout: 10s - retries: 3 - - # Data Service - data-service: - build: - context: . # Build context is the project root - dockerfile: ./services/data/Dockerfile - container_name: bakery-data-service - environment: - - DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db - - REDIS_URL=redis://redis:6379/3 - - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ - - AUTH_SERVICE_URL=http://auth-service:8000 - - AEMET_API_KEY=eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1YWxmYXJvQGdtYWlsLmNvbSIsImp0aSI6ImRjZWVmNTEwLTdmYzEtNGMxNy1hODZkLWQ4NzdlZDc5ZDllNyIsImlzcyI6IkFFTUVUIiwiaWF0IjoxNzUyODMwMDg3LCJ1c2VySWQiOiJkY2VlZjUxMC03ZmMxLTRjMTctYTg2ZC1kODc3ZWQ3OWQ5ZTciLCJyb2xlIjoiIn0.C047gaiEhWhH4ItDgkHSwg8HzKTzw87TOPRTRf8j-2w - - MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here - - SERVICE_NAME=data-service - - SERVICE_VERSION=1.0.0 - ports: - - "8004:8000" - depends_on: - data-db: - condition: service_healthy - redis: - condition: service_healthy - rabbitmq: - condition: service_healthy - auth-service: - condition: service_healthy - networks: - - bakery-network - volumes: - - ./services/data:/app - - ./shared:/app/shared # Explicitly mount shared - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/health"] - interval: 30s - timeout: 10s - retries: 3 - - # Tenant Service + # Tenant Service - New enhanced service tenant-service: build: - context: . # Build context is the project root + context: . dockerfile: ./services/tenant/Dockerfile container_name: bakery-tenant-service environment: + # Database - DATABASE_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db - - REDIS_URL=redis://redis:6379/4 + + # Redis for caching + - REDIS_URL=redis://:redis_pass123@redis:6379/2 + + # Message Queue - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # Service Discovery - AUTH_SERVICE_URL=http://auth-service:8000 + + # JWT Configuration (for token verification) + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 + + # Service Configuration - SERVICE_NAME=tenant-service - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + ports: - "8005:8000" depends_on: @@ -333,31 +326,234 @@ services: - bakery-network volumes: - ./services/tenant:/app - - ./shared:/app/shared # Explicitly mount shared + - ./shared:/app/shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s timeout: 10s retries: 3 - # Notification Service + # Training Service - Enhanced with tenant isolation + training-service: + build: + context: . + dockerfile: ./services/training/Dockerfile + container_name: bakery-training-service + environment: + # Database + - DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db + + # Redis for job queuing and caching + - REDIS_URL=redis://:redis_pass123@redis:6379/3 + + # Message Queue + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # Service Discovery + - AUTH_SERVICE_URL=http://auth-service:8000 + - DATA_SERVICE_URL=http://data-service:8000 + - TENANT_SERVICE_URL=http://tenant-service:8000 + + # JWT Configuration + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 + + # ML Configuration + - MODEL_STORAGE_PATH=/app/models + - MAX_TRAINING_TIME_MINUTES=30 + - MIN_TRAINING_DATA_DAYS=30 + - PROPHET_SEASONALITY_MODE=additive + + # Service Configuration + - SERVICE_NAME=training-service + - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + + ports: + - "8002:8000" + depends_on: + training-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + data-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/training:/app + - ./shared:/app/shared + - ./models:/app/models # Persistent model storage + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Forecasting Service - Enhanced with proper auth + forecasting-service: + build: + context: . + dockerfile: ./services/forecasting/Dockerfile + container_name: bakery-forecasting-service + environment: + # Database + - DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db + + # Redis for caching predictions + - REDIS_URL=redis://:redis_pass123@redis:6379/4 + + # Message Queue + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # Service Discovery + - AUTH_SERVICE_URL=http://auth-service:8000 + - TRAINING_SERVICE_URL=http://training-service:8000 + - DATA_SERVICE_URL=http://data-service:8000 + + # JWT Configuration + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 + + # ML Configuration + - MODEL_STORAGE_PATH=/app/models + - PREDICTION_CACHE_TTL_HOURS=6 + + # Service Configuration + - SERVICE_NAME=forecasting-service + - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + + ports: + - "8003:8000" + depends_on: + forecasting-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + training-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/forecasting:/app + - ./shared:/app/shared + - ./models:/app/models # Shared model storage + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Data Service - Enhanced with external API integration + data-service: + build: + context: . + dockerfile: ./services/data/Dockerfile + container_name: bakery-data-service + environment: + # Database + - DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db + + # Redis for API caching + - REDIS_URL=redis://:redis_pass123@redis:6379/5 + + # Message Queue + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # Service Discovery + - AUTH_SERVICE_URL=http://auth-service:8000 + - TENANT_SERVICE_URL=http://tenant-service:8000 + + # External API Keys + - AEMET_API_KEY=${AEMET_API_KEY:-eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1YWxmYXJvQGdtYWlsLmNvbSIsImp0aSI6ImRjZWVmNTEwLTdmYzEtNGMxNy1hODZkLWQ4NzdlZDc5ZDllNyIsImlzcyI6IkFFTUVUIiwiaWF0IjoxNzUyODMwMDg3LCJ1c2VySWQiOiJkY2VlZjUxMC03ZmMxLTRjMTctYTg2ZC1kODc3ZWQ3OWQ5ZTciLCJyb2xlIjoiIn0.C047gaiEhWhH4ItDgkHSwg8HzKTzw87TOPRTRf8j-2w} + - MADRID_OPENDATA_API_KEY=${MADRID_OPENDATA_API_KEY:-your-madrid-opendata-key} + + # JWT Configuration + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 + + # Data Configuration + - WEATHER_CACHE_TTL_HOURS=1 + - TRAFFIC_CACHE_TTL_HOURS=1 + - DATA_RETENTION_DAYS=365 + + # Service Configuration + - SERVICE_NAME=data-service + - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + + ports: + - "8004:8000" + depends_on: + data-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/data:/app + - ./shared:/app/shared + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Notification Service - Enhanced with WhatsApp and Email notification-service: build: - context: . # Build context is the project root + context: . dockerfile: ./services/notification/Dockerfile container_name: bakery-notification-service environment: + # Database - DATABASE_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db - - REDIS_URL=redis://redis:6379/5 + + # Redis for queue management + - REDIS_URL=redis://:redis_pass123@redis:6379/6 + + # Message Queue - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + + # Service Discovery - AUTH_SERVICE_URL=http://auth-service:8000 - - SMTP_HOST=smtp.gmail.com - - SMTP_PORT=587 - - SMTP_USER=your-email@gmail.com - - SMTP_PASSWORD=your-email-password - - WHATSAPP_API_KEY=your-whatsapp-api-key + - TENANT_SERVICE_URL=http://tenant-service:8000 + + # Email Configuration + - SMTP_HOST=${SMTP_HOST:-smtp.gmail.com} + - SMTP_PORT=${SMTP_PORT:-587} + - SMTP_USER=${SMTP_USER:-your-email@gmail.com} + - SMTP_PASSWORD=${SMTP_PASSWORD:-your-app-password} + - SMTP_FROM_NAME=Bakery Forecast + + # WhatsApp Configuration (Twilio) + - WHATSAPP_ACCOUNT_SID=${WHATSAPP_ACCOUNT_SID:-your-twilio-sid} + - WHATSAPP_AUTH_TOKEN=${WHATSAPP_AUTH_TOKEN:-your-twilio-token} + - WHATSAPP_FROM_NUMBER=${WHATSAPP_FROM_NUMBER:-whatsapp:+14155238886} + + # JWT Configuration + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-your-super-secret-jwt-key-change-in-production} + - JWT_ALGORITHM=HS256 + + # Service Configuration - SERVICE_NAME=notification-service - SERVICE_VERSION=1.0.0 + - LOG_LEVEL=INFO + ports: - "8006:8000" depends_on: @@ -373,66 +569,75 @@ services: - bakery-network volumes: - ./services/notification:/app - - ./shared:/app/shared # Explicitly mount shared + - ./shared:/app/shared healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s timeout: 10s retries: 3 - # API Gateway - gateway: - build: - context: . # Build context is the project root - dockerfile: ./gateway/Dockerfile - container_name: bakery-gateway - environment: - - REDIS_URL=redis://redis:6379/6 - - AUTH_SERVICE_URL=http://auth-service:8000 - - TRAINING_SERVICE_URL=http://training-service:8000 - - FORECASTING_SERVICE_URL=http://forecasting-service:8000 - - DATA_SERVICE_URL=http://data-service:8000 - - TENANT_SERVICE_URL=http://tenant-service:8000 - - NOTIFICATION_SERVICE_URL=http://notification-service:8000 - - CORS_ORIGINS=http://localhost:3000,http://localhost:3001 - - SERVICE_NAME=gateway - - SERVICE_VERSION=1.0.0 + # ================================================================ + # MONITORING SERVICES + # ================================================================ + + # Prometheus - Metrics collection + prometheus: + image: prom/prometheus:latest + container_name: bakery-prometheus + command: + - '--config.file=/etc/prometheus/prometheus.yml' + - '--storage.tsdb.path=/prometheus' + - '--web.console.libraries=/etc/prometheus/console_libraries' + - '--web.console.templates=/etc/prometheus/consoles' + - '--storage.tsdb.retention.time=200h' + - '--web.enable-lifecycle' ports: - - "8000:8000" - depends_on: - auth-service: - condition: service_healthy - training-service: - condition: service_healthy - forecasting-service: - condition: service_healthy - data-service: - condition: service_healthy - tenant-service: - condition: service_healthy - notification-service: - condition: service_healthy + - "9090:9090" + volumes: + - ./infrastructure/monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml + - prometheus_data:/prometheus networks: - bakery-network - volumes: - - ./gateway:/app - - ./shared:/app/shared # Explicitly mount shared - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/health"] - interval: 30s - timeout: 10s - retries: 3 + depends_on: + - gateway + - auth-service + - tenant-service + - training-service + - forecasting-service + - data-service + - notification-service - # Dashboard Frontend + # Grafana - Metrics visualization + grafana: + image: grafana/grafana:latest + container_name: bakery-grafana + environment: + - GF_SECURITY_ADMIN_PASSWORD=admin123 + - GF_USERS_ALLOW_SIGN_UP=false + ports: + - "3002:3000" + volumes: + - grafana_data:/var/lib/grafana + - ./infrastructure/monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards + - ./infrastructure/monitoring/grafana/datasources:/etc/grafana/provisioning/datasources + networks: + - bakery-network + depends_on: + - prometheus + + # ================================================================ + # FRONTEND SERVICES (Optional for development) + # ================================================================ + + # React Dashboard dashboard: build: context: ./frontend dockerfile: Dockerfile.dev - container_name: bakery-dashboard + container_name: bakery-frontend environment: - - REACT_APP_API_URL=http://localhost:8000 - - REACT_APP_WS_URL=ws://localhost:8000 - - CHOKIDAR_USEPOLLING=true + - REACT_APP_API_URL=http://localhost:8000/api/v1 + - REACT_APP_WS_URL=ws://localhost:8000/ws ports: - "3000:3000" depends_on: @@ -442,95 +647,28 @@ services: volumes: - ./frontend:/app - /app/node_modules + command: npm start +# ================================================================ +# ENVIRONMENT VARIABLES TEMPLATE +# ================================================================ - - # Monitoring - Prometheus - prometheus: - image: prom/prometheus:latest - container_name: bakery-prometheus - ports: - - "9090:9090" - volumes: - - ./infrastructure/monitoring/prometheus:/etc/prometheus - - prometheus_data:/prometheus - command: - - '--config.file=/etc/prometheus/prometheus.yml' - - '--storage.tsdb.path=/prometheus' - - '--web.console.libraries=/usr/share/prometheus/console_libraries' - - '--web.console.templates=/usr/share/prometheus/consoles' - - '--web.enable-lifecycle' - networks: - - bakery-network - - # Monitoring - Grafana - grafana: - image: grafana/grafana:latest - container_name: bakery-grafana - ports: - - "3002:3000" - environment: - - GF_SECURITY_ADMIN_PASSWORD=admin123 - volumes: - - grafana_data:/var/lib/grafana - - ./infrastructure/monitoring/grafana:/etc/grafana/provisioning - depends_on: - - prometheus - networks: - - bakery-network - - # Log Aggregation - ELK Stack - elasticsearch: - image: elasticsearch:8.8.0 - container_name: bakery-elasticsearch - environment: - - discovery.type=single-node - - xpack.security.enabled=false - - "ES_JAVA_OPTS=-Xms512m -Xmx512m" - ports: - - "9200:9200" - volumes: - - elasticsearch_data:/usr/share/elasticsearch/data - networks: - - bakery-network - - logstash: - image: logstash:8.8.0 - container_name: bakery-logstash - volumes: - - ./infrastructure/monitoring/logstash:/usr/share/logstash/pipeline - ports: - - "5044:5000" - depends_on: - - elasticsearch - networks: - - bakery-network - - kibana: - image: kibana:8.8.0 - container_name: bakery-kibana - environment: - - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 - ports: - - "5601:5601" - depends_on: - - elasticsearch - networks: - - bakery-network - -volumes: - rabbitmq_data: - redis_data: - auth_db_data: - training_db_data: - forecasting_db_data: - data_db_data: - tenant_db_data: - notification_db_data: - prometheus_data: - grafana_data: - elasticsearch_data: - -networks: - bakery-network: - driver: bridge \ No newline at end of file +# Create a .env file with these variables: +# +# # JWT Secret (CHANGE IN PRODUCTION!) +# JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production-min-32-chars +# +# # External API Keys +# AEMET_API_KEY=your-aemet-api-key-here +# MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here +# +# # Email Configuration (Gmail example) +# SMTP_HOST=smtp.gmail.com +# SMTP_PORT=587 +# SMTP_USER=your-email@gmail.com +# SMTP_PASSWORD=your-app-specific-password +# +# # WhatsApp/Twilio Configuration +# WHATSAPP_ACCOUNT_SID=your-twilio-account-sid +# WHATSAPP_AUTH_TOKEN=your-twilio-auth-token +# WHATSAPP_FROM_NUMBER=whatsapp:+14155238886 \ No newline at end of file diff --git a/gateway/app/middleware/auth.py b/gateway/app/middleware/auth.py index 6ada8b6c..ffaae85c 100644 --- a/gateway/app/middleware/auth.py +++ b/gateway/app/middleware/auth.py @@ -1,18 +1,24 @@ -import logging -from fastapi import Request +# gateway/app/middleware/auth.py - IMPROVED VERSION +""" +Enhanced Authentication Middleware for API Gateway +Implements proper token validation and tenant context extraction +""" + +import structlog +from fastapi import Request, HTTPException from fastapi.responses import JSONResponse from starlette.middleware.base import BaseHTTPMiddleware from starlette.responses import Response import httpx -from typing import Optional -import json +from typing import Optional, Dict, Any +import asyncio from app.core.config import settings from shared.auth.jwt_handler import JWTHandler -logger = logging.getLogger(__name__) +logger = structlog.get_logger() -# JWT handler +# JWT handler for local token validation jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM) # Routes that don't require authentication @@ -25,98 +31,227 @@ PUBLIC_ROUTES = [ "/api/v1/auth/login", "/api/v1/auth/register", "/api/v1/auth/refresh", - "/api/v1/auth/verify" # ✅ Add verify to public routes + "/api/v1/auth/verify" ] class AuthMiddleware(BaseHTTPMiddleware): - """Authentication middleware with better error handling""" + """ + Enhanced Authentication Middleware following microservices best practices - async def dispatch(self, request: Request, call_next) -> Response: - """Process request with authentication""" + Responsibilities: + 1. Token validation (local first, then auth service) + 2. User context injection + 3. Tenant context extraction (per request) + 4. Rate limiting enforcement + 5. Request routing decisions + """ + + def __init__(self, app, redis_client=None): + super().__init__(app) + self.redis_client = redis_client # For caching and rate limiting - # Check if route requires authentication + async def dispatch(self, request: Request, call_next) -> Response: + """Process request with enhanced authentication""" + + # Skip authentication for public routes if self._is_public_route(request.url.path): return await call_next(request) - # Get token from header + # Extract and validate JWT token token = self._extract_token(request) if not token: - logger.warning(f"Missing token for {request.url.path}") + logger.warning(f"Missing token for protected route: {request.url.path}") return JSONResponse( status_code=401, content={"detail": "Authentication required"} ) - # Verify token - try: - # First try to verify token locally - payload = jwt_handler.verify_token(token) - - if payload: - # Validate required fields - required_fields = ["user_id", "email", "tenant_id"] - missing_fields = [field for field in required_fields if field not in payload] - - if missing_fields: - logger.warning(f"Token missing required fields: {missing_fields}") - return JSONResponse( - status_code=401, - content={"detail": f"Invalid token: missing {missing_fields}"} - ) - - # Add user info to request state - request.state.user = payload - logger.debug(f"Authenticated user: {payload.get('email')} (tenant: {payload.get('tenant_id')})") - return await call_next(request) - else: - # Token invalid or expired, try auth service verification - logger.info("Local token verification failed, trying auth service") - user_info = await self._verify_with_auth_service(token) - if user_info: - request.state.user = user_info - return await call_next(request) - else: - logger.warning("Auth service verification also failed") - return JSONResponse( - status_code=401, - content={"detail": "Invalid or expired token"} - ) - - except Exception as e: - logger.error(f"Authentication error: {e}") + # Verify token and get user context + user_context = await self._verify_token(token) + if not user_context: + logger.warning(f"Invalid token for route: {request.url.path}") return JSONResponse( status_code=401, - content={"detail": "Authentication failed"} + content={"detail": "Invalid or expired token"} ) + + # Extract tenant context from request (not from JWT) + tenant_id = self._extract_tenant_from_request(request) + + # Verify user has access to tenant (if tenant_id provided) + if tenant_id: + has_access = await self._verify_tenant_access(user_context["user_id"], tenant_id) + if not has_access: + logger.warning(f"User {user_context['email']} denied access to tenant {tenant_id}") + return JSONResponse( + status_code=403, + content={"detail": "Access denied to tenant"} + ) + request.state.tenant_id = tenant_id + + # Inject user context into request + request.state.user = user_context + request.state.authenticated = True + + # Add user context to forwarded requests + self._inject_auth_headers(request, user_context, tenant_id) + + logger.debug(f"Authenticated request: {user_context['email']} -> {request.url.path}") + + return await call_next(request) def _is_public_route(self, path: str) -> bool: - """Check if route is public""" + """Check if route requires authentication""" return any(path.startswith(route) for route in PUBLIC_ROUTES) def _extract_token(self, request: Request) -> Optional[str]: - """Extract JWT token from request""" + """Extract JWT token from Authorization header""" auth_header = request.headers.get("Authorization") if auth_header and auth_header.startswith("Bearer "): return auth_header.split(" ")[1] return None - async def _verify_with_auth_service(self, token: str) -> Optional[dict]: + def _extract_tenant_from_request(self, request: Request) -> Optional[str]: + """ + Extract tenant ID from request (NOT from JWT token) + + Priority order: + 1. X-Tenant-ID header + 2. tenant_id query parameter + 3. tenant_id in request path + """ + # Method 1: Header + tenant_id = request.headers.get("X-Tenant-ID") + if tenant_id: + return tenant_id + + # Method 2: Query parameter + tenant_id = request.query_params.get("tenant_id") + if tenant_id: + return tenant_id + + # Method 3: Path parameter (extract from URLs like /api/v1/tenants/{tenant_id}/...) + path_parts = request.url.path.split("/") + if "tenants" in path_parts: + try: + tenant_index = path_parts.index("tenants") + if tenant_index + 1 < len(path_parts): + return path_parts[tenant_index + 1] + except (ValueError, IndexError): + pass + + return None + + async def _verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """ + Verify JWT token with fallback strategy: + 1. Local validation (fast) + 2. Auth service validation (authoritative) + 3. Cache valid tokens to reduce auth service calls + """ + + # Step 1: Try local JWT validation first (fast) + try: + payload = jwt_handler.verify_token(token) + if payload and self._validate_token_payload(payload): + logger.debug("Token validated locally") + return payload + except Exception as e: + logger.debug(f"Local token validation failed: {e}") + + # Step 2: Check cache for recently validated tokens + if self.redis_client: + try: + cached_user = await self._get_cached_user(token) + if cached_user: + logger.debug("Token found in cache") + return cached_user + except Exception as e: + logger.warning(f"Cache lookup failed: {e}") + + # Step 3: Verify with auth service (authoritative) + try: + user_context = await self._verify_with_auth_service(token) + if user_context: + # Cache successful validation + if self.redis_client: + await self._cache_user(token, user_context) + logger.debug("Token validated by auth service") + return user_context + except Exception as e: + logger.error(f"Auth service validation failed: {e}") + + return None + + def _validate_token_payload(self, payload: Dict[str, Any]) -> bool: + """Validate JWT payload has required fields""" + required_fields = ["user_id", "email", "exp"] + return all(field in payload for field in required_fields) + + async def _verify_with_auth_service(self, token: str) -> Optional[Dict[str, Any]]: """Verify token with auth service""" try: - async with httpx.AsyncClient(timeout=5.0) as client: + async with httpx.AsyncClient(timeout=3.0) as client: response = await client.post( f"{settings.AUTH_SERVICE_URL}/api/v1/auth/verify", headers={"Authorization": f"Bearer {token}"} ) if response.status_code == 200: - user_info = response.json() - logger.debug(f"Auth service verification successful: {user_info.get('email')}") - return user_info + return response.json() else: - logger.warning(f"Auth service verification failed: {response.status_code}") + logger.warning(f"Auth service returned {response.status_code}") return None + except asyncio.TimeoutError: + logger.error("Auth service timeout") + return None except Exception as e: - logger.error(f"Auth service verification failed: {e}") - return None \ No newline at end of file + logger.error(f"Auth service error: {e}") + return None + + async def _verify_tenant_access(self, user_id: str, tenant_id: str) -> bool: + """Verify user has access to specific tenant""" + try: + async with httpx.AsyncClient(timeout=3.0) as client: + response = await client.get( + f"{settings.TENANT_SERVICE_URL}/api/v1/tenants/{tenant_id}/access/{user_id}" + ) + return response.status_code == 200 + except Exception as e: + logger.error(f"Tenant access verification failed: {e}") + return False + + async def _get_cached_user(self, token: str) -> Optional[Dict[str, Any]]: + """Get user context from cache""" + if not self.redis_client: + return None + + cache_key = f"auth:token:{hash(token)}" + cached_data = await self.redis_client.get(cache_key) + if cached_data: + import json + return json.loads(cached_data) + return None + + async def _cache_user(self, token: str, user_context: Dict[str, Any], ttl: int = 300): + """Cache user context for 5 minutes""" + if not self.redis_client: + return + + cache_key = f"auth:token:{hash(token)}" + import json + await self.redis_client.setex(cache_key, ttl, json.dumps(user_context)) + + def _inject_auth_headers(self, request: Request, user_context: Dict[str, Any], tenant_id: Optional[str]): + """Inject authentication context into forwarded requests""" + # Add user context headers for downstream services + if hasattr(request, "headers"): + # Create mutable headers + headers = dict(request.headers) + headers["X-User-ID"] = user_context["user_id"] + headers["X-User-Email"] = user_context["email"] + if tenant_id: + headers["X-Tenant-ID"] = tenant_id + # Update request headers + request.scope["headers"] = [(k.lower().encode(), v.encode()) for k, v in headers.items()] diff --git a/infrastructure/monitoring/grafana/dashboards/dashboard.yml b/infrastructure/monitoring/grafana/dashboards/dashboard.yml new file mode 100644 index 00000000..cc47a55f --- /dev/null +++ b/infrastructure/monitoring/grafana/dashboards/dashboard.yml @@ -0,0 +1,11 @@ +apiVersion: 1 + +providers: + - name: 'Bakery Forecasting' + orgId: 1 + folder: '' + type: file + disableDeletion: false + updateIntervalSeconds: 10 + options: + path: /etc/grafana/provisioning/dashboards \ No newline at end of file diff --git a/infrastructure/monitoring/grafana/datasources/prometheus.yml b/infrastructure/monitoring/grafana/datasources/prometheus.yml new file mode 100644 index 00000000..f88db84c --- /dev/null +++ b/infrastructure/monitoring/grafana/datasources/prometheus.yml @@ -0,0 +1,9 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: true \ No newline at end of file diff --git a/infrastructure/monitoring/prometheus/prometheus.yml b/infrastructure/monitoring/prometheus/prometheus.yml index 58cddaad..b699b724 100644 --- a/infrastructure/monitoring/prometheus/prometheus.yml +++ b/infrastructure/monitoring/prometheus/prometheus.yml @@ -1,31 +1,59 @@ +--- global: scrape_interval: 15s + evaluation_interval: 15s + +rule_files: + - "alerts.yml" scrape_configs: - job_name: 'gateway' static_configs: - - targets: ['gateway:8080'] + - targets: ['gateway:8000'] + metrics_path: '/metrics' + scrape_interval: 30s - job_name: 'auth-service' static_configs: - - targets: ['auth-service:8080'] - - - job_name: 'training-service' - static_configs: - - targets: ['training-service:8080'] - - - job_name: 'forecasting-service' - static_configs: - - targets: ['forecasting-service:8080'] - - - job_name: 'data-service' - static_configs: - - targets: ['data-service:8080'] + - targets: ['auth-service:8000'] + metrics_path: '/metrics' + scrape_interval: 30s - job_name: 'tenant-service' static_configs: - - targets: ['tenant-service:8080'] + - targets: ['tenant-service:8000'] + metrics_path: '/metrics' + scrape_interval: 30s + + - job_name: 'training-service' + static_configs: + - targets: ['training-service:8000'] + metrics_path: '/metrics' + scrape_interval: 30s + + - job_name: 'forecasting-service' + static_configs: + - targets: ['forecasting-service:8000'] + metrics_path: '/metrics' + scrape_interval: 30s + + - job_name: 'data-service' + static_configs: + - targets: ['data-service:8000'] + metrics_path: '/metrics' + scrape_interval: 30s - job_name: 'notification-service' static_configs: - - targets: ['notification-service:8080'] + - targets: ['notification-service:8000'] + metrics_path: '/metrics' + scrape_interval: 30s + + - job_name: 'redis' + static_configs: + - targets: ['redis:6379'] + + - job_name: 'rabbitmq' + static_configs: + - targets: ['rabbitmq:15692'] + diff --git a/services/auth/app/schemas/auth.py b/services/auth/app/schemas/auth.py index fdbdbf81..3d9d4351 100644 --- a/services/auth/app/schemas/auth.py +++ b/services/auth/app/schemas/auth.py @@ -1,35 +1,31 @@ +# services/auth/app/schemas/auth.py """ -Authentication schemas +Authentication schemas """ from pydantic import BaseModel, EmailStr, Field, validator from typing import Optional from datetime import datetime -from app.core.config import settings -from shared.utils.validation import validate_spanish_phone - class UserRegistration(BaseModel): """User registration schema""" email: EmailStr - password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH) + password: str = Field(..., min_length=8) full_name: str = Field(..., min_length=2, max_length=100) phone: Optional[str] = None language: str = Field(default="es", pattern="^(es|en)$") @validator('password') def validate_password(cls, v): - """Validate password strength""" - from app.core.security import security_manager - if not security_manager.validate_password(v): - raise ValueError('Password does not meet security requirements') - return v - - @validator('phone') - def validate_phone(cls, v): - """Validate phone number""" - if v and not validate_spanish_phone(v): - raise ValueError('Invalid Spanish phone number') + """Basic password validation""" + if len(v) < 8: + raise ValueError('Password must be at least 8 characters') + if not any(c.isupper() for c in v): + raise ValueError('Password must contain uppercase letter') + if not any(c.islower() for c in v): + raise ValueError('Password must contain lowercase letter') + if not any(c.isdigit() for c in v): + raise ValueError('Password must contain number') return v class UserLogin(BaseModel): @@ -55,55 +51,29 @@ class UserResponse(BaseModel): full_name: str is_active: bool is_verified: bool - tenant_id: Optional[str] - role: str phone: Optional[str] language: str - timezone: str - created_at: Optional[datetime] + created_at: datetime last_login: Optional[datetime] + class Config: + from_attributes = True + class PasswordChangeRequest(BaseModel): """Password change request schema""" current_password: str - new_password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH) + new_password: str = Field(..., min_length=8) @validator('new_password') def validate_new_password(cls, v): """Validate new password strength""" - from app.core.security import security_manager - if not security_manager.validate_password(v): - raise ValueError('New password does not meet security requirements') + if len(v) < 8: + raise ValueError('Password must be at least 8 characters') return v -class PasswordResetRequest(BaseModel): - """Password reset request schema""" - email: EmailStr - -class PasswordResetConfirm(BaseModel): - """Password reset confirmation schema""" - token: str - new_password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH) - - @validator('new_password') - def validate_new_password(cls, v): - """Validate new password strength""" - from app.core.security import security_manager - if not security_manager.validate_password(v): - raise ValueError('New password does not meet security requirements') - return v - -class UserUpdate(BaseModel): - """User update schema""" - full_name: Optional[str] = Field(None, min_length=2, max_length=100) - phone: Optional[str] = None - language: Optional[str] = Field(None, pattern="^(es|en)$") - timezone: Optional[str] = None - tenant_id: Optional[str] = None - - @validator('phone') - def validate_phone(cls, v): - """Validate phone number""" - if v and not validate_spanish_phone(v): - raise ValueError('Invalid Spanish phone number') - return v \ No newline at end of file +class TokenVerificationResponse(BaseModel): + """Token verification response for other services""" + user_id: str + email: str + is_active: bool + expires_at: datetime \ No newline at end of file diff --git a/services/auth/requirements.txt b/services/auth/requirements.txt index a1c0cd55..7e3a17cd 100644 --- a/services/auth/requirements.txt +++ b/services/auth/requirements.txt @@ -17,3 +17,5 @@ python-json-logger==2.0.4 pytz==2023.3 python-logstash==0.4.8 structlog==23.2.0 +python-dotenv==1.0.0 + \ No newline at end of file diff --git a/services/tenant/app/api/tenants.py b/services/tenant/app/api/tenants.py new file mode 100644 index 00000000..a7de8909 --- /dev/null +++ b/services/tenant/app/api/tenants.py @@ -0,0 +1,167 @@ +# services/tenant/app/api/tenants.py +""" +Tenant API endpoints +""" + +from fastapi import APIRouter, Depends, HTTPException, status, Request +from sqlalchemy.ext.asyncio import AsyncSession +from typing import List +import structlog + +from app.core.database import get_db +from app.schemas.tenants import ( + BakeryRegistration, TenantResponse, TenantAccessResponse, + TenantUpdate, TenantMemberResponse +) +from app.services.tenant_service import TenantService +from shared.auth.decorators import require_authentication, get_current_user, get_current_tenant_id + +logger = structlog.get_logger() +router = APIRouter() + +@router.post("/bakeries", response_model=TenantResponse) +@require_authentication +async def register_bakery( + bakery_data: BakeryRegistration, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Register a new bakery/tenant""" + user = get_current_user(request) + + try: + result = await TenantService.create_bakery(bakery_data, user["user_id"], db) + logger.info(f"Bakery registered: {bakery_data.name} by {user['email']}") + return result + + except Exception as e: + logger.error(f"Bakery registration failed: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Bakery registration failed" + ) + +@router.get("/tenants/{tenant_id}/access/{user_id}", response_model=TenantAccessResponse) +async def verify_tenant_access( + tenant_id: str, + user_id: str, + db: AsyncSession = Depends(get_db) +): + """Verify if user has access to tenant - Called by Gateway""" + + try: + access_info = await TenantService.verify_user_access(user_id, tenant_id, db) + return access_info + + except Exception as e: + logger.error(f"Access verification failed: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Access verification failed" + ) + +@router.get("/users/{user_id}/tenants", response_model=List[TenantResponse]) +@require_authentication +async def get_user_tenants( + user_id: str, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Get all tenants accessible by user""" + current_user = get_current_user(request) + + # Users can only see their own tenants + if current_user["user_id"] != user_id: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied" + ) + + try: + tenants = await TenantService.get_user_tenants(user_id, db) + return tenants + + except Exception as e: + logger.error(f"Failed to get user tenants: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to retrieve tenants" + ) + +@router.get("/tenants/{tenant_id}", response_model=TenantResponse) +@require_authentication +async def get_tenant( + tenant_id: str, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Get tenant details""" + user = get_current_user(request) + + # Verify user has access to tenant + access = await TenantService.verify_user_access(user["user_id"], tenant_id, db) + if not access.has_access: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied to tenant" + ) + + tenant = await TenantService.get_tenant_by_id(tenant_id, db) + if not tenant: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Tenant not found" + ) + + return tenant + +@router.put("/tenants/{tenant_id}", response_model=TenantResponse) +@require_authentication +async def update_tenant( + tenant_id: str, + update_data: TenantUpdate, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Update tenant information""" + user = get_current_user(request) + + try: + result = await TenantService.update_tenant(tenant_id, update_data, user["user_id"], db) + return result + + except HTTPException: + raise + except Exception as e: + logger.error(f"Tenant update failed: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Tenant update failed" + ) + +@router.post("/tenants/{tenant_id}/members", response_model=TenantMemberResponse) +@require_authentication +async def add_team_member( + tenant_id: str, + user_id: str, + role: str, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Add a team member to tenant""" + current_user = get_current_user(request) + + try: + result = await TenantService.add_team_member( + tenant_id, user_id, role, current_user["user_id"], db + ) + return result + + except HTTPException: + raise + except Exception as e: + logger.error(f"Add team member failed: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to add team member" + ) \ No newline at end of file diff --git a/services/tenant/app/main.py b/services/tenant/app/main.py index 653ac7be..5de394f2 100644 --- a/services/tenant/app/main.py +++ b/services/tenant/app/main.py @@ -1,5 +1,6 @@ +# services/tenant/app/main.py """ -uLutenant Service +Tenant Service FastAPI application """ import structlog @@ -7,23 +8,27 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from app.core.config import settings -from app.core.database import database_manager +from app.core.database import engine +from app.api import tenants from shared.monitoring.logging import setup_logging from shared.monitoring.metrics import MetricsCollector # Setup logging -setup_logging("tenant-service", "INFO") +setup_logging("tenant-service", settings.LOG_LEVEL) logger = structlog.get_logger() # Create FastAPI app app = FastAPI( - title="uLutenant Service", - description="uLutenant service for bakery forecasting", - version="1.0.0" + title="Tenant Management Service", + description="Multi-tenant bakery management service", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc" ) -# Initialize metrics collector -metrics_collector = MetricsCollector("tenant-service") +# Initialize metrics +metrics_collector = MetricsCollector("tenant_service") +app.state.metrics_collector = metrics_collector # CORS middleware app.add_middleware( @@ -34,18 +39,19 @@ app.add_middleware( allow_headers=["*"], ) +# Include routers +app.include_router(tenants.router, prefix="/api/v1", tags=["tenants"]) + @app.on_event("startup") async def startup_event(): - """Application startup""" - logger.info("Starting uLutenant Service") + """Initialize service on startup""" + logger.info("Starting Tenant Service...") - # Create database tables - await database_manager.create_tables() - - # Start metrics server - metrics_collector.start_metrics_server(8080) - - logger.info("uLutenant Service started successfully") +@app.on_event("shutdown") +async def shutdown_event(): + """Cleanup on shutdown""" + logger.info("Shutting down Tenant Service...") + await engine.dispose() @app.get("/health") async def health_check(): @@ -56,6 +62,11 @@ async def health_check(): "version": "1.0.0" } +@app.get("/metrics") +async def metrics(): + """Prometheus metrics endpoint""" + return metrics_collector.generate_latest() + if __name__ == "__main__": import uvicorn - uvicorn.run(app, host="0.0.0.0", port=8000) + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/services/tenant/app/models/tenants.py b/services/tenant/app/models/tenants.py new file mode 100644 index 00000000..89a8f199 --- /dev/null +++ b/services/tenant/app/models/tenants.py @@ -0,0 +1,73 @@ +# services/tenant/app/models/tenants.py +""" +Tenant models for bakery management +""" + +from sqlalchemy import Column, String, Boolean, DateTime, Float, ForeignKey, Text +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import relationship +from datetime import datetime +import uuid + +from shared.database.base import Base + +class Tenant(Base): + """Tenant/Bakery model""" + __tablename__ = "tenants" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name = Column(String(200), nullable=False) + subdomain = Column(String(100), unique=True) + business_type = Column(String(100), default="bakery") + + # Location info + address = Column(Text, nullable=False) + city = Column(String(100), default="Madrid") + postal_code = Column(String(10), nullable=False) + latitude = Column(Float) + longitude = Column(Float) + + # Contact info + phone = Column(String(20)) + email = Column(String(255)) + + # Status + is_active = Column(Boolean, default=True) + subscription_tier = Column(String(50), default="basic") + + # ML status + model_trained = Column(Boolean, default=False) + last_training_date = Column(DateTime) + + # Ownership + owner_id = Column(UUID(as_uuid=True), nullable=False, index=True) + + # Timestamps + created_at = Column(DateTime, default=datetime.utcnow) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + + def __repr__(self): + return f"" + +class TenantMember(Base): + """Tenant membership model for team access""" + __tablename__ = "tenant_members" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + tenant_id = Column(UUID(as_uuid=True), ForeignKey("tenants.id", ondelete="CASCADE"), nullable=False) + user_id = Column(UUID(as_uuid=True), nullable=False, index=True) + + # Role and permissions + role = Column(String(50), default="member") # owner, admin, member, viewer + permissions = Column(Text) # JSON string of permissions + + # Status + is_active = Column(Boolean, default=True) + invited_by = Column(UUID(as_uuid=True)) + invited_at = Column(DateTime, default=datetime.utcnow) + joined_at = Column(DateTime) + + created_at = Column(DateTime, default=datetime.utcnow) + + def __repr__(self): + return f"" \ No newline at end of file diff --git a/services/tenant/app/schemas/tenants.py b/services/tenant/app/schemas/tenants.py new file mode 100644 index 00000000..592f4f70 --- /dev/null +++ b/services/tenant/app/schemas/tenants.py @@ -0,0 +1,83 @@ +# services/tenant/app/schemas/tenants.py +""" +Tenant schemas +""" + +from pydantic import BaseModel, Field, validator +from typing import Optional, List, Dict, Any +from datetime import datetime +import re + +class BakeryRegistration(BaseModel): + """Bakery registration schema""" + name: str = Field(..., min_length=2, max_length=200) + address: str = Field(..., min_length=10, max_length=500) + city: str = Field(default="Madrid", max_length=100) + postal_code: str = Field(..., pattern=r"^\d{5}$") + phone: str = Field(..., min_length=9, max_length=20) + business_type: str = Field(default="bakery") + + @validator('phone') + def validate_spanish_phone(cls, v): + """Validate Spanish phone number""" + # Remove spaces and common separators + phone = re.sub(r'[\s\-\(\)]', '', v) + + # Spanish mobile: +34 6/7/8/9 + 8 digits + # Spanish landline: +34 9 + 8 digits + patterns = [ + r'^(\+34|0034|34)?[6789]\d{8}$', # Mobile + r'^(\+34|0034|34)?9\d{8}$', # Landline + ] + + if not any(re.match(pattern, phone) for pattern in patterns): + raise ValueError('Invalid Spanish phone number') + return v + + @validator('business_type') + def validate_business_type(cls, v): + valid_types = ['bakery', 'coffee_shop', 'pastry_shop', 'restaurant'] + if v not in valid_types: + raise ValueError(f'Business type must be one of: {valid_types}') + return v + +class TenantResponse(BaseModel): + """Tenant response schema""" + id: str + name: str + subdomain: Optional[str] + business_type: str + address: str + city: str + postal_code: str + phone: Optional[str] + is_active: bool + subscription_tier: str + model_trained: bool + last_training_date: Optional[datetime] + owner_id: str + created_at: datetime + + class Config: + from_attributes = True + +class TenantAccessResponse(BaseModel): + """Tenant access verification response""" + has_access: bool + role: str + permissions: List[str] + +class TenantMemberResponse(BaseModel): + """Tenant member response""" + id: str + user_id: str + role: str + is_active: bool + joined_at: Optional[datetime] + +class TenantUpdate(BaseModel): + """Tenant update schema""" + name: Optional[str] = Field(None, min_length=2, max_length=200) + address: Optional[str] = Field(None, min_length=10, max_length=500) + phone: Optional[str] = None + business_type: Optional[str] = None \ No newline at end of file diff --git a/services/tenant/app/services/messaging.py b/services/tenant/app/services/messaging.py new file mode 100644 index 00000000..84b0f49e --- /dev/null +++ b/services/tenant/app/services/messaging.py @@ -0,0 +1,41 @@ +# services/tenant/app/services/messaging.py +""" +Tenant service messaging for event publishing +""" + +import structlog +from shared.messaging.rabbitmq import RabbitMQPublisher + +logger = structlog.get_logger() + +async def publish_tenant_created(tenant_id: str, owner_id: str, tenant_name: str): + """Publish tenant created event""" + try: + publisher = RabbitMQPublisher() + await publisher.publish_event( + "tenant.created", + { + "tenant_id": tenant_id, + "owner_id": owner_id, + "tenant_name": tenant_name, + "timestamp": datetime.utcnow().isoformat() + } + ) + except Exception as e: + logger.error(f"Failed to publish tenant.created event: {e}") + +async def publish_member_added(tenant_id: str, user_id: str, role: str): + """Publish member added event""" + try: + publisher = RabbitMQPublisher() + await publisher.publish_event( + "tenant.member.added", + { + "tenant_id": tenant_id, + "user_id": user_id, + "role": role, + "timestamp": datetime.utcnow().isoformat() + } + ) + except Exception as e: + logger.error(f"Failed to publish tenant.member.added event: {e}") \ No newline at end of file diff --git a/services/tenant/app/services/tenant_service.py b/services/tenant/app/services/tenant_service.py new file mode 100644 index 00000000..e69de29b diff --git a/shared/auth/decorators.py b/shared/auth/decorators.py index 53095a15..5080531e 100644 --- a/shared/auth/decorators.py +++ b/shared/auth/decorators.py @@ -1,41 +1,76 @@ +# shared/auth/decorators.py - NEW FILE """ -Authentication decorators for FastAPI +Authentication decorators for microservices """ from functools import wraps -from fastapi import HTTPException, Depends -from fastapi.security import HTTPBearer -import httpx -import logging +from fastapi import HTTPException, status, Request +from typing import Callable, Optional -logger = logging.getLogger(__name__) - -security = HTTPBearer() - -def verify_service_token(auth_service_url: str): - """Verify service token with auth service""" +def require_authentication(func: Callable) -> Callable: + """Decorator to require authentication - assumes gateway has validated token""" - async def verify_token(token: str = Depends(security)): - try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{auth_service_url}/verify", - headers={"Authorization": f"Bearer {token.credentials}"} - ) - - if response.status_code == 200: - return response.json() - else: - raise HTTPException( - status_code=401, - detail="Invalid authentication credentials" - ) - - except httpx.RequestError as e: - logger.error(f"Auth service unavailable: {e}") + @wraps(func) + async def wrapper(*args, **kwargs): + # Find request object in arguments + request = None + for arg in args: + if isinstance(arg, Request): + request = arg + break + + if not request: + # Check kwargs + request = kwargs.get('request') + + if not request: raise HTTPException( - status_code=503, - detail="Authentication service unavailable" + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Request object not found" ) + + # Check if user context exists (set by gateway) + if not hasattr(request.state, 'user') or not request.state.user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Authentication required" + ) + + return await func(*args, **kwargs) - return verify_token \ No newline at end of file + return wrapper + +def require_tenant_access(func: Callable) -> Callable: + """Decorator to require tenant access""" + + @wraps(func) + async def wrapper(*args, **kwargs): + # Find request object + request = None + for arg in args: + if isinstance(arg, Request): + request = arg + break + + if not request or not hasattr(request.state, 'tenant_id'): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Tenant access required" + ) + + return await func(*args, **kwargs) + + return wrapper + +def get_current_user(request: Request) -> dict: + """Get current user from request state""" + if not hasattr(request.state, 'user'): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User not authenticated" + ) + return request.state.user + +def get_current_tenant_id(request: Request) -> Optional[str]: + """Get current tenant ID from request state""" + return getattr(request.state, 'tenant_id', None) diff --git a/shared/auth/jwt_handler.py b/shared/auth/jwt_handler.py index ba6c9017..e97be37e 100644 --- a/shared/auth/jwt_handler.py +++ b/shared/auth/jwt_handler.py @@ -1,58 +1,97 @@ +# shared/auth/jwt_handler.py - IMPROVED VERSION """ -Shared JWT Authentication Handler -Used across all microservices for consistent authentication +Enhanced JWT Handler with proper token structure """ -from jose import jwt +from jose import jwt, JWTError from datetime import datetime, timedelta, timezone from typing import Optional, Dict, Any -import logging +import structlog -logger = logging.getLogger(__name__) +logger = structlog.get_logger() class JWTHandler: - """JWT token handling for microservices""" + """Enhanced JWT token handling""" def __init__(self, secret_key: str, algorithm: str = "HS256"): self.secret_key = secret_key self.algorithm = algorithm - def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: - """Create JWT access token""" - to_encode = data.copy() + def create_access_token(self, user_data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """ + Create JWT access token WITHOUT tenant_id + Tenant context is determined per request, not stored in token + """ + to_encode = { + "sub": user_data["user_id"], # Standard JWT subject + "user_id": user_data["user_id"], + "email": user_data["email"], + "type": "access" + } if expires_delta: - expire = datetime.now(timezone.utc) + expires_delta + expire = datetime.now(timezone.utc) + expires_delta else: - expire = datetime.now(timezone.utc) + timedelta(minutes=30) + expire = datetime.now(timezone.utc) + timedelta(minutes=30) - to_encode.update({"exp": expire, "type": "access"}) + to_encode.update({ + "exp": expire, + "iat": datetime.now(timezone.utc) + }) encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + logger.debug(f"Created access token for user {user_data['email']}") return encoded_jwt - def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + def create_refresh_token(self, user_data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: """Create JWT refresh token""" - to_encode = data.copy() + to_encode = { + "sub": user_data["user_id"], + "user_id": user_data["user_id"], + "email": user_data["email"], + "type": "refresh" + } if expires_delta: - expire = datetime.now(timezone.utc) + expires_delta + expire = datetime.now(timezone.utc) + expires_delta else: - expire = datetime.now(timezone.utc) + timedelta(days=7) + expire = datetime.now(timezone.utc) + timedelta(days=7) - to_encode.update({"exp": expire, "type": "refresh"}) + to_encode.update({ + "exp": expire, + "iat": datetime.now(timezone.utc) + }) encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) return encoded_jwt def verify_token(self, token: str) -> Optional[Dict[str, Any]]: - """Verify and decode JWT token""" + """Verify and decode JWT token with comprehensive validation""" try: payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + + # Validate required fields + required_fields = ["user_id", "email", "exp", "type"] + if not all(field in payload for field in required_fields): + logger.warning(f"Token missing required fields: {required_fields}") + return None + + # Validate token type + if payload.get("type") not in ["access", "refresh"]: + logger.warning(f"Invalid token type: {payload.get('type')}") + return None + + # Check expiration (jose handles this, but double-check) + exp = payload.get("exp") + if exp and datetime.fromtimestamp(exp, tz=timezone.utc) < datetime.now(timezone.utc): + logger.warning("Token has expired") + return None + return payload - except jwt.ExpiredSignatureError: - logger.warning("Token has expired") + + except JWTError as e: + logger.warning(f"JWT validation failed: {e}") return None - except jwt.JWTError: - logger.warning("Invalid token") + except Exception as e: + logger.error(f"Unexpected error validating token: {e}") return None \ No newline at end of file