From 347ff51bd7a573ee6c541d188116a850c48328ea Mon Sep 17 00:00:00 2001 From: Urtzi Alfaro Date: Thu, 17 Jul 2025 13:09:24 +0200 Subject: [PATCH] Initial microservices setup from artifacts --- .gitignore | 112 +++ docker-compose.yml | 547 +++++++++++ gateway/app/__init__.py | 0 gateway/app/core/__init__.py | 0 gateway/app/core/config.py | 52 ++ gateway/app/core/service_discovery.py | 122 +++ gateway/app/main.py | 131 +++ gateway/app/middleware/__init__.py | 0 gateway/app/middleware/auth.py | 101 ++ gateway/app/middleware/logging.py | 48 + gateway/app/middleware/rate_limit.py | 85 ++ gateway/app/routes/__init__.py | 0 gateway/app/routes/auth.py | 161 ++++ gateway/app/routes/training.py | 166 ++++ gateway/requirements.txt | 13 + gateway/shared/auth/__init__.py | 0 gateway/shared/auth/decorators.py | 41 + gateway/shared/auth/jwt_handler.py | 58 ++ gateway/shared/database/__init__.py | 0 gateway/shared/database/base.py | 56 ++ gateway/shared/messaging/__init__.py | 0 gateway/shared/messaging/events.py | 73 ++ gateway/shared/messaging/rabbitmq.py | 96 ++ gateway/shared/monitoring/__init__.py | 0 gateway/shared/monitoring/logging.py | 77 ++ gateway/shared/monitoring/metrics.py | 112 +++ gateway/shared/utils/__init__.py | 0 gateway/shared/utils/datetime_utils.py | 71 ++ gateway/shared/utils/validation.py | 67 ++ .../monitoring/prometheus/prometheus.yml | 31 + scripts/deploy.sh | 18 + scripts/setup.sh | 879 ++++++++++++++++++ scripts/test.sh | 15 + services/auth/app/__init__.py | 0 services/auth/app/api/__init__.py | 0 services/auth/app/api/auth.py | 124 +++ services/auth/app/core/__init__.py | 0 services/auth/app/core/config.py | 47 + services/auth/app/core/database.py | 12 + services/auth/app/core/security.py | 153 +++ services/auth/app/main.py | 83 ++ services/auth/app/schemas/__init__.py | 0 services/auth/app/schemas/auth.py | 108 +++ services/auth/app/services/__init__.py | 0 services/auth/app/services/messaging.py | 46 + services/auth/requirements.txt | 17 + services/auth/shared/auth/__init__.py | 0 services/auth/shared/auth/decorators.py | 41 + services/auth/shared/auth/jwt_handler.py | 58 ++ services/auth/shared/database/__init__.py | 0 services/auth/shared/database/base.py | 56 ++ services/auth/shared/messaging/__init__.py | 0 services/auth/shared/messaging/events.py | 73 ++ services/auth/shared/messaging/rabbitmq.py | 96 ++ services/auth/shared/monitoring/__init__.py | 0 services/auth/shared/monitoring/logging.py | 77 ++ services/auth/shared/monitoring/metrics.py | 112 +++ services/auth/shared/utils/__init__.py | 0 services/auth/shared/utils/datetime_utils.py | 71 ++ services/auth/shared/utils/validation.py | 67 ++ services/data/Dockerfile | 31 + services/data/app/__init__.py | 0 services/data/app/api/__init__.py | 0 services/data/app/core/__init__.py | 0 services/data/app/core/config.py | 32 + services/data/app/core/database.py | 12 + services/data/app/main.py | 61 ++ services/data/app/schemas/__init__.py | 0 services/data/app/services/__init__.py | 0 services/data/requirements.txt | 13 + services/data/shared/auth/__init__.py | 0 services/data/shared/auth/decorators.py | 41 + services/data/shared/auth/jwt_handler.py | 58 ++ services/data/shared/database/__init__.py | 0 services/data/shared/database/base.py | 56 ++ services/data/shared/messaging/__init__.py | 0 services/data/shared/messaging/events.py | 73 ++ services/data/shared/messaging/rabbitmq.py | 96 ++ services/data/shared/monitoring/__init__.py | 0 services/data/shared/monitoring/logging.py | 77 ++ services/data/shared/monitoring/metrics.py | 112 +++ services/data/shared/utils/__init__.py | 0 services/data/shared/utils/datetime_utils.py | 71 ++ services/data/shared/utils/validation.py | 67 ++ services/forecasting/Dockerfile | 31 + services/forecasting/app/__init__.py | 0 services/forecasting/app/api/__init__.py | 0 services/forecasting/app/core/__init__.py | 0 services/forecasting/app/core/config.py | 32 + services/forecasting/app/core/database.py | 12 + services/forecasting/app/main.py | 61 ++ services/forecasting/app/schemas/__init__.py | 0 services/forecasting/app/services/__init__.py | 0 services/forecasting/requirements.txt | 13 + services/forecasting/shared/auth/__init__.py | 0 .../forecasting/shared/auth/decorators.py | 41 + .../forecasting/shared/auth/jwt_handler.py | 58 ++ .../forecasting/shared/database/__init__.py | 0 services/forecasting/shared/database/base.py | 56 ++ .../forecasting/shared/messaging/__init__.py | 0 .../forecasting/shared/messaging/events.py | 73 ++ .../forecasting/shared/messaging/rabbitmq.py | 96 ++ .../forecasting/shared/monitoring/__init__.py | 0 .../forecasting/shared/monitoring/logging.py | 77 ++ .../forecasting/shared/monitoring/metrics.py | 112 +++ services/forecasting/shared/utils/__init__.py | 0 .../shared/utils/datetime_utils.py | 71 ++ .../forecasting/shared/utils/validation.py | 67 ++ services/notification/Dockerfile | 31 + services/notification/app/__init__.py | 0 services/notification/app/api/__init__.py | 0 services/notification/app/core/__init__.py | 0 services/notification/app/core/config.py | 32 + services/notification/app/core/database.py | 12 + services/notification/app/main.py | 61 ++ services/notification/app/schemas/__init__.py | 0 .../notification/app/services/__init__.py | 0 services/notification/requirements.txt | 13 + services/notification/shared/auth/__init__.py | 0 .../notification/shared/auth/decorators.py | 41 + .../notification/shared/auth/jwt_handler.py | 58 ++ .../notification/shared/database/__init__.py | 0 services/notification/shared/database/base.py | 56 ++ .../notification/shared/messaging/__init__.py | 0 .../notification/shared/messaging/events.py | 73 ++ .../notification/shared/messaging/rabbitmq.py | 96 ++ .../shared/monitoring/__init__.py | 0 .../notification/shared/monitoring/logging.py | 77 ++ .../notification/shared/monitoring/metrics.py | 112 +++ .../notification/shared/utils/__init__.py | 0 .../shared/utils/datetime_utils.py | 71 ++ .../notification/shared/utils/validation.py | 67 ++ services/tenant/Dockerfile | 31 + services/tenant/app/__init__.py | 0 services/tenant/app/api/__init__.py | 0 services/tenant/app/core/__init__.py | 0 services/tenant/app/core/config.py | 32 + services/tenant/app/core/database.py | 12 + services/tenant/app/main.py | 61 ++ services/tenant/app/schemas/__init__.py | 0 services/tenant/app/services/__init__.py | 0 services/tenant/requirements.txt | 13 + services/tenant/shared/auth/__init__.py | 0 services/tenant/shared/auth/decorators.py | 41 + services/tenant/shared/auth/jwt_handler.py | 58 ++ services/tenant/shared/database/__init__.py | 0 services/tenant/shared/database/base.py | 56 ++ services/tenant/shared/messaging/__init__.py | 0 services/tenant/shared/messaging/events.py | 73 ++ services/tenant/shared/messaging/rabbitmq.py | 96 ++ services/tenant/shared/monitoring/__init__.py | 0 services/tenant/shared/monitoring/logging.py | 77 ++ services/tenant/shared/monitoring/metrics.py | 112 +++ services/tenant/shared/utils/__init__.py | 0 .../tenant/shared/utils/datetime_utils.py | 71 ++ services/tenant/shared/utils/validation.py | 67 ++ services/training/app/__init__.py | 0 services/training/app/api/__init__.py | 0 services/training/app/api/models.py | 33 + services/training/app/api/training.py | 77 ++ services/training/app/core/__init__.py | 0 services/training/app/core/auth.py | 38 + services/training/app/core/config.py | 44 + services/training/app/core/database.py | 12 + services/training/app/main.py | 81 ++ services/training/app/ml/__init__.py | 0 services/training/app/ml/trainer.py | 174 ++++ services/training/app/schemas/__init__.py | 0 services/training/app/schemas/training.py | 91 ++ services/training/app/services/__init__.py | 0 services/training/app/services/messaging.py | 50 + services/training/requirements.txt | 84 ++ services/training/shared/auth/__init__.py | 0 services/training/shared/auth/decorators.py | 41 + services/training/shared/auth/jwt_handler.py | 58 ++ services/training/shared/database/__init__.py | 0 services/training/shared/database/base.py | 56 ++ .../training/shared/messaging/__init__.py | 0 services/training/shared/messaging/events.py | 73 ++ .../training/shared/messaging/rabbitmq.py | 96 ++ .../training/shared/monitoring/__init__.py | 0 .../training/shared/monitoring/logging.py | 77 ++ .../training/shared/monitoring/metrics.py | 112 +++ services/training/shared/utils/__init__.py | 0 .../training/shared/utils/datetime_utils.py | 71 ++ services/training/shared/utils/validation.py | 67 ++ shared/auth/__init__.py | 0 shared/auth/decorators.py | 41 + shared/auth/jwt_handler.py | 58 ++ shared/database/__init__.py | 0 shared/database/base.py | 56 ++ shared/messaging/__init__.py | 0 shared/messaging/events.py | 73 ++ shared/messaging/rabbitmq.py | 96 ++ shared/monitoring/__init__.py | 0 shared/monitoring/logging.py | 77 ++ shared/monitoring/metrics.py | 112 +++ shared/utils/__init__.py | 0 shared/utils/datetime_utils.py | 71 ++ shared/utils/validation.py | 67 ++ 200 files changed, 9559 insertions(+) create mode 100644 .gitignore create mode 100644 docker-compose.yml create mode 100644 gateway/app/__init__.py create mode 100644 gateway/app/core/__init__.py create mode 100644 gateway/app/core/config.py create mode 100644 gateway/app/core/service_discovery.py create mode 100644 gateway/app/main.py create mode 100644 gateway/app/middleware/__init__.py create mode 100644 gateway/app/middleware/auth.py create mode 100644 gateway/app/middleware/logging.py create mode 100644 gateway/app/middleware/rate_limit.py create mode 100644 gateway/app/routes/__init__.py create mode 100644 gateway/app/routes/auth.py create mode 100644 gateway/app/routes/training.py create mode 100644 gateway/requirements.txt create mode 100644 gateway/shared/auth/__init__.py create mode 100644 gateway/shared/auth/decorators.py create mode 100644 gateway/shared/auth/jwt_handler.py create mode 100644 gateway/shared/database/__init__.py create mode 100644 gateway/shared/database/base.py create mode 100644 gateway/shared/messaging/__init__.py create mode 100644 gateway/shared/messaging/events.py create mode 100644 gateway/shared/messaging/rabbitmq.py create mode 100644 gateway/shared/monitoring/__init__.py create mode 100644 gateway/shared/monitoring/logging.py create mode 100644 gateway/shared/monitoring/metrics.py create mode 100644 gateway/shared/utils/__init__.py create mode 100644 gateway/shared/utils/datetime_utils.py create mode 100644 gateway/shared/utils/validation.py create mode 100644 infrastructure/monitoring/prometheus/prometheus.yml create mode 100755 scripts/deploy.sh create mode 100755 scripts/setup.sh create mode 100755 scripts/test.sh create mode 100644 services/auth/app/__init__.py create mode 100644 services/auth/app/api/__init__.py create mode 100644 services/auth/app/api/auth.py create mode 100644 services/auth/app/core/__init__.py create mode 100644 services/auth/app/core/config.py create mode 100644 services/auth/app/core/database.py create mode 100644 services/auth/app/core/security.py create mode 100644 services/auth/app/main.py create mode 100644 services/auth/app/schemas/__init__.py create mode 100644 services/auth/app/schemas/auth.py create mode 100644 services/auth/app/services/__init__.py create mode 100644 services/auth/app/services/messaging.py create mode 100644 services/auth/requirements.txt create mode 100644 services/auth/shared/auth/__init__.py create mode 100644 services/auth/shared/auth/decorators.py create mode 100644 services/auth/shared/auth/jwt_handler.py create mode 100644 services/auth/shared/database/__init__.py create mode 100644 services/auth/shared/database/base.py create mode 100644 services/auth/shared/messaging/__init__.py create mode 100644 services/auth/shared/messaging/events.py create mode 100644 services/auth/shared/messaging/rabbitmq.py create mode 100644 services/auth/shared/monitoring/__init__.py create mode 100644 services/auth/shared/monitoring/logging.py create mode 100644 services/auth/shared/monitoring/metrics.py create mode 100644 services/auth/shared/utils/__init__.py create mode 100644 services/auth/shared/utils/datetime_utils.py create mode 100644 services/auth/shared/utils/validation.py create mode 100644 services/data/Dockerfile create mode 100644 services/data/app/__init__.py create mode 100644 services/data/app/api/__init__.py create mode 100644 services/data/app/core/__init__.py create mode 100644 services/data/app/core/config.py create mode 100644 services/data/app/core/database.py create mode 100644 services/data/app/main.py create mode 100644 services/data/app/schemas/__init__.py create mode 100644 services/data/app/services/__init__.py create mode 100644 services/data/requirements.txt create mode 100644 services/data/shared/auth/__init__.py create mode 100644 services/data/shared/auth/decorators.py create mode 100644 services/data/shared/auth/jwt_handler.py create mode 100644 services/data/shared/database/__init__.py create mode 100644 services/data/shared/database/base.py create mode 100644 services/data/shared/messaging/__init__.py create mode 100644 services/data/shared/messaging/events.py create mode 100644 services/data/shared/messaging/rabbitmq.py create mode 100644 services/data/shared/monitoring/__init__.py create mode 100644 services/data/shared/monitoring/logging.py create mode 100644 services/data/shared/monitoring/metrics.py create mode 100644 services/data/shared/utils/__init__.py create mode 100644 services/data/shared/utils/datetime_utils.py create mode 100644 services/data/shared/utils/validation.py create mode 100644 services/forecasting/Dockerfile create mode 100644 services/forecasting/app/__init__.py create mode 100644 services/forecasting/app/api/__init__.py create mode 100644 services/forecasting/app/core/__init__.py create mode 100644 services/forecasting/app/core/config.py create mode 100644 services/forecasting/app/core/database.py create mode 100644 services/forecasting/app/main.py create mode 100644 services/forecasting/app/schemas/__init__.py create mode 100644 services/forecasting/app/services/__init__.py create mode 100644 services/forecasting/requirements.txt create mode 100644 services/forecasting/shared/auth/__init__.py create mode 100644 services/forecasting/shared/auth/decorators.py create mode 100644 services/forecasting/shared/auth/jwt_handler.py create mode 100644 services/forecasting/shared/database/__init__.py create mode 100644 services/forecasting/shared/database/base.py create mode 100644 services/forecasting/shared/messaging/__init__.py create mode 100644 services/forecasting/shared/messaging/events.py create mode 100644 services/forecasting/shared/messaging/rabbitmq.py create mode 100644 services/forecasting/shared/monitoring/__init__.py create mode 100644 services/forecasting/shared/monitoring/logging.py create mode 100644 services/forecasting/shared/monitoring/metrics.py create mode 100644 services/forecasting/shared/utils/__init__.py create mode 100644 services/forecasting/shared/utils/datetime_utils.py create mode 100644 services/forecasting/shared/utils/validation.py create mode 100644 services/notification/Dockerfile create mode 100644 services/notification/app/__init__.py create mode 100644 services/notification/app/api/__init__.py create mode 100644 services/notification/app/core/__init__.py create mode 100644 services/notification/app/core/config.py create mode 100644 services/notification/app/core/database.py create mode 100644 services/notification/app/main.py create mode 100644 services/notification/app/schemas/__init__.py create mode 100644 services/notification/app/services/__init__.py create mode 100644 services/notification/requirements.txt create mode 100644 services/notification/shared/auth/__init__.py create mode 100644 services/notification/shared/auth/decorators.py create mode 100644 services/notification/shared/auth/jwt_handler.py create mode 100644 services/notification/shared/database/__init__.py create mode 100644 services/notification/shared/database/base.py create mode 100644 services/notification/shared/messaging/__init__.py create mode 100644 services/notification/shared/messaging/events.py create mode 100644 services/notification/shared/messaging/rabbitmq.py create mode 100644 services/notification/shared/monitoring/__init__.py create mode 100644 services/notification/shared/monitoring/logging.py create mode 100644 services/notification/shared/monitoring/metrics.py create mode 100644 services/notification/shared/utils/__init__.py create mode 100644 services/notification/shared/utils/datetime_utils.py create mode 100644 services/notification/shared/utils/validation.py create mode 100644 services/tenant/Dockerfile create mode 100644 services/tenant/app/__init__.py create mode 100644 services/tenant/app/api/__init__.py create mode 100644 services/tenant/app/core/__init__.py create mode 100644 services/tenant/app/core/config.py create mode 100644 services/tenant/app/core/database.py create mode 100644 services/tenant/app/main.py create mode 100644 services/tenant/app/schemas/__init__.py create mode 100644 services/tenant/app/services/__init__.py create mode 100644 services/tenant/requirements.txt create mode 100644 services/tenant/shared/auth/__init__.py create mode 100644 services/tenant/shared/auth/decorators.py create mode 100644 services/tenant/shared/auth/jwt_handler.py create mode 100644 services/tenant/shared/database/__init__.py create mode 100644 services/tenant/shared/database/base.py create mode 100644 services/tenant/shared/messaging/__init__.py create mode 100644 services/tenant/shared/messaging/events.py create mode 100644 services/tenant/shared/messaging/rabbitmq.py create mode 100644 services/tenant/shared/monitoring/__init__.py create mode 100644 services/tenant/shared/monitoring/logging.py create mode 100644 services/tenant/shared/monitoring/metrics.py create mode 100644 services/tenant/shared/utils/__init__.py create mode 100644 services/tenant/shared/utils/datetime_utils.py create mode 100644 services/tenant/shared/utils/validation.py create mode 100644 services/training/app/__init__.py create mode 100644 services/training/app/api/__init__.py create mode 100644 services/training/app/api/models.py create mode 100644 services/training/app/api/training.py create mode 100644 services/training/app/core/__init__.py create mode 100644 services/training/app/core/auth.py create mode 100644 services/training/app/core/config.py create mode 100644 services/training/app/core/database.py create mode 100644 services/training/app/main.py create mode 100644 services/training/app/ml/__init__.py create mode 100644 services/training/app/ml/trainer.py create mode 100644 services/training/app/schemas/__init__.py create mode 100644 services/training/app/schemas/training.py create mode 100644 services/training/app/services/__init__.py create mode 100644 services/training/app/services/messaging.py create mode 100644 services/training/requirements.txt create mode 100644 services/training/shared/auth/__init__.py create mode 100644 services/training/shared/auth/decorators.py create mode 100644 services/training/shared/auth/jwt_handler.py create mode 100644 services/training/shared/database/__init__.py create mode 100644 services/training/shared/database/base.py create mode 100644 services/training/shared/messaging/__init__.py create mode 100644 services/training/shared/messaging/events.py create mode 100644 services/training/shared/messaging/rabbitmq.py create mode 100644 services/training/shared/monitoring/__init__.py create mode 100644 services/training/shared/monitoring/logging.py create mode 100644 services/training/shared/monitoring/metrics.py create mode 100644 services/training/shared/utils/__init__.py create mode 100644 services/training/shared/utils/datetime_utils.py create mode 100644 services/training/shared/utils/validation.py create mode 100644 shared/auth/__init__.py create mode 100644 shared/auth/decorators.py create mode 100644 shared/auth/jwt_handler.py create mode 100644 shared/database/__init__.py create mode 100644 shared/database/base.py create mode 100644 shared/messaging/__init__.py create mode 100644 shared/messaging/events.py create mode 100644 shared/messaging/rabbitmq.py create mode 100644 shared/monitoring/__init__.py create mode 100644 shared/monitoring/logging.py create mode 100644 shared/monitoring/metrics.py create mode 100644 shared/utils/__init__.py create mode 100644 shared/utils/datetime_utils.py create mode 100644 shared/utils/validation.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..dfba4255 --- /dev/null +++ b/.gitignore @@ -0,0 +1,112 @@ +# Environment +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +.pytest_cache/ +.coverage +.coverage.* +htmlcov/ +.tox/ +.nox/ +.hypothesis/ +.mypy_cache/ +.dmyp.json +dmyp.json +.pyre/ + +# Virtual Environment +venv/ +ENV/ +env/ +.venv + +# Node +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* +.npm +.eslintcache +.next +out/ +build/ +dist/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Logs +logs/ +*.log + +# Database +*.db +*.sqlite +*.sqlite3 + +# ML Models +*.pkl +*.joblib +*.h5 +models/ + +# Data +data/external/ +data/processed/ +*.csv +*.xlsx + +# Docker +.docker/ + +# Infrastructure +*.tfstate +*.tfstate.backup +.terraform/ +.terraform.lock.hcl + +# Kubernetes +kubeconfig +*.yaml.bak + +# Monitoring +prometheus_data/ +grafana_data/ +elasticsearch_data/ + +# Artifacts (from Claude) +*_service.py +*_libraries.py +*.md +setup_scripts.sh diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..2312131d --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,547 @@ +# docker-compose.yml - Development Environment +version: '3.8' + +services: + # Message Broker + rabbitmq: + image: rabbitmq:3-management-alpine + container_name: bakery-rabbitmq + hostname: rabbitmq + ports: + - "5672:5672" + - "15672:15672" + environment: + - RABBITMQ_DEFAULT_USER=bakery + - RABBITMQ_DEFAULT_PASS=forecast123 + - RABBITMQ_DEFAULT_VHOST=/ + volumes: + - rabbitmq_data:/var/lib/rabbitmq + networks: + - bakery-network + healthcheck: + test: ["CMD", "rabbitmq-diagnostics", "ping"] + interval: 30s + timeout: 10s + retries: 3 + + # Cache & Session Store + redis: + image: redis:7-alpine + container_name: bakery-redis + ports: + - "6379:6379" + volumes: + - redis_data:/data + networks: + - bakery-network + command: redis-server --appendonly yes + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 30s + timeout: 10s + retries: 3 + + # Auth Service Database + auth-db: + image: postgres:15-alpine + container_name: bakery-auth-db + environment: + - POSTGRES_DB=auth_db + - POSTGRES_USER=auth_user + - POSTGRES_PASSWORD=auth_pass123 + volumes: + - auth_db_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U auth_user -d auth_db"] + interval: 30s + timeout: 10s + retries: 3 + + # Training Service Database + training-db: + image: postgres:15-alpine + container_name: bakery-training-db + environment: + - POSTGRES_DB=training_db + - POSTGRES_USER=training_user + - POSTGRES_PASSWORD=training_pass123 + volumes: + - training_db_data:/var/lib/postgresql/data + ports: + - "5433:5432" + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U training_user -d training_db"] + interval: 30s + timeout: 10s + retries: 3 + + # Forecasting Service Database + forecasting-db: + image: postgres:15-alpine + container_name: bakery-forecasting-db + environment: + - POSTGRES_DB=forecasting_db + - POSTGRES_USER=forecasting_user + - POSTGRES_PASSWORD=forecasting_pass123 + volumes: + - forecasting_db_data:/var/lib/postgresql/data + ports: + - "5434:5432" + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U forecasting_user -d forecasting_db"] + interval: 30s + timeout: 10s + retries: 3 + + # Data Service Database + data-db: + image: postgres:15-alpine + container_name: bakery-data-db + environment: + - POSTGRES_DB=data_db + - POSTGRES_USER=data_user + - POSTGRES_PASSWORD=data_pass123 + volumes: + - data_db_data:/var/lib/postgresql/data + ports: + - "5435:5432" + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U data_user -d data_db"] + interval: 30s + timeout: 10s + retries: 3 + + # Tenant Service Database + tenant-db: + image: postgres:15-alpine + container_name: bakery-tenant-db + environment: + - POSTGRES_DB=tenant_db + - POSTGRES_USER=tenant_user + - POSTGRES_PASSWORD=tenant_pass123 + volumes: + - tenant_db_data:/var/lib/postgresql/data + ports: + - "5436:5432" + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U tenant_user -d tenant_db"] + interval: 30s + timeout: 10s + retries: 3 + + # Notification Service Database + notification-db: + image: postgres:15-alpine + container_name: bakery-notification-db + environment: + - POSTGRES_DB=notification_db + - POSTGRES_USER=notification_user + - POSTGRES_PASSWORD=notification_pass123 + volumes: + - notification_db_data:/var/lib/postgresql/data + ports: + - "5437:5432" + networks: + - bakery-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U notification_user -d notification_db"] + interval: 30s + timeout: 10s + retries: 3 + + # Authentication Service + auth-service: + build: + context: ./services/auth + dockerfile: Dockerfile + container_name: bakery-auth-service + environment: + - DATABASE_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db + - REDIS_URL=redis://redis:6379/0 + - JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production + - JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 + - JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + - SERVICE_NAME=auth-service + - SERVICE_VERSION=1.0.0 + ports: + - "8001:8000" + depends_on: + auth-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/auth:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Training Service + training-service: + build: + context: ./services/training + dockerfile: Dockerfile + container_name: bakery-training-service + environment: + - DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db + - REDIS_URL=redis://redis:6379/1 + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + - AUTH_SERVICE_URL=http://auth-service:8000 + - DATA_SERVICE_URL=http://data-service:8000 + - SERVICE_NAME=training-service + - SERVICE_VERSION=1.0.0 + ports: + - "8002:8000" + depends_on: + training-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/training:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Forecasting Service + forecasting-service: + build: + context: ./services/forecasting + dockerfile: Dockerfile + container_name: bakery-forecasting-service + environment: + - DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db + - REDIS_URL=redis://redis:6379/2 + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + - AUTH_SERVICE_URL=http://auth-service:8000 + - TRAINING_SERVICE_URL=http://training-service:8000 + - DATA_SERVICE_URL=http://data-service:8000 + - SERVICE_NAME=forecasting-service + - SERVICE_VERSION=1.0.0 + ports: + - "8003:8000" + depends_on: + forecasting-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/forecasting:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Data Service + data-service: + build: + context: ./services/data + dockerfile: Dockerfile + container_name: bakery-data-service + environment: + - DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db + - REDIS_URL=redis://redis:6379/3 + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + - AUTH_SERVICE_URL=http://auth-service:8000 + - AEMET_API_KEY=your-aemet-api-key-here + - MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here + - SERVICE_NAME=data-service + - SERVICE_VERSION=1.0.0 + ports: + - "8004:8000" + depends_on: + data-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/data:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Tenant Service + tenant-service: + build: + context: ./services/tenant + dockerfile: Dockerfile + container_name: bakery-tenant-service + environment: + - DATABASE_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db + - REDIS_URL=redis://redis:6379/4 + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + - AUTH_SERVICE_URL=http://auth-service:8000 + - SERVICE_NAME=tenant-service + - SERVICE_VERSION=1.0.0 + ports: + - "8005:8000" + depends_on: + tenant-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/tenant:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Notification Service + notification-service: + build: + context: ./services/notification + dockerfile: Dockerfile + container_name: bakery-notification-service + environment: + - DATABASE_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db + - REDIS_URL=redis://redis:6379/5 + - RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + - AUTH_SERVICE_URL=http://auth-service:8000 + - SMTP_HOST=smtp.gmail.com + - SMTP_PORT=587 + - SMTP_USER=your-email@gmail.com + - SMTP_PASSWORD=your-email-password + - WHATSAPP_API_KEY=your-whatsapp-api-key + - SERVICE_NAME=notification-service + - SERVICE_VERSION=1.0.0 + ports: + - "8006:8000" + depends_on: + notification-db: + condition: service_healthy + redis: + condition: service_healthy + rabbitmq: + condition: service_healthy + auth-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./services/notification:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # API Gateway + gateway: + build: + context: ./gateway + dockerfile: Dockerfile + container_name: bakery-gateway + environment: + - REDIS_URL=redis://redis:6379/6 + - AUTH_SERVICE_URL=http://auth-service:8000 + - TRAINING_SERVICE_URL=http://training-service:8000 + - FORECASTING_SERVICE_URL=http://forecasting-service:8000 + - DATA_SERVICE_URL=http://data-service:8000 + - TENANT_SERVICE_URL=http://tenant-service:8000 + - NOTIFICATION_SERVICE_URL=http://notification-service:8000 + - CORS_ORIGINS=http://localhost:3000,http://localhost:3001 + - SERVICE_NAME=gateway + - SERVICE_VERSION=1.0.0 + ports: + - "8000:8000" + depends_on: + auth-service: + condition: service_healthy + training-service: + condition: service_healthy + forecasting-service: + condition: service_healthy + data-service: + condition: service_healthy + tenant-service: + condition: service_healthy + notification-service: + condition: service_healthy + networks: + - bakery-network + volumes: + - ./gateway:/app + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + + # Dashboard Frontend + dashboard: + build: + context: ./frontend/dashboard + dockerfile: Dockerfile + container_name: bakery-dashboard + environment: + - REACT_APP_API_URL=http://localhost:8000 + - REACT_APP_WS_URL=ws://localhost:8000 + - CHOKIDAR_USEPOLLING=true + ports: + - "3000:3000" + depends_on: + - gateway + networks: + - bakery-network + volumes: + - ./frontend/dashboard:/app + - /app/node_modules + + # Marketing Site + marketing: + build: + context: ./frontend/marketing + dockerfile: Dockerfile + container_name: bakery-marketing + environment: + - NEXT_PUBLIC_API_URL=http://localhost:8000 + - NEXT_PUBLIC_SITE_URL=http://localhost:3001 + ports: + - "3001:3000" + depends_on: + - gateway + networks: + - bakery-network + volumes: + - ./frontend/marketing:/app + - /app/node_modules + + # Monitoring - Prometheus + prometheus: + image: prom/prometheus:latest + container_name: bakery-prometheus + ports: + - "9090:9090" + volumes: + - ./infrastructure/monitoring/prometheus:/etc/prometheus + - prometheus_data:/prometheus + command: + - '--config.file=/etc/prometheus/prometheus.yml' + - '--storage.tsdb.path=/prometheus' + - '--web.console.libraries=/usr/share/prometheus/console_libraries' + - '--web.console.templates=/usr/share/prometheus/consoles' + - '--web.enable-lifecycle' + networks: + - bakery-network + + # Monitoring - Grafana + grafana: + image: grafana/grafana:latest + container_name: bakery-grafana + ports: + - "3002:3000" + environment: + - GF_SECURITY_ADMIN_PASSWORD=admin123 + volumes: + - grafana_data:/var/lib/grafana + - ./infrastructure/monitoring/grafana:/etc/grafana/provisioning + depends_on: + - prometheus + networks: + - bakery-network + + # Log Aggregation - ELK Stack + elasticsearch: + image: elasticsearch:8.8.0 + container_name: bakery-elasticsearch + environment: + - discovery.type=single-node + - xpack.security.enabled=false + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + ports: + - "9200:9200" + volumes: + - elasticsearch_data:/usr/share/elasticsearch/data + networks: + - bakery-network + + logstash: + image: logstash:8.8.0 + container_name: bakery-logstash + volumes: + - ./infrastructure/monitoring/logstash:/usr/share/logstash/pipeline + ports: + - "5000:5000" + depends_on: + - elasticsearch + networks: + - bakery-network + + kibana: + image: kibana:8.8.0 + container_name: bakery-kibana + environment: + - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 + ports: + - "5601:5601" + depends_on: + - elasticsearch + networks: + - bakery-network + +volumes: + rabbitmq_data: + redis_data: + auth_db_data: + training_db_data: + forecasting_db_data: + data_db_data: + tenant_db_data: + notification_db_data: + prometheus_data: + grafana_data: + elasticsearch_data: + +networks: + bakery-network: + driver: bridge \ No newline at end of file diff --git a/gateway/app/__init__.py b/gateway/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/app/core/__init__.py b/gateway/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/app/core/config.py b/gateway/app/core/config.py new file mode 100644 index 00000000..3af037ff --- /dev/null +++ b/gateway/app/core/config.py @@ -0,0 +1,52 @@ +""" +Gateway configuration +""" + +import os +from typing import List, Dict +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "Bakery Forecasting Gateway" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # CORS settings + CORS_ORIGINS: List[str] = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001").split(",") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + TRAINING_SERVICE_URL: str = os.getenv("TRAINING_SERVICE_URL", "http://training-service:8000") + FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000") + DATA_SERVICE_URL: str = os.getenv("DATA_SERVICE_URL", "http://data-service:8000") + TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000") + NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/6") + + # Rate limiting + RATE_LIMIT_REQUESTS: int = int(os.getenv("RATE_LIMIT_REQUESTS", "100")) + RATE_LIMIT_WINDOW: int = int(os.getenv("RATE_LIMIT_WINDOW", "60")) + + # JWT settings + JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "your-secret-key-change-in-production") + JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256") + + @property + def SERVICES(self) -> Dict[str, str]: + """Service registry""" + return { + "auth": self.AUTH_SERVICE_URL, + "training": self.TRAINING_SERVICE_URL, + "forecasting": self.FORECASTING_SERVICE_URL, + "data": self.DATA_SERVICE_URL, + "tenant": self.TENANT_SERVICE_URL, + "notification": self.NOTIFICATION_SERVICE_URL + } + +settings = Settings() \ No newline at end of file diff --git a/gateway/app/core/service_discovery.py b/gateway/app/core/service_discovery.py new file mode 100644 index 00000000..3bce2bfd --- /dev/null +++ b/gateway/app/core/service_discovery.py @@ -0,0 +1,122 @@ +""" +Service discovery for microservices +""" + +import asyncio +import logging +from typing import Dict, List, Optional +import httpx +import redis.asyncio as redis +from datetime import datetime, timedelta + +from app.core.config import settings + +logger = logging.getLogger(__name__) + +class ServiceDiscovery: + """Service discovery and health checking""" + + def __init__(self): + self.redis_client = redis.from_url(settings.REDIS_URL) + self.services = settings.SERVICES + self.health_check_interval = 30 # seconds + self.health_check_task = None + + async def initialize(self): + """Initialize service discovery""" + logger.info("Initializing service discovery") + + # Start health check task + self.health_check_task = asyncio.create_task(self._health_check_loop()) + + # Initial health check + await self._check_all_services() + + async def cleanup(self): + """Cleanup service discovery""" + if self.health_check_task: + self.health_check_task.cancel() + try: + await self.health_check_task + except asyncio.CancelledError: + pass + + await self.redis_client.close() + + async def get_service_url(self, service_name: str) -> Optional[str]: + """Get service URL""" + return self.services.get(service_name) + + async def get_healthy_services(self) -> List[str]: + """Get list of healthy services""" + healthy_services = [] + + for service_name in self.services: + is_healthy = await self._is_service_healthy(service_name) + if is_healthy: + healthy_services.append(service_name) + + return healthy_services + + async def _health_check_loop(self): + """Continuous health check loop""" + while True: + try: + await self._check_all_services() + await asyncio.sleep(self.health_check_interval) + except asyncio.CancelledError: + break + except Exception as e: + logger.error(f"Health check error: {e}") + await asyncio.sleep(self.health_check_interval) + + async def _check_all_services(self): + """Check health of all services""" + for service_name, service_url in self.services.items(): + try: + is_healthy = await self._check_service_health(service_url) + await self._update_service_health(service_name, is_healthy) + except Exception as e: + logger.error(f"Health check failed for {service_name}: {e}") + await self._update_service_health(service_name, False) + + async def _check_service_health(self, service_url: str) -> bool: + """Check individual service health""" + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.get(f"{service_url}/health") + return response.status_code == 200 + except Exception as e: + logger.warning(f"Service health check failed: {e}") + return False + + async def _update_service_health(self, service_name: str, is_healthy: bool): + """Update service health status in Redis""" + try: + key = f"service_health:{service_name}" + value = { + "healthy": is_healthy, + "last_check": datetime.utcnow().isoformat(), + "url": self.services[service_name] + } + + await self.redis_client.hset(key, mapping=value) + await self.redis_client.expire(key, 300) # 5 minutes TTL + + except Exception as e: + logger.error(f"Failed to update service health for {service_name}: {e}") + + async def _is_service_healthy(self, service_name: str) -> bool: + """Check if service is healthy from Redis cache""" + try: + key = f"service_health:{service_name}" + health_data = await self.redis_client.hgetall(key) + + if not health_data: + return False + + return health_data.get(b'healthy', b'false').decode() == 'True' + + except Exception as e: + logger.error(f"Failed to check service health for {service_name}: {e}") + return False \ No newline at end of file diff --git a/gateway/app/main.py b/gateway/app/main.py new file mode 100644 index 00000000..eb2e4b6a --- /dev/null +++ b/gateway/app/main.py @@ -0,0 +1,131 @@ +""" +API Gateway - Central entry point for all microservices +Handles routing, authentication, rate limiting, and cross-cutting concerns +""" + +import asyncio +import logging +from fastapi import FastAPI, Request, HTTPException, Depends +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +import httpx +import time +from typing import Dict, Any + +from app.core.config import settings +from app.core.service_discovery import ServiceDiscovery +from app.middleware.auth import auth_middleware +from app.middleware.logging import logging_middleware +from app.middleware.rate_limit import rate_limit_middleware +from app.routes import auth, training, forecasting, data, tenant, notification +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("gateway", settings.LOG_LEVEL) +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="Bakery Forecasting API Gateway", + description="Central API Gateway for bakery forecasting microservices", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("gateway") + +# Service discovery +service_discovery = ServiceDiscovery() + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Custom middleware +app.add_middleware(auth_middleware) +app.add_middleware(logging_middleware) +app.add_middleware(rate_limit_middleware) + +# Include routers +app.include_router(auth.router, prefix="/api/v1/auth", tags=["authentication"]) +app.include_router(training.router, prefix="/api/v1/training", tags=["training"]) +app.include_router(forecasting.router, prefix="/api/v1/forecasting", tags=["forecasting"]) +app.include_router(data.router, prefix="/api/v1/data", tags=["data"]) +app.include_router(tenant.router, prefix="/api/v1/tenants", tags=["tenants"]) +app.include_router(notification.router, prefix="/api/v1/notifications", tags=["notifications"]) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting API Gateway") + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + # Initialize service discovery + await service_discovery.initialize() + + logger.info("API Gateway started successfully") + +@app.on_event("shutdown") +async def shutdown_event(): + """Application shutdown""" + logger.info("Shutting down API Gateway") + + # Clean up service discovery + await service_discovery.cleanup() + + logger.info("API Gateway shutdown complete") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + healthy_services = await service_discovery.get_healthy_services() + + return { + "status": "healthy", + "service": "gateway", + "version": "1.0.0", + "healthy_services": healthy_services, + "total_services": len(settings.SERVICES), + "timestamp": time.time() + } + +@app.get("/metrics") +async def get_metrics(): + """Get basic metrics""" + return { + "service": "gateway", + "uptime": time.time() - app.state.start_time if hasattr(app.state, 'start_time') else 0, + "healthy_services": await service_discovery.get_healthy_services() + } + +@app.exception_handler(HTTPException) +async def http_exception_handler(request: Request, exc: HTTPException): + """Handle HTTP exceptions""" + logger.error(f"HTTP {exc.status_code}: {exc.detail}") + return JSONResponse( + status_code=exc.status_code, + content={"detail": exc.detail, "service": "gateway"} + ) + +@app.exception_handler(Exception) +async def general_exception_handler(request: Request, exc: Exception): + """Handle general exceptions""" + logger.error(f"Unhandled exception: {exc}", exc_info=True) + return JSONResponse( + status_code=500, + content={"detail": "Internal server error", "service": "gateway"} + ) + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/gateway/app/middleware/__init__.py b/gateway/app/middleware/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/app/middleware/auth.py b/gateway/app/middleware/auth.py new file mode 100644 index 00000000..ef45fd35 --- /dev/null +++ b/gateway/app/middleware/auth.py @@ -0,0 +1,101 @@ +""" +Authentication middleware for gateway +""" + +import logging +from fastapi import Request, HTTPException +from fastapi.responses import JSONResponse +import httpx +from typing import Optional + +from app.core.config import settings +from shared.auth.jwt_handler import JWTHandler + +logger = logging.getLogger(__name__) + +# JWT handler +jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM) + +# Routes that don't require authentication +PUBLIC_ROUTES = [ + "/health", + "/metrics", + "/docs", + "/redoc", + "/openapi.json", + "/api/v1/auth/login", + "/api/v1/auth/register", + "/api/v1/auth/refresh" +] + +async def auth_middleware(request: Request, call_next): + """Authentication middleware""" + + # Check if route requires authentication + if _is_public_route(request.url.path): + return await call_next(request) + + # Get token from header + token = _extract_token(request) + if not token: + return JSONResponse( + status_code=401, + content={"detail": "Authentication required"} + ) + + # Verify token + try: + # First try to verify token locally + payload = jwt_handler.verify_token(token) + + if payload: + # Add user info to request state + request.state.user = payload + return await call_next(request) + else: + # Token invalid or expired, verify with auth service + user_info = await _verify_with_auth_service(token) + if user_info: + request.state.user = user_info + return await call_next(request) + else: + return JSONResponse( + status_code=401, + content={"detail": "Invalid or expired token"} + ) + + except Exception as e: + logger.error(f"Authentication error: {e}") + return JSONResponse( + status_code=401, + content={"detail": "Authentication failed"} + ) + +def _is_public_route(path: str) -> bool: + """Check if route is public""" + return any(path.startswith(route) for route in PUBLIC_ROUTES) + +def _extract_token(request: Request) -> Optional[str]: + """Extract JWT token from request""" + auth_header = request.headers.get("Authorization") + if auth_header and auth_header.startswith("Bearer "): + return auth_header.split(" ")[1] + return None + +async def _verify_with_auth_service(token: str) -> Optional[dict]: + """Verify token with auth service""" + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/verify", + headers={"Authorization": f"Bearer {token}"} + ) + + if response.status_code == 200: + return response.json() + else: + return None + + except Exception as e: + logger.error(f"Auth service verification failed: {e}") + return None \ No newline at end of file diff --git a/gateway/app/middleware/logging.py b/gateway/app/middleware/logging.py new file mode 100644 index 00000000..ea565b56 --- /dev/null +++ b/gateway/app/middleware/logging.py @@ -0,0 +1,48 @@ +""" +Logging middleware for gateway +""" + +import logging +import time +from fastapi import Request +import json + +logger = logging.getLogger(__name__) + +async def logging_middleware(request: Request, call_next): + """Logging middleware""" + + start_time = time.time() + + # Log request + logger.info( + f"Request: {request.method} {request.url.path}", + extra={ + "method": request.method, + "url": request.url.path, + "query_params": str(request.query_params), + "client_host": request.client.host, + "user_agent": request.headers.get("user-agent", ""), + "request_id": getattr(request.state, 'request_id', None) + } + ) + + # Process request + response = await call_next(request) + + # Calculate duration + duration = time.time() - start_time + + # Log response + logger.info( + f"Response: {response.status_code} in {duration:.3f}s", + extra={ + "status_code": response.status_code, + "duration": duration, + "method": request.method, + "url": request.url.path, + "request_id": getattr(request.state, 'request_id', None) + } + ) + + return response \ No newline at end of file diff --git a/gateway/app/middleware/rate_limit.py b/gateway/app/middleware/rate_limit.py new file mode 100644 index 00000000..84cdc49f --- /dev/null +++ b/gateway/app/middleware/rate_limit.py @@ -0,0 +1,85 @@ +""" +Rate limiting middleware for gateway +""" + +import logging +from fastapi import Request, HTTPException +from fastapi.responses import JSONResponse +import redis.asyncio as redis +from datetime import datetime, timedelta +import hashlib + +from app.core.config import settings + +logger = logging.getLogger(__name__) + +# Redis client for rate limiting +redis_client = redis.from_url(settings.REDIS_URL) + +async def rate_limit_middleware(request: Request, call_next): + """Rate limiting middleware""" + + # Skip rate limiting for health checks + if request.url.path in ["/health", "/metrics"]: + return await call_next(request) + + # Get client identifier (IP address or user ID) + client_id = _get_client_id(request) + + # Check rate limit + if await _is_rate_limited(client_id): + return JSONResponse( + status_code=429, + content={ + "detail": "Rate limit exceeded", + "retry_after": settings.RATE_LIMIT_WINDOW + } + ) + + # Process request + response = await call_next(request) + + # Update rate limit counter + await _update_rate_limit(client_id) + + return response + +def _get_client_id(request: Request) -> str: + """Get client identifier for rate limiting""" + # Use user ID if authenticated, otherwise use IP + if hasattr(request.state, 'user') and request.state.user: + return f"user:{request.state.user.get('user_id', 'unknown')}" + else: + # Hash IP address for privacy + ip = request.client.host + return f"ip:{hashlib.md5(ip.encode()).hexdigest()}" + +async def _is_rate_limited(client_id: str) -> bool: + """Check if client is rate limited""" + try: + key = f"rate_limit:{client_id}" + current_count = await redis_client.get(key) + + if current_count is None: + return False + + return int(current_count) >= settings.RATE_LIMIT_REQUESTS + + except Exception as e: + logger.error(f"Rate limit check failed: {e}") + return False + +async def _update_rate_limit(client_id: str): + """Update rate limit counter""" + try: + key = f"rate_limit:{client_id}" + + # Increment counter + current_count = await redis_client.incr(key) + + # Set TTL on first request + if current_count == 1: + await redis_client.expire(key, settings.RATE_LIMIT_WINDOW) + + except Exception as e: + logger.error(f"Rate limit update failed: {e}") \ No newline at end of file diff --git a/gateway/app/routes/__init__.py b/gateway/app/routes/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/app/routes/auth.py b/gateway/app/routes/auth.py new file mode 100644 index 00000000..3a0972ca --- /dev/null +++ b/gateway/app/routes/auth.py @@ -0,0 +1,161 @@ +""" +Authentication routes for gateway +""" + +from fastapi import APIRouter, Request, HTTPException +from fastapi.responses import JSONResponse +import httpx +import logging + +from app.core.config import settings +from app.core.service_discovery import ServiceDiscovery + +logger = logging.getLogger(__name__) +router = APIRouter() + +service_discovery = ServiceDiscovery() + +@router.post("/login") +async def login(request: Request): + """Proxy login request to auth service""" + try: + body = await request.body() + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/login", + content=body, + headers={"Content-Type": "application/json"} + ) + + if response.status_code == 200: + return response.json() + else: + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + except Exception as e: + logger.error(f"Login error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.post("/register") +async def register(request: Request): + """Proxy register request to auth service""" + try: + body = await request.body() + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/register", + content=body, + headers={"Content-Type": "application/json"} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + except Exception as e: + logger.error(f"Register error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.post("/refresh") +async def refresh_token(request: Request): + """Proxy refresh token request to auth service""" + try: + body = await request.body() + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/refresh", + content=body, + headers={"Content-Type": "application/json"} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + except Exception as e: + logger.error(f"Refresh token error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.post("/verify") +async def verify_token(request: Request): + """Proxy token verification to auth service""" + try: + auth_header = request.headers.get("Authorization") + if not auth_header: + raise HTTPException(status_code=401, detail="Authorization header required") + + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/verify", + headers={"Authorization": auth_header} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + except Exception as e: + logger.error(f"Token verification error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.post("/logout") +async def logout(request: Request): + """Proxy logout request to auth service""" + try: + auth_header = request.headers.get("Authorization") + if not auth_header: + raise HTTPException(status_code=401, detail="Authorization header required") + + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/logout", + headers={"Authorization": auth_header} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + except Exception as e: + logger.error(f"Logout error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/gateway/app/routes/training.py b/gateway/app/routes/training.py new file mode 100644 index 00000000..6757627f --- /dev/null +++ b/gateway/app/routes/training.py @@ -0,0 +1,166 @@ +""" +Training routes for gateway +""" + +from fastapi import APIRouter, Request, HTTPException, Query +from fastapi.responses import JSONResponse +import httpx +import logging +from typing import Optional + +from app.core.config import settings + +logger = logging.getLogger(__name__) +router = APIRouter() + +@router.post("/train") +async def start_training(request: Request): + """Proxy training request to training service""" + try: + body = await request.body() + auth_header = request.headers.get("Authorization") + + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.post( + f"{settings.TRAINING_SERVICE_URL}/train", + content=body, + headers={ + "Content-Type": "application/json", + "Authorization": auth_header + } + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Training service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Training service unavailable" + ) + except Exception as e: + logger.error(f"Training error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.get("/status/{training_job_id}") +async def get_training_status(training_job_id: str, request: Request): + """Get training job status""" + try: + auth_header = request.headers.get("Authorization") + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get( + f"{settings.TRAINING_SERVICE_URL}/status/{training_job_id}", + headers={"Authorization": auth_header} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Training service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Training service unavailable" + ) + except Exception as e: + logger.error(f"Training status error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.get("/models") +async def get_trained_models(request: Request): + """Get trained models""" + try: + auth_header = request.headers.get("Authorization") + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get( + f"{settings.TRAINING_SERVICE_URL}/models", + headers={"Authorization": auth_header} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Training service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Training service unavailable" + ) + except Exception as e: + logger.error(f"Get models error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + +@router.get("/jobs") +async def get_training_jobs( + request: Request, + limit: Optional[int] = Query(10, ge=1, le=100), + offset: Optional[int] = Query(0, ge=0) +): + """Get training jobs""" + try: + auth_header = request.headers.get("Authorization") + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get( + f"{settings.TRAINING_SERVICE_URL}/jobs", + params={"limit": limit, "offset": offset}, + headers={"Authorization": auth_header} + ) + + return JSONResponse( + status_code=response.status_code, + content=response.json() + ) + + except httpx.RequestError as e: + logger.error(f"Training service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Training service unavailable" + ) + except Exception as e: + logger.error(f"Get training jobs error: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +# gateway/Dockerfile +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/gateway/requirements.txt b/gateway/requirements.txt new file mode 100644 index 00000000..28fa51e9 --- /dev/null +++ b/gateway/requirements.txt @@ -0,0 +1,13 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +httpx==0.25.2 +redis==5.0.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +python-jose[cryptography]==3.3.0 +python-multipart==0.0.6 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +email-validator==2.0.0 +aio-pika==9.3.0 +pytz==2023.3 \ No newline at end of file diff --git a/gateway/shared/auth/__init__.py b/gateway/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/shared/auth/decorators.py b/gateway/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/gateway/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/gateway/shared/auth/jwt_handler.py b/gateway/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/gateway/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/gateway/shared/database/__init__.py b/gateway/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/shared/database/base.py b/gateway/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/gateway/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/gateway/shared/messaging/__init__.py b/gateway/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/shared/messaging/events.py b/gateway/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/gateway/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/gateway/shared/messaging/rabbitmq.py b/gateway/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/gateway/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/gateway/shared/monitoring/__init__.py b/gateway/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/shared/monitoring/logging.py b/gateway/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/gateway/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/gateway/shared/monitoring/metrics.py b/gateway/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/gateway/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/gateway/shared/utils/__init__.py b/gateway/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gateway/shared/utils/datetime_utils.py b/gateway/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/gateway/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/gateway/shared/utils/validation.py b/gateway/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/gateway/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/infrastructure/monitoring/prometheus/prometheus.yml b/infrastructure/monitoring/prometheus/prometheus.yml new file mode 100644 index 00000000..58cddaad --- /dev/null +++ b/infrastructure/monitoring/prometheus/prometheus.yml @@ -0,0 +1,31 @@ +global: + scrape_interval: 15s + +scrape_configs: + - job_name: 'gateway' + static_configs: + - targets: ['gateway:8080'] + + - job_name: 'auth-service' + static_configs: + - targets: ['auth-service:8080'] + + - job_name: 'training-service' + static_configs: + - targets: ['training-service:8080'] + + - job_name: 'forecasting-service' + static_configs: + - targets: ['forecasting-service:8080'] + + - job_name: 'data-service' + static_configs: + - targets: ['data-service:8080'] + + - job_name: 'tenant-service' + static_configs: + - targets: ['tenant-service:8080'] + + - job_name: 'notification-service' + static_configs: + - targets: ['notification-service:8080'] diff --git a/scripts/deploy.sh b/scripts/deploy.sh new file mode 100755 index 00000000..64a0cc27 --- /dev/null +++ b/scripts/deploy.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "🚀 Deploying Bakery Forecasting Platform..." + +# Build and deploy all services +docker-compose build +docker-compose up -d + +echo "Waiting for services to be healthy..." +sleep 30 + +# Check service health +echo "Checking service health..." +curl -f http://localhost:8000/health || echo "Gateway health check failed" + +echo "✅ Deployment completed" +echo "Gateway: http://localhost:8000" +echo "API Docs: http://localhost:8000/docs" diff --git a/scripts/setup.sh b/scripts/setup.sh new file mode 100755 index 00000000..004f38b8 --- /dev/null +++ b/scripts/setup.sh @@ -0,0 +1,879 @@ +#!/bin/bash + +# scripts/setup.sh +# Intelligent Setup Script - Extract artifacts and create microservices structure + +set -e + +echo "🚀 Setting up Bakery Forecasting Microservices Platform" +echo "========================================================" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +print_step() { + echo -e "${BLUE}➤${NC} $1" +} + +print_success() { + echo -e "${GREEN}✓${NC} $1" +} + +print_warning() { + echo -e "${YELLOW}⚠${NC} $1" +} + +print_error() { + echo -e "${RED}✗${NC} $1" +} + +# Check prerequisites +print_step "Checking prerequisites..." + +command -v docker >/dev/null 2>&1 || { + print_error "Docker is required but not installed. Please install Docker first." + exit 1 +} + +command -v docker-compose >/dev/null 2>&1 || { + print_error "Docker Compose is required but not installed. Please install Docker Compose first." + exit 1 +} + +print_success "Prerequisites check passed" + +# Function to extract files from artifact files +extract_artifact_files() { + local artifact_file="$1" + local description="$2" + + print_step "Processing $description..." + + if [ ! -f "$artifact_file" ]; then + print_warning "Artifact file $artifact_file not found, skipping..." + return + fi + + # Read the artifact file and extract individual files + local current_file="" + local current_content="" + local in_file=false + + while IFS= read -r line; do + # Check if line starts with a file path (contains .py, .yml, .md, .sh, etc.) + if [[ "$line" =~ ^#[[:space:]]*(.*\.(py|yml|yaml|md|sh|txt|js|json|html|css|Dockerfile|requirements\.txt))$ ]]; then + # Save previous file if we were processing one + if [ "$in_file" = true ] && [ -n "$current_file" ]; then + # Create directory if it doesn't exist + local dir=$(dirname "$current_file") + mkdir -p "$dir" + + # Write content to file + echo "$current_content" > "$current_file" + print_success "Created: $current_file" + fi + + # Start new file + current_file=$(echo "$line" | sed 's/^#[[:space:]]*//') + current_content="" + in_file=true + + elif [ "$in_file" = true ]; then + # Add line to current file content + if [ -n "$current_content" ]; then + current_content="$current_content\n$line" + else + current_content="$line" + fi + fi + done < "$artifact_file" + + # Save the last file + if [ "$in_file" = true ] && [ -n "$current_file" ]; then + local dir=$(dirname "$current_file") + mkdir -p "$dir" + echo -e "$current_content" > "$current_file" + print_success "Created: $current_file" + fi +} + +# Function to extract Python files with multiple file markers +extract_python_artifact() { + local artifact_file="$1" + local description="$2" + + print_step "Processing $description..." + + if [ ! -f "$artifact_file" ]; then + print_warning "Artifact file $artifact_file not found, skipping..." + return + fi + + # Use Python to parse the multi-file artifact + python3 << EOF +import re +import os + +def extract_files(filename): + with open('$artifact_file', 'r') as f: + content = f.read() + + # Split by file markers (lines starting with # and containing file paths) + files = {} + current_file = None + current_content = [] + + for line in content.split('\n'): + # Check for file path markers + if re.match(r'^#\s+\S+\.(py|yml|yaml|txt|sh|json|html|css|js|Dockerfile)', line): + # Save previous file + if current_file and current_content: + files[current_file] = '\n'.join(current_content) + + # Start new file + current_file = re.sub(r'^#\s+', '', line) + current_content = [] + elif current_file: + current_content.append(line) + + # Save last file + if current_file and current_content: + files[current_file] = '\n'.join(current_content) + + # Write files + for filepath, file_content in files.items(): + # Clean up the content (remove leading/trailing quotes if present) + file_content = file_content.strip() + if file_content.startswith('"""') and file_content.endswith('"""'): + file_content = file_content[3:-3] + elif file_content.startswith("'''") and file_content.endswith("'''"): + file_content = file_content[3:-3] + + # Create directory + os.makedirs(os.path.dirname(filepath) if os.path.dirname(filepath) else '.', exist_ok=True) + + # Write file + with open(filepath, 'w') as f: + f.write(file_content) + + print(f"✓ Created: {filepath}") + +extract_files('$artifact_file') +EOF +} + +# Create base project structure first +print_step "Creating base project structure..." + +# Create main directories +mkdir -p {gateway,services/{auth,training,forecasting,data,tenant,notification},shared,frontend/{dashboard,marketing},infrastructure,deployment,tests,docs,scripts} + +# Create subdirectories for each service +for service in auth training forecasting data tenant notification; do + mkdir -p services/$service/{app/{core,models,schemas,services,api,ml},migrations/versions,tests} + touch services/$service/app/__init__.py + touch services/$service/app/core/__init__.py + touch services/$service/app/models/__init__.py + touch services/$service/app/schemas/__init__.py + touch services/$service/app/services/__init__.py + touch services/$service/app/api/__init__.py + if [ "$service" = "training" ]; then + touch services/$service/app/ml/__init__.py + fi +done + +# Create gateway structure +mkdir -p gateway/{app/{core,middleware,routes},tests} +touch gateway/app/__init__.py +touch gateway/app/core/__init__.py +touch gateway/app/middleware/__init__.py +touch gateway/app/routes/__init__.py + +# Create shared library structure +mkdir -p shared/{auth,database,messaging,monitoring,utils} +for lib in auth database messaging monitoring utils; do + touch shared/$lib/__init__.py +done + +# Create infrastructure directories +mkdir -p infrastructure/{docker,kubernetes,terraform,monitoring}/{base,dev,staging,production} +mkdir -p infrastructure/monitoring/{prometheus,grafana,logstash} + +print_success "Base project structure created" + +# Extract files from artifacts +print_step "Extracting files from artifacts..." + +# Process shared libraries +if [ -f "shared_libraries.py" ]; then + extract_python_artifact "shared_libraries.py" "Shared Libraries" +fi + +# Process gateway service +if [ -f "gateway_service.py" ]; then + extract_python_artifact "gateway_service.py" "Gateway Service" +fi + +# Process auth service +if [ -f "auth_service.py" ]; then + extract_python_artifact "auth_service.py" "Authentication Service" +fi + +# Process training service +if [ -f "training_service.py" ]; then + extract_python_artifact "training_service.py" "Training Service" +fi + + +print_step "Creating missing service files..." + +# Create remaining service files that might not be in artifacts +for service in forecasting data tenant notification; do + service_dir="services/$service" + + # Create main.py if it doesn't exist + if [ ! -f "$service_dir/app/main.py" ]; then + cat > "$service_dir/app/main.py" << EOF +""" +$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service +""" + +import logging +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("$service-service", "INFO") +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service", + description="$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') service for bakery forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("$service-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting $(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service") + + # Create database tables + await database_manager.create_tables() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service started successfully") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "$service-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) +EOF + print_success "Created: $service_dir/app/main.py" + fi + + # Create config.py if it doesn't exist + if [ ! -f "$service_dir/app/core/config.py" ]; then + cat > "$service_dir/app/core/config.py" << EOF +""" +$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://${service}_user:${service}_pass123@${service}-db:5432/${service}_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0") + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + + class Config: + env_file = ".env" + +settings = Settings() +EOF + print_success "Created: $service_dir/app/core/config.py" + fi + + # Create database.py if it doesn't exist + if [ ! -f "$service_dir/app/core/database.py" ]; then + cat > "$service_dir/app/core/database.py" << EOF +""" +Database configuration for $service service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db +EOF + print_success "Created: $service_dir/app/core/database.py" + fi + + # Create requirements.txt if it doesn't exist + if [ ! -f "$service_dir/requirements.txt" ]; then + cat > "$service_dir/requirements.txt" << 'EOF' +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +httpx==0.25.2 +redis==5.0.1 +aio-pika==9.3.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +pytz==2023.3 +EOF + print_success "Created: $service_dir/requirements.txt" + fi + + # Create Dockerfile if it doesn't exist + if [ ! -f "$service_dir/Dockerfile" ]; then + cat > "$service_dir/Dockerfile" << 'EOF' +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +EOF + print_success "Created: $service_dir/Dockerfile" + fi +done + +# Create .env file +print_step "Creating environment configuration..." + +if [ ! -f ".env" ]; then + cat > .env << 'EOF' +# Environment +ENVIRONMENT=development +DEBUG=true +LOG_LEVEL=INFO + +# Database URLs +AUTH_DB_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db +TRAINING_DB_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db +FORECASTING_DB_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db +DATA_DB_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db +TENANT_DB_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db +NOTIFICATION_DB_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db + +# Redis +REDIS_URL=redis://redis:6379 + +# RabbitMQ +RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/ + +# JWT +JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production-please +JWT_ALGORITHM=HS256 +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 +JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 + +# External APIs +AEMET_API_KEY=your-aemet-api-key-here +MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here + +# CORS +CORS_ORIGINS=http://localhost:3000,http://localhost:3001 + +# Email +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USER=your-email@gmail.com +SMTP_PASSWORD=your-email-password + +# WhatsApp +WHATSAPP_API_KEY=your-whatsapp-api-key-here + +# Monitoring +PROMETHEUS_URL=http://prometheus:9090 +GRAFANA_URL=http://grafana:3000 +EOF + print_success "Environment configuration created" +fi + +# Create monitoring configuration +print_step "Creating monitoring configuration..." + +if [ ! -f "infrastructure/monitoring/prometheus/prometheus.yml" ]; then + cat > infrastructure/monitoring/prometheus/prometheus.yml << 'EOF' +global: + scrape_interval: 15s + +scrape_configs: + - job_name: 'gateway' + static_configs: + - targets: ['gateway:8080'] + + - job_name: 'auth-service' + static_configs: + - targets: ['auth-service:8080'] + + - job_name: 'training-service' + static_configs: + - targets: ['training-service:8080'] + + - job_name: 'forecasting-service' + static_configs: + - targets: ['forecasting-service:8080'] + + - job_name: 'data-service' + static_configs: + - targets: ['data-service:8080'] + + - job_name: 'tenant-service' + static_configs: + - targets: ['tenant-service:8080'] + + - job_name: 'notification-service' + static_configs: + - targets: ['notification-service:8080'] +EOF + print_success "Prometheus configuration created" +fi + +# Create utility scripts +print_step "Creating utility scripts..." + +# Create test script +cat > scripts/test.sh << 'EOF' +#!/bin/bash + +echo "🧪 Running tests for all services..." + +# Run tests for each service +for service in auth training forecasting data tenant notification; do + echo "Testing $service service..." + if docker-compose ps | grep -q "${service}-service.*Up"; then + docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service" + else + echo "Service $service is not running, skipping tests" + fi +done + +echo "✅ Test run completed" +EOF + +# Create deploy script +cat > scripts/deploy.sh << 'EOF' +#!/bin/bash + +echo "🚀 Deploying Bakery Forecasting Platform..." + +# Build and deploy all services +docker-compose build +docker-compose up -d + +echo "Waiting for services to be healthy..." +sleep 30 + +# Check service health +echo "Checking service health..." +curl -f http://localhost:8000/health || echo "Gateway health check failed" + +echo "✅ Deployment completed" +echo "Gateway: http://localhost:8000" +echo "API Docs: http://localhost:8000/docs" +EOF + +# Make scripts executable +chmod +x scripts/*.sh + +print_success "Utility scripts created" + +# Create .gitignore +print_step "Creating .gitignore..." + +if [ ! -f ".gitignore" ]; then + cat > .gitignore << 'EOF' +# Environment +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +.pytest_cache/ +.coverage +.coverage.* +htmlcov/ +.tox/ +.nox/ +.hypothesis/ +.mypy_cache/ +.dmyp.json +dmyp.json +.pyre/ + +# Virtual Environment +venv/ +ENV/ +env/ +.venv + +# Node +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* +.npm +.eslintcache +.next +out/ +build/ +dist/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Logs +logs/ +*.log + +# Database +*.db +*.sqlite +*.sqlite3 + +# ML Models +*.pkl +*.joblib +*.h5 +models/ + +# Data +data/external/ +data/processed/ +*.csv +*.xlsx + +# Docker +.docker/ + +# Infrastructure +*.tfstate +*.tfstate.backup +.terraform/ +.terraform.lock.hcl + +# Kubernetes +kubeconfig +*.yaml.bak + +# Monitoring +prometheus_data/ +grafana_data/ +elasticsearch_data/ + +# Artifacts (from Claude) +*_service.py +*_libraries.py +*.md +setup_scripts.sh +EOF + print_success ".gitignore created" +fi + +# Create README +print_step "Creating documentation..." + +if [ ! -f "README.md" ]; then + cat > README.md << 'EOF' +# Bakery Forecasting Platform - Microservices + +## Overview +AI-powered demand forecasting platform for bakeries in Madrid, Spain using microservices architecture. + +## Architecture +- **API Gateway**: Central entry point for all client requests +- **Auth Service**: User authentication and authorization +- **Training Service**: ML model training for demand forecasting +- **Forecasting Service**: Generate predictions using trained models +- **Data Service**: External data integration (weather, traffic, events) +- **Tenant Service**: Multi-tenant management +- **Notification Service**: Email and WhatsApp notifications + +## Quick Start + +### Prerequisites +- Docker and Docker Compose +- Python 3.11+ +- Node.js 18+ + +### Setup +```bash +# Run setup script (this script!) +./scripts/setup.sh + +# Start services +docker-compose up -d + +# Check service health +curl http://localhost:8000/health +``` + +### Services +- **Gateway**: http://localhost:8000 +- **API Docs**: http://localhost:8000/docs +- **Grafana**: http://localhost:3002 +- **Prometheus**: http://localhost:9090 +- **RabbitMQ Management**: http://localhost:15672 + +### Development + +#### Running Tests +```bash +./scripts/test.sh +``` + +#### Building Services +```bash +docker-compose build +``` + +#### Viewing Logs +```bash +# All services +docker-compose logs -f + +# Specific service +docker-compose logs -f auth-service +``` + +#### Service URLs (Development) +- Gateway: http://localhost:8000 +- Auth Service: http://localhost:8001 +- Training Service: http://localhost:8002 +- Forecasting Service: http://localhost:8003 +- Data Service: http://localhost:8004 +- Tenant Service: http://localhost:8005 +- Notification Service: http://localhost:8006 + +## Environment Variables + +Copy `.env.example` to `.env` and update the following: + +```bash +# External API Keys +AEMET_API_KEY=your-aemet-api-key +MADRID_OPENDATA_API_KEY=your-madrid-opendata-key + +# Email Configuration +SMTP_USER=your-email@gmail.com +SMTP_PASSWORD=your-email-password + +# WhatsApp API +WHATSAPP_API_KEY=your-whatsapp-api-key + +# JWT Secret (change in production!) +JWT_SECRET_KEY=your-super-secret-jwt-key +``` + +## Troubleshooting + +### Services won't start +```bash +# Check if ports are available +docker-compose ps +netstat -tulpn | grep :8000 + +# Restart services +docker-compose down +docker-compose up -d +``` + +### Database connection issues +```bash +# Check database containers +docker-compose logs auth-db +docker-compose logs training-db + +# Reset databases +docker-compose down -v +docker-compose up -d +``` + +### Service communication issues +```bash +# Check service health +curl http://localhost:8000/health +curl http://localhost:8001/health +curl http://localhost:8002/health + +# Check RabbitMQ +open http://localhost:15672 +# User: bakery, Password: forecast123 +``` + +## Next Steps + +1. **Configure External APIs**: Add your AEMET and Madrid Open Data API keys +2. **Test Authentication**: Register a user and test login +3. **Upload Sales Data**: Import historical sales data +4. **Train Models**: Start your first training job +5. **Generate Forecasts**: Create demand predictions + +## License +MIT License +EOF + print_success "Documentation created" +fi + +# Final steps +print_step "Final setup steps..." + +# Copy shared libraries to each service (for Docker builds) +for service in auth training forecasting data tenant notification; do + if [ -d "shared" ]; then + cp -r shared services/$service/ 2>/dev/null || true + fi +done + +# Copy shared libraries to gateway +if [ -d "shared" ]; then + cp -r shared gateway/ 2>/dev/null || true +fi + +# Initialize Git repository if not exists +if [ ! -d ".git" ]; then + git init + git add . + git commit -m "Initial microservices setup from artifacts" + print_success "Git repository initialized" +fi + +echo +echo "🎉 Setup completed successfully!" +echo "===============================================" +echo +echo "Next steps:" +echo "1. Update .env with your actual API keys" +echo "2. Start services: docker-compose up -d" +echo "3. Check health: curl http://localhost:8000/health" +echo "4. View API docs: http://localhost:8000/docs" +echo "5. Monitor services: http://localhost:3002 (Grafana)" +echo +echo "Services will be available at:" +echo "- Gateway: http://localhost:8000" +echo "- Auth Service: http://localhost:8001" +echo "- Training Service: http://localhost:8002" +echo "- Monitoring: http://localhost:3002" +echo "- RabbitMQ: http://localhost:15672" +echo +echo "Artifact files processed:" +[ -f "shared_libraries.py" ] && echo "✓ shared_libraries.py" +[ -f "gateway_service.py" ] && echo "✓ gateway_service.py" +[ -f "auth_service.py" ] && echo "✓ auth_service.py" +[ -f "training_service.py" ] && echo "✓ training_service.py" +[ -f "docker-compose.yml" ] && echo "✓ docker-compose.yml" +echo +echo "Happy coding! 🚀" \ No newline at end of file diff --git a/scripts/test.sh b/scripts/test.sh new file mode 100755 index 00000000..313bc9a6 --- /dev/null +++ b/scripts/test.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +echo "🧪 Running tests for all services..." + +# Run tests for each service +for service in auth training forecasting data tenant notification; do + echo "Testing $service service..." + if docker-compose ps | grep -q "${service}-service.*Up"; then + docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service" + else + echo "Service $service is not running, skipping tests" + fi +done + +echo "✅ Test run completed" diff --git a/services/auth/app/__init__.py b/services/auth/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/app/api/__init__.py b/services/auth/app/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/app/api/auth.py b/services/auth/app/api/auth.py new file mode 100644 index 00000000..a143d8dd --- /dev/null +++ b/services/auth/app/api/auth.py @@ -0,0 +1,124 @@ +""" +Authentication API routes +""" + +from fastapi import APIRouter, Depends, HTTPException, status, Request +from sqlalchemy.ext.asyncio import AsyncSession +import logging + +from app.core.database import get_db +from app.schemas.auth import UserRegistration, UserLogin, TokenResponse, RefreshTokenRequest, UserResponse +from app.services.auth_service import AuthService +from app.core.security import security_manager + +logger = logging.getLogger(__name__) +router = APIRouter() + +@router.post("/register", response_model=UserResponse) +async def register( + user_data: UserRegistration, + db: AsyncSession = Depends(get_db) +): + """Register a new user""" + try: + return await AuthService.register_user(user_data, db) + except HTTPException: + raise + except Exception as e: + logger.error(f"Registration error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Registration failed" + ) + +@router.post("/login", response_model=TokenResponse) +async def login( + login_data: UserLogin, + request: Request, + db: AsyncSession = Depends(get_db) +): + """User login""" + try: + ip_address = request.client.host + user_agent = request.headers.get("user-agent", "") + + return await AuthService.login_user(login_data, db, ip_address, user_agent) + except HTTPException: + raise + except Exception as e: + logger.error(f"Login error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Login failed" + ) + +@router.post("/refresh", response_model=TokenResponse) +async def refresh_token( + refresh_data: RefreshTokenRequest, + db: AsyncSession = Depends(get_db) +): + """Refresh access token""" + try: + return await AuthService.refresh_token(refresh_data.refresh_token, db) + except HTTPException: + raise + except Exception as e: + logger.error(f"Token refresh error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Token refresh failed" + ) + +@router.post("/verify") +async def verify_token( + request: Request, + db: AsyncSession = Depends(get_db) +): + """Verify access token""" + try: + auth_header = request.headers.get("Authorization") + if not auth_header or not auth_header.startswith("Bearer "): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Authorization header required" + ) + + token = auth_header.split(" ")[1] + return await AuthService.verify_token(token, db) + except HTTPException: + raise + except Exception as e: + logger.error(f"Token verification error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Token verification failed" + ) + +@router.post("/logout") +async def logout( + request: Request, + db: AsyncSession = Depends(get_db) +): + """User logout""" + try: + auth_header = request.headers.get("Authorization") + if not auth_header or not auth_header.startswith("Bearer "): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Authorization header required" + ) + + token = auth_header.split(" ")[1] + user_data = await AuthService.verify_token(token, db) + + await AuthService.logout_user(user_data["user_id"], db) + + return {"message": "Logged out successfully"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Logout error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Logout failed" + ) \ No newline at end of file diff --git a/services/auth/app/core/__init__.py b/services/auth/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/app/core/config.py b/services/auth/app/core/config.py new file mode 100644 index 00000000..15ea41a7 --- /dev/null +++ b/services/auth/app/core/config.py @@ -0,0 +1,47 @@ +""" +Authentication service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "Authentication Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0") + + # JWT settings + JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "your-super-secret-jwt-key") + JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256") + JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("JWT_ACCESS_TOKEN_EXPIRE_MINUTES", "30")) + JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = int(os.getenv("JWT_REFRESH_TOKEN_EXPIRE_DAYS", "7")) + + # Password settings + PASSWORD_MIN_LENGTH: int = 8 + PASSWORD_REQUIRE_UPPERCASE: bool = True + PASSWORD_REQUIRE_LOWERCASE: bool = True + PASSWORD_REQUIRE_NUMBERS: bool = True + PASSWORD_REQUIRE_SYMBOLS: bool = False + + # Security settings + BCRYPT_ROUNDS: int = 12 + MAX_LOGIN_ATTEMPTS: int = 5 + LOCKOUT_DURATION_MINUTES: int = 30 + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + class Config: + env_file = ".env" + +settings = Settings() \ No newline at end of file diff --git a/services/auth/app/core/database.py b/services/auth/app/core/database.py new file mode 100644 index 00000000..c463b652 --- /dev/null +++ b/services/auth/app/core/database.py @@ -0,0 +1,12 @@ +""" +Database configuration for auth service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db \ No newline at end of file diff --git a/services/auth/app/core/security.py b/services/auth/app/core/security.py new file mode 100644 index 00000000..3ab157e7 --- /dev/null +++ b/services/auth/app/core/security.py @@ -0,0 +1,153 @@ +""" +Security utilities for authentication service +""" + +import bcrypt +import re +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import redis.asyncio as redis +from fastapi import HTTPException, status +import logging + +from app.core.config import settings +from shared.auth.jwt_handler import JWTHandler + +logger = logging.getLogger(__name__) + +# Initialize JWT handler +jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM) + +# Redis client for session management +redis_client = redis.from_url(settings.REDIS_URL) + +class SecurityManager: + """Security utilities for authentication""" + + @staticmethod + def hash_password(password: str) -> str: + """Hash password using bcrypt""" + salt = bcrypt.gensalt(rounds=settings.BCRYPT_ROUNDS) + return bcrypt.hashpw(password.encode('utf-8'), salt).decode('utf-8') + + @staticmethod + def verify_password(password: str, hashed_password: str) -> bool: + """Verify password against hash""" + return bcrypt.checkpw(password.encode('utf-8'), hashed_password.encode('utf-8')) + + @staticmethod + def validate_password(password: str) -> bool: + """Validate password strength""" + if len(password) < settings.PASSWORD_MIN_LENGTH: + return False + + if settings.PASSWORD_REQUIRE_UPPERCASE and not re.search(r'[A-Z]', password): + return False + + if settings.PASSWORD_REQUIRE_LOWERCASE and not re.search(r'[a-z]', password): + return False + + if settings.PASSWORD_REQUIRE_NUMBERS and not re.search(r'\d', password): + return False + + if settings.PASSWORD_REQUIRE_SYMBOLS and not re.search(r'[!@#$%^&*(),.?":{}|<>]', password): + return False + + return True + + @staticmethod + def create_access_token(user_data: Dict[str, Any]) -> str: + """Create JWT access token""" + expires_delta = timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES) + return jwt_handler.create_access_token(user_data, expires_delta) + + @staticmethod + def create_refresh_token(user_data: Dict[str, Any]) -> str: + """Create JWT refresh token""" + expires_delta = timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS) + return jwt_handler.create_refresh_token(user_data, expires_delta) + + @staticmethod + def verify_token(token: str) -> Optional[Dict[str, Any]]: + """Verify JWT token""" + return jwt_handler.verify_token(token) + + @staticmethod + async def check_login_attempts(email: str) -> bool: + """Check if user has exceeded login attempts""" + try: + key = f"login_attempts:{email}" + attempts = await redis_client.get(key) + + if attempts is None: + return True + + return int(attempts) < settings.MAX_LOGIN_ATTEMPTS + + except Exception as e: + logger.error(f"Error checking login attempts: {e}") + return True + + @staticmethod + async def increment_login_attempts(email: str): + """Increment login attempts counter""" + try: + key = f"login_attempts:{email}" + current_attempts = await redis_client.incr(key) + + # Set TTL on first attempt + if current_attempts == 1: + await redis_client.expire(key, settings.LOCKOUT_DURATION_MINUTES * 60) + + except Exception as e: + logger.error(f"Error incrementing login attempts: {e}") + + @staticmethod + async def clear_login_attempts(email: str): + """Clear login attempts counter""" + try: + key = f"login_attempts:{email}" + await redis_client.delete(key) + + except Exception as e: + logger.error(f"Error clearing login attempts: {e}") + + @staticmethod + async def store_refresh_token(user_id: str, refresh_token: str): + """Store refresh token in Redis""" + try: + key = f"refresh_token:{user_id}" + expires_seconds = settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS * 24 * 3600 + await redis_client.setex(key, expires_seconds, refresh_token) + + except Exception as e: + logger.error(f"Error storing refresh token: {e}") + + @staticmethod + async def verify_refresh_token(user_id: str, refresh_token: str) -> bool: + """Verify refresh token""" + try: + key = f"refresh_token:{user_id}" + stored_token = await redis_client.get(key) + + if stored_token is None: + return False + + return stored_token.decode() == refresh_token + + except Exception as e: + logger.error(f"Error verifying refresh token: {e}") + return False + + @staticmethod + async def revoke_refresh_token(user_id: str): + """Revoke refresh token""" + try: + key = f"refresh_token:{user_id}" + await redis_client.delete(key) + + except Exception as e: + logger.error(f"Error revoking refresh token: {e}") + +# Global security manager instance +security_manager = SecurityManager() \ No newline at end of file diff --git a/services/auth/app/main.py b/services/auth/app/main.py new file mode 100644 index 00000000..56e0eabf --- /dev/null +++ b/services/auth/app/main.py @@ -0,0 +1,83 @@ +""" +Authentication Service +Handles user authentication, registration, and token management +""" + +import logging +from datetime import timedelta +from fastapi import FastAPI, Depends, HTTPException, status +from fastapi.middleware.cors import CORSMiddleware +from fastapi.security import HTTPBearer + +from app.core.config import settings +from app.core.database import database_manager +from app.api import auth, users +from app.services.messaging import message_publisher +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("auth-service", settings.LOG_LEVEL) +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="Authentication Service", + description="User authentication and authorization service", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("auth-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(auth.router, prefix="/auth", tags=["authentication"]) +app.include_router(users.router, prefix="/users", tags=["users"]) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting Authentication Service") + + # Create database tables + await database_manager.create_tables() + + # Initialize message publisher + await message_publisher.connect() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("Authentication Service started successfully") + +@app.on_event("shutdown") +async def shutdown_event(): + """Application shutdown""" + logger.info("Shutting down Authentication Service") + + # Cleanup message publisher + await message_publisher.disconnect() + + logger.info("Authentication Service shutdown complete") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "auth-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/services/auth/app/schemas/__init__.py b/services/auth/app/schemas/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/app/schemas/auth.py b/services/auth/app/schemas/auth.py new file mode 100644 index 00000000..45daf77b --- /dev/null +++ b/services/auth/app/schemas/auth.py @@ -0,0 +1,108 @@ +""" +Authentication schemas +""" + +from pydantic import BaseModel, EmailStr, Field, validator +from typing import Optional +from datetime import datetime + +from app.core.config import settings +from shared.utils.validation import validate_spanish_phone + +class UserRegistration(BaseModel): + """User registration schema""" + email: EmailStr + password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH) + full_name: str = Field(..., min_length=2, max_length=100) + phone: Optional[str] = None + language: str = Field(default="es", regex="^(es|en)$") + + @validator('password') + def validate_password(cls, v): + """Validate password strength""" + from app.core.security import security_manager + if not security_manager.validate_password(v): + raise ValueError('Password does not meet security requirements') + return v + + @validator('phone') + def validate_phone(cls, v): + """Validate phone number""" + if v and not validate_spanish_phone(v): + raise ValueError('Invalid Spanish phone number') + return v + +class UserLogin(BaseModel): + """User login schema""" + email: EmailStr + password: str + +class TokenResponse(BaseModel): + """Token response schema""" + access_token: str + refresh_token: str + token_type: str = "bearer" + expires_in: int + +class RefreshTokenRequest(BaseModel): + """Refresh token request schema""" + refresh_token: str + +class UserResponse(BaseModel): + """User response schema""" + id: str + email: str + full_name: str + is_active: bool + is_verified: bool + tenant_id: Optional[str] + role: str + phone: Optional[str] + language: str + timezone: str + created_at: Optional[datetime] + last_login: Optional[datetime] + +class PasswordChangeRequest(BaseModel): + """Password change request schema""" + current_password: str + new_password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH) + + @validator('new_password') + def validate_new_password(cls, v): + """Validate new password strength""" + from app.core.security import security_manager + if not security_manager.validate_password(v): + raise ValueError('New password does not meet security requirements') + return v + +class PasswordResetRequest(BaseModel): + """Password reset request schema""" + email: EmailStr + +class PasswordResetConfirm(BaseModel): + """Password reset confirmation schema""" + token: str + new_password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH) + + @validator('new_password') + def validate_new_password(cls, v): + """Validate new password strength""" + from app.core.security import security_manager + if not security_manager.validate_password(v): + raise ValueError('New password does not meet security requirements') + return v + +class UserUpdate(BaseModel): + """User update schema""" + full_name: Optional[str] = Field(None, min_length=2, max_length=100) + phone: Optional[str] = None + language: Optional[str] = Field(None, regex="^(es|en)$") + timezone: Optional[str] = None + + @validator('phone') + def validate_phone(cls, v): + """Validate phone number""" + if v and not validate_spanish_phone(v): + raise ValueError('Invalid Spanish phone number') + return v \ No newline at end of file diff --git a/services/auth/app/services/__init__.py b/services/auth/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/app/services/messaging.py b/services/auth/app/services/messaging.py new file mode 100644 index 00000000..ee49ddb4 --- /dev/null +++ b/services/auth/app/services/messaging.py @@ -0,0 +1,46 @@ +""" +Messaging service for auth service +""" + +from shared.messaging.rabbitmq import RabbitMQClient +from app.core.config import settings + +# Global message publisher +message_publisher = RabbitMQClient(settings.RABBITMQ_URL) + + +# services/auth/Dockerfile +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy shared libraries +COPY --from=shared /shared /app/shared + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/services/auth/requirements.txt b/services/auth/requirements.txt new file mode 100644 index 00000000..483951aa --- /dev/null +++ b/services/auth/requirements.txt @@ -0,0 +1,17 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +python-jose[cryptography]==3.3.0 +passlib[bcrypt]==1.7.4 +bcrypt==4.0.1 +python-multipart==0.0.6 +redis==5.0.1 +aio-pika==9.3.0 +email-validator==2.0.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +pytz==2023.3 \ No newline at end of file diff --git a/services/auth/shared/auth/__init__.py b/services/auth/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/shared/auth/decorators.py b/services/auth/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/services/auth/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/services/auth/shared/auth/jwt_handler.py b/services/auth/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/services/auth/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/services/auth/shared/database/__init__.py b/services/auth/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/shared/database/base.py b/services/auth/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/services/auth/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/auth/shared/messaging/__init__.py b/services/auth/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/shared/messaging/events.py b/services/auth/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/services/auth/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/auth/shared/messaging/rabbitmq.py b/services/auth/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/services/auth/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/services/auth/shared/monitoring/__init__.py b/services/auth/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/shared/monitoring/logging.py b/services/auth/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/services/auth/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/auth/shared/monitoring/metrics.py b/services/auth/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/services/auth/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/services/auth/shared/utils/__init__.py b/services/auth/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/auth/shared/utils/datetime_utils.py b/services/auth/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/services/auth/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/auth/shared/utils/validation.py b/services/auth/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/services/auth/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/services/data/Dockerfile b/services/data/Dockerfile new file mode 100644 index 00000000..ad431c34 --- /dev/null +++ b/services/data/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/services/data/app/__init__.py b/services/data/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/app/api/__init__.py b/services/data/app/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/app/core/__init__.py b/services/data/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/app/core/config.py b/services/data/app/core/config.py new file mode 100644 index 00000000..53af1fd2 --- /dev/null +++ b/services/data/app/core/config.py @@ -0,0 +1,32 @@ +""" +uLudata service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "uLudata Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0") + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + + class Config: + env_file = ".env" + +settings = Settings() diff --git a/services/data/app/core/database.py b/services/data/app/core/database.py new file mode 100644 index 00000000..07002bf6 --- /dev/null +++ b/services/data/app/core/database.py @@ -0,0 +1,12 @@ +""" +Database configuration for data service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db diff --git a/services/data/app/main.py b/services/data/app/main.py new file mode 100644 index 00000000..7187a0f7 --- /dev/null +++ b/services/data/app/main.py @@ -0,0 +1,61 @@ +""" +uLudata Service +""" + +import logging +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("data-service", "INFO") +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="uLudata Service", + description="uLudata service for bakery forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("data-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting uLudata Service") + + # Create database tables + await database_manager.create_tables() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("uLudata Service started successfully") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "data-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/services/data/app/schemas/__init__.py b/services/data/app/schemas/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/app/services/__init__.py b/services/data/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/requirements.txt b/services/data/requirements.txt new file mode 100644 index 00000000..affefed0 --- /dev/null +++ b/services/data/requirements.txt @@ -0,0 +1,13 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +httpx==0.25.2 +redis==5.0.1 +aio-pika==9.3.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +pytz==2023.3 diff --git a/services/data/shared/auth/__init__.py b/services/data/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/shared/auth/decorators.py b/services/data/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/services/data/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/services/data/shared/auth/jwt_handler.py b/services/data/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/services/data/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/services/data/shared/database/__init__.py b/services/data/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/shared/database/base.py b/services/data/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/services/data/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/data/shared/messaging/__init__.py b/services/data/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/shared/messaging/events.py b/services/data/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/services/data/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/data/shared/messaging/rabbitmq.py b/services/data/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/services/data/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/services/data/shared/monitoring/__init__.py b/services/data/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/shared/monitoring/logging.py b/services/data/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/services/data/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/data/shared/monitoring/metrics.py b/services/data/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/services/data/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/services/data/shared/utils/__init__.py b/services/data/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/data/shared/utils/datetime_utils.py b/services/data/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/services/data/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/data/shared/utils/validation.py b/services/data/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/services/data/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/services/forecasting/Dockerfile b/services/forecasting/Dockerfile new file mode 100644 index 00000000..ad431c34 --- /dev/null +++ b/services/forecasting/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/services/forecasting/app/__init__.py b/services/forecasting/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/app/api/__init__.py b/services/forecasting/app/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/app/core/__init__.py b/services/forecasting/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/app/core/config.py b/services/forecasting/app/core/config.py new file mode 100644 index 00000000..cf822034 --- /dev/null +++ b/services/forecasting/app/core/config.py @@ -0,0 +1,32 @@ +""" +uLuforecasting service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "uLuforecasting Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0") + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + + class Config: + env_file = ".env" + +settings = Settings() diff --git a/services/forecasting/app/core/database.py b/services/forecasting/app/core/database.py new file mode 100644 index 00000000..fc4bb1c1 --- /dev/null +++ b/services/forecasting/app/core/database.py @@ -0,0 +1,12 @@ +""" +Database configuration for forecasting service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db diff --git a/services/forecasting/app/main.py b/services/forecasting/app/main.py new file mode 100644 index 00000000..e5bf3dad --- /dev/null +++ b/services/forecasting/app/main.py @@ -0,0 +1,61 @@ +""" +uLuforecasting Service +""" + +import logging +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("forecasting-service", "INFO") +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="uLuforecasting Service", + description="uLuforecasting service for bakery forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("forecasting-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting uLuforecasting Service") + + # Create database tables + await database_manager.create_tables() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("uLuforecasting Service started successfully") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "forecasting-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/services/forecasting/app/schemas/__init__.py b/services/forecasting/app/schemas/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/app/services/__init__.py b/services/forecasting/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/requirements.txt b/services/forecasting/requirements.txt new file mode 100644 index 00000000..affefed0 --- /dev/null +++ b/services/forecasting/requirements.txt @@ -0,0 +1,13 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +httpx==0.25.2 +redis==5.0.1 +aio-pika==9.3.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +pytz==2023.3 diff --git a/services/forecasting/shared/auth/__init__.py b/services/forecasting/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/shared/auth/decorators.py b/services/forecasting/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/services/forecasting/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/services/forecasting/shared/auth/jwt_handler.py b/services/forecasting/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/services/forecasting/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/services/forecasting/shared/database/__init__.py b/services/forecasting/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/shared/database/base.py b/services/forecasting/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/services/forecasting/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/forecasting/shared/messaging/__init__.py b/services/forecasting/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/shared/messaging/events.py b/services/forecasting/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/services/forecasting/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/forecasting/shared/messaging/rabbitmq.py b/services/forecasting/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/services/forecasting/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/services/forecasting/shared/monitoring/__init__.py b/services/forecasting/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/shared/monitoring/logging.py b/services/forecasting/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/services/forecasting/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/forecasting/shared/monitoring/metrics.py b/services/forecasting/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/services/forecasting/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/services/forecasting/shared/utils/__init__.py b/services/forecasting/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/forecasting/shared/utils/datetime_utils.py b/services/forecasting/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/services/forecasting/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/forecasting/shared/utils/validation.py b/services/forecasting/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/services/forecasting/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/services/notification/Dockerfile b/services/notification/Dockerfile new file mode 100644 index 00000000..ad431c34 --- /dev/null +++ b/services/notification/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/services/notification/app/__init__.py b/services/notification/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/app/api/__init__.py b/services/notification/app/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/app/core/__init__.py b/services/notification/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/app/core/config.py b/services/notification/app/core/config.py new file mode 100644 index 00000000..5deb4c54 --- /dev/null +++ b/services/notification/app/core/config.py @@ -0,0 +1,32 @@ +""" +uLunotification service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "uLunotification Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0") + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + + class Config: + env_file = ".env" + +settings = Settings() diff --git a/services/notification/app/core/database.py b/services/notification/app/core/database.py new file mode 100644 index 00000000..ef8ad931 --- /dev/null +++ b/services/notification/app/core/database.py @@ -0,0 +1,12 @@ +""" +Database configuration for notification service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db diff --git a/services/notification/app/main.py b/services/notification/app/main.py new file mode 100644 index 00000000..b044e007 --- /dev/null +++ b/services/notification/app/main.py @@ -0,0 +1,61 @@ +""" +uLunotification Service +""" + +import logging +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("notification-service", "INFO") +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="uLunotification Service", + description="uLunotification service for bakery forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("notification-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting uLunotification Service") + + # Create database tables + await database_manager.create_tables() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("uLunotification Service started successfully") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "notification-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/services/notification/app/schemas/__init__.py b/services/notification/app/schemas/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/app/services/__init__.py b/services/notification/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/requirements.txt b/services/notification/requirements.txt new file mode 100644 index 00000000..affefed0 --- /dev/null +++ b/services/notification/requirements.txt @@ -0,0 +1,13 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +httpx==0.25.2 +redis==5.0.1 +aio-pika==9.3.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +pytz==2023.3 diff --git a/services/notification/shared/auth/__init__.py b/services/notification/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/shared/auth/decorators.py b/services/notification/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/services/notification/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/services/notification/shared/auth/jwt_handler.py b/services/notification/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/services/notification/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/services/notification/shared/database/__init__.py b/services/notification/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/shared/database/base.py b/services/notification/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/services/notification/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/notification/shared/messaging/__init__.py b/services/notification/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/shared/messaging/events.py b/services/notification/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/services/notification/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/notification/shared/messaging/rabbitmq.py b/services/notification/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/services/notification/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/services/notification/shared/monitoring/__init__.py b/services/notification/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/shared/monitoring/logging.py b/services/notification/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/services/notification/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/notification/shared/monitoring/metrics.py b/services/notification/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/services/notification/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/services/notification/shared/utils/__init__.py b/services/notification/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/notification/shared/utils/datetime_utils.py b/services/notification/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/services/notification/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/notification/shared/utils/validation.py b/services/notification/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/services/notification/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/services/tenant/Dockerfile b/services/tenant/Dockerfile new file mode 100644 index 00000000..ad431c34 --- /dev/null +++ b/services/tenant/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/services/tenant/app/__init__.py b/services/tenant/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/app/api/__init__.py b/services/tenant/app/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/app/core/__init__.py b/services/tenant/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/app/core/config.py b/services/tenant/app/core/config.py new file mode 100644 index 00000000..8c0a8e88 --- /dev/null +++ b/services/tenant/app/core/config.py @@ -0,0 +1,32 @@ +""" +uLutenant service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "uLutenant Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0") + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + + class Config: + env_file = ".env" + +settings = Settings() diff --git a/services/tenant/app/core/database.py b/services/tenant/app/core/database.py new file mode 100644 index 00000000..2206dde6 --- /dev/null +++ b/services/tenant/app/core/database.py @@ -0,0 +1,12 @@ +""" +Database configuration for tenant service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db diff --git a/services/tenant/app/main.py b/services/tenant/app/main.py new file mode 100644 index 00000000..0a517fe7 --- /dev/null +++ b/services/tenant/app/main.py @@ -0,0 +1,61 @@ +""" +uLutenant Service +""" + +import logging +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("tenant-service", "INFO") +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="uLutenant Service", + description="uLutenant service for bakery forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("tenant-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting uLutenant Service") + + # Create database tables + await database_manager.create_tables() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("uLutenant Service started successfully") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "tenant-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/services/tenant/app/schemas/__init__.py b/services/tenant/app/schemas/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/app/services/__init__.py b/services/tenant/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/requirements.txt b/services/tenant/requirements.txt new file mode 100644 index 00000000..affefed0 --- /dev/null +++ b/services/tenant/requirements.txt @@ -0,0 +1,13 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +httpx==0.25.2 +redis==5.0.1 +aio-pika==9.3.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 +pytz==2023.3 diff --git a/services/tenant/shared/auth/__init__.py b/services/tenant/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/shared/auth/decorators.py b/services/tenant/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/services/tenant/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/services/tenant/shared/auth/jwt_handler.py b/services/tenant/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/services/tenant/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/services/tenant/shared/database/__init__.py b/services/tenant/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/shared/database/base.py b/services/tenant/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/services/tenant/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/tenant/shared/messaging/__init__.py b/services/tenant/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/shared/messaging/events.py b/services/tenant/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/services/tenant/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/tenant/shared/messaging/rabbitmq.py b/services/tenant/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/services/tenant/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/services/tenant/shared/monitoring/__init__.py b/services/tenant/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/shared/monitoring/logging.py b/services/tenant/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/services/tenant/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/tenant/shared/monitoring/metrics.py b/services/tenant/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/services/tenant/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/services/tenant/shared/utils/__init__.py b/services/tenant/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/tenant/shared/utils/datetime_utils.py b/services/tenant/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/services/tenant/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/tenant/shared/utils/validation.py b/services/tenant/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/services/tenant/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/services/training/app/__init__.py b/services/training/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/app/api/__init__.py b/services/training/app/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/app/api/models.py b/services/training/app/api/models.py new file mode 100644 index 00000000..dc378f92 --- /dev/null +++ b/services/training/app/api/models.py @@ -0,0 +1,33 @@ +""" +Models API endpoints +""" + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from typing import List +import logging + +from app.core.database import get_db +from app.core.auth import verify_token +from app.schemas.training import TrainedModelResponse +from app.services.training_service import TrainingService + +logger = logging.getLogger(__name__) +router = APIRouter() + +training_service = TrainingService() + +@router.get("/", response_model=List[TrainedModelResponse]) +async def get_trained_models( + user_data: dict = Depends(verify_token), + db: AsyncSession = Depends(get_db) +): + """Get trained models""" + try: + return await training_service.get_trained_models(user_data, db) + except Exception as e: + logger.error(f"Get trained models error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to get trained models" + ) \ No newline at end of file diff --git a/services/training/app/api/training.py b/services/training/app/api/training.py new file mode 100644 index 00000000..a9fc161b --- /dev/null +++ b/services/training/app/api/training.py @@ -0,0 +1,77 @@ +""" +Training API endpoints +""" + +from fastapi import APIRouter, Depends, HTTPException, status, Query +from sqlalchemy.ext.asyncio import AsyncSession +from typing import List, Optional +import logging + +from app.core.database import get_db +from app.core.auth import verify_token +from app.schemas.training import TrainingRequest, TrainingJobResponse, TrainedModelResponse +from app.services.training_service import TrainingService + +logger = logging.getLogger(__name__) +router = APIRouter() + +training_service = TrainingService() + +@router.post("/train", response_model=TrainingJobResponse) +async def start_training( + request: TrainingRequest, + user_data: dict = Depends(verify_token), + db: AsyncSession = Depends(get_db) +): + """Start training job""" + try: + return await training_service.start_training(request, user_data, db) + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e) + ) + except Exception as e: + logger.error(f"Training start error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to start training" + ) + +@router.get("/status/{job_id}", response_model=TrainingJobResponse) +async def get_training_status( + job_id: str, + user_data: dict = Depends(verify_token), + db: AsyncSession = Depends(get_db) +): + """Get training job status""" + try: + return await training_service.get_training_status(job_id, user_data, db) + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=str(e) + ) + except Exception as e: + logger.error(f"Get training status error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to get training status" + ) + +@router.get("/jobs", response_model=List[TrainingJobResponse]) +async def get_training_jobs( + limit: int = Query(10, ge=1, le=100), + offset: int = Query(0, ge=0), + user_data: dict = Depends(verify_token), + db: AsyncSession = Depends(get_db) +): + """Get training jobs""" + try: + return await training_service.get_training_jobs(user_data, limit, offset, db) + except Exception as e: + logger.error(f"Get training jobs error: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to get training jobs" + ) \ No newline at end of file diff --git a/services/training/app/core/__init__.py b/services/training/app/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/app/core/auth.py b/services/training/app/core/auth.py new file mode 100644 index 00000000..0d8cacc7 --- /dev/null +++ b/services/training/app/core/auth.py @@ -0,0 +1,38 @@ +""" +Authentication utilities for training service +""" + +import httpx +from fastapi import HTTPException, status, Depends +from fastapi.security import HTTPBearer +import logging + +from app.core.config import settings + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +async def verify_token(token: str = Depends(security)): + """Verify token with auth service""" + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{settings.AUTH_SERVICE_URL}/auth/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="Authentication service unavailable" + ) \ No newline at end of file diff --git a/services/training/app/core/config.py b/services/training/app/core/config.py new file mode 100644 index 00000000..a2aa684c --- /dev/null +++ b/services/training/app/core/config.py @@ -0,0 +1,44 @@ +""" +Training service configuration +""" + +import os +from pydantic import BaseSettings + +class Settings(BaseSettings): + """Application settings""" + + # Basic settings + APP_NAME: str = "Training Service" + VERSION: str = "1.0.0" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + # Database settings + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db") + + # Redis settings + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/1") + + # RabbitMQ settings + RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/") + + # Service URLs + AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000") + DATA_SERVICE_URL: str = os.getenv("DATA_SERVICE_URL", "http://data-service:8000") + + # ML Settings + MODEL_STORAGE_PATH: str = os.getenv("MODEL_STORAGE_PATH", "/app/models") + MAX_TRAINING_TIME_MINUTES: int = int(os.getenv("MAX_TRAINING_TIME_MINUTES", "30")) + MIN_TRAINING_DATA_DAYS: int = int(os.getenv("MIN_TRAINING_DATA_DAYS", "30")) + + # Prophet Settings + PROPHET_SEASONALITY_MODE: str = os.getenv("PROPHET_SEASONALITY_MODE", "additive") + PROPHET_DAILY_SEASONALITY: bool = os.getenv("PROPHET_DAILY_SEASONALITY", "true").lower() == "true" + PROPHET_WEEKLY_SEASONALITY: bool = os.getenv("PROPHET_WEEKLY_SEASONALITY", "true").lower() == "true" + PROPHET_YEARLY_SEASONALITY: bool = os.getenv("PROPHET_YEARLY_SEASONALITY", "true").lower() == "true" + + class Config: + env_file = ".env" + +settings = Settings() \ No newline at end of file diff --git a/services/training/app/core/database.py b/services/training/app/core/database.py new file mode 100644 index 00000000..08191d62 --- /dev/null +++ b/services/training/app/core/database.py @@ -0,0 +1,12 @@ +""" +Database configuration for training service +""" + +from shared.database.base import DatabaseManager +from app.core.config import settings + +# Initialize database manager +database_manager = DatabaseManager(settings.DATABASE_URL) + +# Alias for convenience +get_db = database_manager.get_db \ No newline at end of file diff --git a/services/training/app/main.py b/services/training/app/main.py new file mode 100644 index 00000000..ed628550 --- /dev/null +++ b/services/training/app/main.py @@ -0,0 +1,81 @@ +""" +Training Service +Handles ML model training for bakery demand forecasting +""" + +import logging +from fastapi import FastAPI, BackgroundTasks +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from app.api import training, models +from app.services.messaging import message_publisher +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("training-service", settings.LOG_LEVEL) +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="Training Service", + description="ML model training service for bakery demand forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("training-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(training.router, prefix="/training", tags=["training"]) +app.include_router(models.router, prefix="/models", tags=["models"]) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting Training Service") + + # Create database tables + await database_manager.create_tables() + + # Initialize message publisher + await message_publisher.connect() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("Training Service started successfully") + +@app.on_event("shutdown") +async def shutdown_event(): + """Application shutdown""" + logger.info("Shutting down Training Service") + + # Cleanup message publisher + await message_publisher.disconnect() + + logger.info("Training Service shutdown complete") + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "training-service", + "version": "1.0.0" + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/services/training/app/ml/__init__.py b/services/training/app/ml/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/app/ml/trainer.py b/services/training/app/ml/trainer.py new file mode 100644 index 00000000..50ea5664 --- /dev/null +++ b/services/training/app/ml/trainer.py @@ -0,0 +1,174 @@ +""" +ML Training implementation +""" + +import asyncio +import logging +from typing import Dict, Any, List +import pandas as pd +from datetime import datetime +import joblib +import os +from prophet import Prophet +import numpy as np +from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score + +from app.core.config import settings + +logger = logging.getLogger(__name__) + +class MLTrainer: + """ML training implementation""" + + def __init__(self): + self.model_storage_path = settings.MODEL_STORAGE_PATH + os.makedirs(self.model_storage_path, exist_ok=True) + + async def train_models(self, training_data: Dict[str, Any], job_id: str, db) -> Dict[str, Any]: + """Train models for all products""" + + models_result = {} + + # Get sales data + sales_data = training_data.get("sales_data", []) + external_data = training_data.get("external_data", {}) + + # Group by product + products_data = self._group_by_product(sales_data) + + # Train model for each product + for product_name, product_sales in products_data.items(): + try: + model_result = await self._train_product_model( + product_name, + product_sales, + external_data, + job_id + ) + models_result[product_name] = model_result + + except Exception as e: + logger.error(f"Failed to train model for {product_name}: {e}") + continue + + return models_result + + def _group_by_product(self, sales_data: List[Dict]) -> Dict[str, List[Dict]]: + """Group sales data by product""" + + products = {} + for sale in sales_data: + product_name = sale.get("product_name") + if product_name not in products: + products[product_name] = [] + products[product_name].append(sale) + + return products + + async def _train_product_model(self, product_name: str, sales_data: List[Dict], external_data: Dict, job_id: str) -> Dict[str, Any]: + """Train Prophet model for a single product""" + + # Convert to DataFrame + df = pd.DataFrame(sales_data) + df['date'] = pd.to_datetime(df['date']) + + # Aggregate daily sales + daily_sales = df.groupby('date')['quantity_sold'].sum().reset_index() + daily_sales.columns = ['ds', 'y'] + + # Add external features + daily_sales = self._add_external_features(daily_sales, external_data) + + # Train Prophet model + model = Prophet( + seasonality_mode=settings.PROPHET_SEASONALITY_MODE, + daily_seasonality=settings.PROPHET_DAILY_SEASONALITY, + weekly_seasonality=settings.PROPHET_WEEKLY_SEASONALITY, + yearly_seasonality=settings.PROPHET_YEARLY_SEASONALITY + ) + + # Add regressors + model.add_regressor('temperature') + model.add_regressor('humidity') + model.add_regressor('precipitation') + model.add_regressor('traffic_volume') + + # Fit model + model.fit(daily_sales) + + # Save model + model_path = os.path.join( + self.model_storage_path, + f"{job_id}_{product_name}_prophet_model.pkl" + ) + + joblib.dump(model, model_path) + + return { + "type": "prophet", + "path": model_path, + "training_samples": len(daily_sales), + "features": ["temperature", "humidity", "precipitation", "traffic_volume"], + "hyperparameters": { + "seasonality_mode": settings.PROPHET_SEASONALITY_MODE, + "daily_seasonality": settings.PROPHET_DAILY_SEASONALITY, + "weekly_seasonality": settings.PROPHET_WEEKLY_SEASONALITY, + "yearly_seasonality": settings.PROPHET_YEARLY_SEASONALITY + } + } + + def _add_external_features(self, daily_sales: pd.DataFrame, external_data: Dict) -> pd.DataFrame: + """Add external features to sales data""" + + # Add weather data + weather_data = external_data.get("weather", []) + if weather_data: + weather_df = pd.DataFrame(weather_data) + weather_df['ds'] = pd.to_datetime(weather_df['date']) + daily_sales = daily_sales.merge(weather_df[['ds', 'temperature', 'humidity', 'precipitation']], on='ds', how='left') + + # Add traffic data + traffic_data = external_data.get("traffic", []) + if traffic_data: + traffic_df = pd.DataFrame(traffic_data) + traffic_df['ds'] = pd.to_datetime(traffic_df['date']) + daily_sales = daily_sales.merge(traffic_df[['ds', 'traffic_volume']], on='ds', how='left') + + # Fill missing values + daily_sales['temperature'] = daily_sales['temperature'].fillna(daily_sales['temperature'].mean()) + daily_sales['humidity'] = daily_sales['humidity'].fillna(daily_sales['humidity'].mean()) + daily_sales['precipitation'] = daily_sales['precipitation'].fillna(0) + daily_sales['traffic_volume'] = daily_sales['traffic_volume'].fillna(daily_sales['traffic_volume'].mean()) + + return daily_sales + + async def validate_models(self, models_result: Dict[str, Any], db) -> Dict[str, Any]: + """Validate trained models""" + + validation_results = {} + + for product_name, model_data in models_result.items(): + try: + # Load model + model_path = model_data.get("path") + model = joblib.load(model_path) + + # Mock validation for now (in production, you'd use actual validation data) + validation_results[product_name] = { + "mape": np.random.uniform(10, 25), # Mock MAPE between 10-25% + "rmse": np.random.uniform(8, 15), # Mock RMSE + "mae": np.random.uniform(5, 12), # Mock MAE + "r2_score": np.random.uniform(0.7, 0.9) # Mock R2 score + } + + except Exception as e: + logger.error(f"Validation failed for {product_name}: {e}") + validation_results[product_name] = { + "mape": None, + "rmse": None, + "mae": None, + "r2_score": None, + "error": str(e) + } + + return validation_results \ No newline at end of file diff --git a/services/training/app/schemas/__init__.py b/services/training/app/schemas/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/app/schemas/training.py b/services/training/app/schemas/training.py new file mode 100644 index 00000000..2027bba2 --- /dev/null +++ b/services/training/app/schemas/training.py @@ -0,0 +1,91 @@ +""" +Training schemas +""" + +from pydantic import BaseModel, Field, validator +from typing import Optional, Dict, Any, List +from datetime import datetime +from enum import Enum + +class TrainingJobStatus(str, Enum): + """Training job status enum""" + QUEUED = "queued" + RUNNING = "running" + COMPLETED = "completed" + FAILED = "failed" + CANCELLED = "cancelled" + +class TrainingRequest(BaseModel): + """Training request schema""" + tenant_id: Optional[str] = None # Will be set from auth + force_retrain: bool = Field(default=False, description="Force retrain even if recent models exist") + products: Optional[List[str]] = Field(default=None, description="Specific products to train, or None for all") + training_days: Optional[int] = Field(default=730, ge=30, le=1095, description="Number of days of historical data to use") + + @validator('training_days') + def validate_training_days(cls, v): + if v < 30: + raise ValueError('Minimum training days is 30') + if v > 1095: + raise ValueError('Maximum training days is 1095 (3 years)') + return v + +class TrainingJobResponse(BaseModel): + """Training job response schema""" + id: str + tenant_id: str + status: TrainingJobStatus + progress: int + current_step: Optional[str] + started_at: datetime + completed_at: Optional[datetime] + duration_seconds: Optional[int] + models_trained: Optional[Dict[str, Any]] + metrics: Optional[Dict[str, Any]] + error_message: Optional[str] + + class Config: + from_attributes = True + +class TrainedModelResponse(BaseModel): + """Trained model response schema""" + id: str + product_name: str + model_type: str + model_version: str + mape: Optional[float] + rmse: Optional[float] + mae: Optional[float] + r2_score: Optional[float] + training_samples: Optional[int] + features_used: Optional[List[str]] + is_active: bool + created_at: datetime + last_used_at: Optional[datetime] + + class Config: + from_attributes = True + +class TrainingProgress(BaseModel): + """Training progress update schema""" + job_id: str + progress: int + current_step: str + estimated_completion: Optional[datetime] + +class TrainingMetrics(BaseModel): + """Training metrics schema""" + total_jobs: int + successful_jobs: int + failed_jobs: int + average_duration: float + models_trained: int + active_models: int + +class ModelValidationResult(BaseModel): + """Model validation result schema""" + product_name: str + is_valid: bool + accuracy_score: float + validation_error: Optional[str] + recommendations: List[str] \ No newline at end of file diff --git a/services/training/app/services/__init__.py b/services/training/app/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/app/services/messaging.py b/services/training/app/services/messaging.py new file mode 100644 index 00000000..d1ea6180 --- /dev/null +++ b/services/training/app/services/messaging.py @@ -0,0 +1,50 @@ +""" +Messaging service for training service +""" + +from shared.messaging.rabbitmq import RabbitMQClient +from app.core.config import settings + +# Global message publisher +message_publisher = RabbitMQClient(settings.RABBITMQ_URL) + + +# services/training/Dockerfile +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + g++ \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy shared libraries +COPY --from=shared /shared /app/shared + +# Copy application code +COPY . . + +# Create model storage directory +RUN mkdir -p /app/models + +# Add shared libraries to Python path +ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH" + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Run application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/services/training/requirements.txt b/services/training/requirements.txt new file mode 100644 index 00000000..9db6ad4f --- /dev/null +++ b/services/training/requirements.txt @@ -0,0 +1,84 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +alembic==1.12.1 +pydantic==2.5.0 +pydantic-settings==2.1.0 +httpx==0.25.2 +redis==5.0.1 +aio-pika==9.3.0 +prometheus-client==0.17.1 +python-json-logger==2.0.4 + +# ML dependencies +prophet==1.1.4 +scikit-learn==1.3.2 +pandas==2.1.4 +numpy==1.24.4 +joblib==1.3.2 +scipy==1.11.4 + +# Utilities +pytz==2023.3 +python-dateutil==2.8.2# services/training/app/main.py +""" +Training Service +Handles ML model training for bakery demand forecasting +""" + +import logging +from fastapi import FastAPI, BackgroundTasks +from fastapi.middleware.cors import CORSMiddleware + +from app.core.config import settings +from app.core.database import database_manager +from app.api import training, models +from app.services.messaging import message_publisher +from shared.monitoring.logging import setup_logging +from shared.monitoring.metrics import MetricsCollector + +# Setup logging +setup_logging("training-service", settings.LOG_LEVEL) +logger = logging.getLogger(__name__) + +# Create FastAPI app +app = FastAPI( + title="Training Service", + description="ML model training service for bakery demand forecasting", + version="1.0.0" +) + +# Initialize metrics collector +metrics_collector = MetricsCollector("training-service") + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Include routers +app.include_router(training.router, prefix="/training", tags=["training"]) +app.include_router(models.router, prefix="/models", tags=["models"]) + +@app.on_event("startup") +async def startup_event(): + """Application startup""" + logger.info("Starting Training Service") + + # Create database tables + await database_manager.create_tables() + + # Initialize message publisher + await message_publisher.connect() + + # Start metrics server + metrics_collector.start_metrics_server(8080) + + logger.info("Training Service started successfully") + +@ \ No newline at end of file diff --git a/services/training/shared/auth/__init__.py b/services/training/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/shared/auth/decorators.py b/services/training/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/services/training/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/services/training/shared/auth/jwt_handler.py b/services/training/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/services/training/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/services/training/shared/database/__init__.py b/services/training/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/shared/database/base.py b/services/training/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/services/training/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/services/training/shared/messaging/__init__.py b/services/training/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/shared/messaging/events.py b/services/training/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/services/training/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/services/training/shared/messaging/rabbitmq.py b/services/training/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/services/training/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/services/training/shared/monitoring/__init__.py b/services/training/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/shared/monitoring/logging.py b/services/training/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/services/training/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/services/training/shared/monitoring/metrics.py b/services/training/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/services/training/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/services/training/shared/utils/__init__.py b/services/training/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/training/shared/utils/datetime_utils.py b/services/training/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/services/training/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/services/training/shared/utils/validation.py b/services/training/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/services/training/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file diff --git a/shared/auth/__init__.py b/shared/auth/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/shared/auth/decorators.py b/shared/auth/decorators.py new file mode 100644 index 00000000..53095a15 --- /dev/null +++ b/shared/auth/decorators.py @@ -0,0 +1,41 @@ +""" +Authentication decorators for FastAPI +""" + +from functools import wraps +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer +import httpx +import logging + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +def verify_service_token(auth_service_url: str): + """Verify service token with auth service""" + + async def verify_token(token: str = Depends(security)): + try: + async with httpx.AsyncClient() as client: + response = await client.post( + f"{auth_service_url}/verify", + headers={"Authorization": f"Bearer {token.credentials}"} + ) + + if response.status_code == 200: + return response.json() + else: + raise HTTPException( + status_code=401, + detail="Invalid authentication credentials" + ) + + except httpx.RequestError as e: + logger.error(f"Auth service unavailable: {e}") + raise HTTPException( + status_code=503, + detail="Authentication service unavailable" + ) + + return verify_token \ No newline at end of file diff --git a/shared/auth/jwt_handler.py b/shared/auth/jwt_handler.py new file mode 100644 index 00000000..8e7643c7 --- /dev/null +++ b/shared/auth/jwt_handler.py @@ -0,0 +1,58 @@ +""" +Shared JWT Authentication Handler +Used across all microservices for consistent authentication +""" + +import jwt +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +import logging + +logger = logging.getLogger(__name__) + +class JWTHandler: + """JWT token handling for microservices""" + + def __init__(self, secret_key: str, algorithm: str = "HS256"): + self.secret_key = secret_key + self.algorithm = algorithm + + def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT access token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=30) + + to_encode.update({"exp": expire, "type": "access"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str: + """Create JWT refresh token""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=7) + + to_encode.update({"exp": expire, "type": "refresh"}) + + encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm) + return encoded_jwt + + def verify_token(self, token: str) -> Optional[Dict[str, Any]]: + """Verify and decode JWT token""" + try: + payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm]) + return payload + except jwt.ExpiredSignatureError: + logger.warning("Token has expired") + return None + except jwt.InvalidTokenError: + logger.warning("Invalid token") + return None \ No newline at end of file diff --git a/shared/database/__init__.py b/shared/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/shared/database/base.py b/shared/database/base.py new file mode 100644 index 00000000..e5766716 --- /dev/null +++ b/shared/database/base.py @@ -0,0 +1,56 @@ +""" +Base database configuration for all microservices +""" + +import os +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base +from sqlalchemy.pool import StaticPool +import logging + +logger = logging.getLogger(__name__) + +Base = declarative_base() + +class DatabaseManager: + """Database manager for microservices""" + + def __init__(self, database_url: str): + self.database_url = database_url + self.async_engine = create_async_engine( + database_url, + echo=False, + pool_pre_ping=True, + pool_recycle=300, + pool_size=20, + max_overflow=30 + ) + + self.async_session_local = sessionmaker( + self.async_engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def get_db(self): + """Get database session""" + async with self.async_session_local() as session: + try: + yield session + except Exception as e: + logger.error(f"Database session error: {e}") + await session.rollback() + raise + finally: + await session.close() + + async def create_tables(self): + """Create database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def drop_tables(self): + """Drop database tables""" + async with self.async_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) \ No newline at end of file diff --git a/shared/messaging/__init__.py b/shared/messaging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/shared/messaging/events.py b/shared/messaging/events.py new file mode 100644 index 00000000..812e972d --- /dev/null +++ b/shared/messaging/events.py @@ -0,0 +1,73 @@ +""" +Event definitions for microservices communication +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any, Optional +import uuid + +@dataclass +class BaseEvent: + """Base event class""" + event_id: str + event_type: str + service_name: str + timestamp: datetime + data: Dict[str, Any] + correlation_id: Optional[str] = None + + def __post_init__(self): + if not self.event_id: + self.event_id = str(uuid.uuid4()) + if not self.timestamp: + self.timestamp = datetime.utcnow() + +# Training Events +@dataclass +class TrainingStartedEvent(BaseEvent): + event_type: str = "training.started" + +@dataclass +class TrainingCompletedEvent(BaseEvent): + event_type: str = "training.completed" + +@dataclass +class TrainingFailedEvent(BaseEvent): + event_type: str = "training.failed" + +# Forecasting Events +@dataclass +class ForecastGeneratedEvent(BaseEvent): + event_type: str = "forecast.generated" + +@dataclass +class ForecastRequestedEvent(BaseEvent): + event_type: str = "forecast.requested" + +# User Events +@dataclass +class UserRegisteredEvent(BaseEvent): + event_type: str = "user.registered" + +@dataclass +class UserLoginEvent(BaseEvent): + event_type: str = "user.login" + +# Tenant Events +@dataclass +class TenantCreatedEvent(BaseEvent): + event_type: str = "tenant.created" + +@dataclass +class TenantUpdatedEvent(BaseEvent): + event_type: str = "tenant.updated" + +# Notification Events +@dataclass +class NotificationSentEvent(BaseEvent): + event_type: str = "notification.sent" + +@dataclass +class NotificationFailedEvent(BaseEvent): + event_type: str = "notification.failed" \ No newline at end of file diff --git a/shared/messaging/rabbitmq.py b/shared/messaging/rabbitmq.py new file mode 100644 index 00000000..62d95cfb --- /dev/null +++ b/shared/messaging/rabbitmq.py @@ -0,0 +1,96 @@ +""" +RabbitMQ messaging client for microservices +""" + +import asyncio +import json +import logging +from typing import Dict, Any, Callable +import aio_pika +from aio_pika import connect_robust, Message, DeliveryMode + +logger = logging.getLogger(__name__) + +class RabbitMQClient: + """RabbitMQ client for microservices communication""" + + def __init__(self, connection_url: str): + self.connection_url = connection_url + self.connection = None + self.channel = None + + async def connect(self): + """Connect to RabbitMQ""" + try: + self.connection = await connect_robust(self.connection_url) + self.channel = await self.connection.channel() + logger.info("Connected to RabbitMQ") + except Exception as e: + logger.error(f"Failed to connect to RabbitMQ: {e}") + raise + + async def disconnect(self): + """Disconnect from RabbitMQ""" + if self.connection: + await self.connection.close() + logger.info("Disconnected from RabbitMQ") + + async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]): + """Publish event to RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Create message + message = Message( + json.dumps(event_data).encode(), + delivery_mode=DeliveryMode.PERSISTENT, + content_type="application/json" + ) + + # Publish message + await exchange.publish(message, routing_key=routing_key) + + logger.info(f"Published event to {exchange_name} with routing key {routing_key}") + + except Exception as e: + logger.error(f"Failed to publish event: {e}") + raise + + async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable): + """Consume events from RabbitMQ""" + try: + if not self.channel: + await self.connect() + + # Declare exchange + exchange = await self.channel.declare_exchange( + exchange_name, + aio_pika.ExchangeType.TOPIC, + durable=True + ) + + # Declare queue + queue = await self.channel.declare_queue( + queue_name, + durable=True + ) + + # Bind queue to exchange + await queue.bind(exchange, routing_key) + + # Set up consumer + await queue.consume(callback) + + logger.info(f"Started consuming events from {queue_name}") + + except Exception as e: + logger.error(f"Failed to consume events: {e}") + raise \ No newline at end of file diff --git a/shared/monitoring/__init__.py b/shared/monitoring/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/shared/monitoring/logging.py b/shared/monitoring/logging.py new file mode 100644 index 00000000..0fde234d --- /dev/null +++ b/shared/monitoring/logging.py @@ -0,0 +1,77 @@ +""" +Centralized logging configuration for microservices +""" + +import logging +import logging.config +import os +from typing import Dict, Any + +def setup_logging(service_name: str, log_level: str = "INFO") -> None: + """Set up logging configuration for a microservice""" + + config: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" + }, + "detailed": { + "format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s" + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "format": "%(asctime)s %(name)s %(levelname)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": log_level, + "formatter": "standard", + "stream": "ext://sys.stdout" + }, + "file": { + "class": "logging.FileHandler", + "level": log_level, + "formatter": "detailed", + "filename": f"/var/log/{service_name}.log", + "mode": "a" + }, + "logstash": { + "class": "logstash.TCPLogstashHandler", + "host": os.getenv("LOGSTASH_HOST", "localhost"), + "port": int(os.getenv("LOGSTASH_PORT", "5000")), + "version": 1, + "message_type": "logstash", + "fqdn": False, + "tags": [service_name] + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": log_level, + "propagate": False + }, + "uvicorn": { + "handlers": ["console"], + "level": log_level, + "propagate": False + }, + "uvicorn.access": { + "handlers": ["console"], + "level": log_level, + "propagate": False + } + } + } + + # Add logstash handler if in production + if os.getenv("ENVIRONMENT") == "production": + config["loggers"][""]["handlers"].append("logstash") + + logging.config.dictConfig(config) + logger = logging.getLogger(__name__) + logger.info(f"Logging configured for {service_name}") \ No newline at end of file diff --git a/shared/monitoring/metrics.py b/shared/monitoring/metrics.py new file mode 100644 index 00000000..a5e35223 --- /dev/null +++ b/shared/monitoring/metrics.py @@ -0,0 +1,112 @@ +""" +Metrics collection for microservices +""" + +import time +import logging +from typing import Dict, Any +from prometheus_client import Counter, Histogram, Gauge, start_http_server +from functools import wraps + +logger = logging.getLogger(__name__) + +# Prometheus metrics +REQUEST_COUNT = Counter( + 'http_requests_total', + 'Total HTTP requests', + ['method', 'endpoint', 'status_code', 'service'] +) + +REQUEST_DURATION = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + ['method', 'endpoint', 'service'] +) + +ACTIVE_CONNECTIONS = Gauge( + 'active_connections', + 'Active database connections', + ['service'] +) + +TRAINING_JOBS = Counter( + 'training_jobs_total', + 'Total training jobs', + ['status', 'service'] +) + +FORECASTS_GENERATED = Counter( + 'forecasts_generated_total', + 'Total forecasts generated', + ['service'] +) + +class MetricsCollector: + """Metrics collector for microservices""" + + def __init__(self, service_name: str): + self.service_name = service_name + self.start_time = time.time() + + def start_metrics_server(self, port: int = 8080): + """Start Prometheus metrics server""" + try: + start_http_server(port) + logger.info(f"Metrics server started on port {port}") + except Exception as e: + logger.error(f"Failed to start metrics server: {e}") + + def record_request(self, method: str, endpoint: str, status_code: int, duration: float): + """Record HTTP request metrics""" + REQUEST_COUNT.labels( + method=method, + endpoint=endpoint, + status_code=status_code, + service=self.service_name + ).inc() + + REQUEST_DURATION.labels( + method=method, + endpoint=endpoint, + service=self.service_name + ).observe(duration) + + def record_training_job(self, status: str): + """Record training job metrics""" + TRAINING_JOBS.labels( + status=status, + service=self.service_name + ).inc() + + def record_forecast_generated(self): + """Record forecast generation metrics""" + FORECASTS_GENERATED.labels( + service=self.service_name + ).inc() + + def set_active_connections(self, count: int): + """Set active database connections""" + ACTIVE_CONNECTIONS.labels( + service=self.service_name + ).set(count) + +def metrics_middleware(metrics_collector: MetricsCollector): + """Middleware to collect metrics""" + + def middleware(request, call_next): + start_time = time.time() + + response = call_next(request) + + duration = time.time() - start_time + + metrics_collector.record_request( + method=request.method, + endpoint=request.url.path, + status_code=response.status_code, + duration=duration + ) + + return response + + return middleware \ No newline at end of file diff --git a/shared/utils/__init__.py b/shared/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/shared/utils/datetime_utils.py b/shared/utils/datetime_utils.py new file mode 100644 index 00000000..3035001a --- /dev/null +++ b/shared/utils/datetime_utils.py @@ -0,0 +1,71 @@ +""" +DateTime utilities for microservices +""" + +from datetime import datetime, timezone, timedelta +from typing import Optional +import pytz + +def utc_now() -> datetime: + """Get current UTC datetime""" + return datetime.now(timezone.utc) + +def madrid_now() -> datetime: + """Get current Madrid datetime""" + madrid_tz = pytz.timezone('Europe/Madrid') + return datetime.now(madrid_tz) + +def to_utc(dt: datetime) -> datetime: + """Convert datetime to UTC""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + +def to_madrid(dt: datetime) -> datetime: + """Convert datetime to Madrid timezone""" + madrid_tz = pytz.timezone('Europe/Madrid') + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(madrid_tz) + +def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str: + """Format datetime as string""" + return dt.strftime(format_str) + +def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime: + """Parse datetime from string""" + return datetime.strptime(dt_str, format_str) + +def is_business_hours(dt: Optional[datetime] = None) -> bool: + """Check if datetime is during business hours (9 AM - 6 PM Madrid time)""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Check if it's a weekday (Monday=0, Sunday=6) + if madrid_dt.weekday() >= 5: # Weekend + return False + + # Check if it's business hours + return 9 <= madrid_dt.hour < 18 + +def next_business_day(dt: Optional[datetime] = None) -> datetime: + """Get next business day""" + if dt is None: + dt = madrid_now() + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + + madrid_dt = to_madrid(dt) + + # Add days until we reach a weekday + while madrid_dt.weekday() >= 5: # Weekend + madrid_dt += timedelta(days=1) + + # Set to 9 AM + return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0) \ No newline at end of file diff --git a/shared/utils/validation.py b/shared/utils/validation.py new file mode 100644 index 00000000..c855b20c --- /dev/null +++ b/shared/utils/validation.py @@ -0,0 +1,67 @@ +""" +Validation utilities for microservices +""" + +import re +from typing import Any, Optional +from email_validator import validate_email, EmailNotValidError + +def validate_spanish_phone(phone: str) -> bool: + """Validate Spanish phone number""" + # Spanish phone pattern: +34 followed by 9 digits + pattern = r'^(\+34|0034|34)?[6-9]\d{8}$' + return bool(re.match(pattern, phone.replace(' ', '').replace('-', ''))) + +def validate_email_address(email: str) -> bool: + """Validate email address""" + try: + validate_email(email) + return True + except EmailNotValidError: + return False + +def validate_tenant_name(name: str) -> bool: + """Validate tenant name""" + # Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes + pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$" + return bool(re.match(pattern, name)) + +def validate_address(address: str) -> bool: + """Validate address""" + # Must be 5-200 characters + return 5 <= len(address.strip()) <= 200 + +def validate_coordinates(latitude: float, longitude: float) -> bool: + """Validate Madrid coordinates""" + # Madrid is roughly between these coordinates + madrid_bounds = { + 'lat_min': 40.3, + 'lat_max': 40.6, + 'lon_min': -3.8, + 'lon_max': -3.5 + } + + return ( + madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and + madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max'] + ) + +def validate_product_name(name: str) -> bool: + """Validate product name""" + # Must be 1-50 characters, letters, numbers, spaces + pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$" + return bool(re.match(pattern, name)) + +def validate_positive_number(value: Any) -> bool: + """Validate positive number""" + try: + return float(value) > 0 + except (ValueError, TypeError): + return False + +def validate_non_negative_number(value: Any) -> bool: + """Validate non-negative number""" + try: + return float(value) >= 0 + except (ValueError, TypeError): + return False \ No newline at end of file