Initial microservices setup from artifacts
This commit is contained in:
112
.gitignore
vendored
Normal file
112
.gitignore
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
# Environment
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
.pytest_cache/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.hypothesis/
|
||||||
|
.mypy_cache/
|
||||||
|
.dmyp.json
|
||||||
|
dmyp.json
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# Virtual Environment
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env/
|
||||||
|
.venv
|
||||||
|
|
||||||
|
# Node
|
||||||
|
node_modules/
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
.npm
|
||||||
|
.eslintcache
|
||||||
|
.next
|
||||||
|
out/
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs/
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Database
|
||||||
|
*.db
|
||||||
|
*.sqlite
|
||||||
|
*.sqlite3
|
||||||
|
|
||||||
|
# ML Models
|
||||||
|
*.pkl
|
||||||
|
*.joblib
|
||||||
|
*.h5
|
||||||
|
models/
|
||||||
|
|
||||||
|
# Data
|
||||||
|
data/external/
|
||||||
|
data/processed/
|
||||||
|
*.csv
|
||||||
|
*.xlsx
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
.docker/
|
||||||
|
|
||||||
|
# Infrastructure
|
||||||
|
*.tfstate
|
||||||
|
*.tfstate.backup
|
||||||
|
.terraform/
|
||||||
|
.terraform.lock.hcl
|
||||||
|
|
||||||
|
# Kubernetes
|
||||||
|
kubeconfig
|
||||||
|
*.yaml.bak
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
prometheus_data/
|
||||||
|
grafana_data/
|
||||||
|
elasticsearch_data/
|
||||||
|
|
||||||
|
# Artifacts (from Claude)
|
||||||
|
*_service.py
|
||||||
|
*_libraries.py
|
||||||
|
*.md
|
||||||
|
setup_scripts.sh
|
||||||
547
docker-compose.yml
Normal file
547
docker-compose.yml
Normal file
@@ -0,0 +1,547 @@
|
|||||||
|
# docker-compose.yml - Development Environment
|
||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
# Message Broker
|
||||||
|
rabbitmq:
|
||||||
|
image: rabbitmq:3-management-alpine
|
||||||
|
container_name: bakery-rabbitmq
|
||||||
|
hostname: rabbitmq
|
||||||
|
ports:
|
||||||
|
- "5672:5672"
|
||||||
|
- "15672:15672"
|
||||||
|
environment:
|
||||||
|
- RABBITMQ_DEFAULT_USER=bakery
|
||||||
|
- RABBITMQ_DEFAULT_PASS=forecast123
|
||||||
|
- RABBITMQ_DEFAULT_VHOST=/
|
||||||
|
volumes:
|
||||||
|
- rabbitmq_data:/var/lib/rabbitmq
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "rabbitmq-diagnostics", "ping"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Cache & Session Store
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
container_name: bakery-redis
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
command: redis-server --appendonly yes
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Auth Service Database
|
||||||
|
auth-db:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: bakery-auth-db
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=auth_db
|
||||||
|
- POSTGRES_USER=auth_user
|
||||||
|
- POSTGRES_PASSWORD=auth_pass123
|
||||||
|
volumes:
|
||||||
|
- auth_db_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U auth_user -d auth_db"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Training Service Database
|
||||||
|
training-db:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: bakery-training-db
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=training_db
|
||||||
|
- POSTGRES_USER=training_user
|
||||||
|
- POSTGRES_PASSWORD=training_pass123
|
||||||
|
volumes:
|
||||||
|
- training_db_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5433:5432"
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U training_user -d training_db"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Forecasting Service Database
|
||||||
|
forecasting-db:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: bakery-forecasting-db
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=forecasting_db
|
||||||
|
- POSTGRES_USER=forecasting_user
|
||||||
|
- POSTGRES_PASSWORD=forecasting_pass123
|
||||||
|
volumes:
|
||||||
|
- forecasting_db_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5434:5432"
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U forecasting_user -d forecasting_db"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Data Service Database
|
||||||
|
data-db:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: bakery-data-db
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=data_db
|
||||||
|
- POSTGRES_USER=data_user
|
||||||
|
- POSTGRES_PASSWORD=data_pass123
|
||||||
|
volumes:
|
||||||
|
- data_db_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5435:5432"
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U data_user -d data_db"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Tenant Service Database
|
||||||
|
tenant-db:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: bakery-tenant-db
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=tenant_db
|
||||||
|
- POSTGRES_USER=tenant_user
|
||||||
|
- POSTGRES_PASSWORD=tenant_pass123
|
||||||
|
volumes:
|
||||||
|
- tenant_db_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5436:5432"
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U tenant_user -d tenant_db"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Notification Service Database
|
||||||
|
notification-db:
|
||||||
|
image: postgres:15-alpine
|
||||||
|
container_name: bakery-notification-db
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=notification_db
|
||||||
|
- POSTGRES_USER=notification_user
|
||||||
|
- POSTGRES_PASSWORD=notification_pass123
|
||||||
|
volumes:
|
||||||
|
- notification_db_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5437:5432"
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U notification_user -d notification_db"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Authentication Service
|
||||||
|
auth-service:
|
||||||
|
build:
|
||||||
|
context: ./services/auth
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-auth-service
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db
|
||||||
|
- REDIS_URL=redis://redis:6379/0
|
||||||
|
- JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production
|
||||||
|
- JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||||
|
- JWT_REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||||
|
- RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
- SERVICE_NAME=auth-service
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8001:8000"
|
||||||
|
depends_on:
|
||||||
|
auth-db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./services/auth:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Training Service
|
||||||
|
training-service:
|
||||||
|
build:
|
||||||
|
context: ./services/training
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-training-service
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db
|
||||||
|
- REDIS_URL=redis://redis:6379/1
|
||||||
|
- RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
- AUTH_SERVICE_URL=http://auth-service:8000
|
||||||
|
- DATA_SERVICE_URL=http://data-service:8000
|
||||||
|
- SERVICE_NAME=training-service
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8002:8000"
|
||||||
|
depends_on:
|
||||||
|
training-db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
auth-service:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./services/training:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Forecasting Service
|
||||||
|
forecasting-service:
|
||||||
|
build:
|
||||||
|
context: ./services/forecasting
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-forecasting-service
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db
|
||||||
|
- REDIS_URL=redis://redis:6379/2
|
||||||
|
- RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
- AUTH_SERVICE_URL=http://auth-service:8000
|
||||||
|
- TRAINING_SERVICE_URL=http://training-service:8000
|
||||||
|
- DATA_SERVICE_URL=http://data-service:8000
|
||||||
|
- SERVICE_NAME=forecasting-service
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8003:8000"
|
||||||
|
depends_on:
|
||||||
|
forecasting-db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
auth-service:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./services/forecasting:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Data Service
|
||||||
|
data-service:
|
||||||
|
build:
|
||||||
|
context: ./services/data
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-data-service
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db
|
||||||
|
- REDIS_URL=redis://redis:6379/3
|
||||||
|
- RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
- AUTH_SERVICE_URL=http://auth-service:8000
|
||||||
|
- AEMET_API_KEY=your-aemet-api-key-here
|
||||||
|
- MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here
|
||||||
|
- SERVICE_NAME=data-service
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8004:8000"
|
||||||
|
depends_on:
|
||||||
|
data-db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
auth-service:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./services/data:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Tenant Service
|
||||||
|
tenant-service:
|
||||||
|
build:
|
||||||
|
context: ./services/tenant
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-tenant-service
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db
|
||||||
|
- REDIS_URL=redis://redis:6379/4
|
||||||
|
- RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
- AUTH_SERVICE_URL=http://auth-service:8000
|
||||||
|
- SERVICE_NAME=tenant-service
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8005:8000"
|
||||||
|
depends_on:
|
||||||
|
tenant-db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
auth-service:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./services/tenant:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Notification Service
|
||||||
|
notification-service:
|
||||||
|
build:
|
||||||
|
context: ./services/notification
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-notification-service
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db
|
||||||
|
- REDIS_URL=redis://redis:6379/5
|
||||||
|
- RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
- AUTH_SERVICE_URL=http://auth-service:8000
|
||||||
|
- SMTP_HOST=smtp.gmail.com
|
||||||
|
- SMTP_PORT=587
|
||||||
|
- SMTP_USER=your-email@gmail.com
|
||||||
|
- SMTP_PASSWORD=your-email-password
|
||||||
|
- WHATSAPP_API_KEY=your-whatsapp-api-key
|
||||||
|
- SERVICE_NAME=notification-service
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8006:8000"
|
||||||
|
depends_on:
|
||||||
|
notification-db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
auth-service:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./services/notification:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# API Gateway
|
||||||
|
gateway:
|
||||||
|
build:
|
||||||
|
context: ./gateway
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-gateway
|
||||||
|
environment:
|
||||||
|
- REDIS_URL=redis://redis:6379/6
|
||||||
|
- AUTH_SERVICE_URL=http://auth-service:8000
|
||||||
|
- TRAINING_SERVICE_URL=http://training-service:8000
|
||||||
|
- FORECASTING_SERVICE_URL=http://forecasting-service:8000
|
||||||
|
- DATA_SERVICE_URL=http://data-service:8000
|
||||||
|
- TENANT_SERVICE_URL=http://tenant-service:8000
|
||||||
|
- NOTIFICATION_SERVICE_URL=http://notification-service:8000
|
||||||
|
- CORS_ORIGINS=http://localhost:3000,http://localhost:3001
|
||||||
|
- SERVICE_NAME=gateway
|
||||||
|
- SERVICE_VERSION=1.0.0
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
depends_on:
|
||||||
|
auth-service:
|
||||||
|
condition: service_healthy
|
||||||
|
training-service:
|
||||||
|
condition: service_healthy
|
||||||
|
forecasting-service:
|
||||||
|
condition: service_healthy
|
||||||
|
data-service:
|
||||||
|
condition: service_healthy
|
||||||
|
tenant-service:
|
||||||
|
condition: service_healthy
|
||||||
|
notification-service:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./gateway:/app
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Dashboard Frontend
|
||||||
|
dashboard:
|
||||||
|
build:
|
||||||
|
context: ./frontend/dashboard
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-dashboard
|
||||||
|
environment:
|
||||||
|
- REACT_APP_API_URL=http://localhost:8000
|
||||||
|
- REACT_APP_WS_URL=ws://localhost:8000
|
||||||
|
- CHOKIDAR_USEPOLLING=true
|
||||||
|
ports:
|
||||||
|
- "3000:3000"
|
||||||
|
depends_on:
|
||||||
|
- gateway
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./frontend/dashboard:/app
|
||||||
|
- /app/node_modules
|
||||||
|
|
||||||
|
# Marketing Site
|
||||||
|
marketing:
|
||||||
|
build:
|
||||||
|
context: ./frontend/marketing
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: bakery-marketing
|
||||||
|
environment:
|
||||||
|
- NEXT_PUBLIC_API_URL=http://localhost:8000
|
||||||
|
- NEXT_PUBLIC_SITE_URL=http://localhost:3001
|
||||||
|
ports:
|
||||||
|
- "3001:3000"
|
||||||
|
depends_on:
|
||||||
|
- gateway
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
volumes:
|
||||||
|
- ./frontend/marketing:/app
|
||||||
|
- /app/node_modules
|
||||||
|
|
||||||
|
# Monitoring - Prometheus
|
||||||
|
prometheus:
|
||||||
|
image: prom/prometheus:latest
|
||||||
|
container_name: bakery-prometheus
|
||||||
|
ports:
|
||||||
|
- "9090:9090"
|
||||||
|
volumes:
|
||||||
|
- ./infrastructure/monitoring/prometheus:/etc/prometheus
|
||||||
|
- prometheus_data:/prometheus
|
||||||
|
command:
|
||||||
|
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||||
|
- '--storage.tsdb.path=/prometheus'
|
||||||
|
- '--web.console.libraries=/usr/share/prometheus/console_libraries'
|
||||||
|
- '--web.console.templates=/usr/share/prometheus/consoles'
|
||||||
|
- '--web.enable-lifecycle'
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
|
||||||
|
# Monitoring - Grafana
|
||||||
|
grafana:
|
||||||
|
image: grafana/grafana:latest
|
||||||
|
container_name: bakery-grafana
|
||||||
|
ports:
|
||||||
|
- "3002:3000"
|
||||||
|
environment:
|
||||||
|
- GF_SECURITY_ADMIN_PASSWORD=admin123
|
||||||
|
volumes:
|
||||||
|
- grafana_data:/var/lib/grafana
|
||||||
|
- ./infrastructure/monitoring/grafana:/etc/grafana/provisioning
|
||||||
|
depends_on:
|
||||||
|
- prometheus
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
|
||||||
|
# Log Aggregation - ELK Stack
|
||||||
|
elasticsearch:
|
||||||
|
image: elasticsearch:8.8.0
|
||||||
|
container_name: bakery-elasticsearch
|
||||||
|
environment:
|
||||||
|
- discovery.type=single-node
|
||||||
|
- xpack.security.enabled=false
|
||||||
|
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||||
|
ports:
|
||||||
|
- "9200:9200"
|
||||||
|
volumes:
|
||||||
|
- elasticsearch_data:/usr/share/elasticsearch/data
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
|
||||||
|
logstash:
|
||||||
|
image: logstash:8.8.0
|
||||||
|
container_name: bakery-logstash
|
||||||
|
volumes:
|
||||||
|
- ./infrastructure/monitoring/logstash:/usr/share/logstash/pipeline
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: kibana:8.8.0
|
||||||
|
container_name: bakery-kibana
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
networks:
|
||||||
|
- bakery-network
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
rabbitmq_data:
|
||||||
|
redis_data:
|
||||||
|
auth_db_data:
|
||||||
|
training_db_data:
|
||||||
|
forecasting_db_data:
|
||||||
|
data_db_data:
|
||||||
|
tenant_db_data:
|
||||||
|
notification_db_data:
|
||||||
|
prometheus_data:
|
||||||
|
grafana_data:
|
||||||
|
elasticsearch_data:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
bakery-network:
|
||||||
|
driver: bridge
|
||||||
0
gateway/app/__init__.py
Normal file
0
gateway/app/__init__.py
Normal file
0
gateway/app/core/__init__.py
Normal file
0
gateway/app/core/__init__.py
Normal file
52
gateway/app/core/config.py
Normal file
52
gateway/app/core/config.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
"""
|
||||||
|
Gateway configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import List, Dict
|
||||||
|
from pydantic import BaseSettings
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings"""
|
||||||
|
|
||||||
|
# Basic settings
|
||||||
|
APP_NAME: str = "Bakery Forecasting Gateway"
|
||||||
|
VERSION: str = "1.0.0"
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
# CORS settings
|
||||||
|
CORS_ORIGINS: List[str] = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001").split(",")
|
||||||
|
|
||||||
|
# Service URLs
|
||||||
|
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||||
|
TRAINING_SERVICE_URL: str = os.getenv("TRAINING_SERVICE_URL", "http://training-service:8000")
|
||||||
|
FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000")
|
||||||
|
DATA_SERVICE_URL: str = os.getenv("DATA_SERVICE_URL", "http://data-service:8000")
|
||||||
|
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||||
|
NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000")
|
||||||
|
|
||||||
|
# Redis settings
|
||||||
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/6")
|
||||||
|
|
||||||
|
# Rate limiting
|
||||||
|
RATE_LIMIT_REQUESTS: int = int(os.getenv("RATE_LIMIT_REQUESTS", "100"))
|
||||||
|
RATE_LIMIT_WINDOW: int = int(os.getenv("RATE_LIMIT_WINDOW", "60"))
|
||||||
|
|
||||||
|
# JWT settings
|
||||||
|
JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "your-secret-key-change-in-production")
|
||||||
|
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def SERVICES(self) -> Dict[str, str]:
|
||||||
|
"""Service registry"""
|
||||||
|
return {
|
||||||
|
"auth": self.AUTH_SERVICE_URL,
|
||||||
|
"training": self.TRAINING_SERVICE_URL,
|
||||||
|
"forecasting": self.FORECASTING_SERVICE_URL,
|
||||||
|
"data": self.DATA_SERVICE_URL,
|
||||||
|
"tenant": self.TENANT_SERVICE_URL,
|
||||||
|
"notification": self.NOTIFICATION_SERVICE_URL
|
||||||
|
}
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
122
gateway/app/core/service_discovery.py
Normal file
122
gateway/app/core/service_discovery.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
"""
|
||||||
|
Service discovery for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
import httpx
|
||||||
|
import redis.asyncio as redis
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class ServiceDiscovery:
|
||||||
|
"""Service discovery and health checking"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.redis_client = redis.from_url(settings.REDIS_URL)
|
||||||
|
self.services = settings.SERVICES
|
||||||
|
self.health_check_interval = 30 # seconds
|
||||||
|
self.health_check_task = None
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""Initialize service discovery"""
|
||||||
|
logger.info("Initializing service discovery")
|
||||||
|
|
||||||
|
# Start health check task
|
||||||
|
self.health_check_task = asyncio.create_task(self._health_check_loop())
|
||||||
|
|
||||||
|
# Initial health check
|
||||||
|
await self._check_all_services()
|
||||||
|
|
||||||
|
async def cleanup(self):
|
||||||
|
"""Cleanup service discovery"""
|
||||||
|
if self.health_check_task:
|
||||||
|
self.health_check_task.cancel()
|
||||||
|
try:
|
||||||
|
await self.health_check_task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
await self.redis_client.close()
|
||||||
|
|
||||||
|
async def get_service_url(self, service_name: str) -> Optional[str]:
|
||||||
|
"""Get service URL"""
|
||||||
|
return self.services.get(service_name)
|
||||||
|
|
||||||
|
async def get_healthy_services(self) -> List[str]:
|
||||||
|
"""Get list of healthy services"""
|
||||||
|
healthy_services = []
|
||||||
|
|
||||||
|
for service_name in self.services:
|
||||||
|
is_healthy = await self._is_service_healthy(service_name)
|
||||||
|
if is_healthy:
|
||||||
|
healthy_services.append(service_name)
|
||||||
|
|
||||||
|
return healthy_services
|
||||||
|
|
||||||
|
async def _health_check_loop(self):
|
||||||
|
"""Continuous health check loop"""
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
await self._check_all_services()
|
||||||
|
await asyncio.sleep(self.health_check_interval)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Health check error: {e}")
|
||||||
|
await asyncio.sleep(self.health_check_interval)
|
||||||
|
|
||||||
|
async def _check_all_services(self):
|
||||||
|
"""Check health of all services"""
|
||||||
|
for service_name, service_url in self.services.items():
|
||||||
|
try:
|
||||||
|
is_healthy = await self._check_service_health(service_url)
|
||||||
|
await self._update_service_health(service_name, is_healthy)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Health check failed for {service_name}: {e}")
|
||||||
|
await self._update_service_health(service_name, False)
|
||||||
|
|
||||||
|
async def _check_service_health(self, service_url: str) -> bool:
|
||||||
|
"""Check individual service health"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.get(f"{service_url}/health")
|
||||||
|
return response.status_code == 200
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Service health check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _update_service_health(self, service_name: str, is_healthy: bool):
|
||||||
|
"""Update service health status in Redis"""
|
||||||
|
try:
|
||||||
|
key = f"service_health:{service_name}"
|
||||||
|
value = {
|
||||||
|
"healthy": is_healthy,
|
||||||
|
"last_check": datetime.utcnow().isoformat(),
|
||||||
|
"url": self.services[service_name]
|
||||||
|
}
|
||||||
|
|
||||||
|
await self.redis_client.hset(key, mapping=value)
|
||||||
|
await self.redis_client.expire(key, 300) # 5 minutes TTL
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to update service health for {service_name}: {e}")
|
||||||
|
|
||||||
|
async def _is_service_healthy(self, service_name: str) -> bool:
|
||||||
|
"""Check if service is healthy from Redis cache"""
|
||||||
|
try:
|
||||||
|
key = f"service_health:{service_name}"
|
||||||
|
health_data = await self.redis_client.hgetall(key)
|
||||||
|
|
||||||
|
if not health_data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return health_data.get(b'healthy', b'false').decode() == 'True'
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to check service health for {service_name}: {e}")
|
||||||
|
return False
|
||||||
131
gateway/app/main.py
Normal file
131
gateway/app/main.py
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
"""
|
||||||
|
API Gateway - Central entry point for all microservices
|
||||||
|
Handles routing, authentication, rate limiting, and cross-cutting concerns
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from fastapi import FastAPI, Request, HTTPException, Depends
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import httpx
|
||||||
|
import time
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.service_discovery import ServiceDiscovery
|
||||||
|
from app.middleware.auth import auth_middleware
|
||||||
|
from app.middleware.logging import logging_middleware
|
||||||
|
from app.middleware.rate_limit import rate_limit_middleware
|
||||||
|
from app.routes import auth, training, forecasting, data, tenant, notification
|
||||||
|
from shared.monitoring.logging import setup_logging
|
||||||
|
from shared.monitoring.metrics import MetricsCollector
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
setup_logging("gateway", settings.LOG_LEVEL)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="Bakery Forecasting API Gateway",
|
||||||
|
description="Central API Gateway for bakery forecasting microservices",
|
||||||
|
version="1.0.0",
|
||||||
|
docs_url="/docs",
|
||||||
|
redoc_url="/redoc"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize metrics collector
|
||||||
|
metrics_collector = MetricsCollector("gateway")
|
||||||
|
|
||||||
|
# Service discovery
|
||||||
|
service_discovery = ServiceDiscovery()
|
||||||
|
|
||||||
|
# CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=settings.CORS_ORIGINS,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Custom middleware
|
||||||
|
app.add_middleware(auth_middleware)
|
||||||
|
app.add_middleware(logging_middleware)
|
||||||
|
app.add_middleware(rate_limit_middleware)
|
||||||
|
|
||||||
|
# Include routers
|
||||||
|
app.include_router(auth.router, prefix="/api/v1/auth", tags=["authentication"])
|
||||||
|
app.include_router(training.router, prefix="/api/v1/training", tags=["training"])
|
||||||
|
app.include_router(forecasting.router, prefix="/api/v1/forecasting", tags=["forecasting"])
|
||||||
|
app.include_router(data.router, prefix="/api/v1/data", tags=["data"])
|
||||||
|
app.include_router(tenant.router, prefix="/api/v1/tenants", tags=["tenants"])
|
||||||
|
app.include_router(notification.router, prefix="/api/v1/notifications", tags=["notifications"])
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Application startup"""
|
||||||
|
logger.info("Starting API Gateway")
|
||||||
|
|
||||||
|
# Start metrics server
|
||||||
|
metrics_collector.start_metrics_server(8080)
|
||||||
|
|
||||||
|
# Initialize service discovery
|
||||||
|
await service_discovery.initialize()
|
||||||
|
|
||||||
|
logger.info("API Gateway started successfully")
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def shutdown_event():
|
||||||
|
"""Application shutdown"""
|
||||||
|
logger.info("Shutting down API Gateway")
|
||||||
|
|
||||||
|
# Clean up service discovery
|
||||||
|
await service_discovery.cleanup()
|
||||||
|
|
||||||
|
logger.info("API Gateway shutdown complete")
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
healthy_services = await service_discovery.get_healthy_services()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "gateway",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"healthy_services": healthy_services,
|
||||||
|
"total_services": len(settings.SERVICES),
|
||||||
|
"timestamp": time.time()
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.get("/metrics")
|
||||||
|
async def get_metrics():
|
||||||
|
"""Get basic metrics"""
|
||||||
|
return {
|
||||||
|
"service": "gateway",
|
||||||
|
"uptime": time.time() - app.state.start_time if hasattr(app.state, 'start_time') else 0,
|
||||||
|
"healthy_services": await service_discovery.get_healthy_services()
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
|
async def http_exception_handler(request: Request, exc: HTTPException):
|
||||||
|
"""Handle HTTP exceptions"""
|
||||||
|
logger.error(f"HTTP {exc.status_code}: {exc.detail}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content={"detail": exc.detail, "service": "gateway"}
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.exception_handler(Exception)
|
||||||
|
async def general_exception_handler(request: Request, exc: Exception):
|
||||||
|
"""Handle general exceptions"""
|
||||||
|
logger.error(f"Unhandled exception: {exc}", exc_info=True)
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content={"detail": "Internal server error", "service": "gateway"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
0
gateway/app/middleware/__init__.py
Normal file
0
gateway/app/middleware/__init__.py
Normal file
101
gateway/app/middleware/auth.py
Normal file
101
gateway/app/middleware/auth.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
"""
|
||||||
|
Authentication middleware for gateway
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from fastapi import Request, HTTPException
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import httpx
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from shared.auth.jwt_handler import JWTHandler
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# JWT handler
|
||||||
|
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
|
||||||
|
|
||||||
|
# Routes that don't require authentication
|
||||||
|
PUBLIC_ROUTES = [
|
||||||
|
"/health",
|
||||||
|
"/metrics",
|
||||||
|
"/docs",
|
||||||
|
"/redoc",
|
||||||
|
"/openapi.json",
|
||||||
|
"/api/v1/auth/login",
|
||||||
|
"/api/v1/auth/register",
|
||||||
|
"/api/v1/auth/refresh"
|
||||||
|
]
|
||||||
|
|
||||||
|
async def auth_middleware(request: Request, call_next):
|
||||||
|
"""Authentication middleware"""
|
||||||
|
|
||||||
|
# Check if route requires authentication
|
||||||
|
if _is_public_route(request.url.path):
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
# Get token from header
|
||||||
|
token = _extract_token(request)
|
||||||
|
if not token:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=401,
|
||||||
|
content={"detail": "Authentication required"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify token
|
||||||
|
try:
|
||||||
|
# First try to verify token locally
|
||||||
|
payload = jwt_handler.verify_token(token)
|
||||||
|
|
||||||
|
if payload:
|
||||||
|
# Add user info to request state
|
||||||
|
request.state.user = payload
|
||||||
|
return await call_next(request)
|
||||||
|
else:
|
||||||
|
# Token invalid or expired, verify with auth service
|
||||||
|
user_info = await _verify_with_auth_service(token)
|
||||||
|
if user_info:
|
||||||
|
request.state.user = user_info
|
||||||
|
return await call_next(request)
|
||||||
|
else:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=401,
|
||||||
|
content={"detail": "Invalid or expired token"}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Authentication error: {e}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=401,
|
||||||
|
content={"detail": "Authentication failed"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def _is_public_route(path: str) -> bool:
|
||||||
|
"""Check if route is public"""
|
||||||
|
return any(path.startswith(route) for route in PUBLIC_ROUTES)
|
||||||
|
|
||||||
|
def _extract_token(request: Request) -> Optional[str]:
|
||||||
|
"""Extract JWT token from request"""
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if auth_header and auth_header.startswith("Bearer "):
|
||||||
|
return auth_header.split(" ")[1]
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _verify_with_auth_service(token: str) -> Optional[dict]:
|
||||||
|
"""Verify token with auth service"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.AUTH_SERVICE_URL}/verify",
|
||||||
|
headers={"Authorization": f"Bearer {token}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Auth service verification failed: {e}")
|
||||||
|
return None
|
||||||
48
gateway/app/middleware/logging.py
Normal file
48
gateway/app/middleware/logging.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
"""
|
||||||
|
Logging middleware for gateway
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from fastapi import Request
|
||||||
|
import json
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def logging_middleware(request: Request, call_next):
|
||||||
|
"""Logging middleware"""
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Log request
|
||||||
|
logger.info(
|
||||||
|
f"Request: {request.method} {request.url.path}",
|
||||||
|
extra={
|
||||||
|
"method": request.method,
|
||||||
|
"url": request.url.path,
|
||||||
|
"query_params": str(request.query_params),
|
||||||
|
"client_host": request.client.host,
|
||||||
|
"user_agent": request.headers.get("user-agent", ""),
|
||||||
|
"request_id": getattr(request.state, 'request_id', None)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process request
|
||||||
|
response = await call_next(request)
|
||||||
|
|
||||||
|
# Calculate duration
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
|
# Log response
|
||||||
|
logger.info(
|
||||||
|
f"Response: {response.status_code} in {duration:.3f}s",
|
||||||
|
extra={
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"duration": duration,
|
||||||
|
"method": request.method,
|
||||||
|
"url": request.url.path,
|
||||||
|
"request_id": getattr(request.state, 'request_id', None)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
85
gateway/app/middleware/rate_limit.py
Normal file
85
gateway/app/middleware/rate_limit.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""
|
||||||
|
Rate limiting middleware for gateway
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from fastapi import Request, HTTPException
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import redis.asyncio as redis
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Redis client for rate limiting
|
||||||
|
redis_client = redis.from_url(settings.REDIS_URL)
|
||||||
|
|
||||||
|
async def rate_limit_middleware(request: Request, call_next):
|
||||||
|
"""Rate limiting middleware"""
|
||||||
|
|
||||||
|
# Skip rate limiting for health checks
|
||||||
|
if request.url.path in ["/health", "/metrics"]:
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
# Get client identifier (IP address or user ID)
|
||||||
|
client_id = _get_client_id(request)
|
||||||
|
|
||||||
|
# Check rate limit
|
||||||
|
if await _is_rate_limited(client_id):
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=429,
|
||||||
|
content={
|
||||||
|
"detail": "Rate limit exceeded",
|
||||||
|
"retry_after": settings.RATE_LIMIT_WINDOW
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process request
|
||||||
|
response = await call_next(request)
|
||||||
|
|
||||||
|
# Update rate limit counter
|
||||||
|
await _update_rate_limit(client_id)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _get_client_id(request: Request) -> str:
|
||||||
|
"""Get client identifier for rate limiting"""
|
||||||
|
# Use user ID if authenticated, otherwise use IP
|
||||||
|
if hasattr(request.state, 'user') and request.state.user:
|
||||||
|
return f"user:{request.state.user.get('user_id', 'unknown')}"
|
||||||
|
else:
|
||||||
|
# Hash IP address for privacy
|
||||||
|
ip = request.client.host
|
||||||
|
return f"ip:{hashlib.md5(ip.encode()).hexdigest()}"
|
||||||
|
|
||||||
|
async def _is_rate_limited(client_id: str) -> bool:
|
||||||
|
"""Check if client is rate limited"""
|
||||||
|
try:
|
||||||
|
key = f"rate_limit:{client_id}"
|
||||||
|
current_count = await redis_client.get(key)
|
||||||
|
|
||||||
|
if current_count is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return int(current_count) >= settings.RATE_LIMIT_REQUESTS
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Rate limit check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _update_rate_limit(client_id: str):
|
||||||
|
"""Update rate limit counter"""
|
||||||
|
try:
|
||||||
|
key = f"rate_limit:{client_id}"
|
||||||
|
|
||||||
|
# Increment counter
|
||||||
|
current_count = await redis_client.incr(key)
|
||||||
|
|
||||||
|
# Set TTL on first request
|
||||||
|
if current_count == 1:
|
||||||
|
await redis_client.expire(key, settings.RATE_LIMIT_WINDOW)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Rate limit update failed: {e}")
|
||||||
0
gateway/app/routes/__init__.py
Normal file
0
gateway/app/routes/__init__.py
Normal file
161
gateway/app/routes/auth.py
Normal file
161
gateway/app/routes/auth.py
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
"""
|
||||||
|
Authentication routes for gateway
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Request, HTTPException
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.service_discovery import ServiceDiscovery
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
service_discovery = ServiceDiscovery()
|
||||||
|
|
||||||
|
@router.post("/login")
|
||||||
|
async def login(request: Request):
|
||||||
|
"""Proxy login request to auth service"""
|
||||||
|
try:
|
||||||
|
body = await request.body()
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.AUTH_SERVICE_URL}/login",
|
||||||
|
content=body,
|
||||||
|
headers={"Content-Type": "application/json"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
else:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Login error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.post("/register")
|
||||||
|
async def register(request: Request):
|
||||||
|
"""Proxy register request to auth service"""
|
||||||
|
try:
|
||||||
|
body = await request.body()
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.AUTH_SERVICE_URL}/register",
|
||||||
|
content=body,
|
||||||
|
headers={"Content-Type": "application/json"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Register error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.post("/refresh")
|
||||||
|
async def refresh_token(request: Request):
|
||||||
|
"""Proxy refresh token request to auth service"""
|
||||||
|
try:
|
||||||
|
body = await request.body()
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.AUTH_SERVICE_URL}/refresh",
|
||||||
|
content=body,
|
||||||
|
headers={"Content-Type": "application/json"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Refresh token error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.post("/verify")
|
||||||
|
async def verify_token(request: Request):
|
||||||
|
"""Proxy token verification to auth service"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if not auth_header:
|
||||||
|
raise HTTPException(status_code=401, detail="Authorization header required")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.AUTH_SERVICE_URL}/verify",
|
||||||
|
headers={"Authorization": auth_header}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Token verification error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.post("/logout")
|
||||||
|
async def logout(request: Request):
|
||||||
|
"""Proxy logout request to auth service"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if not auth_header:
|
||||||
|
raise HTTPException(status_code=401, detail="Authorization header required")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.AUTH_SERVICE_URL}/logout",
|
||||||
|
headers={"Authorization": auth_header}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Logout error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
166
gateway/app/routes/training.py
Normal file
166
gateway/app/routes/training.py
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
"""
|
||||||
|
Training routes for gateway
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Request, HTTPException, Query
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/train")
|
||||||
|
async def start_training(request: Request):
|
||||||
|
"""Proxy training request to training service"""
|
||||||
|
try:
|
||||||
|
body = await request.body()
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{settings.TRAINING_SERVICE_URL}/train",
|
||||||
|
content=body,
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": auth_header
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Training service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Training service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Training error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.get("/status/{training_job_id}")
|
||||||
|
async def get_training_status(training_job_id: str, request: Request):
|
||||||
|
"""Get training job status"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{settings.TRAINING_SERVICE_URL}/status/{training_job_id}",
|
||||||
|
headers={"Authorization": auth_header}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Training service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Training service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Training status error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.get("/models")
|
||||||
|
async def get_trained_models(request: Request):
|
||||||
|
"""Get trained models"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{settings.TRAINING_SERVICE_URL}/models",
|
||||||
|
headers={"Authorization": auth_header}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Training service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Training service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Get models error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
@router.get("/jobs")
|
||||||
|
async def get_training_jobs(
|
||||||
|
request: Request,
|
||||||
|
limit: Optional[int] = Query(10, ge=1, le=100),
|
||||||
|
offset: Optional[int] = Query(0, ge=0)
|
||||||
|
):
|
||||||
|
"""Get training jobs"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{settings.TRAINING_SERVICE_URL}/jobs",
|
||||||
|
params={"limit": limit, "offset": offset},
|
||||||
|
headers={"Authorization": auth_header}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=response.status_code,
|
||||||
|
content=response.json()
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Training service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Training service unavailable"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Get training jobs error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
|
||||||
|
# gateway/Dockerfile
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Add shared libraries to Python path
|
||||||
|
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run application
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
13
gateway/requirements.txt
Normal file
13
gateway/requirements.txt
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
httpx==0.25.2
|
||||||
|
redis==5.0.1
|
||||||
|
pydantic==2.5.0
|
||||||
|
pydantic-settings==2.1.0
|
||||||
|
python-jose[cryptography]==3.3.0
|
||||||
|
python-multipart==0.0.6
|
||||||
|
prometheus-client==0.17.1
|
||||||
|
python-json-logger==2.0.4
|
||||||
|
email-validator==2.0.0
|
||||||
|
aio-pika==9.3.0
|
||||||
|
pytz==2023.3
|
||||||
0
gateway/shared/auth/__init__.py
Normal file
0
gateway/shared/auth/__init__.py
Normal file
41
gateway/shared/auth/decorators.py
Normal file
41
gateway/shared/auth/decorators.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
Authentication decorators for FastAPI
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
from fastapi import HTTPException, Depends
|
||||||
|
from fastapi.security import HTTPBearer
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
def verify_service_token(auth_service_url: str):
|
||||||
|
"""Verify service token with auth service"""
|
||||||
|
|
||||||
|
async def verify_token(token: str = Depends(security)):
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{auth_service_url}/verify",
|
||||||
|
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=401,
|
||||||
|
detail="Invalid authentication credentials"
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
|
||||||
|
return verify_token
|
||||||
58
gateway/shared/auth/jwt_handler.py
Normal file
58
gateway/shared/auth/jwt_handler.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""
|
||||||
|
Shared JWT Authentication Handler
|
||||||
|
Used across all microservices for consistent authentication
|
||||||
|
"""
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class JWTHandler:
|
||||||
|
"""JWT token handling for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||||
|
self.secret_key = secret_key
|
||||||
|
self.algorithm = algorithm
|
||||||
|
|
||||||
|
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT access token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=30)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "access"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT refresh token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(days=7)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "refresh"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Verify and decode JWT token"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||||
|
return payload
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
logger.warning("Token has expired")
|
||||||
|
return None
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
logger.warning("Invalid token")
|
||||||
|
return None
|
||||||
0
gateway/shared/database/__init__.py
Normal file
0
gateway/shared/database/__init__.py
Normal file
56
gateway/shared/database/base.py
Normal file
56
gateway/shared/database/base.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""
|
||||||
|
Base database configuration for all microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||||
|
from sqlalchemy.pool import StaticPool
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
class DatabaseManager:
|
||||||
|
"""Database manager for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, database_url: str):
|
||||||
|
self.database_url = database_url
|
||||||
|
self.async_engine = create_async_engine(
|
||||||
|
database_url,
|
||||||
|
echo=False,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=300,
|
||||||
|
pool_size=20,
|
||||||
|
max_overflow=30
|
||||||
|
)
|
||||||
|
|
||||||
|
self.async_session_local = sessionmaker(
|
||||||
|
self.async_engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_db(self):
|
||||||
|
"""Get database session"""
|
||||||
|
async with self.async_session_local() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database session error: {e}")
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def create_tables(self):
|
||||||
|
"""Create database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
async def drop_tables(self):
|
||||||
|
"""Drop database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
0
gateway/shared/messaging/__init__.py
Normal file
0
gateway/shared/messaging/__init__.py
Normal file
73
gateway/shared/messaging/events.py
Normal file
73
gateway/shared/messaging/events.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
"""
|
||||||
|
Event definitions for microservices communication
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BaseEvent:
|
||||||
|
"""Base event class"""
|
||||||
|
event_id: str
|
||||||
|
event_type: str
|
||||||
|
service_name: str
|
||||||
|
timestamp: datetime
|
||||||
|
data: Dict[str, Any]
|
||||||
|
correlation_id: Optional[str] = None
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
if not self.event_id:
|
||||||
|
self.event_id = str(uuid.uuid4())
|
||||||
|
if not self.timestamp:
|
||||||
|
self.timestamp = datetime.utcnow()
|
||||||
|
|
||||||
|
# Training Events
|
||||||
|
@dataclass
|
||||||
|
class TrainingStartedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.started"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrainingCompletedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.completed"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrainingFailedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.failed"
|
||||||
|
|
||||||
|
# Forecasting Events
|
||||||
|
@dataclass
|
||||||
|
class ForecastGeneratedEvent(BaseEvent):
|
||||||
|
event_type: str = "forecast.generated"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ForecastRequestedEvent(BaseEvent):
|
||||||
|
event_type: str = "forecast.requested"
|
||||||
|
|
||||||
|
# User Events
|
||||||
|
@dataclass
|
||||||
|
class UserRegisteredEvent(BaseEvent):
|
||||||
|
event_type: str = "user.registered"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UserLoginEvent(BaseEvent):
|
||||||
|
event_type: str = "user.login"
|
||||||
|
|
||||||
|
# Tenant Events
|
||||||
|
@dataclass
|
||||||
|
class TenantCreatedEvent(BaseEvent):
|
||||||
|
event_type: str = "tenant.created"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TenantUpdatedEvent(BaseEvent):
|
||||||
|
event_type: str = "tenant.updated"
|
||||||
|
|
||||||
|
# Notification Events
|
||||||
|
@dataclass
|
||||||
|
class NotificationSentEvent(BaseEvent):
|
||||||
|
event_type: str = "notification.sent"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NotificationFailedEvent(BaseEvent):
|
||||||
|
event_type: str = "notification.failed"
|
||||||
96
gateway/shared/messaging/rabbitmq.py
Normal file
96
gateway/shared/messaging/rabbitmq.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""
|
||||||
|
RabbitMQ messaging client for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any, Callable
|
||||||
|
import aio_pika
|
||||||
|
from aio_pika import connect_robust, Message, DeliveryMode
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class RabbitMQClient:
|
||||||
|
"""RabbitMQ client for microservices communication"""
|
||||||
|
|
||||||
|
def __init__(self, connection_url: str):
|
||||||
|
self.connection_url = connection_url
|
||||||
|
self.connection = None
|
||||||
|
self.channel = None
|
||||||
|
|
||||||
|
async def connect(self):
|
||||||
|
"""Connect to RabbitMQ"""
|
||||||
|
try:
|
||||||
|
self.connection = await connect_robust(self.connection_url)
|
||||||
|
self.channel = await self.connection.channel()
|
||||||
|
logger.info("Connected to RabbitMQ")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def disconnect(self):
|
||||||
|
"""Disconnect from RabbitMQ"""
|
||||||
|
if self.connection:
|
||||||
|
await self.connection.close()
|
||||||
|
logger.info("Disconnected from RabbitMQ")
|
||||||
|
|
||||||
|
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||||
|
"""Publish event to RabbitMQ"""
|
||||||
|
try:
|
||||||
|
if not self.channel:
|
||||||
|
await self.connect()
|
||||||
|
|
||||||
|
# Declare exchange
|
||||||
|
exchange = await self.channel.declare_exchange(
|
||||||
|
exchange_name,
|
||||||
|
aio_pika.ExchangeType.TOPIC,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create message
|
||||||
|
message = Message(
|
||||||
|
json.dumps(event_data).encode(),
|
||||||
|
delivery_mode=DeliveryMode.PERSISTENT,
|
||||||
|
content_type="application/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Publish message
|
||||||
|
await exchange.publish(message, routing_key=routing_key)
|
||||||
|
|
||||||
|
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to publish event: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||||
|
"""Consume events from RabbitMQ"""
|
||||||
|
try:
|
||||||
|
if not self.channel:
|
||||||
|
await self.connect()
|
||||||
|
|
||||||
|
# Declare exchange
|
||||||
|
exchange = await self.channel.declare_exchange(
|
||||||
|
exchange_name,
|
||||||
|
aio_pika.ExchangeType.TOPIC,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Declare queue
|
||||||
|
queue = await self.channel.declare_queue(
|
||||||
|
queue_name,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind queue to exchange
|
||||||
|
await queue.bind(exchange, routing_key)
|
||||||
|
|
||||||
|
# Set up consumer
|
||||||
|
await queue.consume(callback)
|
||||||
|
|
||||||
|
logger.info(f"Started consuming events from {queue_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to consume events: {e}")
|
||||||
|
raise
|
||||||
0
gateway/shared/monitoring/__init__.py
Normal file
0
gateway/shared/monitoring/__init__.py
Normal file
77
gateway/shared/monitoring/logging.py
Normal file
77
gateway/shared/monitoring/logging.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Centralized logging configuration for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import os
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||||
|
"""Set up logging configuration for a microservice"""
|
||||||
|
|
||||||
|
config: Dict[str, Any] = {
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"formatters": {
|
||||||
|
"standard": {
|
||||||
|
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||||
|
},
|
||||||
|
"detailed": {
|
||||||
|
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||||
|
},
|
||||||
|
"json": {
|
||||||
|
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||||
|
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"level": log_level,
|
||||||
|
"formatter": "standard",
|
||||||
|
"stream": "ext://sys.stdout"
|
||||||
|
},
|
||||||
|
"file": {
|
||||||
|
"class": "logging.FileHandler",
|
||||||
|
"level": log_level,
|
||||||
|
"formatter": "detailed",
|
||||||
|
"filename": f"/var/log/{service_name}.log",
|
||||||
|
"mode": "a"
|
||||||
|
},
|
||||||
|
"logstash": {
|
||||||
|
"class": "logstash.TCPLogstashHandler",
|
||||||
|
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||||
|
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||||
|
"version": 1,
|
||||||
|
"message_type": "logstash",
|
||||||
|
"fqdn": False,
|
||||||
|
"tags": [service_name]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"loggers": {
|
||||||
|
"": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
},
|
||||||
|
"uvicorn": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
},
|
||||||
|
"uvicorn.access": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add logstash handler if in production
|
||||||
|
if os.getenv("ENVIRONMENT") == "production":
|
||||||
|
config["loggers"][""]["handlers"].append("logstash")
|
||||||
|
|
||||||
|
logging.config.dictConfig(config)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info(f"Logging configured for {service_name}")
|
||||||
112
gateway/shared/monitoring/metrics.py
Normal file
112
gateway/shared/monitoring/metrics.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
"""
|
||||||
|
Metrics collection for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any
|
||||||
|
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Prometheus metrics
|
||||||
|
REQUEST_COUNT = Counter(
|
||||||
|
'http_requests_total',
|
||||||
|
'Total HTTP requests',
|
||||||
|
['method', 'endpoint', 'status_code', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
REQUEST_DURATION = Histogram(
|
||||||
|
'http_request_duration_seconds',
|
||||||
|
'HTTP request duration in seconds',
|
||||||
|
['method', 'endpoint', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
ACTIVE_CONNECTIONS = Gauge(
|
||||||
|
'active_connections',
|
||||||
|
'Active database connections',
|
||||||
|
['service']
|
||||||
|
)
|
||||||
|
|
||||||
|
TRAINING_JOBS = Counter(
|
||||||
|
'training_jobs_total',
|
||||||
|
'Total training jobs',
|
||||||
|
['status', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
FORECASTS_GENERATED = Counter(
|
||||||
|
'forecasts_generated_total',
|
||||||
|
'Total forecasts generated',
|
||||||
|
['service']
|
||||||
|
)
|
||||||
|
|
||||||
|
class MetricsCollector:
|
||||||
|
"""Metrics collector for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, service_name: str):
|
||||||
|
self.service_name = service_name
|
||||||
|
self.start_time = time.time()
|
||||||
|
|
||||||
|
def start_metrics_server(self, port: int = 8080):
|
||||||
|
"""Start Prometheus metrics server"""
|
||||||
|
try:
|
||||||
|
start_http_server(port)
|
||||||
|
logger.info(f"Metrics server started on port {port}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to start metrics server: {e}")
|
||||||
|
|
||||||
|
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||||
|
"""Record HTTP request metrics"""
|
||||||
|
REQUEST_COUNT.labels(
|
||||||
|
method=method,
|
||||||
|
endpoint=endpoint,
|
||||||
|
status_code=status_code,
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
REQUEST_DURATION.labels(
|
||||||
|
method=method,
|
||||||
|
endpoint=endpoint,
|
||||||
|
service=self.service_name
|
||||||
|
).observe(duration)
|
||||||
|
|
||||||
|
def record_training_job(self, status: str):
|
||||||
|
"""Record training job metrics"""
|
||||||
|
TRAINING_JOBS.labels(
|
||||||
|
status=status,
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
def record_forecast_generated(self):
|
||||||
|
"""Record forecast generation metrics"""
|
||||||
|
FORECASTS_GENERATED.labels(
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
def set_active_connections(self, count: int):
|
||||||
|
"""Set active database connections"""
|
||||||
|
ACTIVE_CONNECTIONS.labels(
|
||||||
|
service=self.service_name
|
||||||
|
).set(count)
|
||||||
|
|
||||||
|
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||||
|
"""Middleware to collect metrics"""
|
||||||
|
|
||||||
|
def middleware(request, call_next):
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
response = call_next(request)
|
||||||
|
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
|
metrics_collector.record_request(
|
||||||
|
method=request.method,
|
||||||
|
endpoint=request.url.path,
|
||||||
|
status_code=response.status_code,
|
||||||
|
duration=duration
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
return middleware
|
||||||
0
gateway/shared/utils/__init__.py
Normal file
0
gateway/shared/utils/__init__.py
Normal file
71
gateway/shared/utils/datetime_utils.py
Normal file
71
gateway/shared/utils/datetime_utils.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
"""
|
||||||
|
DateTime utilities for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime, timezone, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
def utc_now() -> datetime:
|
||||||
|
"""Get current UTC datetime"""
|
||||||
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
def madrid_now() -> datetime:
|
||||||
|
"""Get current Madrid datetime"""
|
||||||
|
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||||
|
return datetime.now(madrid_tz)
|
||||||
|
|
||||||
|
def to_utc(dt: datetime) -> datetime:
|
||||||
|
"""Convert datetime to UTC"""
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(timezone.utc)
|
||||||
|
|
||||||
|
def to_madrid(dt: datetime) -> datetime:
|
||||||
|
"""Convert datetime to Madrid timezone"""
|
||||||
|
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(madrid_tz)
|
||||||
|
|
||||||
|
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||||
|
"""Format datetime as string"""
|
||||||
|
return dt.strftime(format_str)
|
||||||
|
|
||||||
|
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||||
|
"""Parse datetime from string"""
|
||||||
|
return datetime.strptime(dt_str, format_str)
|
||||||
|
|
||||||
|
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||||
|
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||||
|
if dt is None:
|
||||||
|
dt = madrid_now()
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
madrid_dt = to_madrid(dt)
|
||||||
|
|
||||||
|
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||||
|
if madrid_dt.weekday() >= 5: # Weekend
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if it's business hours
|
||||||
|
return 9 <= madrid_dt.hour < 18
|
||||||
|
|
||||||
|
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||||
|
"""Get next business day"""
|
||||||
|
if dt is None:
|
||||||
|
dt = madrid_now()
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
madrid_dt = to_madrid(dt)
|
||||||
|
|
||||||
|
# Add days until we reach a weekday
|
||||||
|
while madrid_dt.weekday() >= 5: # Weekend
|
||||||
|
madrid_dt += timedelta(days=1)
|
||||||
|
|
||||||
|
# Set to 9 AM
|
||||||
|
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||||
67
gateway/shared/utils/validation.py
Normal file
67
gateway/shared/utils/validation.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""
|
||||||
|
Validation utilities for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Optional
|
||||||
|
from email_validator import validate_email, EmailNotValidError
|
||||||
|
|
||||||
|
def validate_spanish_phone(phone: str) -> bool:
|
||||||
|
"""Validate Spanish phone number"""
|
||||||
|
# Spanish phone pattern: +34 followed by 9 digits
|
||||||
|
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||||
|
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||||
|
|
||||||
|
def validate_email_address(email: str) -> bool:
|
||||||
|
"""Validate email address"""
|
||||||
|
try:
|
||||||
|
validate_email(email)
|
||||||
|
return True
|
||||||
|
except EmailNotValidError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_tenant_name(name: str) -> bool:
|
||||||
|
"""Validate tenant name"""
|
||||||
|
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||||
|
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||||
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
|
def validate_address(address: str) -> bool:
|
||||||
|
"""Validate address"""
|
||||||
|
# Must be 5-200 characters
|
||||||
|
return 5 <= len(address.strip()) <= 200
|
||||||
|
|
||||||
|
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||||
|
"""Validate Madrid coordinates"""
|
||||||
|
# Madrid is roughly between these coordinates
|
||||||
|
madrid_bounds = {
|
||||||
|
'lat_min': 40.3,
|
||||||
|
'lat_max': 40.6,
|
||||||
|
'lon_min': -3.8,
|
||||||
|
'lon_max': -3.5
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||||
|
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate_product_name(name: str) -> bool:
|
||||||
|
"""Validate product name"""
|
||||||
|
# Must be 1-50 characters, letters, numbers, spaces
|
||||||
|
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||||
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
|
def validate_positive_number(value: Any) -> bool:
|
||||||
|
"""Validate positive number"""
|
||||||
|
try:
|
||||||
|
return float(value) > 0
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_non_negative_number(value: Any) -> bool:
|
||||||
|
"""Validate non-negative number"""
|
||||||
|
try:
|
||||||
|
return float(value) >= 0
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
31
infrastructure/monitoring/prometheus/prometheus.yml
Normal file
31
infrastructure/monitoring/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
global:
|
||||||
|
scrape_interval: 15s
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: 'gateway'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['gateway:8080']
|
||||||
|
|
||||||
|
- job_name: 'auth-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['auth-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'training-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['training-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'forecasting-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['forecasting-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'data-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['data-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'tenant-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['tenant-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'notification-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['notification-service:8080']
|
||||||
18
scripts/deploy.sh
Executable file
18
scripts/deploy.sh
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "🚀 Deploying Bakery Forecasting Platform..."
|
||||||
|
|
||||||
|
# Build and deploy all services
|
||||||
|
docker-compose build
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
echo "Waiting for services to be healthy..."
|
||||||
|
sleep 30
|
||||||
|
|
||||||
|
# Check service health
|
||||||
|
echo "Checking service health..."
|
||||||
|
curl -f http://localhost:8000/health || echo "Gateway health check failed"
|
||||||
|
|
||||||
|
echo "✅ Deployment completed"
|
||||||
|
echo "Gateway: http://localhost:8000"
|
||||||
|
echo "API Docs: http://localhost:8000/docs"
|
||||||
879
scripts/setup.sh
Executable file
879
scripts/setup.sh
Executable file
@@ -0,0 +1,879 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# scripts/setup.sh
|
||||||
|
# Intelligent Setup Script - Extract artifacts and create microservices structure
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "🚀 Setting up Bakery Forecasting Microservices Platform"
|
||||||
|
echo "========================================================"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
print_step() {
|
||||||
|
echo -e "${BLUE}➤${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}✓${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}⚠${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}✗${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check prerequisites
|
||||||
|
print_step "Checking prerequisites..."
|
||||||
|
|
||||||
|
command -v docker >/dev/null 2>&1 || {
|
||||||
|
print_error "Docker is required but not installed. Please install Docker first."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
command -v docker-compose >/dev/null 2>&1 || {
|
||||||
|
print_error "Docker Compose is required but not installed. Please install Docker Compose first."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success "Prerequisites check passed"
|
||||||
|
|
||||||
|
# Function to extract files from artifact files
|
||||||
|
extract_artifact_files() {
|
||||||
|
local artifact_file="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
print_step "Processing $description..."
|
||||||
|
|
||||||
|
if [ ! -f "$artifact_file" ]; then
|
||||||
|
print_warning "Artifact file $artifact_file not found, skipping..."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Read the artifact file and extract individual files
|
||||||
|
local current_file=""
|
||||||
|
local current_content=""
|
||||||
|
local in_file=false
|
||||||
|
|
||||||
|
while IFS= read -r line; do
|
||||||
|
# Check if line starts with a file path (contains .py, .yml, .md, .sh, etc.)
|
||||||
|
if [[ "$line" =~ ^#[[:space:]]*(.*\.(py|yml|yaml|md|sh|txt|js|json|html|css|Dockerfile|requirements\.txt))$ ]]; then
|
||||||
|
# Save previous file if we were processing one
|
||||||
|
if [ "$in_file" = true ] && [ -n "$current_file" ]; then
|
||||||
|
# Create directory if it doesn't exist
|
||||||
|
local dir=$(dirname "$current_file")
|
||||||
|
mkdir -p "$dir"
|
||||||
|
|
||||||
|
# Write content to file
|
||||||
|
echo "$current_content" > "$current_file"
|
||||||
|
print_success "Created: $current_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Start new file
|
||||||
|
current_file=$(echo "$line" | sed 's/^#[[:space:]]*//')
|
||||||
|
current_content=""
|
||||||
|
in_file=true
|
||||||
|
|
||||||
|
elif [ "$in_file" = true ]; then
|
||||||
|
# Add line to current file content
|
||||||
|
if [ -n "$current_content" ]; then
|
||||||
|
current_content="$current_content\n$line"
|
||||||
|
else
|
||||||
|
current_content="$line"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done < "$artifact_file"
|
||||||
|
|
||||||
|
# Save the last file
|
||||||
|
if [ "$in_file" = true ] && [ -n "$current_file" ]; then
|
||||||
|
local dir=$(dirname "$current_file")
|
||||||
|
mkdir -p "$dir"
|
||||||
|
echo -e "$current_content" > "$current_file"
|
||||||
|
print_success "Created: $current_file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to extract Python files with multiple file markers
|
||||||
|
extract_python_artifact() {
|
||||||
|
local artifact_file="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
print_step "Processing $description..."
|
||||||
|
|
||||||
|
if [ ! -f "$artifact_file" ]; then
|
||||||
|
print_warning "Artifact file $artifact_file not found, skipping..."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use Python to parse the multi-file artifact
|
||||||
|
python3 << EOF
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
def extract_files(filename):
|
||||||
|
with open('$artifact_file', 'r') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Split by file markers (lines starting with # and containing file paths)
|
||||||
|
files = {}
|
||||||
|
current_file = None
|
||||||
|
current_content = []
|
||||||
|
|
||||||
|
for line in content.split('\n'):
|
||||||
|
# Check for file path markers
|
||||||
|
if re.match(r'^#\s+\S+\.(py|yml|yaml|txt|sh|json|html|css|js|Dockerfile)', line):
|
||||||
|
# Save previous file
|
||||||
|
if current_file and current_content:
|
||||||
|
files[current_file] = '\n'.join(current_content)
|
||||||
|
|
||||||
|
# Start new file
|
||||||
|
current_file = re.sub(r'^#\s+', '', line)
|
||||||
|
current_content = []
|
||||||
|
elif current_file:
|
||||||
|
current_content.append(line)
|
||||||
|
|
||||||
|
# Save last file
|
||||||
|
if current_file and current_content:
|
||||||
|
files[current_file] = '\n'.join(current_content)
|
||||||
|
|
||||||
|
# Write files
|
||||||
|
for filepath, file_content in files.items():
|
||||||
|
# Clean up the content (remove leading/trailing quotes if present)
|
||||||
|
file_content = file_content.strip()
|
||||||
|
if file_content.startswith('"""') and file_content.endswith('"""'):
|
||||||
|
file_content = file_content[3:-3]
|
||||||
|
elif file_content.startswith("'''") and file_content.endswith("'''"):
|
||||||
|
file_content = file_content[3:-3]
|
||||||
|
|
||||||
|
# Create directory
|
||||||
|
os.makedirs(os.path.dirname(filepath) if os.path.dirname(filepath) else '.', exist_ok=True)
|
||||||
|
|
||||||
|
# Write file
|
||||||
|
with open(filepath, 'w') as f:
|
||||||
|
f.write(file_content)
|
||||||
|
|
||||||
|
print(f"✓ Created: {filepath}")
|
||||||
|
|
||||||
|
extract_files('$artifact_file')
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create base project structure first
|
||||||
|
print_step "Creating base project structure..."
|
||||||
|
|
||||||
|
# Create main directories
|
||||||
|
mkdir -p {gateway,services/{auth,training,forecasting,data,tenant,notification},shared,frontend/{dashboard,marketing},infrastructure,deployment,tests,docs,scripts}
|
||||||
|
|
||||||
|
# Create subdirectories for each service
|
||||||
|
for service in auth training forecasting data tenant notification; do
|
||||||
|
mkdir -p services/$service/{app/{core,models,schemas,services,api,ml},migrations/versions,tests}
|
||||||
|
touch services/$service/app/__init__.py
|
||||||
|
touch services/$service/app/core/__init__.py
|
||||||
|
touch services/$service/app/models/__init__.py
|
||||||
|
touch services/$service/app/schemas/__init__.py
|
||||||
|
touch services/$service/app/services/__init__.py
|
||||||
|
touch services/$service/app/api/__init__.py
|
||||||
|
if [ "$service" = "training" ]; then
|
||||||
|
touch services/$service/app/ml/__init__.py
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Create gateway structure
|
||||||
|
mkdir -p gateway/{app/{core,middleware,routes},tests}
|
||||||
|
touch gateway/app/__init__.py
|
||||||
|
touch gateway/app/core/__init__.py
|
||||||
|
touch gateway/app/middleware/__init__.py
|
||||||
|
touch gateway/app/routes/__init__.py
|
||||||
|
|
||||||
|
# Create shared library structure
|
||||||
|
mkdir -p shared/{auth,database,messaging,monitoring,utils}
|
||||||
|
for lib in auth database messaging monitoring utils; do
|
||||||
|
touch shared/$lib/__init__.py
|
||||||
|
done
|
||||||
|
|
||||||
|
# Create infrastructure directories
|
||||||
|
mkdir -p infrastructure/{docker,kubernetes,terraform,monitoring}/{base,dev,staging,production}
|
||||||
|
mkdir -p infrastructure/monitoring/{prometheus,grafana,logstash}
|
||||||
|
|
||||||
|
print_success "Base project structure created"
|
||||||
|
|
||||||
|
# Extract files from artifacts
|
||||||
|
print_step "Extracting files from artifacts..."
|
||||||
|
|
||||||
|
# Process shared libraries
|
||||||
|
if [ -f "shared_libraries.py" ]; then
|
||||||
|
extract_python_artifact "shared_libraries.py" "Shared Libraries"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process gateway service
|
||||||
|
if [ -f "gateway_service.py" ]; then
|
||||||
|
extract_python_artifact "gateway_service.py" "Gateway Service"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process auth service
|
||||||
|
if [ -f "auth_service.py" ]; then
|
||||||
|
extract_python_artifact "auth_service.py" "Authentication Service"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Process training service
|
||||||
|
if [ -f "training_service.py" ]; then
|
||||||
|
extract_python_artifact "training_service.py" "Training Service"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
print_step "Creating missing service files..."
|
||||||
|
|
||||||
|
# Create remaining service files that might not be in artifacts
|
||||||
|
for service in forecasting data tenant notification; do
|
||||||
|
service_dir="services/$service"
|
||||||
|
|
||||||
|
# Create main.py if it doesn't exist
|
||||||
|
if [ ! -f "$service_dir/app/main.py" ]; then
|
||||||
|
cat > "$service_dir/app/main.py" << EOF
|
||||||
|
"""
|
||||||
|
$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.database import database_manager
|
||||||
|
from shared.monitoring.logging import setup_logging
|
||||||
|
from shared.monitoring.metrics import MetricsCollector
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
setup_logging("$service-service", "INFO")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service",
|
||||||
|
description="$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') service for bakery forecasting",
|
||||||
|
version="1.0.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize metrics collector
|
||||||
|
metrics_collector = MetricsCollector("$service-service")
|
||||||
|
|
||||||
|
# CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Application startup"""
|
||||||
|
logger.info("Starting $(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service")
|
||||||
|
|
||||||
|
# Create database tables
|
||||||
|
await database_manager.create_tables()
|
||||||
|
|
||||||
|
# Start metrics server
|
||||||
|
metrics_collector.start_metrics_server(8080)
|
||||||
|
|
||||||
|
logger.info("$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service started successfully")
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "$service-service",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
|
EOF
|
||||||
|
print_success "Created: $service_dir/app/main.py"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create config.py if it doesn't exist
|
||||||
|
if [ ! -f "$service_dir/app/core/config.py" ]; then
|
||||||
|
cat > "$service_dir/app/core/config.py" << EOF
|
||||||
|
"""
|
||||||
|
$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') service configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic import BaseSettings
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings"""
|
||||||
|
|
||||||
|
# Basic settings
|
||||||
|
APP_NAME: str = "$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service"
|
||||||
|
VERSION: str = "1.0.0"
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://${service}_user:${service}_pass123@${service}-db:5432/${service}_db")
|
||||||
|
|
||||||
|
# Redis settings
|
||||||
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0")
|
||||||
|
|
||||||
|
# RabbitMQ settings
|
||||||
|
RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/")
|
||||||
|
|
||||||
|
# Service URLs
|
||||||
|
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
EOF
|
||||||
|
print_success "Created: $service_dir/app/core/config.py"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create database.py if it doesn't exist
|
||||||
|
if [ ! -f "$service_dir/app/core/database.py" ]; then
|
||||||
|
cat > "$service_dir/app/core/database.py" << EOF
|
||||||
|
"""
|
||||||
|
Database configuration for $service service
|
||||||
|
"""
|
||||||
|
|
||||||
|
from shared.database.base import DatabaseManager
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Initialize database manager
|
||||||
|
database_manager = DatabaseManager(settings.DATABASE_URL)
|
||||||
|
|
||||||
|
# Alias for convenience
|
||||||
|
get_db = database_manager.get_db
|
||||||
|
EOF
|
||||||
|
print_success "Created: $service_dir/app/core/database.py"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create requirements.txt if it doesn't exist
|
||||||
|
if [ ! -f "$service_dir/requirements.txt" ]; then
|
||||||
|
cat > "$service_dir/requirements.txt" << 'EOF'
|
||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
asyncpg==0.29.0
|
||||||
|
alembic==1.12.1
|
||||||
|
pydantic==2.5.0
|
||||||
|
pydantic-settings==2.1.0
|
||||||
|
httpx==0.25.2
|
||||||
|
redis==5.0.1
|
||||||
|
aio-pika==9.3.0
|
||||||
|
prometheus-client==0.17.1
|
||||||
|
python-json-logger==2.0.4
|
||||||
|
pytz==2023.3
|
||||||
|
EOF
|
||||||
|
print_success "Created: $service_dir/requirements.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create Dockerfile if it doesn't exist
|
||||||
|
if [ ! -f "$service_dir/Dockerfile" ]; then
|
||||||
|
cat > "$service_dir/Dockerfile" << 'EOF'
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Add shared libraries to Python path
|
||||||
|
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run application
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
|
EOF
|
||||||
|
print_success "Created: $service_dir/Dockerfile"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Create .env file
|
||||||
|
print_step "Creating environment configuration..."
|
||||||
|
|
||||||
|
if [ ! -f ".env" ]; then
|
||||||
|
cat > .env << 'EOF'
|
||||||
|
# Environment
|
||||||
|
ENVIRONMENT=development
|
||||||
|
DEBUG=true
|
||||||
|
LOG_LEVEL=INFO
|
||||||
|
|
||||||
|
# Database URLs
|
||||||
|
AUTH_DB_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db
|
||||||
|
TRAINING_DB_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db
|
||||||
|
FORECASTING_DB_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db
|
||||||
|
DATA_DB_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db
|
||||||
|
TENANT_DB_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db
|
||||||
|
NOTIFICATION_DB_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db
|
||||||
|
|
||||||
|
# Redis
|
||||||
|
REDIS_URL=redis://redis:6379
|
||||||
|
|
||||||
|
# RabbitMQ
|
||||||
|
RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
|
||||||
|
|
||||||
|
# JWT
|
||||||
|
JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production-please
|
||||||
|
JWT_ALGORITHM=HS256
|
||||||
|
JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||||
|
JWT_REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||||
|
|
||||||
|
# External APIs
|
||||||
|
AEMET_API_KEY=your-aemet-api-key-here
|
||||||
|
MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here
|
||||||
|
|
||||||
|
# CORS
|
||||||
|
CORS_ORIGINS=http://localhost:3000,http://localhost:3001
|
||||||
|
|
||||||
|
# Email
|
||||||
|
SMTP_HOST=smtp.gmail.com
|
||||||
|
SMTP_PORT=587
|
||||||
|
SMTP_USER=your-email@gmail.com
|
||||||
|
SMTP_PASSWORD=your-email-password
|
||||||
|
|
||||||
|
# WhatsApp
|
||||||
|
WHATSAPP_API_KEY=your-whatsapp-api-key-here
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
PROMETHEUS_URL=http://prometheus:9090
|
||||||
|
GRAFANA_URL=http://grafana:3000
|
||||||
|
EOF
|
||||||
|
print_success "Environment configuration created"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create monitoring configuration
|
||||||
|
print_step "Creating monitoring configuration..."
|
||||||
|
|
||||||
|
if [ ! -f "infrastructure/monitoring/prometheus/prometheus.yml" ]; then
|
||||||
|
cat > infrastructure/monitoring/prometheus/prometheus.yml << 'EOF'
|
||||||
|
global:
|
||||||
|
scrape_interval: 15s
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: 'gateway'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['gateway:8080']
|
||||||
|
|
||||||
|
- job_name: 'auth-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['auth-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'training-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['training-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'forecasting-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['forecasting-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'data-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['data-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'tenant-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['tenant-service:8080']
|
||||||
|
|
||||||
|
- job_name: 'notification-service'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['notification-service:8080']
|
||||||
|
EOF
|
||||||
|
print_success "Prometheus configuration created"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create utility scripts
|
||||||
|
print_step "Creating utility scripts..."
|
||||||
|
|
||||||
|
# Create test script
|
||||||
|
cat > scripts/test.sh << 'EOF'
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "🧪 Running tests for all services..."
|
||||||
|
|
||||||
|
# Run tests for each service
|
||||||
|
for service in auth training forecasting data tenant notification; do
|
||||||
|
echo "Testing $service service..."
|
||||||
|
if docker-compose ps | grep -q "${service}-service.*Up"; then
|
||||||
|
docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service"
|
||||||
|
else
|
||||||
|
echo "Service $service is not running, skipping tests"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✅ Test run completed"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create deploy script
|
||||||
|
cat > scripts/deploy.sh << 'EOF'
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "🚀 Deploying Bakery Forecasting Platform..."
|
||||||
|
|
||||||
|
# Build and deploy all services
|
||||||
|
docker-compose build
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
echo "Waiting for services to be healthy..."
|
||||||
|
sleep 30
|
||||||
|
|
||||||
|
# Check service health
|
||||||
|
echo "Checking service health..."
|
||||||
|
curl -f http://localhost:8000/health || echo "Gateway health check failed"
|
||||||
|
|
||||||
|
echo "✅ Deployment completed"
|
||||||
|
echo "Gateway: http://localhost:8000"
|
||||||
|
echo "API Docs: http://localhost:8000/docs"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Make scripts executable
|
||||||
|
chmod +x scripts/*.sh
|
||||||
|
|
||||||
|
print_success "Utility scripts created"
|
||||||
|
|
||||||
|
# Create .gitignore
|
||||||
|
print_step "Creating .gitignore..."
|
||||||
|
|
||||||
|
if [ ! -f ".gitignore" ]; then
|
||||||
|
cat > .gitignore << 'EOF'
|
||||||
|
# Environment
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
.pytest_cache/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.hypothesis/
|
||||||
|
.mypy_cache/
|
||||||
|
.dmyp.json
|
||||||
|
dmyp.json
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# Virtual Environment
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env/
|
||||||
|
.venv
|
||||||
|
|
||||||
|
# Node
|
||||||
|
node_modules/
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
.npm
|
||||||
|
.eslintcache
|
||||||
|
.next
|
||||||
|
out/
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs/
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Database
|
||||||
|
*.db
|
||||||
|
*.sqlite
|
||||||
|
*.sqlite3
|
||||||
|
|
||||||
|
# ML Models
|
||||||
|
*.pkl
|
||||||
|
*.joblib
|
||||||
|
*.h5
|
||||||
|
models/
|
||||||
|
|
||||||
|
# Data
|
||||||
|
data/external/
|
||||||
|
data/processed/
|
||||||
|
*.csv
|
||||||
|
*.xlsx
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
.docker/
|
||||||
|
|
||||||
|
# Infrastructure
|
||||||
|
*.tfstate
|
||||||
|
*.tfstate.backup
|
||||||
|
.terraform/
|
||||||
|
.terraform.lock.hcl
|
||||||
|
|
||||||
|
# Kubernetes
|
||||||
|
kubeconfig
|
||||||
|
*.yaml.bak
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
prometheus_data/
|
||||||
|
grafana_data/
|
||||||
|
elasticsearch_data/
|
||||||
|
|
||||||
|
# Artifacts (from Claude)
|
||||||
|
*_service.py
|
||||||
|
*_libraries.py
|
||||||
|
*.md
|
||||||
|
setup_scripts.sh
|
||||||
|
EOF
|
||||||
|
print_success ".gitignore created"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create README
|
||||||
|
print_step "Creating documentation..."
|
||||||
|
|
||||||
|
if [ ! -f "README.md" ]; then
|
||||||
|
cat > README.md << 'EOF'
|
||||||
|
# Bakery Forecasting Platform - Microservices
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
AI-powered demand forecasting platform for bakeries in Madrid, Spain using microservices architecture.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
- **API Gateway**: Central entry point for all client requests
|
||||||
|
- **Auth Service**: User authentication and authorization
|
||||||
|
- **Training Service**: ML model training for demand forecasting
|
||||||
|
- **Forecasting Service**: Generate predictions using trained models
|
||||||
|
- **Data Service**: External data integration (weather, traffic, events)
|
||||||
|
- **Tenant Service**: Multi-tenant management
|
||||||
|
- **Notification Service**: Email and WhatsApp notifications
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
- Docker and Docker Compose
|
||||||
|
- Python 3.11+
|
||||||
|
- Node.js 18+
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
```bash
|
||||||
|
# Run setup script (this script!)
|
||||||
|
./scripts/setup.sh
|
||||||
|
|
||||||
|
# Start services
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
# Check service health
|
||||||
|
curl http://localhost:8000/health
|
||||||
|
```
|
||||||
|
|
||||||
|
### Services
|
||||||
|
- **Gateway**: http://localhost:8000
|
||||||
|
- **API Docs**: http://localhost:8000/docs
|
||||||
|
- **Grafana**: http://localhost:3002
|
||||||
|
- **Prometheus**: http://localhost:9090
|
||||||
|
- **RabbitMQ Management**: http://localhost:15672
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
#### Running Tests
|
||||||
|
```bash
|
||||||
|
./scripts/test.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Building Services
|
||||||
|
```bash
|
||||||
|
docker-compose build
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Viewing Logs
|
||||||
|
```bash
|
||||||
|
# All services
|
||||||
|
docker-compose logs -f
|
||||||
|
|
||||||
|
# Specific service
|
||||||
|
docker-compose logs -f auth-service
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Service URLs (Development)
|
||||||
|
- Gateway: http://localhost:8000
|
||||||
|
- Auth Service: http://localhost:8001
|
||||||
|
- Training Service: http://localhost:8002
|
||||||
|
- Forecasting Service: http://localhost:8003
|
||||||
|
- Data Service: http://localhost:8004
|
||||||
|
- Tenant Service: http://localhost:8005
|
||||||
|
- Notification Service: http://localhost:8006
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
Copy `.env.example` to `.env` and update the following:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# External API Keys
|
||||||
|
AEMET_API_KEY=your-aemet-api-key
|
||||||
|
MADRID_OPENDATA_API_KEY=your-madrid-opendata-key
|
||||||
|
|
||||||
|
# Email Configuration
|
||||||
|
SMTP_USER=your-email@gmail.com
|
||||||
|
SMTP_PASSWORD=your-email-password
|
||||||
|
|
||||||
|
# WhatsApp API
|
||||||
|
WHATSAPP_API_KEY=your-whatsapp-api-key
|
||||||
|
|
||||||
|
# JWT Secret (change in production!)
|
||||||
|
JWT_SECRET_KEY=your-super-secret-jwt-key
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Services won't start
|
||||||
|
```bash
|
||||||
|
# Check if ports are available
|
||||||
|
docker-compose ps
|
||||||
|
netstat -tulpn | grep :8000
|
||||||
|
|
||||||
|
# Restart services
|
||||||
|
docker-compose down
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database connection issues
|
||||||
|
```bash
|
||||||
|
# Check database containers
|
||||||
|
docker-compose logs auth-db
|
||||||
|
docker-compose logs training-db
|
||||||
|
|
||||||
|
# Reset databases
|
||||||
|
docker-compose down -v
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### Service communication issues
|
||||||
|
```bash
|
||||||
|
# Check service health
|
||||||
|
curl http://localhost:8000/health
|
||||||
|
curl http://localhost:8001/health
|
||||||
|
curl http://localhost:8002/health
|
||||||
|
|
||||||
|
# Check RabbitMQ
|
||||||
|
open http://localhost:15672
|
||||||
|
# User: bakery, Password: forecast123
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. **Configure External APIs**: Add your AEMET and Madrid Open Data API keys
|
||||||
|
2. **Test Authentication**: Register a user and test login
|
||||||
|
3. **Upload Sales Data**: Import historical sales data
|
||||||
|
4. **Train Models**: Start your first training job
|
||||||
|
5. **Generate Forecasts**: Create demand predictions
|
||||||
|
|
||||||
|
## License
|
||||||
|
MIT License
|
||||||
|
EOF
|
||||||
|
print_success "Documentation created"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Final steps
|
||||||
|
print_step "Final setup steps..."
|
||||||
|
|
||||||
|
# Copy shared libraries to each service (for Docker builds)
|
||||||
|
for service in auth training forecasting data tenant notification; do
|
||||||
|
if [ -d "shared" ]; then
|
||||||
|
cp -r shared services/$service/ 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Copy shared libraries to gateway
|
||||||
|
if [ -d "shared" ]; then
|
||||||
|
cp -r shared gateway/ 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Initialize Git repository if not exists
|
||||||
|
if [ ! -d ".git" ]; then
|
||||||
|
git init
|
||||||
|
git add .
|
||||||
|
git commit -m "Initial microservices setup from artifacts"
|
||||||
|
print_success "Git repository initialized"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "🎉 Setup completed successfully!"
|
||||||
|
echo "==============================================="
|
||||||
|
echo
|
||||||
|
echo "Next steps:"
|
||||||
|
echo "1. Update .env with your actual API keys"
|
||||||
|
echo "2. Start services: docker-compose up -d"
|
||||||
|
echo "3. Check health: curl http://localhost:8000/health"
|
||||||
|
echo "4. View API docs: http://localhost:8000/docs"
|
||||||
|
echo "5. Monitor services: http://localhost:3002 (Grafana)"
|
||||||
|
echo
|
||||||
|
echo "Services will be available at:"
|
||||||
|
echo "- Gateway: http://localhost:8000"
|
||||||
|
echo "- Auth Service: http://localhost:8001"
|
||||||
|
echo "- Training Service: http://localhost:8002"
|
||||||
|
echo "- Monitoring: http://localhost:3002"
|
||||||
|
echo "- RabbitMQ: http://localhost:15672"
|
||||||
|
echo
|
||||||
|
echo "Artifact files processed:"
|
||||||
|
[ -f "shared_libraries.py" ] && echo "✓ shared_libraries.py"
|
||||||
|
[ -f "gateway_service.py" ] && echo "✓ gateway_service.py"
|
||||||
|
[ -f "auth_service.py" ] && echo "✓ auth_service.py"
|
||||||
|
[ -f "training_service.py" ] && echo "✓ training_service.py"
|
||||||
|
[ -f "docker-compose.yml" ] && echo "✓ docker-compose.yml"
|
||||||
|
echo
|
||||||
|
echo "Happy coding! 🚀"
|
||||||
15
scripts/test.sh
Executable file
15
scripts/test.sh
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "🧪 Running tests for all services..."
|
||||||
|
|
||||||
|
# Run tests for each service
|
||||||
|
for service in auth training forecasting data tenant notification; do
|
||||||
|
echo "Testing $service service..."
|
||||||
|
if docker-compose ps | grep -q "${service}-service.*Up"; then
|
||||||
|
docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service"
|
||||||
|
else
|
||||||
|
echo "Service $service is not running, skipping tests"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✅ Test run completed"
|
||||||
0
services/auth/app/__init__.py
Normal file
0
services/auth/app/__init__.py
Normal file
0
services/auth/app/api/__init__.py
Normal file
0
services/auth/app/api/__init__.py
Normal file
124
services/auth/app/api/auth.py
Normal file
124
services/auth/app/api/auth.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
"""
|
||||||
|
Authentication API routes
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from app.core.database import get_db
|
||||||
|
from app.schemas.auth import UserRegistration, UserLogin, TokenResponse, RefreshTokenRequest, UserResponse
|
||||||
|
from app.services.auth_service import AuthService
|
||||||
|
from app.core.security import security_manager
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/register", response_model=UserResponse)
|
||||||
|
async def register(
|
||||||
|
user_data: UserRegistration,
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Register a new user"""
|
||||||
|
try:
|
||||||
|
return await AuthService.register_user(user_data, db)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Registration error: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Registration failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/login", response_model=TokenResponse)
|
||||||
|
async def login(
|
||||||
|
login_data: UserLogin,
|
||||||
|
request: Request,
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""User login"""
|
||||||
|
try:
|
||||||
|
ip_address = request.client.host
|
||||||
|
user_agent = request.headers.get("user-agent", "")
|
||||||
|
|
||||||
|
return await AuthService.login_user(login_data, db, ip_address, user_agent)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Login error: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Login failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/refresh", response_model=TokenResponse)
|
||||||
|
async def refresh_token(
|
||||||
|
refresh_data: RefreshTokenRequest,
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Refresh access token"""
|
||||||
|
try:
|
||||||
|
return await AuthService.refresh_token(refresh_data.refresh_token, db)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Token refresh error: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Token refresh failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/verify")
|
||||||
|
async def verify_token(
|
||||||
|
request: Request,
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Verify access token"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if not auth_header or not auth_header.startswith("Bearer "):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Authorization header required"
|
||||||
|
)
|
||||||
|
|
||||||
|
token = auth_header.split(" ")[1]
|
||||||
|
return await AuthService.verify_token(token, db)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Token verification error: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Token verification failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/logout")
|
||||||
|
async def logout(
|
||||||
|
request: Request,
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""User logout"""
|
||||||
|
try:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if not auth_header or not auth_header.startswith("Bearer "):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Authorization header required"
|
||||||
|
)
|
||||||
|
|
||||||
|
token = auth_header.split(" ")[1]
|
||||||
|
user_data = await AuthService.verify_token(token, db)
|
||||||
|
|
||||||
|
await AuthService.logout_user(user_data["user_id"], db)
|
||||||
|
|
||||||
|
return {"message": "Logged out successfully"}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Logout error: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Logout failed"
|
||||||
|
)
|
||||||
0
services/auth/app/core/__init__.py
Normal file
0
services/auth/app/core/__init__.py
Normal file
47
services/auth/app/core/config.py
Normal file
47
services/auth/app/core/config.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
"""
|
||||||
|
Authentication service configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic import BaseSettings
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings"""
|
||||||
|
|
||||||
|
# Basic settings
|
||||||
|
APP_NAME: str = "Authentication Service"
|
||||||
|
VERSION: str = "1.0.0"
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db")
|
||||||
|
|
||||||
|
# Redis settings
|
||||||
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0")
|
||||||
|
|
||||||
|
# JWT settings
|
||||||
|
JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "your-super-secret-jwt-key")
|
||||||
|
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
|
||||||
|
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("JWT_ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
|
||||||
|
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = int(os.getenv("JWT_REFRESH_TOKEN_EXPIRE_DAYS", "7"))
|
||||||
|
|
||||||
|
# Password settings
|
||||||
|
PASSWORD_MIN_LENGTH: int = 8
|
||||||
|
PASSWORD_REQUIRE_UPPERCASE: bool = True
|
||||||
|
PASSWORD_REQUIRE_LOWERCASE: bool = True
|
||||||
|
PASSWORD_REQUIRE_NUMBERS: bool = True
|
||||||
|
PASSWORD_REQUIRE_SYMBOLS: bool = False
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
BCRYPT_ROUNDS: int = 12
|
||||||
|
MAX_LOGIN_ATTEMPTS: int = 5
|
||||||
|
LOCKOUT_DURATION_MINUTES: int = 30
|
||||||
|
|
||||||
|
# RabbitMQ settings
|
||||||
|
RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
12
services/auth/app/core/database.py
Normal file
12
services/auth/app/core/database.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
"""
|
||||||
|
Database configuration for auth service
|
||||||
|
"""
|
||||||
|
|
||||||
|
from shared.database.base import DatabaseManager
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Initialize database manager
|
||||||
|
database_manager = DatabaseManager(settings.DATABASE_URL)
|
||||||
|
|
||||||
|
# Alias for convenience
|
||||||
|
get_db = database_manager.get_db
|
||||||
153
services/auth/app/core/security.py
Normal file
153
services/auth/app/core/security.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
"""
|
||||||
|
Security utilities for authentication service
|
||||||
|
"""
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
import re
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
import redis.asyncio as redis
|
||||||
|
from fastapi import HTTPException, status
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from shared.auth.jwt_handler import JWTHandler
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Initialize JWT handler
|
||||||
|
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
|
||||||
|
|
||||||
|
# Redis client for session management
|
||||||
|
redis_client = redis.from_url(settings.REDIS_URL)
|
||||||
|
|
||||||
|
class SecurityManager:
|
||||||
|
"""Security utilities for authentication"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_password(password: str) -> str:
|
||||||
|
"""Hash password using bcrypt"""
|
||||||
|
salt = bcrypt.gensalt(rounds=settings.BCRYPT_ROUNDS)
|
||||||
|
return bcrypt.hashpw(password.encode('utf-8'), salt).decode('utf-8')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def verify_password(password: str, hashed_password: str) -> bool:
|
||||||
|
"""Verify password against hash"""
|
||||||
|
return bcrypt.checkpw(password.encode('utf-8'), hashed_password.encode('utf-8'))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_password(password: str) -> bool:
|
||||||
|
"""Validate password strength"""
|
||||||
|
if len(password) < settings.PASSWORD_MIN_LENGTH:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if settings.PASSWORD_REQUIRE_UPPERCASE and not re.search(r'[A-Z]', password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if settings.PASSWORD_REQUIRE_LOWERCASE and not re.search(r'[a-z]', password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if settings.PASSWORD_REQUIRE_NUMBERS and not re.search(r'\d', password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if settings.PASSWORD_REQUIRE_SYMBOLS and not re.search(r'[!@#$%^&*(),.?":{}|<>]', password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_access_token(user_data: Dict[str, Any]) -> str:
|
||||||
|
"""Create JWT access token"""
|
||||||
|
expires_delta = timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
return jwt_handler.create_access_token(user_data, expires_delta)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_refresh_token(user_data: Dict[str, Any]) -> str:
|
||||||
|
"""Create JWT refresh token"""
|
||||||
|
expires_delta = timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS)
|
||||||
|
return jwt_handler.create_refresh_token(user_data, expires_delta)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def verify_token(token: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Verify JWT token"""
|
||||||
|
return jwt_handler.verify_token(token)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def check_login_attempts(email: str) -> bool:
|
||||||
|
"""Check if user has exceeded login attempts"""
|
||||||
|
try:
|
||||||
|
key = f"login_attempts:{email}"
|
||||||
|
attempts = await redis_client.get(key)
|
||||||
|
|
||||||
|
if attempts is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return int(attempts) < settings.MAX_LOGIN_ATTEMPTS
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking login attempts: {e}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def increment_login_attempts(email: str):
|
||||||
|
"""Increment login attempts counter"""
|
||||||
|
try:
|
||||||
|
key = f"login_attempts:{email}"
|
||||||
|
current_attempts = await redis_client.incr(key)
|
||||||
|
|
||||||
|
# Set TTL on first attempt
|
||||||
|
if current_attempts == 1:
|
||||||
|
await redis_client.expire(key, settings.LOCKOUT_DURATION_MINUTES * 60)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error incrementing login attempts: {e}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def clear_login_attempts(email: str):
|
||||||
|
"""Clear login attempts counter"""
|
||||||
|
try:
|
||||||
|
key = f"login_attempts:{email}"
|
||||||
|
await redis_client.delete(key)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error clearing login attempts: {e}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def store_refresh_token(user_id: str, refresh_token: str):
|
||||||
|
"""Store refresh token in Redis"""
|
||||||
|
try:
|
||||||
|
key = f"refresh_token:{user_id}"
|
||||||
|
expires_seconds = settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS * 24 * 3600
|
||||||
|
await redis_client.setex(key, expires_seconds, refresh_token)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error storing refresh token: {e}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def verify_refresh_token(user_id: str, refresh_token: str) -> bool:
|
||||||
|
"""Verify refresh token"""
|
||||||
|
try:
|
||||||
|
key = f"refresh_token:{user_id}"
|
||||||
|
stored_token = await redis_client.get(key)
|
||||||
|
|
||||||
|
if stored_token is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return stored_token.decode() == refresh_token
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error verifying refresh token: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def revoke_refresh_token(user_id: str):
|
||||||
|
"""Revoke refresh token"""
|
||||||
|
try:
|
||||||
|
key = f"refresh_token:{user_id}"
|
||||||
|
await redis_client.delete(key)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error revoking refresh token: {e}")
|
||||||
|
|
||||||
|
# Global security manager instance
|
||||||
|
security_manager = SecurityManager()
|
||||||
83
services/auth/app/main.py
Normal file
83
services/auth/app/main.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
"""
|
||||||
|
Authentication Service
|
||||||
|
Handles user authentication, registration, and token management
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import timedelta
|
||||||
|
from fastapi import FastAPI, Depends, HTTPException, status
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.security import HTTPBearer
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.database import database_manager
|
||||||
|
from app.api import auth, users
|
||||||
|
from app.services.messaging import message_publisher
|
||||||
|
from shared.monitoring.logging import setup_logging
|
||||||
|
from shared.monitoring.metrics import MetricsCollector
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
setup_logging("auth-service", settings.LOG_LEVEL)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="Authentication Service",
|
||||||
|
description="User authentication and authorization service",
|
||||||
|
version="1.0.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize metrics collector
|
||||||
|
metrics_collector = MetricsCollector("auth-service")
|
||||||
|
|
||||||
|
# CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include routers
|
||||||
|
app.include_router(auth.router, prefix="/auth", tags=["authentication"])
|
||||||
|
app.include_router(users.router, prefix="/users", tags=["users"])
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Application startup"""
|
||||||
|
logger.info("Starting Authentication Service")
|
||||||
|
|
||||||
|
# Create database tables
|
||||||
|
await database_manager.create_tables()
|
||||||
|
|
||||||
|
# Initialize message publisher
|
||||||
|
await message_publisher.connect()
|
||||||
|
|
||||||
|
# Start metrics server
|
||||||
|
metrics_collector.start_metrics_server(8080)
|
||||||
|
|
||||||
|
logger.info("Authentication Service started successfully")
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def shutdown_event():
|
||||||
|
"""Application shutdown"""
|
||||||
|
logger.info("Shutting down Authentication Service")
|
||||||
|
|
||||||
|
# Cleanup message publisher
|
||||||
|
await message_publisher.disconnect()
|
||||||
|
|
||||||
|
logger.info("Authentication Service shutdown complete")
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "auth-service",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
0
services/auth/app/schemas/__init__.py
Normal file
0
services/auth/app/schemas/__init__.py
Normal file
108
services/auth/app/schemas/auth.py
Normal file
108
services/auth/app/schemas/auth.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
"""
|
||||||
|
Authentication schemas
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pydantic import BaseModel, EmailStr, Field, validator
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from shared.utils.validation import validate_spanish_phone
|
||||||
|
|
||||||
|
class UserRegistration(BaseModel):
|
||||||
|
"""User registration schema"""
|
||||||
|
email: EmailStr
|
||||||
|
password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH)
|
||||||
|
full_name: str = Field(..., min_length=2, max_length=100)
|
||||||
|
phone: Optional[str] = None
|
||||||
|
language: str = Field(default="es", regex="^(es|en)$")
|
||||||
|
|
||||||
|
@validator('password')
|
||||||
|
def validate_password(cls, v):
|
||||||
|
"""Validate password strength"""
|
||||||
|
from app.core.security import security_manager
|
||||||
|
if not security_manager.validate_password(v):
|
||||||
|
raise ValueError('Password does not meet security requirements')
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator('phone')
|
||||||
|
def validate_phone(cls, v):
|
||||||
|
"""Validate phone number"""
|
||||||
|
if v and not validate_spanish_phone(v):
|
||||||
|
raise ValueError('Invalid Spanish phone number')
|
||||||
|
return v
|
||||||
|
|
||||||
|
class UserLogin(BaseModel):
|
||||||
|
"""User login schema"""
|
||||||
|
email: EmailStr
|
||||||
|
password: str
|
||||||
|
|
||||||
|
class TokenResponse(BaseModel):
|
||||||
|
"""Token response schema"""
|
||||||
|
access_token: str
|
||||||
|
refresh_token: str
|
||||||
|
token_type: str = "bearer"
|
||||||
|
expires_in: int
|
||||||
|
|
||||||
|
class RefreshTokenRequest(BaseModel):
|
||||||
|
"""Refresh token request schema"""
|
||||||
|
refresh_token: str
|
||||||
|
|
||||||
|
class UserResponse(BaseModel):
|
||||||
|
"""User response schema"""
|
||||||
|
id: str
|
||||||
|
email: str
|
||||||
|
full_name: str
|
||||||
|
is_active: bool
|
||||||
|
is_verified: bool
|
||||||
|
tenant_id: Optional[str]
|
||||||
|
role: str
|
||||||
|
phone: Optional[str]
|
||||||
|
language: str
|
||||||
|
timezone: str
|
||||||
|
created_at: Optional[datetime]
|
||||||
|
last_login: Optional[datetime]
|
||||||
|
|
||||||
|
class PasswordChangeRequest(BaseModel):
|
||||||
|
"""Password change request schema"""
|
||||||
|
current_password: str
|
||||||
|
new_password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH)
|
||||||
|
|
||||||
|
@validator('new_password')
|
||||||
|
def validate_new_password(cls, v):
|
||||||
|
"""Validate new password strength"""
|
||||||
|
from app.core.security import security_manager
|
||||||
|
if not security_manager.validate_password(v):
|
||||||
|
raise ValueError('New password does not meet security requirements')
|
||||||
|
return v
|
||||||
|
|
||||||
|
class PasswordResetRequest(BaseModel):
|
||||||
|
"""Password reset request schema"""
|
||||||
|
email: EmailStr
|
||||||
|
|
||||||
|
class PasswordResetConfirm(BaseModel):
|
||||||
|
"""Password reset confirmation schema"""
|
||||||
|
token: str
|
||||||
|
new_password: str = Field(..., min_length=settings.PASSWORD_MIN_LENGTH)
|
||||||
|
|
||||||
|
@validator('new_password')
|
||||||
|
def validate_new_password(cls, v):
|
||||||
|
"""Validate new password strength"""
|
||||||
|
from app.core.security import security_manager
|
||||||
|
if not security_manager.validate_password(v):
|
||||||
|
raise ValueError('New password does not meet security requirements')
|
||||||
|
return v
|
||||||
|
|
||||||
|
class UserUpdate(BaseModel):
|
||||||
|
"""User update schema"""
|
||||||
|
full_name: Optional[str] = Field(None, min_length=2, max_length=100)
|
||||||
|
phone: Optional[str] = None
|
||||||
|
language: Optional[str] = Field(None, regex="^(es|en)$")
|
||||||
|
timezone: Optional[str] = None
|
||||||
|
|
||||||
|
@validator('phone')
|
||||||
|
def validate_phone(cls, v):
|
||||||
|
"""Validate phone number"""
|
||||||
|
if v and not validate_spanish_phone(v):
|
||||||
|
raise ValueError('Invalid Spanish phone number')
|
||||||
|
return v
|
||||||
0
services/auth/app/services/__init__.py
Normal file
0
services/auth/app/services/__init__.py
Normal file
46
services/auth/app/services/messaging.py
Normal file
46
services/auth/app/services/messaging.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
"""
|
||||||
|
Messaging service for auth service
|
||||||
|
"""
|
||||||
|
|
||||||
|
from shared.messaging.rabbitmq import RabbitMQClient
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Global message publisher
|
||||||
|
message_publisher = RabbitMQClient(settings.RABBITMQ_URL)
|
||||||
|
|
||||||
|
|
||||||
|
# services/auth/Dockerfile
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy shared libraries
|
||||||
|
COPY --from=shared /shared /app/shared
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Add shared libraries to Python path
|
||||||
|
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run application
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
17
services/auth/requirements.txt
Normal file
17
services/auth/requirements.txt
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
asyncpg==0.29.0
|
||||||
|
alembic==1.12.1
|
||||||
|
pydantic==2.5.0
|
||||||
|
pydantic-settings==2.1.0
|
||||||
|
python-jose[cryptography]==3.3.0
|
||||||
|
passlib[bcrypt]==1.7.4
|
||||||
|
bcrypt==4.0.1
|
||||||
|
python-multipart==0.0.6
|
||||||
|
redis==5.0.1
|
||||||
|
aio-pika==9.3.0
|
||||||
|
email-validator==2.0.0
|
||||||
|
prometheus-client==0.17.1
|
||||||
|
python-json-logger==2.0.4
|
||||||
|
pytz==2023.3
|
||||||
0
services/auth/shared/auth/__init__.py
Normal file
0
services/auth/shared/auth/__init__.py
Normal file
41
services/auth/shared/auth/decorators.py
Normal file
41
services/auth/shared/auth/decorators.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
Authentication decorators for FastAPI
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
from fastapi import HTTPException, Depends
|
||||||
|
from fastapi.security import HTTPBearer
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
def verify_service_token(auth_service_url: str):
|
||||||
|
"""Verify service token with auth service"""
|
||||||
|
|
||||||
|
async def verify_token(token: str = Depends(security)):
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{auth_service_url}/verify",
|
||||||
|
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=401,
|
||||||
|
detail="Invalid authentication credentials"
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
|
||||||
|
return verify_token
|
||||||
58
services/auth/shared/auth/jwt_handler.py
Normal file
58
services/auth/shared/auth/jwt_handler.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""
|
||||||
|
Shared JWT Authentication Handler
|
||||||
|
Used across all microservices for consistent authentication
|
||||||
|
"""
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class JWTHandler:
|
||||||
|
"""JWT token handling for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||||
|
self.secret_key = secret_key
|
||||||
|
self.algorithm = algorithm
|
||||||
|
|
||||||
|
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT access token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=30)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "access"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT refresh token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(days=7)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "refresh"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Verify and decode JWT token"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||||
|
return payload
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
logger.warning("Token has expired")
|
||||||
|
return None
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
logger.warning("Invalid token")
|
||||||
|
return None
|
||||||
0
services/auth/shared/database/__init__.py
Normal file
0
services/auth/shared/database/__init__.py
Normal file
56
services/auth/shared/database/base.py
Normal file
56
services/auth/shared/database/base.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""
|
||||||
|
Base database configuration for all microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||||
|
from sqlalchemy.pool import StaticPool
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
class DatabaseManager:
|
||||||
|
"""Database manager for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, database_url: str):
|
||||||
|
self.database_url = database_url
|
||||||
|
self.async_engine = create_async_engine(
|
||||||
|
database_url,
|
||||||
|
echo=False,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=300,
|
||||||
|
pool_size=20,
|
||||||
|
max_overflow=30
|
||||||
|
)
|
||||||
|
|
||||||
|
self.async_session_local = sessionmaker(
|
||||||
|
self.async_engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_db(self):
|
||||||
|
"""Get database session"""
|
||||||
|
async with self.async_session_local() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database session error: {e}")
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def create_tables(self):
|
||||||
|
"""Create database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
async def drop_tables(self):
|
||||||
|
"""Drop database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
0
services/auth/shared/messaging/__init__.py
Normal file
0
services/auth/shared/messaging/__init__.py
Normal file
73
services/auth/shared/messaging/events.py
Normal file
73
services/auth/shared/messaging/events.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
"""
|
||||||
|
Event definitions for microservices communication
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BaseEvent:
|
||||||
|
"""Base event class"""
|
||||||
|
event_id: str
|
||||||
|
event_type: str
|
||||||
|
service_name: str
|
||||||
|
timestamp: datetime
|
||||||
|
data: Dict[str, Any]
|
||||||
|
correlation_id: Optional[str] = None
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
if not self.event_id:
|
||||||
|
self.event_id = str(uuid.uuid4())
|
||||||
|
if not self.timestamp:
|
||||||
|
self.timestamp = datetime.utcnow()
|
||||||
|
|
||||||
|
# Training Events
|
||||||
|
@dataclass
|
||||||
|
class TrainingStartedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.started"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrainingCompletedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.completed"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrainingFailedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.failed"
|
||||||
|
|
||||||
|
# Forecasting Events
|
||||||
|
@dataclass
|
||||||
|
class ForecastGeneratedEvent(BaseEvent):
|
||||||
|
event_type: str = "forecast.generated"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ForecastRequestedEvent(BaseEvent):
|
||||||
|
event_type: str = "forecast.requested"
|
||||||
|
|
||||||
|
# User Events
|
||||||
|
@dataclass
|
||||||
|
class UserRegisteredEvent(BaseEvent):
|
||||||
|
event_type: str = "user.registered"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UserLoginEvent(BaseEvent):
|
||||||
|
event_type: str = "user.login"
|
||||||
|
|
||||||
|
# Tenant Events
|
||||||
|
@dataclass
|
||||||
|
class TenantCreatedEvent(BaseEvent):
|
||||||
|
event_type: str = "tenant.created"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TenantUpdatedEvent(BaseEvent):
|
||||||
|
event_type: str = "tenant.updated"
|
||||||
|
|
||||||
|
# Notification Events
|
||||||
|
@dataclass
|
||||||
|
class NotificationSentEvent(BaseEvent):
|
||||||
|
event_type: str = "notification.sent"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NotificationFailedEvent(BaseEvent):
|
||||||
|
event_type: str = "notification.failed"
|
||||||
96
services/auth/shared/messaging/rabbitmq.py
Normal file
96
services/auth/shared/messaging/rabbitmq.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""
|
||||||
|
RabbitMQ messaging client for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any, Callable
|
||||||
|
import aio_pika
|
||||||
|
from aio_pika import connect_robust, Message, DeliveryMode
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class RabbitMQClient:
|
||||||
|
"""RabbitMQ client for microservices communication"""
|
||||||
|
|
||||||
|
def __init__(self, connection_url: str):
|
||||||
|
self.connection_url = connection_url
|
||||||
|
self.connection = None
|
||||||
|
self.channel = None
|
||||||
|
|
||||||
|
async def connect(self):
|
||||||
|
"""Connect to RabbitMQ"""
|
||||||
|
try:
|
||||||
|
self.connection = await connect_robust(self.connection_url)
|
||||||
|
self.channel = await self.connection.channel()
|
||||||
|
logger.info("Connected to RabbitMQ")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def disconnect(self):
|
||||||
|
"""Disconnect from RabbitMQ"""
|
||||||
|
if self.connection:
|
||||||
|
await self.connection.close()
|
||||||
|
logger.info("Disconnected from RabbitMQ")
|
||||||
|
|
||||||
|
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||||
|
"""Publish event to RabbitMQ"""
|
||||||
|
try:
|
||||||
|
if not self.channel:
|
||||||
|
await self.connect()
|
||||||
|
|
||||||
|
# Declare exchange
|
||||||
|
exchange = await self.channel.declare_exchange(
|
||||||
|
exchange_name,
|
||||||
|
aio_pika.ExchangeType.TOPIC,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create message
|
||||||
|
message = Message(
|
||||||
|
json.dumps(event_data).encode(),
|
||||||
|
delivery_mode=DeliveryMode.PERSISTENT,
|
||||||
|
content_type="application/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Publish message
|
||||||
|
await exchange.publish(message, routing_key=routing_key)
|
||||||
|
|
||||||
|
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to publish event: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||||
|
"""Consume events from RabbitMQ"""
|
||||||
|
try:
|
||||||
|
if not self.channel:
|
||||||
|
await self.connect()
|
||||||
|
|
||||||
|
# Declare exchange
|
||||||
|
exchange = await self.channel.declare_exchange(
|
||||||
|
exchange_name,
|
||||||
|
aio_pika.ExchangeType.TOPIC,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Declare queue
|
||||||
|
queue = await self.channel.declare_queue(
|
||||||
|
queue_name,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind queue to exchange
|
||||||
|
await queue.bind(exchange, routing_key)
|
||||||
|
|
||||||
|
# Set up consumer
|
||||||
|
await queue.consume(callback)
|
||||||
|
|
||||||
|
logger.info(f"Started consuming events from {queue_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to consume events: {e}")
|
||||||
|
raise
|
||||||
0
services/auth/shared/monitoring/__init__.py
Normal file
0
services/auth/shared/monitoring/__init__.py
Normal file
77
services/auth/shared/monitoring/logging.py
Normal file
77
services/auth/shared/monitoring/logging.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Centralized logging configuration for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import os
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||||
|
"""Set up logging configuration for a microservice"""
|
||||||
|
|
||||||
|
config: Dict[str, Any] = {
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"formatters": {
|
||||||
|
"standard": {
|
||||||
|
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||||
|
},
|
||||||
|
"detailed": {
|
||||||
|
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||||
|
},
|
||||||
|
"json": {
|
||||||
|
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||||
|
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"level": log_level,
|
||||||
|
"formatter": "standard",
|
||||||
|
"stream": "ext://sys.stdout"
|
||||||
|
},
|
||||||
|
"file": {
|
||||||
|
"class": "logging.FileHandler",
|
||||||
|
"level": log_level,
|
||||||
|
"formatter": "detailed",
|
||||||
|
"filename": f"/var/log/{service_name}.log",
|
||||||
|
"mode": "a"
|
||||||
|
},
|
||||||
|
"logstash": {
|
||||||
|
"class": "logstash.TCPLogstashHandler",
|
||||||
|
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||||
|
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||||
|
"version": 1,
|
||||||
|
"message_type": "logstash",
|
||||||
|
"fqdn": False,
|
||||||
|
"tags": [service_name]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"loggers": {
|
||||||
|
"": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
},
|
||||||
|
"uvicorn": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
},
|
||||||
|
"uvicorn.access": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add logstash handler if in production
|
||||||
|
if os.getenv("ENVIRONMENT") == "production":
|
||||||
|
config["loggers"][""]["handlers"].append("logstash")
|
||||||
|
|
||||||
|
logging.config.dictConfig(config)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info(f"Logging configured for {service_name}")
|
||||||
112
services/auth/shared/monitoring/metrics.py
Normal file
112
services/auth/shared/monitoring/metrics.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
"""
|
||||||
|
Metrics collection for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any
|
||||||
|
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Prometheus metrics
|
||||||
|
REQUEST_COUNT = Counter(
|
||||||
|
'http_requests_total',
|
||||||
|
'Total HTTP requests',
|
||||||
|
['method', 'endpoint', 'status_code', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
REQUEST_DURATION = Histogram(
|
||||||
|
'http_request_duration_seconds',
|
||||||
|
'HTTP request duration in seconds',
|
||||||
|
['method', 'endpoint', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
ACTIVE_CONNECTIONS = Gauge(
|
||||||
|
'active_connections',
|
||||||
|
'Active database connections',
|
||||||
|
['service']
|
||||||
|
)
|
||||||
|
|
||||||
|
TRAINING_JOBS = Counter(
|
||||||
|
'training_jobs_total',
|
||||||
|
'Total training jobs',
|
||||||
|
['status', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
FORECASTS_GENERATED = Counter(
|
||||||
|
'forecasts_generated_total',
|
||||||
|
'Total forecasts generated',
|
||||||
|
['service']
|
||||||
|
)
|
||||||
|
|
||||||
|
class MetricsCollector:
|
||||||
|
"""Metrics collector for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, service_name: str):
|
||||||
|
self.service_name = service_name
|
||||||
|
self.start_time = time.time()
|
||||||
|
|
||||||
|
def start_metrics_server(self, port: int = 8080):
|
||||||
|
"""Start Prometheus metrics server"""
|
||||||
|
try:
|
||||||
|
start_http_server(port)
|
||||||
|
logger.info(f"Metrics server started on port {port}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to start metrics server: {e}")
|
||||||
|
|
||||||
|
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||||
|
"""Record HTTP request metrics"""
|
||||||
|
REQUEST_COUNT.labels(
|
||||||
|
method=method,
|
||||||
|
endpoint=endpoint,
|
||||||
|
status_code=status_code,
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
REQUEST_DURATION.labels(
|
||||||
|
method=method,
|
||||||
|
endpoint=endpoint,
|
||||||
|
service=self.service_name
|
||||||
|
).observe(duration)
|
||||||
|
|
||||||
|
def record_training_job(self, status: str):
|
||||||
|
"""Record training job metrics"""
|
||||||
|
TRAINING_JOBS.labels(
|
||||||
|
status=status,
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
def record_forecast_generated(self):
|
||||||
|
"""Record forecast generation metrics"""
|
||||||
|
FORECASTS_GENERATED.labels(
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
def set_active_connections(self, count: int):
|
||||||
|
"""Set active database connections"""
|
||||||
|
ACTIVE_CONNECTIONS.labels(
|
||||||
|
service=self.service_name
|
||||||
|
).set(count)
|
||||||
|
|
||||||
|
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||||
|
"""Middleware to collect metrics"""
|
||||||
|
|
||||||
|
def middleware(request, call_next):
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
response = call_next(request)
|
||||||
|
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
|
metrics_collector.record_request(
|
||||||
|
method=request.method,
|
||||||
|
endpoint=request.url.path,
|
||||||
|
status_code=response.status_code,
|
||||||
|
duration=duration
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
return middleware
|
||||||
0
services/auth/shared/utils/__init__.py
Normal file
0
services/auth/shared/utils/__init__.py
Normal file
71
services/auth/shared/utils/datetime_utils.py
Normal file
71
services/auth/shared/utils/datetime_utils.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
"""
|
||||||
|
DateTime utilities for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime, timezone, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
def utc_now() -> datetime:
|
||||||
|
"""Get current UTC datetime"""
|
||||||
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
def madrid_now() -> datetime:
|
||||||
|
"""Get current Madrid datetime"""
|
||||||
|
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||||
|
return datetime.now(madrid_tz)
|
||||||
|
|
||||||
|
def to_utc(dt: datetime) -> datetime:
|
||||||
|
"""Convert datetime to UTC"""
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(timezone.utc)
|
||||||
|
|
||||||
|
def to_madrid(dt: datetime) -> datetime:
|
||||||
|
"""Convert datetime to Madrid timezone"""
|
||||||
|
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(madrid_tz)
|
||||||
|
|
||||||
|
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||||
|
"""Format datetime as string"""
|
||||||
|
return dt.strftime(format_str)
|
||||||
|
|
||||||
|
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||||
|
"""Parse datetime from string"""
|
||||||
|
return datetime.strptime(dt_str, format_str)
|
||||||
|
|
||||||
|
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||||
|
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||||
|
if dt is None:
|
||||||
|
dt = madrid_now()
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
madrid_dt = to_madrid(dt)
|
||||||
|
|
||||||
|
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||||
|
if madrid_dt.weekday() >= 5: # Weekend
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if it's business hours
|
||||||
|
return 9 <= madrid_dt.hour < 18
|
||||||
|
|
||||||
|
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||||
|
"""Get next business day"""
|
||||||
|
if dt is None:
|
||||||
|
dt = madrid_now()
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
madrid_dt = to_madrid(dt)
|
||||||
|
|
||||||
|
# Add days until we reach a weekday
|
||||||
|
while madrid_dt.weekday() >= 5: # Weekend
|
||||||
|
madrid_dt += timedelta(days=1)
|
||||||
|
|
||||||
|
# Set to 9 AM
|
||||||
|
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||||
67
services/auth/shared/utils/validation.py
Normal file
67
services/auth/shared/utils/validation.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""
|
||||||
|
Validation utilities for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Optional
|
||||||
|
from email_validator import validate_email, EmailNotValidError
|
||||||
|
|
||||||
|
def validate_spanish_phone(phone: str) -> bool:
|
||||||
|
"""Validate Spanish phone number"""
|
||||||
|
# Spanish phone pattern: +34 followed by 9 digits
|
||||||
|
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||||
|
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||||
|
|
||||||
|
def validate_email_address(email: str) -> bool:
|
||||||
|
"""Validate email address"""
|
||||||
|
try:
|
||||||
|
validate_email(email)
|
||||||
|
return True
|
||||||
|
except EmailNotValidError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_tenant_name(name: str) -> bool:
|
||||||
|
"""Validate tenant name"""
|
||||||
|
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||||
|
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||||
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
|
def validate_address(address: str) -> bool:
|
||||||
|
"""Validate address"""
|
||||||
|
# Must be 5-200 characters
|
||||||
|
return 5 <= len(address.strip()) <= 200
|
||||||
|
|
||||||
|
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||||
|
"""Validate Madrid coordinates"""
|
||||||
|
# Madrid is roughly between these coordinates
|
||||||
|
madrid_bounds = {
|
||||||
|
'lat_min': 40.3,
|
||||||
|
'lat_max': 40.6,
|
||||||
|
'lon_min': -3.8,
|
||||||
|
'lon_max': -3.5
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||||
|
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate_product_name(name: str) -> bool:
|
||||||
|
"""Validate product name"""
|
||||||
|
# Must be 1-50 characters, letters, numbers, spaces
|
||||||
|
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||||
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
|
def validate_positive_number(value: Any) -> bool:
|
||||||
|
"""Validate positive number"""
|
||||||
|
try:
|
||||||
|
return float(value) > 0
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_non_negative_number(value: Any) -> bool:
|
||||||
|
"""Validate non-negative number"""
|
||||||
|
try:
|
||||||
|
return float(value) >= 0
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
31
services/data/Dockerfile
Normal file
31
services/data/Dockerfile
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Add shared libraries to Python path
|
||||||
|
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run application
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
0
services/data/app/__init__.py
Normal file
0
services/data/app/__init__.py
Normal file
0
services/data/app/api/__init__.py
Normal file
0
services/data/app/api/__init__.py
Normal file
0
services/data/app/core/__init__.py
Normal file
0
services/data/app/core/__init__.py
Normal file
32
services/data/app/core/config.py
Normal file
32
services/data/app/core/config.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""
|
||||||
|
uLudata service configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic import BaseSettings
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings"""
|
||||||
|
|
||||||
|
# Basic settings
|
||||||
|
APP_NAME: str = "uLudata Service"
|
||||||
|
VERSION: str = "1.0.0"
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db")
|
||||||
|
|
||||||
|
# Redis settings
|
||||||
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0")
|
||||||
|
|
||||||
|
# RabbitMQ settings
|
||||||
|
RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/")
|
||||||
|
|
||||||
|
# Service URLs
|
||||||
|
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
12
services/data/app/core/database.py
Normal file
12
services/data/app/core/database.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
"""
|
||||||
|
Database configuration for data service
|
||||||
|
"""
|
||||||
|
|
||||||
|
from shared.database.base import DatabaseManager
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Initialize database manager
|
||||||
|
database_manager = DatabaseManager(settings.DATABASE_URL)
|
||||||
|
|
||||||
|
# Alias for convenience
|
||||||
|
get_db = database_manager.get_db
|
||||||
61
services/data/app/main.py
Normal file
61
services/data/app/main.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
"""
|
||||||
|
uLudata Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.database import database_manager
|
||||||
|
from shared.monitoring.logging import setup_logging
|
||||||
|
from shared.monitoring.metrics import MetricsCollector
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
setup_logging("data-service", "INFO")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="uLudata Service",
|
||||||
|
description="uLudata service for bakery forecasting",
|
||||||
|
version="1.0.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize metrics collector
|
||||||
|
metrics_collector = MetricsCollector("data-service")
|
||||||
|
|
||||||
|
# CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Application startup"""
|
||||||
|
logger.info("Starting uLudata Service")
|
||||||
|
|
||||||
|
# Create database tables
|
||||||
|
await database_manager.create_tables()
|
||||||
|
|
||||||
|
# Start metrics server
|
||||||
|
metrics_collector.start_metrics_server(8080)
|
||||||
|
|
||||||
|
logger.info("uLudata Service started successfully")
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "data-service",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
0
services/data/app/schemas/__init__.py
Normal file
0
services/data/app/schemas/__init__.py
Normal file
0
services/data/app/services/__init__.py
Normal file
0
services/data/app/services/__init__.py
Normal file
13
services/data/requirements.txt
Normal file
13
services/data/requirements.txt
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
asyncpg==0.29.0
|
||||||
|
alembic==1.12.1
|
||||||
|
pydantic==2.5.0
|
||||||
|
pydantic-settings==2.1.0
|
||||||
|
httpx==0.25.2
|
||||||
|
redis==5.0.1
|
||||||
|
aio-pika==9.3.0
|
||||||
|
prometheus-client==0.17.1
|
||||||
|
python-json-logger==2.0.4
|
||||||
|
pytz==2023.3
|
||||||
0
services/data/shared/auth/__init__.py
Normal file
0
services/data/shared/auth/__init__.py
Normal file
41
services/data/shared/auth/decorators.py
Normal file
41
services/data/shared/auth/decorators.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
Authentication decorators for FastAPI
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
from fastapi import HTTPException, Depends
|
||||||
|
from fastapi.security import HTTPBearer
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
def verify_service_token(auth_service_url: str):
|
||||||
|
"""Verify service token with auth service"""
|
||||||
|
|
||||||
|
async def verify_token(token: str = Depends(security)):
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{auth_service_url}/verify",
|
||||||
|
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=401,
|
||||||
|
detail="Invalid authentication credentials"
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
|
||||||
|
return verify_token
|
||||||
58
services/data/shared/auth/jwt_handler.py
Normal file
58
services/data/shared/auth/jwt_handler.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""
|
||||||
|
Shared JWT Authentication Handler
|
||||||
|
Used across all microservices for consistent authentication
|
||||||
|
"""
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class JWTHandler:
|
||||||
|
"""JWT token handling for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||||
|
self.secret_key = secret_key
|
||||||
|
self.algorithm = algorithm
|
||||||
|
|
||||||
|
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT access token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=30)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "access"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT refresh token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(days=7)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "refresh"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Verify and decode JWT token"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||||
|
return payload
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
logger.warning("Token has expired")
|
||||||
|
return None
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
logger.warning("Invalid token")
|
||||||
|
return None
|
||||||
0
services/data/shared/database/__init__.py
Normal file
0
services/data/shared/database/__init__.py
Normal file
56
services/data/shared/database/base.py
Normal file
56
services/data/shared/database/base.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""
|
||||||
|
Base database configuration for all microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||||
|
from sqlalchemy.pool import StaticPool
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
class DatabaseManager:
|
||||||
|
"""Database manager for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, database_url: str):
|
||||||
|
self.database_url = database_url
|
||||||
|
self.async_engine = create_async_engine(
|
||||||
|
database_url,
|
||||||
|
echo=False,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=300,
|
||||||
|
pool_size=20,
|
||||||
|
max_overflow=30
|
||||||
|
)
|
||||||
|
|
||||||
|
self.async_session_local = sessionmaker(
|
||||||
|
self.async_engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_db(self):
|
||||||
|
"""Get database session"""
|
||||||
|
async with self.async_session_local() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database session error: {e}")
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def create_tables(self):
|
||||||
|
"""Create database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
async def drop_tables(self):
|
||||||
|
"""Drop database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
0
services/data/shared/messaging/__init__.py
Normal file
0
services/data/shared/messaging/__init__.py
Normal file
73
services/data/shared/messaging/events.py
Normal file
73
services/data/shared/messaging/events.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
"""
|
||||||
|
Event definitions for microservices communication
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BaseEvent:
|
||||||
|
"""Base event class"""
|
||||||
|
event_id: str
|
||||||
|
event_type: str
|
||||||
|
service_name: str
|
||||||
|
timestamp: datetime
|
||||||
|
data: Dict[str, Any]
|
||||||
|
correlation_id: Optional[str] = None
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
if not self.event_id:
|
||||||
|
self.event_id = str(uuid.uuid4())
|
||||||
|
if not self.timestamp:
|
||||||
|
self.timestamp = datetime.utcnow()
|
||||||
|
|
||||||
|
# Training Events
|
||||||
|
@dataclass
|
||||||
|
class TrainingStartedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.started"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrainingCompletedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.completed"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrainingFailedEvent(BaseEvent):
|
||||||
|
event_type: str = "training.failed"
|
||||||
|
|
||||||
|
# Forecasting Events
|
||||||
|
@dataclass
|
||||||
|
class ForecastGeneratedEvent(BaseEvent):
|
||||||
|
event_type: str = "forecast.generated"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ForecastRequestedEvent(BaseEvent):
|
||||||
|
event_type: str = "forecast.requested"
|
||||||
|
|
||||||
|
# User Events
|
||||||
|
@dataclass
|
||||||
|
class UserRegisteredEvent(BaseEvent):
|
||||||
|
event_type: str = "user.registered"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class UserLoginEvent(BaseEvent):
|
||||||
|
event_type: str = "user.login"
|
||||||
|
|
||||||
|
# Tenant Events
|
||||||
|
@dataclass
|
||||||
|
class TenantCreatedEvent(BaseEvent):
|
||||||
|
event_type: str = "tenant.created"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TenantUpdatedEvent(BaseEvent):
|
||||||
|
event_type: str = "tenant.updated"
|
||||||
|
|
||||||
|
# Notification Events
|
||||||
|
@dataclass
|
||||||
|
class NotificationSentEvent(BaseEvent):
|
||||||
|
event_type: str = "notification.sent"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NotificationFailedEvent(BaseEvent):
|
||||||
|
event_type: str = "notification.failed"
|
||||||
96
services/data/shared/messaging/rabbitmq.py
Normal file
96
services/data/shared/messaging/rabbitmq.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""
|
||||||
|
RabbitMQ messaging client for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any, Callable
|
||||||
|
import aio_pika
|
||||||
|
from aio_pika import connect_robust, Message, DeliveryMode
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class RabbitMQClient:
|
||||||
|
"""RabbitMQ client for microservices communication"""
|
||||||
|
|
||||||
|
def __init__(self, connection_url: str):
|
||||||
|
self.connection_url = connection_url
|
||||||
|
self.connection = None
|
||||||
|
self.channel = None
|
||||||
|
|
||||||
|
async def connect(self):
|
||||||
|
"""Connect to RabbitMQ"""
|
||||||
|
try:
|
||||||
|
self.connection = await connect_robust(self.connection_url)
|
||||||
|
self.channel = await self.connection.channel()
|
||||||
|
logger.info("Connected to RabbitMQ")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def disconnect(self):
|
||||||
|
"""Disconnect from RabbitMQ"""
|
||||||
|
if self.connection:
|
||||||
|
await self.connection.close()
|
||||||
|
logger.info("Disconnected from RabbitMQ")
|
||||||
|
|
||||||
|
async def publish_event(self, exchange_name: str, routing_key: str, event_data: Dict[str, Any]):
|
||||||
|
"""Publish event to RabbitMQ"""
|
||||||
|
try:
|
||||||
|
if not self.channel:
|
||||||
|
await self.connect()
|
||||||
|
|
||||||
|
# Declare exchange
|
||||||
|
exchange = await self.channel.declare_exchange(
|
||||||
|
exchange_name,
|
||||||
|
aio_pika.ExchangeType.TOPIC,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create message
|
||||||
|
message = Message(
|
||||||
|
json.dumps(event_data).encode(),
|
||||||
|
delivery_mode=DeliveryMode.PERSISTENT,
|
||||||
|
content_type="application/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Publish message
|
||||||
|
await exchange.publish(message, routing_key=routing_key)
|
||||||
|
|
||||||
|
logger.info(f"Published event to {exchange_name} with routing key {routing_key}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to publish event: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def consume_events(self, exchange_name: str, queue_name: str, routing_key: str, callback: Callable):
|
||||||
|
"""Consume events from RabbitMQ"""
|
||||||
|
try:
|
||||||
|
if not self.channel:
|
||||||
|
await self.connect()
|
||||||
|
|
||||||
|
# Declare exchange
|
||||||
|
exchange = await self.channel.declare_exchange(
|
||||||
|
exchange_name,
|
||||||
|
aio_pika.ExchangeType.TOPIC,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Declare queue
|
||||||
|
queue = await self.channel.declare_queue(
|
||||||
|
queue_name,
|
||||||
|
durable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bind queue to exchange
|
||||||
|
await queue.bind(exchange, routing_key)
|
||||||
|
|
||||||
|
# Set up consumer
|
||||||
|
await queue.consume(callback)
|
||||||
|
|
||||||
|
logger.info(f"Started consuming events from {queue_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to consume events: {e}")
|
||||||
|
raise
|
||||||
0
services/data/shared/monitoring/__init__.py
Normal file
0
services/data/shared/monitoring/__init__.py
Normal file
77
services/data/shared/monitoring/logging.py
Normal file
77
services/data/shared/monitoring/logging.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Centralized logging configuration for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import os
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
def setup_logging(service_name: str, log_level: str = "INFO") -> None:
|
||||||
|
"""Set up logging configuration for a microservice"""
|
||||||
|
|
||||||
|
config: Dict[str, Any] = {
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"formatters": {
|
||||||
|
"standard": {
|
||||||
|
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||||
|
},
|
||||||
|
"detailed": {
|
||||||
|
"format": "%(asctime)s [%(levelname)s] %(name)s [%(filename)s:%(lineno)d] %(message)s"
|
||||||
|
},
|
||||||
|
"json": {
|
||||||
|
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||||
|
"format": "%(asctime)s %(name)s %(levelname)s %(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"level": log_level,
|
||||||
|
"formatter": "standard",
|
||||||
|
"stream": "ext://sys.stdout"
|
||||||
|
},
|
||||||
|
"file": {
|
||||||
|
"class": "logging.FileHandler",
|
||||||
|
"level": log_level,
|
||||||
|
"formatter": "detailed",
|
||||||
|
"filename": f"/var/log/{service_name}.log",
|
||||||
|
"mode": "a"
|
||||||
|
},
|
||||||
|
"logstash": {
|
||||||
|
"class": "logstash.TCPLogstashHandler",
|
||||||
|
"host": os.getenv("LOGSTASH_HOST", "localhost"),
|
||||||
|
"port": int(os.getenv("LOGSTASH_PORT", "5000")),
|
||||||
|
"version": 1,
|
||||||
|
"message_type": "logstash",
|
||||||
|
"fqdn": False,
|
||||||
|
"tags": [service_name]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"loggers": {
|
||||||
|
"": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
},
|
||||||
|
"uvicorn": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
},
|
||||||
|
"uvicorn.access": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": log_level,
|
||||||
|
"propagate": False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add logstash handler if in production
|
||||||
|
if os.getenv("ENVIRONMENT") == "production":
|
||||||
|
config["loggers"][""]["handlers"].append("logstash")
|
||||||
|
|
||||||
|
logging.config.dictConfig(config)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info(f"Logging configured for {service_name}")
|
||||||
112
services/data/shared/monitoring/metrics.py
Normal file
112
services/data/shared/monitoring/metrics.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
"""
|
||||||
|
Metrics collection for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any
|
||||||
|
from prometheus_client import Counter, Histogram, Gauge, start_http_server
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Prometheus metrics
|
||||||
|
REQUEST_COUNT = Counter(
|
||||||
|
'http_requests_total',
|
||||||
|
'Total HTTP requests',
|
||||||
|
['method', 'endpoint', 'status_code', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
REQUEST_DURATION = Histogram(
|
||||||
|
'http_request_duration_seconds',
|
||||||
|
'HTTP request duration in seconds',
|
||||||
|
['method', 'endpoint', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
ACTIVE_CONNECTIONS = Gauge(
|
||||||
|
'active_connections',
|
||||||
|
'Active database connections',
|
||||||
|
['service']
|
||||||
|
)
|
||||||
|
|
||||||
|
TRAINING_JOBS = Counter(
|
||||||
|
'training_jobs_total',
|
||||||
|
'Total training jobs',
|
||||||
|
['status', 'service']
|
||||||
|
)
|
||||||
|
|
||||||
|
FORECASTS_GENERATED = Counter(
|
||||||
|
'forecasts_generated_total',
|
||||||
|
'Total forecasts generated',
|
||||||
|
['service']
|
||||||
|
)
|
||||||
|
|
||||||
|
class MetricsCollector:
|
||||||
|
"""Metrics collector for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, service_name: str):
|
||||||
|
self.service_name = service_name
|
||||||
|
self.start_time = time.time()
|
||||||
|
|
||||||
|
def start_metrics_server(self, port: int = 8080):
|
||||||
|
"""Start Prometheus metrics server"""
|
||||||
|
try:
|
||||||
|
start_http_server(port)
|
||||||
|
logger.info(f"Metrics server started on port {port}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to start metrics server: {e}")
|
||||||
|
|
||||||
|
def record_request(self, method: str, endpoint: str, status_code: int, duration: float):
|
||||||
|
"""Record HTTP request metrics"""
|
||||||
|
REQUEST_COUNT.labels(
|
||||||
|
method=method,
|
||||||
|
endpoint=endpoint,
|
||||||
|
status_code=status_code,
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
REQUEST_DURATION.labels(
|
||||||
|
method=method,
|
||||||
|
endpoint=endpoint,
|
||||||
|
service=self.service_name
|
||||||
|
).observe(duration)
|
||||||
|
|
||||||
|
def record_training_job(self, status: str):
|
||||||
|
"""Record training job metrics"""
|
||||||
|
TRAINING_JOBS.labels(
|
||||||
|
status=status,
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
def record_forecast_generated(self):
|
||||||
|
"""Record forecast generation metrics"""
|
||||||
|
FORECASTS_GENERATED.labels(
|
||||||
|
service=self.service_name
|
||||||
|
).inc()
|
||||||
|
|
||||||
|
def set_active_connections(self, count: int):
|
||||||
|
"""Set active database connections"""
|
||||||
|
ACTIVE_CONNECTIONS.labels(
|
||||||
|
service=self.service_name
|
||||||
|
).set(count)
|
||||||
|
|
||||||
|
def metrics_middleware(metrics_collector: MetricsCollector):
|
||||||
|
"""Middleware to collect metrics"""
|
||||||
|
|
||||||
|
def middleware(request, call_next):
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
response = call_next(request)
|
||||||
|
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
|
metrics_collector.record_request(
|
||||||
|
method=request.method,
|
||||||
|
endpoint=request.url.path,
|
||||||
|
status_code=response.status_code,
|
||||||
|
duration=duration
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
return middleware
|
||||||
0
services/data/shared/utils/__init__.py
Normal file
0
services/data/shared/utils/__init__.py
Normal file
71
services/data/shared/utils/datetime_utils.py
Normal file
71
services/data/shared/utils/datetime_utils.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
"""
|
||||||
|
DateTime utilities for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime, timezone, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
def utc_now() -> datetime:
|
||||||
|
"""Get current UTC datetime"""
|
||||||
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
def madrid_now() -> datetime:
|
||||||
|
"""Get current Madrid datetime"""
|
||||||
|
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||||
|
return datetime.now(madrid_tz)
|
||||||
|
|
||||||
|
def to_utc(dt: datetime) -> datetime:
|
||||||
|
"""Convert datetime to UTC"""
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(timezone.utc)
|
||||||
|
|
||||||
|
def to_madrid(dt: datetime) -> datetime:
|
||||||
|
"""Convert datetime to Madrid timezone"""
|
||||||
|
madrid_tz = pytz.timezone('Europe/Madrid')
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(madrid_tz)
|
||||||
|
|
||||||
|
def format_datetime(dt: datetime, format_str: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||||
|
"""Format datetime as string"""
|
||||||
|
return dt.strftime(format_str)
|
||||||
|
|
||||||
|
def parse_datetime(dt_str: str, format_str: str = "%Y-%m-%d %H:%M:%S") -> datetime:
|
||||||
|
"""Parse datetime from string"""
|
||||||
|
return datetime.strptime(dt_str, format_str)
|
||||||
|
|
||||||
|
def is_business_hours(dt: Optional[datetime] = None) -> bool:
|
||||||
|
"""Check if datetime is during business hours (9 AM - 6 PM Madrid time)"""
|
||||||
|
if dt is None:
|
||||||
|
dt = madrid_now()
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
madrid_dt = to_madrid(dt)
|
||||||
|
|
||||||
|
# Check if it's a weekday (Monday=0, Sunday=6)
|
||||||
|
if madrid_dt.weekday() >= 5: # Weekend
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if it's business hours
|
||||||
|
return 9 <= madrid_dt.hour < 18
|
||||||
|
|
||||||
|
def next_business_day(dt: Optional[datetime] = None) -> datetime:
|
||||||
|
"""Get next business day"""
|
||||||
|
if dt is None:
|
||||||
|
dt = madrid_now()
|
||||||
|
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
madrid_dt = to_madrid(dt)
|
||||||
|
|
||||||
|
# Add days until we reach a weekday
|
||||||
|
while madrid_dt.weekday() >= 5: # Weekend
|
||||||
|
madrid_dt += timedelta(days=1)
|
||||||
|
|
||||||
|
# Set to 9 AM
|
||||||
|
return madrid_dt.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||||
67
services/data/shared/utils/validation.py
Normal file
67
services/data/shared/utils/validation.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""
|
||||||
|
Validation utilities for microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Optional
|
||||||
|
from email_validator import validate_email, EmailNotValidError
|
||||||
|
|
||||||
|
def validate_spanish_phone(phone: str) -> bool:
|
||||||
|
"""Validate Spanish phone number"""
|
||||||
|
# Spanish phone pattern: +34 followed by 9 digits
|
||||||
|
pattern = r'^(\+34|0034|34)?[6-9]\d{8}$'
|
||||||
|
return bool(re.match(pattern, phone.replace(' ', '').replace('-', '')))
|
||||||
|
|
||||||
|
def validate_email_address(email: str) -> bool:
|
||||||
|
"""Validate email address"""
|
||||||
|
try:
|
||||||
|
validate_email(email)
|
||||||
|
return True
|
||||||
|
except EmailNotValidError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_tenant_name(name: str) -> bool:
|
||||||
|
"""Validate tenant name"""
|
||||||
|
# Must be 2-50 characters, letters, numbers, spaces, hyphens, apostrophes
|
||||||
|
pattern = r"^[a-zA-ZÀ-ÿ0-9\s\-']{2,50}$"
|
||||||
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
|
def validate_address(address: str) -> bool:
|
||||||
|
"""Validate address"""
|
||||||
|
# Must be 5-200 characters
|
||||||
|
return 5 <= len(address.strip()) <= 200
|
||||||
|
|
||||||
|
def validate_coordinates(latitude: float, longitude: float) -> bool:
|
||||||
|
"""Validate Madrid coordinates"""
|
||||||
|
# Madrid is roughly between these coordinates
|
||||||
|
madrid_bounds = {
|
||||||
|
'lat_min': 40.3,
|
||||||
|
'lat_max': 40.6,
|
||||||
|
'lon_min': -3.8,
|
||||||
|
'lon_max': -3.5
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
madrid_bounds['lat_min'] <= latitude <= madrid_bounds['lat_max'] and
|
||||||
|
madrid_bounds['lon_min'] <= longitude <= madrid_bounds['lon_max']
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate_product_name(name: str) -> bool:
|
||||||
|
"""Validate product name"""
|
||||||
|
# Must be 1-50 characters, letters, numbers, spaces
|
||||||
|
pattern = r"^[a-zA-ZÀ-ÿ0-9\s]{1,50}$"
|
||||||
|
return bool(re.match(pattern, name))
|
||||||
|
|
||||||
|
def validate_positive_number(value: Any) -> bool:
|
||||||
|
"""Validate positive number"""
|
||||||
|
try:
|
||||||
|
return float(value) > 0
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_non_negative_number(value: Any) -> bool:
|
||||||
|
"""Validate non-negative number"""
|
||||||
|
try:
|
||||||
|
return float(value) >= 0
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
31
services/forecasting/Dockerfile
Normal file
31
services/forecasting/Dockerfile
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Add shared libraries to Python path
|
||||||
|
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run application
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
0
services/forecasting/app/__init__.py
Normal file
0
services/forecasting/app/__init__.py
Normal file
0
services/forecasting/app/api/__init__.py
Normal file
0
services/forecasting/app/api/__init__.py
Normal file
0
services/forecasting/app/core/__init__.py
Normal file
0
services/forecasting/app/core/__init__.py
Normal file
32
services/forecasting/app/core/config.py
Normal file
32
services/forecasting/app/core/config.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""
|
||||||
|
uLuforecasting service configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic import BaseSettings
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings"""
|
||||||
|
|
||||||
|
# Basic settings
|
||||||
|
APP_NAME: str = "uLuforecasting Service"
|
||||||
|
VERSION: str = "1.0.0"
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db")
|
||||||
|
|
||||||
|
# Redis settings
|
||||||
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0")
|
||||||
|
|
||||||
|
# RabbitMQ settings
|
||||||
|
RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/")
|
||||||
|
|
||||||
|
# Service URLs
|
||||||
|
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
12
services/forecasting/app/core/database.py
Normal file
12
services/forecasting/app/core/database.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
"""
|
||||||
|
Database configuration for forecasting service
|
||||||
|
"""
|
||||||
|
|
||||||
|
from shared.database.base import DatabaseManager
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
# Initialize database manager
|
||||||
|
database_manager = DatabaseManager(settings.DATABASE_URL)
|
||||||
|
|
||||||
|
# Alias for convenience
|
||||||
|
get_db = database_manager.get_db
|
||||||
61
services/forecasting/app/main.py
Normal file
61
services/forecasting/app/main.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
"""
|
||||||
|
uLuforecasting Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.database import database_manager
|
||||||
|
from shared.monitoring.logging import setup_logging
|
||||||
|
from shared.monitoring.metrics import MetricsCollector
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
setup_logging("forecasting-service", "INFO")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="uLuforecasting Service",
|
||||||
|
description="uLuforecasting service for bakery forecasting",
|
||||||
|
version="1.0.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize metrics collector
|
||||||
|
metrics_collector = MetricsCollector("forecasting-service")
|
||||||
|
|
||||||
|
# CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Application startup"""
|
||||||
|
logger.info("Starting uLuforecasting Service")
|
||||||
|
|
||||||
|
# Create database tables
|
||||||
|
await database_manager.create_tables()
|
||||||
|
|
||||||
|
# Start metrics server
|
||||||
|
metrics_collector.start_metrics_server(8080)
|
||||||
|
|
||||||
|
logger.info("uLuforecasting Service started successfully")
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "forecasting-service",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
0
services/forecasting/app/schemas/__init__.py
Normal file
0
services/forecasting/app/schemas/__init__.py
Normal file
0
services/forecasting/app/services/__init__.py
Normal file
0
services/forecasting/app/services/__init__.py
Normal file
13
services/forecasting/requirements.txt
Normal file
13
services/forecasting/requirements.txt
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
asyncpg==0.29.0
|
||||||
|
alembic==1.12.1
|
||||||
|
pydantic==2.5.0
|
||||||
|
pydantic-settings==2.1.0
|
||||||
|
httpx==0.25.2
|
||||||
|
redis==5.0.1
|
||||||
|
aio-pika==9.3.0
|
||||||
|
prometheus-client==0.17.1
|
||||||
|
python-json-logger==2.0.4
|
||||||
|
pytz==2023.3
|
||||||
0
services/forecasting/shared/auth/__init__.py
Normal file
0
services/forecasting/shared/auth/__init__.py
Normal file
41
services/forecasting/shared/auth/decorators.py
Normal file
41
services/forecasting/shared/auth/decorators.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
Authentication decorators for FastAPI
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
from fastapi import HTTPException, Depends
|
||||||
|
from fastapi.security import HTTPBearer
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
def verify_service_token(auth_service_url: str):
|
||||||
|
"""Verify service token with auth service"""
|
||||||
|
|
||||||
|
async def verify_token(token: str = Depends(security)):
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{auth_service_url}/verify",
|
||||||
|
headers={"Authorization": f"Bearer {token.credentials}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=401,
|
||||||
|
detail="Invalid authentication credentials"
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Auth service unavailable: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Authentication service unavailable"
|
||||||
|
)
|
||||||
|
|
||||||
|
return verify_token
|
||||||
58
services/forecasting/shared/auth/jwt_handler.py
Normal file
58
services/forecasting/shared/auth/jwt_handler.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""
|
||||||
|
Shared JWT Authentication Handler
|
||||||
|
Used across all microservices for consistent authentication
|
||||||
|
"""
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class JWTHandler:
|
||||||
|
"""JWT token handling for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, secret_key: str, algorithm: str = "HS256"):
|
||||||
|
self.secret_key = secret_key
|
||||||
|
self.algorithm = algorithm
|
||||||
|
|
||||||
|
def create_access_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT access token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=30)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "access"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def create_refresh_token(self, data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""Create JWT refresh token"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(days=7)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire, "type": "refresh"})
|
||||||
|
|
||||||
|
encoded_jwt = jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Verify and decode JWT token"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||||
|
return payload
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
logger.warning("Token has expired")
|
||||||
|
return None
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
logger.warning("Invalid token")
|
||||||
|
return None
|
||||||
0
services/forecasting/shared/database/__init__.py
Normal file
0
services/forecasting/shared/database/__init__.py
Normal file
56
services/forecasting/shared/database/base.py
Normal file
56
services/forecasting/shared/database/base.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""
|
||||||
|
Base database configuration for all microservices
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||||
|
from sqlalchemy.pool import StaticPool
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
class DatabaseManager:
|
||||||
|
"""Database manager for microservices"""
|
||||||
|
|
||||||
|
def __init__(self, database_url: str):
|
||||||
|
self.database_url = database_url
|
||||||
|
self.async_engine = create_async_engine(
|
||||||
|
database_url,
|
||||||
|
echo=False,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=300,
|
||||||
|
pool_size=20,
|
||||||
|
max_overflow=30
|
||||||
|
)
|
||||||
|
|
||||||
|
self.async_session_local = sessionmaker(
|
||||||
|
self.async_engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_db(self):
|
||||||
|
"""Get database session"""
|
||||||
|
async with self.async_session_local() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database session error: {e}")
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def create_tables(self):
|
||||||
|
"""Create database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
async def drop_tables(self):
|
||||||
|
"""Drop database tables"""
|
||||||
|
async with self.async_engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
0
services/forecasting/shared/messaging/__init__.py
Normal file
0
services/forecasting/shared/messaging/__init__.py
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user