Initial microservices setup from artifacts

This commit is contained in:
Urtzi Alfaro
2025-07-17 13:09:24 +02:00
commit 347ff51bd7
200 changed files with 9559 additions and 0 deletions

18
scripts/deploy.sh Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
echo "🚀 Deploying Bakery Forecasting Platform..."
# Build and deploy all services
docker-compose build
docker-compose up -d
echo "Waiting for services to be healthy..."
sleep 30
# Check service health
echo "Checking service health..."
curl -f http://localhost:8000/health || echo "Gateway health check failed"
echo "✅ Deployment completed"
echo "Gateway: http://localhost:8000"
echo "API Docs: http://localhost:8000/docs"

879
scripts/setup.sh Executable file
View File

@@ -0,0 +1,879 @@
#!/bin/bash
# scripts/setup.sh
# Intelligent Setup Script - Extract artifacts and create microservices structure
set -e
echo "🚀 Setting up Bakery Forecasting Microservices Platform"
echo "========================================================"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
print_step() {
echo -e "${BLUE}${NC} $1"
}
print_success() {
echo -e "${GREEN}${NC} $1"
}
print_warning() {
echo -e "${YELLOW}${NC} $1"
}
print_error() {
echo -e "${RED}${NC} $1"
}
# Check prerequisites
print_step "Checking prerequisites..."
command -v docker >/dev/null 2>&1 || {
print_error "Docker is required but not installed. Please install Docker first."
exit 1
}
command -v docker-compose >/dev/null 2>&1 || {
print_error "Docker Compose is required but not installed. Please install Docker Compose first."
exit 1
}
print_success "Prerequisites check passed"
# Function to extract files from artifact files
extract_artifact_files() {
local artifact_file="$1"
local description="$2"
print_step "Processing $description..."
if [ ! -f "$artifact_file" ]; then
print_warning "Artifact file $artifact_file not found, skipping..."
return
fi
# Read the artifact file and extract individual files
local current_file=""
local current_content=""
local in_file=false
while IFS= read -r line; do
# Check if line starts with a file path (contains .py, .yml, .md, .sh, etc.)
if [[ "$line" =~ ^#[[:space:]]*(.*\.(py|yml|yaml|md|sh|txt|js|json|html|css|Dockerfile|requirements\.txt))$ ]]; then
# Save previous file if we were processing one
if [ "$in_file" = true ] && [ -n "$current_file" ]; then
# Create directory if it doesn't exist
local dir=$(dirname "$current_file")
mkdir -p "$dir"
# Write content to file
echo "$current_content" > "$current_file"
print_success "Created: $current_file"
fi
# Start new file
current_file=$(echo "$line" | sed 's/^#[[:space:]]*//')
current_content=""
in_file=true
elif [ "$in_file" = true ]; then
# Add line to current file content
if [ -n "$current_content" ]; then
current_content="$current_content\n$line"
else
current_content="$line"
fi
fi
done < "$artifact_file"
# Save the last file
if [ "$in_file" = true ] && [ -n "$current_file" ]; then
local dir=$(dirname "$current_file")
mkdir -p "$dir"
echo -e "$current_content" > "$current_file"
print_success "Created: $current_file"
fi
}
# Function to extract Python files with multiple file markers
extract_python_artifact() {
local artifact_file="$1"
local description="$2"
print_step "Processing $description..."
if [ ! -f "$artifact_file" ]; then
print_warning "Artifact file $artifact_file not found, skipping..."
return
fi
# Use Python to parse the multi-file artifact
python3 << EOF
import re
import os
def extract_files(filename):
with open('$artifact_file', 'r') as f:
content = f.read()
# Split by file markers (lines starting with # and containing file paths)
files = {}
current_file = None
current_content = []
for line in content.split('\n'):
# Check for file path markers
if re.match(r'^#\s+\S+\.(py|yml|yaml|txt|sh|json|html|css|js|Dockerfile)', line):
# Save previous file
if current_file and current_content:
files[current_file] = '\n'.join(current_content)
# Start new file
current_file = re.sub(r'^#\s+', '', line)
current_content = []
elif current_file:
current_content.append(line)
# Save last file
if current_file and current_content:
files[current_file] = '\n'.join(current_content)
# Write files
for filepath, file_content in files.items():
# Clean up the content (remove leading/trailing quotes if present)
file_content = file_content.strip()
if file_content.startswith('"""') and file_content.endswith('"""'):
file_content = file_content[3:-3]
elif file_content.startswith("'''") and file_content.endswith("'''"):
file_content = file_content[3:-3]
# Create directory
os.makedirs(os.path.dirname(filepath) if os.path.dirname(filepath) else '.', exist_ok=True)
# Write file
with open(filepath, 'w') as f:
f.write(file_content)
print(f"✓ Created: {filepath}")
extract_files('$artifact_file')
EOF
}
# Create base project structure first
print_step "Creating base project structure..."
# Create main directories
mkdir -p {gateway,services/{auth,training,forecasting,data,tenant,notification},shared,frontend/{dashboard,marketing},infrastructure,deployment,tests,docs,scripts}
# Create subdirectories for each service
for service in auth training forecasting data tenant notification; do
mkdir -p services/$service/{app/{core,models,schemas,services,api,ml},migrations/versions,tests}
touch services/$service/app/__init__.py
touch services/$service/app/core/__init__.py
touch services/$service/app/models/__init__.py
touch services/$service/app/schemas/__init__.py
touch services/$service/app/services/__init__.py
touch services/$service/app/api/__init__.py
if [ "$service" = "training" ]; then
touch services/$service/app/ml/__init__.py
fi
done
# Create gateway structure
mkdir -p gateway/{app/{core,middleware,routes},tests}
touch gateway/app/__init__.py
touch gateway/app/core/__init__.py
touch gateway/app/middleware/__init__.py
touch gateway/app/routes/__init__.py
# Create shared library structure
mkdir -p shared/{auth,database,messaging,monitoring,utils}
for lib in auth database messaging monitoring utils; do
touch shared/$lib/__init__.py
done
# Create infrastructure directories
mkdir -p infrastructure/{docker,kubernetes,terraform,monitoring}/{base,dev,staging,production}
mkdir -p infrastructure/monitoring/{prometheus,grafana,logstash}
print_success "Base project structure created"
# Extract files from artifacts
print_step "Extracting files from artifacts..."
# Process shared libraries
if [ -f "shared_libraries.py" ]; then
extract_python_artifact "shared_libraries.py" "Shared Libraries"
fi
# Process gateway service
if [ -f "gateway_service.py" ]; then
extract_python_artifact "gateway_service.py" "Gateway Service"
fi
# Process auth service
if [ -f "auth_service.py" ]; then
extract_python_artifact "auth_service.py" "Authentication Service"
fi
# Process training service
if [ -f "training_service.py" ]; then
extract_python_artifact "training_service.py" "Training Service"
fi
print_step "Creating missing service files..."
# Create remaining service files that might not be in artifacts
for service in forecasting data tenant notification; do
service_dir="services/$service"
# Create main.py if it doesn't exist
if [ ! -f "$service_dir/app/main.py" ]; then
cat > "$service_dir/app/main.py" << EOF
"""
$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service
"""
import logging
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.core.config import settings
from app.core.database import database_manager
from shared.monitoring.logging import setup_logging
from shared.monitoring.metrics import MetricsCollector
# Setup logging
setup_logging("$service-service", "INFO")
logger = logging.getLogger(__name__)
# Create FastAPI app
app = FastAPI(
title="$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service",
description="$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') service for bakery forecasting",
version="1.0.0"
)
# Initialize metrics collector
metrics_collector = MetricsCollector("$service-service")
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.on_event("startup")
async def startup_event():
"""Application startup"""
logger.info("Starting $(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service")
# Create database tables
await database_manager.create_tables()
# Start metrics server
metrics_collector.start_metrics_server(8080)
logger.info("$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service started successfully")
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {
"status": "healthy",
"service": "$service-service",
"version": "1.0.0"
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
EOF
print_success "Created: $service_dir/app/main.py"
fi
# Create config.py if it doesn't exist
if [ ! -f "$service_dir/app/core/config.py" ]; then
cat > "$service_dir/app/core/config.py" << EOF
"""
$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') service configuration
"""
import os
from pydantic import BaseSettings
class Settings(BaseSettings):
"""Application settings"""
# Basic settings
APP_NAME: str = "$(echo $service | sed 's/.*/\L&/; s/[a-z]*/\u&/g') Service"
VERSION: str = "1.0.0"
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
# Database settings
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql+asyncpg://${service}_user:${service}_pass123@${service}-db:5432/${service}_db")
# Redis settings
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379/0")
# RabbitMQ settings
RABBITMQ_URL: str = os.getenv("RABBITMQ_URL", "amqp://bakery:forecast123@rabbitmq:5672/")
# Service URLs
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
class Config:
env_file = ".env"
settings = Settings()
EOF
print_success "Created: $service_dir/app/core/config.py"
fi
# Create database.py if it doesn't exist
if [ ! -f "$service_dir/app/core/database.py" ]; then
cat > "$service_dir/app/core/database.py" << EOF
"""
Database configuration for $service service
"""
from shared.database.base import DatabaseManager
from app.core.config import settings
# Initialize database manager
database_manager = DatabaseManager(settings.DATABASE_URL)
# Alias for convenience
get_db = database_manager.get_db
EOF
print_success "Created: $service_dir/app/core/database.py"
fi
# Create requirements.txt if it doesn't exist
if [ ! -f "$service_dir/requirements.txt" ]; then
cat > "$service_dir/requirements.txt" << 'EOF'
fastapi==0.104.1
uvicorn[standard]==0.24.0
sqlalchemy==2.0.23
asyncpg==0.29.0
alembic==1.12.1
pydantic==2.5.0
pydantic-settings==2.1.0
httpx==0.25.2
redis==5.0.1
aio-pika==9.3.0
prometheus-client==0.17.1
python-json-logger==2.0.4
pytz==2023.3
EOF
print_success "Created: $service_dir/requirements.txt"
fi
# Create Dockerfile if it doesn't exist
if [ ! -f "$service_dir/Dockerfile" ]; then
cat > "$service_dir/Dockerfile" << 'EOF'
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Add shared libraries to Python path
ENV PYTHONPATH="/app:/app/shared:$PYTHONPATH"
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/health || exit 1
# Run application
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
EOF
print_success "Created: $service_dir/Dockerfile"
fi
done
# Create .env file
print_step "Creating environment configuration..."
if [ ! -f ".env" ]; then
cat > .env << 'EOF'
# Environment
ENVIRONMENT=development
DEBUG=true
LOG_LEVEL=INFO
# Database URLs
AUTH_DB_URL=postgresql+asyncpg://auth_user:auth_pass123@auth-db:5432/auth_db
TRAINING_DB_URL=postgresql+asyncpg://training_user:training_pass123@training-db:5432/training_db
FORECASTING_DB_URL=postgresql+asyncpg://forecasting_user:forecasting_pass123@forecasting-db:5432/forecasting_db
DATA_DB_URL=postgresql+asyncpg://data_user:data_pass123@data-db:5432/data_db
TENANT_DB_URL=postgresql+asyncpg://tenant_user:tenant_pass123@tenant-db:5432/tenant_db
NOTIFICATION_DB_URL=postgresql+asyncpg://notification_user:notification_pass123@notification-db:5432/notification_db
# Redis
REDIS_URL=redis://redis:6379
# RabbitMQ
RABBITMQ_URL=amqp://bakery:forecast123@rabbitmq:5672/
# JWT
JWT_SECRET_KEY=your-super-secret-jwt-key-change-in-production-please
JWT_ALGORITHM=HS256
JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30
JWT_REFRESH_TOKEN_EXPIRE_DAYS=7
# External APIs
AEMET_API_KEY=your-aemet-api-key-here
MADRID_OPENDATA_API_KEY=your-madrid-opendata-key-here
# CORS
CORS_ORIGINS=http://localhost:3000,http://localhost:3001
# Email
SMTP_HOST=smtp.gmail.com
SMTP_PORT=587
SMTP_USER=your-email@gmail.com
SMTP_PASSWORD=your-email-password
# WhatsApp
WHATSAPP_API_KEY=your-whatsapp-api-key-here
# Monitoring
PROMETHEUS_URL=http://prometheus:9090
GRAFANA_URL=http://grafana:3000
EOF
print_success "Environment configuration created"
fi
# Create monitoring configuration
print_step "Creating monitoring configuration..."
if [ ! -f "infrastructure/monitoring/prometheus/prometheus.yml" ]; then
cat > infrastructure/monitoring/prometheus/prometheus.yml << 'EOF'
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'gateway'
static_configs:
- targets: ['gateway:8080']
- job_name: 'auth-service'
static_configs:
- targets: ['auth-service:8080']
- job_name: 'training-service'
static_configs:
- targets: ['training-service:8080']
- job_name: 'forecasting-service'
static_configs:
- targets: ['forecasting-service:8080']
- job_name: 'data-service'
static_configs:
- targets: ['data-service:8080']
- job_name: 'tenant-service'
static_configs:
- targets: ['tenant-service:8080']
- job_name: 'notification-service'
static_configs:
- targets: ['notification-service:8080']
EOF
print_success "Prometheus configuration created"
fi
# Create utility scripts
print_step "Creating utility scripts..."
# Create test script
cat > scripts/test.sh << 'EOF'
#!/bin/bash
echo "🧪 Running tests for all services..."
# Run tests for each service
for service in auth training forecasting data tenant notification; do
echo "Testing $service service..."
if docker-compose ps | grep -q "${service}-service.*Up"; then
docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service"
else
echo "Service $service is not running, skipping tests"
fi
done
echo "✅ Test run completed"
EOF
# Create deploy script
cat > scripts/deploy.sh << 'EOF'
#!/bin/bash
echo "🚀 Deploying Bakery Forecasting Platform..."
# Build and deploy all services
docker-compose build
docker-compose up -d
echo "Waiting for services to be healthy..."
sleep 30
# Check service health
echo "Checking service health..."
curl -f http://localhost:8000/health || echo "Gateway health check failed"
echo "✅ Deployment completed"
echo "Gateway: http://localhost:8000"
echo "API Docs: http://localhost:8000/docs"
EOF
# Make scripts executable
chmod +x scripts/*.sh
print_success "Utility scripts created"
# Create .gitignore
print_step "Creating .gitignore..."
if [ ! -f ".gitignore" ]; then
cat > .gitignore << 'EOF'
# Environment
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
.pytest_cache/
.coverage
.coverage.*
htmlcov/
.tox/
.nox/
.hypothesis/
.mypy_cache/
.dmyp.json
dmyp.json
.pyre/
# Virtual Environment
venv/
ENV/
env/
.venv
# Node
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
.npm
.eslintcache
.next
out/
build/
dist/
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
.DS_Store
# Logs
logs/
*.log
# Database
*.db
*.sqlite
*.sqlite3
# ML Models
*.pkl
*.joblib
*.h5
models/
# Data
data/external/
data/processed/
*.csv
*.xlsx
# Docker
.docker/
# Infrastructure
*.tfstate
*.tfstate.backup
.terraform/
.terraform.lock.hcl
# Kubernetes
kubeconfig
*.yaml.bak
# Monitoring
prometheus_data/
grafana_data/
elasticsearch_data/
# Artifacts (from Claude)
*_service.py
*_libraries.py
*.md
setup_scripts.sh
EOF
print_success ".gitignore created"
fi
# Create README
print_step "Creating documentation..."
if [ ! -f "README.md" ]; then
cat > README.md << 'EOF'
# Bakery Forecasting Platform - Microservices
## Overview
AI-powered demand forecasting platform for bakeries in Madrid, Spain using microservices architecture.
## Architecture
- **API Gateway**: Central entry point for all client requests
- **Auth Service**: User authentication and authorization
- **Training Service**: ML model training for demand forecasting
- **Forecasting Service**: Generate predictions using trained models
- **Data Service**: External data integration (weather, traffic, events)
- **Tenant Service**: Multi-tenant management
- **Notification Service**: Email and WhatsApp notifications
## Quick Start
### Prerequisites
- Docker and Docker Compose
- Python 3.11+
- Node.js 18+
### Setup
```bash
# Run setup script (this script!)
./scripts/setup.sh
# Start services
docker-compose up -d
# Check service health
curl http://localhost:8000/health
```
### Services
- **Gateway**: http://localhost:8000
- **API Docs**: http://localhost:8000/docs
- **Grafana**: http://localhost:3002
- **Prometheus**: http://localhost:9090
- **RabbitMQ Management**: http://localhost:15672
### Development
#### Running Tests
```bash
./scripts/test.sh
```
#### Building Services
```bash
docker-compose build
```
#### Viewing Logs
```bash
# All services
docker-compose logs -f
# Specific service
docker-compose logs -f auth-service
```
#### Service URLs (Development)
- Gateway: http://localhost:8000
- Auth Service: http://localhost:8001
- Training Service: http://localhost:8002
- Forecasting Service: http://localhost:8003
- Data Service: http://localhost:8004
- Tenant Service: http://localhost:8005
- Notification Service: http://localhost:8006
## Environment Variables
Copy `.env.example` to `.env` and update the following:
```bash
# External API Keys
AEMET_API_KEY=your-aemet-api-key
MADRID_OPENDATA_API_KEY=your-madrid-opendata-key
# Email Configuration
SMTP_USER=your-email@gmail.com
SMTP_PASSWORD=your-email-password
# WhatsApp API
WHATSAPP_API_KEY=your-whatsapp-api-key
# JWT Secret (change in production!)
JWT_SECRET_KEY=your-super-secret-jwt-key
```
## Troubleshooting
### Services won't start
```bash
# Check if ports are available
docker-compose ps
netstat -tulpn | grep :8000
# Restart services
docker-compose down
docker-compose up -d
```
### Database connection issues
```bash
# Check database containers
docker-compose logs auth-db
docker-compose logs training-db
# Reset databases
docker-compose down -v
docker-compose up -d
```
### Service communication issues
```bash
# Check service health
curl http://localhost:8000/health
curl http://localhost:8001/health
curl http://localhost:8002/health
# Check RabbitMQ
open http://localhost:15672
# User: bakery, Password: forecast123
```
## Next Steps
1. **Configure External APIs**: Add your AEMET and Madrid Open Data API keys
2. **Test Authentication**: Register a user and test login
3. **Upload Sales Data**: Import historical sales data
4. **Train Models**: Start your first training job
5. **Generate Forecasts**: Create demand predictions
## License
MIT License
EOF
print_success "Documentation created"
fi
# Final steps
print_step "Final setup steps..."
# Copy shared libraries to each service (for Docker builds)
for service in auth training forecasting data tenant notification; do
if [ -d "shared" ]; then
cp -r shared services/$service/ 2>/dev/null || true
fi
done
# Copy shared libraries to gateway
if [ -d "shared" ]; then
cp -r shared gateway/ 2>/dev/null || true
fi
# Initialize Git repository if not exists
if [ ! -d ".git" ]; then
git init
git add .
git commit -m "Initial microservices setup from artifacts"
print_success "Git repository initialized"
fi
echo
echo "🎉 Setup completed successfully!"
echo "==============================================="
echo
echo "Next steps:"
echo "1. Update .env with your actual API keys"
echo "2. Start services: docker-compose up -d"
echo "3. Check health: curl http://localhost:8000/health"
echo "4. View API docs: http://localhost:8000/docs"
echo "5. Monitor services: http://localhost:3002 (Grafana)"
echo
echo "Services will be available at:"
echo "- Gateway: http://localhost:8000"
echo "- Auth Service: http://localhost:8001"
echo "- Training Service: http://localhost:8002"
echo "- Monitoring: http://localhost:3002"
echo "- RabbitMQ: http://localhost:15672"
echo
echo "Artifact files processed:"
[ -f "shared_libraries.py" ] && echo "✓ shared_libraries.py"
[ -f "gateway_service.py" ] && echo "✓ gateway_service.py"
[ -f "auth_service.py" ] && echo "✓ auth_service.py"
[ -f "training_service.py" ] && echo "✓ training_service.py"
[ -f "docker-compose.yml" ] && echo "✓ docker-compose.yml"
echo
echo "Happy coding! 🚀"

15
scripts/test.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
echo "🧪 Running tests for all services..."
# Run tests for each service
for service in auth training forecasting data tenant notification; do
echo "Testing $service service..."
if docker-compose ps | grep -q "${service}-service.*Up"; then
docker-compose exec -T ${service}-service python -m pytest tests/ -v || echo "Tests failed for $service"
else
echo "Service $service is not running, skipping tests"
fi
done
echo "✅ Test run completed"