Improve docker config

This commit is contained in:
Urtzi Alfaro
2025-07-20 02:16:51 +02:00
parent 9a67f3d175
commit 1c730c3c81
27 changed files with 2598 additions and 1161 deletions

214
scripts/docker-setup.sh Executable file
View File

@@ -0,0 +1,214 @@
# ================================================================
# FIXED SETUP SCRIPT
# scripts/docker-setup.sh
# ================================================================
#!/bin/bash
# Fixed setup script with proper error handling
set -e
ENVIRONMENT=${1:-development}
PROFILES=${2:-"development,frontend"}
# Colors for output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
# Logging functions
print_step() {
echo -e "${GREEN}[STEP]${NC} $1"
}
print_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
print_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
print_step "Setting up Bakery Forecasting Platform"
echo "Environment: $ENVIRONMENT"
echo "Profiles: $PROFILES"
# Check if .env file exists
if [ ! -f ".env" ]; then
print_error ".env file not found!"
echo "Please create .env file with the content from the artifact."
echo "Run: cp .env.example .env"
exit 1
fi
# Validate critical environment variables
print_step "Validating environment variables..."
# Source the .env file to check variables
set -a # automatically export all variables
source .env
set +a
# Check critical variables
critical_vars=(
"IMAGE_TAG"
"AUTH_DB_NAME"
"AUTH_DB_USER"
"AUTH_DB_PASSWORD"
"REDIS_PASSWORD"
"RABBITMQ_USER"
"RABBITMQ_PASSWORD"
"GATEWAY_PORT"
"AUTH_SERVICE_PORT"
)
missing_vars=()
for var in "${critical_vars[@]}"; do
if [ -z "${!var}" ]; then
missing_vars+=("$var")
fi
done
if [ ${#missing_vars[@]} -gt 0 ]; then
print_error "Missing required environment variables:"
printf '%s\n' "${missing_vars[@]}"
exit 1
fi
print_step "Environment variables validated successfully"
# Create necessary directories
print_step "Creating necessary directories..."
mkdir -p infrastructure/{redis,rabbitmq,postgres/init-scripts,monitoring/{prometheus/rules,grafana/{dashboards,datasources}},pgadmin}
mkdir -p backups logs models templates/{email,whatsapp}
mkdir -p shared/{config,auth,database,messaging,monitoring,utils}
# Create basic monitoring configs if they don't exist
if [ ! -f "infrastructure/monitoring/prometheus/prometheus.yml" ]; then
print_step "Creating basic Prometheus configuration..."
cat > infrastructure/monitoring/prometheus/prometheus.yml << 'EOF'
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'gateway'
static_configs:
- targets: ['gateway:8000']
- job_name: 'auth-service'
static_configs:
- targets: ['auth-service:8000']
- job_name: 'training-service'
static_configs:
- targets: ['training-service:8000']
- job_name: 'forecasting-service'
static_configs:
- targets: ['forecasting-service:8000']
- job_name: 'data-service'
static_configs:
- targets: ['data-service:8000']
- job_name: 'tenant-service'
static_configs:
- targets: ['tenant-service:8000']
- job_name: 'notification-service'
static_configs:
- targets: ['notification-service:8000']
EOF
fi
# Set proper permissions
chmod 644 infrastructure/monitoring/prometheus/prometheus.yml 2>/dev/null || true
# Stop any existing containers
print_step "Stopping existing containers..."
docker-compose down --remove-orphans 2>/dev/null || true
# Build and start services based on environment
case $ENVIRONMENT in
"development")
print_step "Starting development environment..."
IFS=',' read -ra PROFILE_ARRAY <<< "$PROFILES"
PROFILE_ARGS=""
for profile in "${PROFILE_ARRAY[@]}"; do
PROFILE_ARGS="$PROFILE_ARGS --profile $profile"
done
# Build first to catch any build errors
print_step "Building services..."
docker-compose $PROFILE_ARGS build
# Then start
print_step "Starting services..."
docker-compose $PROFILE_ARGS up -d
;;
"production")
print_step "Starting production environment..."
docker-compose -f docker-compose.yml -f docker-compose.prod.yml --profile production --profile monitoring up -d --build
;;
"testing")
print_step "Starting testing environment..."
docker-compose -f docker-compose.yml -f docker-compose.test.yml up -d --build
;;
*)
print_step "Starting with custom profiles: $PROFILES"
IFS=',' read -ra PROFILE_ARRAY <<< "$PROFILES"
PROFILE_ARGS=""
for profile in "${PROFILE_ARRAY[@]}"; do
PROFILE_ARGS="$PROFILE_ARGS --profile $profile"
done
docker-compose $PROFILE_ARGS build
docker-compose $PROFILE_ARGS up -d
;;
esac
# Wait a moment for services to start
print_step "Waiting for services to start..."
sleep 10
# Check service status
print_step "Checking service status..."
if command -v curl &> /dev/null; then
# Check if gateway is responding
if curl -f -s "http://localhost:${GATEWAY_PORT}/health" > /dev/null 2>&1; then
echo "✅ Gateway is responding"
else
echo "⚠️ Gateway is not yet responding (this is normal during first startup)"
fi
else
echo "⚠️ curl not found - skipping health check"
fi
print_step "Setup completed!"
echo ""
echo "================================================================"
echo -e "${GREEN}SERVICES AVAILABLE${NC}"
echo "================================================================"
echo "- Gateway: http://localhost:${GATEWAY_PORT}"
echo "- API Docs: http://localhost:${GATEWAY_PORT}/docs"
echo "- Dashboard: http://localhost:${DASHBOARD_PORT} (if frontend profile enabled)"
echo "- Grafana: http://localhost:${GRAFANA_PORT} (${GRAFANA_ADMIN_USER}/${GRAFANA_ADMIN_PASSWORD})"
echo "- pgAdmin: http://localhost:${PGADMIN_PORT} (${PGADMIN_EMAIL}/${PGADMIN_PASSWORD})"
echo "- RabbitMQ: http://localhost:${RABBITMQ_MANAGEMENT_PORT} (${RABBITMQ_USER}/${RABBITMQ_PASSWORD})"
echo "- Redis Commander: http://localhost:${REDIS_COMMANDER_PORT} (${REDIS_COMMANDER_USER}/${REDIS_COMMANDER_PASSWORD})"
echo ""
echo "================================================================"
echo -e "${GREEN}NEXT STEPS${NC}"
echo "================================================================"
echo "1. Check service health:"
echo " ./scripts/docker-health-check.sh"
echo ""
echo "2. View logs:"
echo " docker-compose logs -f"
echo ""
echo "3. Check specific service:"
echo " docker-compose logs -f auth-service"
echo ""
echo "If you see any errors, check the logs for more details."