Initial commit - production deployment
This commit is contained in:
0
shared/config/__init__.py
Executable file
0
shared/config/__init__.py
Executable file
537
shared/config/base.py
Executable file
537
shared/config/base.py
Executable file
@@ -0,0 +1,537 @@
|
||||
# shared/config/base.py
|
||||
"""
|
||||
Base configuration for all microservices
|
||||
Provides common settings and patterns
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Dict, Optional, Any, Set
|
||||
from pydantic_settings import BaseSettings
|
||||
from pydantic import validator, Field
|
||||
|
||||
|
||||
# ================================================================
|
||||
# INTERNAL SERVICE REGISTRY
|
||||
# ================================================================
|
||||
|
||||
# Central registry of all internal microservices that should have
|
||||
# automatic access to tenant resources without user membership
|
||||
# Service names should match the naming convention used in JWT tokens
|
||||
INTERNAL_SERVICES: Set[str] = {
|
||||
# Core services
|
||||
"auth-service",
|
||||
"tenant-service",
|
||||
"gateway", # API Gateway
|
||||
"gateway-service", # Alternative name for gateway
|
||||
|
||||
# Business logic services
|
||||
"inventory-service",
|
||||
"production-service",
|
||||
"recipes-service",
|
||||
"suppliers-service",
|
||||
"pos-service",
|
||||
"orders-service",
|
||||
"sales-service",
|
||||
"procurement-service",
|
||||
|
||||
# ML and analytics services
|
||||
"training-service",
|
||||
"forecasting-service",
|
||||
"ai-insights-service",
|
||||
|
||||
# Orchestration services
|
||||
"orchestrator-service",
|
||||
|
||||
# Support services
|
||||
"notification-service",
|
||||
"alert-service",
|
||||
"alert-processor-service",
|
||||
"alert-processor", # Alternative name (from k8s service name)
|
||||
"demo-session-service",
|
||||
"demo-service", # Alternative name for demo session service
|
||||
"external-service",
|
||||
|
||||
# Enterprise services
|
||||
"distribution-service",
|
||||
}
|
||||
|
||||
|
||||
def is_internal_service(service_identifier: str) -> bool:
|
||||
"""
|
||||
Check if a service identifier represents an internal service.
|
||||
|
||||
Args:
|
||||
service_identifier: Service name (e.g., 'production-service')
|
||||
|
||||
Returns:
|
||||
bool: True if the identifier is a recognized internal service
|
||||
"""
|
||||
return service_identifier in INTERNAL_SERVICES
|
||||
|
||||
|
||||
class BaseServiceSettings(BaseSettings):
|
||||
"""
|
||||
Base configuration class for all microservices
|
||||
Provides common settings and validation patterns
|
||||
"""
|
||||
|
||||
# ================================================================
|
||||
# CORE SERVICE SETTINGS
|
||||
# ================================================================
|
||||
|
||||
# Application Identity
|
||||
APP_NAME: str = "Bakery Service"
|
||||
SERVICE_NAME: str = "base-service"
|
||||
VERSION: str = "1.0.0"
|
||||
DESCRIPTION: str = "Base microservice for bakery platform"
|
||||
|
||||
# Environment & Debugging
|
||||
ENVIRONMENT: str = os.getenv("ENVIRONMENT", "development")
|
||||
DEBUG: bool = os.getenv("DEBUG", "false").lower() == "true"
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||
|
||||
# Service Discovery & Health
|
||||
SERVICE_HOST: str = os.getenv("SERVICE_HOST", "0.0.0.0")
|
||||
SERVICE_PORT: int = int(os.getenv("SERVICE_PORT", "8000"))
|
||||
HEALTH_CHECK_ENABLED: bool = True
|
||||
METRICS_ENABLED: bool = True
|
||||
|
||||
# ================================================================
|
||||
# DATABASE CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# Note: DATABASE_URL is defined as a property in each service-specific config
|
||||
# to construct the URL from secure environment variables
|
||||
|
||||
# Database connection settings
|
||||
DB_POOL_SIZE: int = int(os.getenv("DB_POOL_SIZE", "10"))
|
||||
DB_MAX_OVERFLOW: int = int(os.getenv("DB_MAX_OVERFLOW", "20"))
|
||||
DB_POOL_TIMEOUT: int = int(os.getenv("DB_POOL_TIMEOUT", "30"))
|
||||
DB_POOL_RECYCLE: int = int(os.getenv("DB_POOL_RECYCLE", "3600"))
|
||||
DB_POOL_PRE_PING: bool = os.getenv("DB_POOL_PRE_PING", "true").lower() == "true"
|
||||
DB_ECHO: bool = os.getenv("DB_ECHO", "false").lower() == "true"
|
||||
|
||||
# ================================================================
|
||||
# REDIS CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
@property
|
||||
def REDIS_URL(self) -> str:
|
||||
"""Build Redis URL from secure components with TLS support"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("REDIS_URL")
|
||||
if complete_url:
|
||||
# Upgrade to TLS if not already
|
||||
if complete_url.startswith("redis://") and "tls" not in complete_url.lower():
|
||||
complete_url = complete_url.replace("redis://", "rediss://", 1)
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach with TLS)
|
||||
password = os.getenv("REDIS_PASSWORD", "")
|
||||
host = os.getenv("REDIS_HOST", "redis-service")
|
||||
port = os.getenv("REDIS_PORT", "6379")
|
||||
use_tls = os.getenv("REDIS_TLS_ENABLED", "true").lower() == "true"
|
||||
|
||||
# Use rediss:// for TLS, redis:// for non-TLS
|
||||
protocol = "rediss" if use_tls else "redis"
|
||||
|
||||
# DEBUG: print what we're using
|
||||
import sys
|
||||
print(f"[DEBUG REDIS_URL] password={repr(password)}, host={host}, port={port}, tls={use_tls}", file=sys.stderr)
|
||||
|
||||
if password:
|
||||
url = f"{protocol}://:{password}@{host}:{port}"
|
||||
if use_tls:
|
||||
# Use ssl_cert_reqs=none for self-signed certs in internal cluster
|
||||
# Still encrypted, just skips cert validation
|
||||
url += "?ssl_cert_reqs=none"
|
||||
print(f"[DEBUG REDIS_URL] Returning URL with auth and TLS: {url}", file=sys.stderr)
|
||||
return url
|
||||
url = f"{protocol}://{host}:{port}"
|
||||
if use_tls:
|
||||
# Use ssl_cert_reqs=none for self-signed certs in internal cluster
|
||||
url += "?ssl_cert_reqs=none"
|
||||
print(f"[DEBUG REDIS_URL] Returning URL without auth: {url}", file=sys.stderr)
|
||||
return url
|
||||
|
||||
REDIS_DB: int = int(os.getenv("REDIS_DB", "0"))
|
||||
REDIS_MAX_CONNECTIONS: int = int(os.getenv("REDIS_MAX_CONNECTIONS", "50"))
|
||||
REDIS_RETRY_ON_TIMEOUT: bool = True
|
||||
REDIS_SOCKET_KEEPALIVE: bool = True
|
||||
REDIS_SOCKET_KEEPALIVE_OPTIONS: Dict[str, int] = {
|
||||
"TCP_KEEPIDLE": 1,
|
||||
"TCP_KEEPINTVL": 3,
|
||||
"TCP_KEEPCNT": 5,
|
||||
}
|
||||
|
||||
@property
|
||||
def REDIS_URL_WITH_DB(self) -> str:
|
||||
"""Get Redis URL with database number"""
|
||||
base_url = self.REDIS_URL.rstrip('/')
|
||||
return f"{base_url}/{self.REDIS_DB}"
|
||||
|
||||
# ================================================================
|
||||
# RABBITMQ CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
@property
|
||||
def RABBITMQ_URL(self) -> str:
|
||||
"""Build RabbitMQ URL from secure components"""
|
||||
# Try complete URL first (for backward compatibility)
|
||||
complete_url = os.getenv("RABBITMQ_URL")
|
||||
if complete_url:
|
||||
return complete_url
|
||||
|
||||
# Build from components (secure approach)
|
||||
user = os.getenv("RABBITMQ_USER", "bakery")
|
||||
password = os.getenv("RABBITMQ_PASSWORD", "forecast123")
|
||||
host = os.getenv("RABBITMQ_HOST", "rabbitmq-service")
|
||||
port = os.getenv("RABBITMQ_PORT", "5672")
|
||||
vhost = os.getenv("RABBITMQ_VHOST", "/")
|
||||
|
||||
return f"amqp://{user}:{password}@{host}:{port}{vhost}"
|
||||
RABBITMQ_EXCHANGE: str = os.getenv("RABBITMQ_EXCHANGE", "bakery_events")
|
||||
RABBITMQ_QUEUE_PREFIX: str = os.getenv("RABBITMQ_QUEUE_PREFIX", "bakery")
|
||||
RABBITMQ_RETRY_ATTEMPTS: int = int(os.getenv("RABBITMQ_RETRY_ATTEMPTS", "3"))
|
||||
RABBITMQ_RETRY_DELAY: int = int(os.getenv("RABBITMQ_RETRY_DELAY", "5"))
|
||||
|
||||
# ================================================================
|
||||
# AUTHENTICATION & SECURITY
|
||||
# ================================================================
|
||||
|
||||
# JWT Configuration
|
||||
# ✅ FIXED: Use production JWT secret key to match auth service
|
||||
# Must be same across all services for inter-service communication
|
||||
JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", "usMHw9kQCQoyrc7wPmMi3bClr0lTY9wvzZmcTbADvL0=")
|
||||
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
|
||||
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("JWT_ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
|
||||
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = int(os.getenv("JWT_REFRESH_TOKEN_EXPIRE_DAYS", "7"))
|
||||
|
||||
|
||||
|
||||
# Password Requirements
|
||||
PASSWORD_MIN_LENGTH: int = int(os.getenv("PASSWORD_MIN_LENGTH", "8"))
|
||||
PASSWORD_REQUIRE_UPPERCASE: bool = os.getenv("PASSWORD_REQUIRE_UPPERCASE", "true").lower() == "true"
|
||||
PASSWORD_REQUIRE_LOWERCASE: bool = os.getenv("PASSWORD_REQUIRE_LOWERCASE", "true").lower() == "true"
|
||||
PASSWORD_REQUIRE_NUMBERS: bool = os.getenv("PASSWORD_REQUIRE_NUMBERS", "true").lower() == "true"
|
||||
PASSWORD_REQUIRE_SYMBOLS: bool = os.getenv("PASSWORD_REQUIRE_SYMBOLS", "false").lower() == "true"
|
||||
|
||||
# Security Settings
|
||||
BCRYPT_ROUNDS: int = int(os.getenv("BCRYPT_ROUNDS", "12"))
|
||||
MAX_LOGIN_ATTEMPTS: int = int(os.getenv("MAX_LOGIN_ATTEMPTS", "5"))
|
||||
LOCKOUT_DURATION_MINUTES: int = int(os.getenv("LOCKOUT_DURATION_MINUTES", "30"))
|
||||
|
||||
# ================================================================
|
||||
# INTER-SERVICE COMMUNICATION
|
||||
# ================================================================
|
||||
|
||||
# Service URLs (can be overridden by environment variables)
|
||||
GATEWAY_URL: str = os.getenv("GATEWAY_URL", "http://gateway-service:8000")
|
||||
AUTH_SERVICE_URL: str = os.getenv("AUTH_SERVICE_URL", "http://auth-service:8000")
|
||||
TRAINING_SERVICE_URL: str = os.getenv("TRAINING_SERVICE_URL", "http://training-service:8000")
|
||||
FORECASTING_SERVICE_URL: str = os.getenv("FORECASTING_SERVICE_URL", "http://forecasting-service:8000")
|
||||
SALES_SERVICE_URL: str = os.getenv("SALES_SERVICE_URL", "http://sales-service:8000")
|
||||
EXTERNAL_SERVICE_URL: str = os.getenv("EXTERNAL_SERVICE_URL", "http://external-service:8000")
|
||||
TENANT_SERVICE_URL: str = os.getenv("TENANT_SERVICE_URL", "http://tenant-service:8000")
|
||||
INVENTORY_SERVICE_URL: str = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
|
||||
NOTIFICATION_SERVICE_URL: str = os.getenv("NOTIFICATION_SERVICE_URL", "http://notification-service:8000")
|
||||
PRODUCTION_SERVICE_URL: str = os.getenv("PRODUCTION_SERVICE_URL", "http://bakery-production-service:8000")
|
||||
ORDERS_SERVICE_URL: str = os.getenv("ORDERS_SERVICE_URL", "http://bakery-orders-service:8000")
|
||||
SUPPLIERS_SERVICE_URL: str = os.getenv("SUPPLIERS_SERVICE_URL", "http://bakery-suppliers-service:8000")
|
||||
RECIPES_SERVICE_URL: str = os.getenv("RECIPES_SERVICE_URL", "http://recipes-service:8000")
|
||||
POS_SERVICE_URL: str = os.getenv("POS_SERVICE_URL", "http://pos-service:8000")
|
||||
NOMINATIM_SERVICE_URL: str = os.getenv("NOMINATIM_SERVICE_URL", "http://nominatim:8080")
|
||||
DEMO_SESSION_SERVICE_URL: str = os.getenv("DEMO_SESSION_SERVICE_URL", "http://demo-session-service:8000")
|
||||
ALERT_PROCESSOR_SERVICE_URL: str = os.getenv("ALERT_PROCESSOR_SERVICE_URL", "http://alert-processor:8000")
|
||||
PROCUREMENT_SERVICE_URL: str = os.getenv("PROCUREMENT_SERVICE_URL", "http://procurement-service:8000")
|
||||
ORCHESTRATOR_SERVICE_URL: str = os.getenv("ORCHESTRATOR_SERVICE_URL", "http://orchestrator-service:8000")
|
||||
AI_INSIGHTS_SERVICE_URL: str = os.getenv("AI_INSIGHTS_SERVICE_URL", "http://ai-insights-service:8000")
|
||||
DISTRIBUTION_SERVICE_URL: str = os.getenv("DISTRIBUTION_SERVICE_URL", "http://distribution-service:8000")
|
||||
|
||||
# HTTP Client Settings
|
||||
HTTP_TIMEOUT: int = int(os.getenv("HTTP_TIMEOUT", "30"))
|
||||
HTTP_RETRIES: int = int(os.getenv("HTTP_RETRIES", "3"))
|
||||
HTTP_RETRY_DELAY: float = float(os.getenv("HTTP_RETRY_DELAY", "1.0"))
|
||||
|
||||
# ================================================================
|
||||
# CORS & API CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
CORS_ORIGINS: str = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001")
|
||||
CORS_ALLOW_CREDENTIALS: bool = os.getenv("CORS_ALLOW_CREDENTIALS", "true").lower() == "true"
|
||||
CORS_ALLOW_METHODS: List[str] = ["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]
|
||||
CORS_ALLOW_HEADERS: List[str] = ["*"]
|
||||
|
||||
@property
|
||||
def CORS_ORIGINS_LIST(self) -> List[str]:
|
||||
"""Get CORS origins as list"""
|
||||
return [origin.strip() for origin in self.CORS_ORIGINS.split(",") if origin.strip()]
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_ENABLED: bool = os.getenv("RATE_LIMIT_ENABLED", "true").lower() == "true"
|
||||
RATE_LIMIT_REQUESTS: int = int(os.getenv("RATE_LIMIT_REQUESTS", "100"))
|
||||
RATE_LIMIT_WINDOW: int = int(os.getenv("RATE_LIMIT_WINDOW", "60"))
|
||||
RATE_LIMIT_BURST: int = int(os.getenv("RATE_LIMIT_BURST", "10"))
|
||||
|
||||
# API Documentation
|
||||
API_DOCS_ENABLED: bool = os.getenv("API_DOCS_ENABLED", "true").lower() == "true"
|
||||
API_DOCS_URL: str = "/docs"
|
||||
API_REDOC_URL: str = "/redoc"
|
||||
API_OPENAPI_URL: str = "/openapi.json"
|
||||
|
||||
# ================================================================
|
||||
# EXTERNAL APIS & INTEGRATIONS
|
||||
# ================================================================
|
||||
|
||||
# Weather API (AEMET - Spanish Weather Service)
|
||||
AEMET_API_KEY: str = os.getenv("AEMET_API_KEY", "")
|
||||
AEMET_BASE_URL: str = "https://opendata.aemet.es/opendata"
|
||||
AEMET_TIMEOUT: int = int(os.getenv("AEMET_TIMEOUT", "30"))
|
||||
|
||||
# Madrid Open Data
|
||||
MADRID_OPENDATA_API_KEY: str = os.getenv("MADRID_OPENDATA_API_KEY", "")
|
||||
MADRID_OPENDATA_BASE_URL: str = "https://datos.madrid.es"
|
||||
MADRID_OPENDATA_TIMEOUT: int = int(os.getenv("MADRID_OPENDATA_TIMEOUT", "30"))
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST: str = os.getenv("SMTP_HOST", "smtp.gmail.com")
|
||||
SMTP_PORT: int = int(os.getenv("SMTP_PORT", "587"))
|
||||
SMTP_USER: str = os.getenv("SMTP_USER", "")
|
||||
SMTP_PASSWORD: str = os.getenv("SMTP_PASSWORD", "")
|
||||
SMTP_TLS: bool = os.getenv("SMTP_TLS", "true").lower() == "true"
|
||||
SMTP_SSL: bool = os.getenv("SMTP_SSL", "false").lower() == "true"
|
||||
|
||||
# WhatsApp API
|
||||
WHATSAPP_API_KEY: str = os.getenv("WHATSAPP_API_KEY", "")
|
||||
WHATSAPP_BASE_URL: str = os.getenv("WHATSAPP_BASE_URL", "https://api.twilio.com")
|
||||
WHATSAPP_FROM_NUMBER: str = os.getenv("WHATSAPP_FROM_NUMBER", "")
|
||||
|
||||
# Stripe Payment Configuration
|
||||
STRIPE_PUBLISHABLE_KEY: str = os.getenv("STRIPE_PUBLISHABLE_KEY", "")
|
||||
STRIPE_SECRET_KEY: str = os.getenv("STRIPE_SECRET_KEY", "")
|
||||
STRIPE_WEBHOOK_SECRET: str = os.getenv("STRIPE_WEBHOOK_SECRET", "")
|
||||
STRIPE_API_VERSION: str = os.getenv("STRIPE_API_VERSION", "") # Empty = use SDK default
|
||||
|
||||
# ================================================================
|
||||
# ML & AI CONFIGURATION
|
||||
# ================================================================
|
||||
|
||||
# Model Storage Backend (MinIO is the primary storage)
|
||||
MODEL_STORAGE_BACKEND: str = os.getenv("MODEL_STORAGE_BACKEND", "minio")
|
||||
|
||||
# Training Configuration
|
||||
MAX_TRAINING_TIME_MINUTES: int = int(os.getenv("MAX_TRAINING_TIME_MINUTES", "30"))
|
||||
MIN_TRAINING_DATA_DAYS: int = int(os.getenv("MIN_TRAINING_DATA_DAYS", "30"))
|
||||
TRAINING_BATCH_SIZE: int = int(os.getenv("TRAINING_BATCH_SIZE", "1000"))
|
||||
|
||||
# Prophet Configuration
|
||||
PROPHET_SEASONALITY_MODE: str = os.getenv("PROPHET_SEASONALITY_MODE", "additive")
|
||||
PROPHET_CHANGEPOINT_PRIOR_SCALE: float = float(os.getenv("PROPHET_CHANGEPOINT_PRIOR_SCALE", "0.05"))
|
||||
PROPHET_SEASONALITY_PRIOR_SCALE: float = float(os.getenv("PROPHET_SEASONALITY_PRIOR_SCALE", "10.0"))
|
||||
|
||||
# Prediction Caching
|
||||
PREDICTION_CACHE_TTL_HOURS: int = int(os.getenv("PREDICTION_CACHE_TTL_HOURS", "6"))
|
||||
WEATHER_CACHE_TTL_HOURS: int = int(os.getenv("WEATHER_CACHE_TTL_HOURS", "1"))
|
||||
TRAFFIC_CACHE_TTL_HOURS: int = int(os.getenv("TRAFFIC_CACHE_TTL_HOURS", "1"))
|
||||
|
||||
# ================================================================
|
||||
# MONITORING & OBSERVABILITY
|
||||
# ================================================================
|
||||
|
||||
# Logging Configuration
|
||||
LOG_FORMAT: str = os.getenv("LOG_FORMAT", "json") # json, text
|
||||
LOG_FILE_ENABLED: bool = os.getenv("LOG_FILE_ENABLED", "false").lower() == "true"
|
||||
LOG_FILE_PATH: str = os.getenv("LOG_FILE_PATH", "/app/logs")
|
||||
LOG_ROTATION_SIZE: str = os.getenv("LOG_ROTATION_SIZE", "100MB")
|
||||
LOG_RETENTION_DAYS: int = int(os.getenv("LOG_RETENTION_DAYS", "30"))
|
||||
|
||||
# Metrics & Monitoring
|
||||
PROMETHEUS_ENABLED: bool = os.getenv("PROMETHEUS_ENABLED", "true").lower() == "true"
|
||||
PROMETHEUS_PORT: int = int(os.getenv("PROMETHEUS_PORT", "9090"))
|
||||
PROMETHEUS_PATH: str = "/metrics"
|
||||
|
||||
# Tracing
|
||||
JAEGER_ENABLED: bool = os.getenv("JAEGER_ENABLED", "false").lower() == "true"
|
||||
JAEGER_AGENT_HOST: str = os.getenv("JAEGER_AGENT_HOST", "localhost")
|
||||
JAEGER_AGENT_PORT: int = int(os.getenv("JAEGER_AGENT_PORT", "6831"))
|
||||
|
||||
# Health Checks
|
||||
HEALTH_CHECK_TIMEOUT: int = int(os.getenv("HEALTH_CHECK_TIMEOUT", "30"))
|
||||
HEALTH_CHECK_INTERVAL: int = int(os.getenv("HEALTH_CHECK_INTERVAL", "30"))
|
||||
|
||||
# ================================================================
|
||||
# DATA RETENTION & CLEANUP
|
||||
# ================================================================
|
||||
|
||||
DATA_RETENTION_DAYS: int = int(os.getenv("DATA_RETENTION_DAYS", "365"))
|
||||
LOG_RETENTION_DAYS: int = int(os.getenv("LOG_RETENTION_DAYS", "90"))
|
||||
METRIC_RETENTION_DAYS: int = int(os.getenv("METRIC_RETENTION_DAYS", "90"))
|
||||
TEMP_FILE_CLEANUP_HOURS: int = int(os.getenv("TEMP_FILE_CLEANUP_HOURS", "24"))
|
||||
|
||||
# ================================================================
|
||||
# BUSINESS RULES & CONSTRAINTS
|
||||
# ================================================================
|
||||
|
||||
# Forecasting Business Rules
|
||||
MAX_FORECAST_DAYS: int = int(os.getenv("MAX_FORECAST_DAYS", "30"))
|
||||
MIN_HISTORICAL_DAYS: int = int(os.getenv("MIN_HISTORICAL_DAYS", "60"))
|
||||
CONFIDENCE_THRESHOLD: float = float(os.getenv("CONFIDENCE_THRESHOLD", "0.8"))
|
||||
|
||||
# Spanish Business Context
|
||||
TIMEZONE: str = os.getenv("TIMEZONE", "Europe/Madrid")
|
||||
LOCALE: str = os.getenv("LOCALE", "es_ES.UTF-8")
|
||||
CURRENCY: str = os.getenv("CURRENCY", "EUR")
|
||||
|
||||
# Business Hours (24-hour format)
|
||||
BUSINESS_HOUR_START: int = int(os.getenv("BUSINESS_HOUR_START", "7"))
|
||||
BUSINESS_HOUR_END: int = int(os.getenv("BUSINESS_HOUR_END", "20"))
|
||||
|
||||
# Spanish Holidays & Seasonal Adjustments
|
||||
ENABLE_SPANISH_HOLIDAYS: bool = os.getenv("ENABLE_SPANISH_HOLIDAYS", "true").lower() == "true"
|
||||
ENABLE_MADRID_HOLIDAYS: bool = os.getenv("ENABLE_MADRID_HOLIDAYS", "true").lower() == "true"
|
||||
SCHOOL_CALENDAR_ENABLED: bool = os.getenv("SCHOOL_CALENDAR_ENABLED", "true").lower() == "true"
|
||||
|
||||
# ================================================================
|
||||
# PROCUREMENT AUTOMATION
|
||||
# ================================================================
|
||||
|
||||
# NOTE: Tenant-specific procurement settings (auto-approval thresholds, supplier scores,
|
||||
# approval rules, lead times, forecast days, etc.) have been moved to TenantSettings.
|
||||
# Services should fetch these using TenantSettingsClient from shared/utils/tenant_settings_client.py
|
||||
|
||||
# System-level procurement settings (apply to all tenants):
|
||||
AUTO_CREATE_POS_FROM_PLAN: bool = os.getenv("AUTO_CREATE_POS_FROM_PLAN", "true").lower() == "true"
|
||||
PROCUREMENT_TEST_MODE: bool = os.getenv("PROCUREMENT_TEST_MODE", "false").lower() == "true"
|
||||
SEND_AUTO_APPROVAL_SUMMARY: bool = os.getenv("SEND_AUTO_APPROVAL_SUMMARY", "true").lower() == "true"
|
||||
AUTO_APPROVAL_SUMMARY_TIME_HOUR: int = int(os.getenv("AUTO_APPROVAL_SUMMARY_TIME_HOUR", "18"))
|
||||
|
||||
# ================================================================
|
||||
# DEVELOPMENT & TESTING
|
||||
# ================================================================
|
||||
|
||||
# Testing Configuration
|
||||
TESTING: bool = os.getenv("TESTING", "false").lower() == "true"
|
||||
TEST_DATABASE_URL: str = os.getenv("TEST_DATABASE_URL", "")
|
||||
MOCK_EXTERNAL_APIS: bool = os.getenv("MOCK_EXTERNAL_APIS", "false").lower() == "true"
|
||||
|
||||
# Development Features
|
||||
AUTO_RELOAD: bool = os.getenv("AUTO_RELOAD", "false").lower() == "true"
|
||||
PROFILING_ENABLED: bool = os.getenv("PROFILING_ENABLED", "false").lower() == "true"
|
||||
|
||||
# ================================================================
|
||||
# VALIDATORS
|
||||
# ================================================================
|
||||
|
||||
@validator('JWT_SECRET_KEY')
|
||||
def validate_jwt_secret(cls, v):
|
||||
if v == "change-this-in-production" and os.getenv("ENVIRONMENT") == "production":
|
||||
raise ValueError("JWT_SECRET_KEY must be changed in production")
|
||||
if len(v) < 32:
|
||||
raise ValueError("JWT_SECRET_KEY must be at least 32 characters long")
|
||||
return v
|
||||
|
||||
@validator('LOG_LEVEL')
|
||||
def validate_log_level(cls, v):
|
||||
valid_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
|
||||
if v.upper() not in valid_levels:
|
||||
raise ValueError(f"LOG_LEVEL must be one of: {valid_levels}")
|
||||
return v.upper()
|
||||
|
||||
@validator('ENVIRONMENT')
|
||||
def validate_environment(cls, v):
|
||||
valid_envs = ['development', 'staging', 'production', 'testing']
|
||||
if v.lower() not in valid_envs:
|
||||
raise ValueError(f"ENVIRONMENT must be one of: {valid_envs}")
|
||||
return v.lower()
|
||||
|
||||
# ================================================================
|
||||
# COMPUTED PROPERTIES
|
||||
# ================================================================
|
||||
|
||||
@property
|
||||
def IS_PRODUCTION(self) -> bool:
|
||||
"""Check if running in production"""
|
||||
return self.ENVIRONMENT == "production"
|
||||
|
||||
@property
|
||||
def IS_DEVELOPMENT(self) -> bool:
|
||||
"""Check if running in development"""
|
||||
return self.ENVIRONMENT == "development"
|
||||
|
||||
@property
|
||||
def IS_TESTING(self) -> bool:
|
||||
"""Check if running tests"""
|
||||
return self.TESTING or self.ENVIRONMENT == "testing"
|
||||
|
||||
@property
|
||||
def SERVICE_REGISTRY(self) -> Dict[str, str]:
|
||||
"""Get all service URLs"""
|
||||
return {
|
||||
"gateway": self.GATEWAY_URL,
|
||||
"auth": self.AUTH_SERVICE_URL,
|
||||
"training": self.TRAINING_SERVICE_URL,
|
||||
"forecasting": self.FORECASTING_SERVICE_URL,
|
||||
"sales": self.SALES_SERVICE_URL,
|
||||
"external": self.EXTERNAL_SERVICE_URL,
|
||||
"tenant": self.TENANT_SERVICE_URL,
|
||||
"inventory": self.INVENTORY_SERVICE_URL,
|
||||
"notification": self.NOTIFICATION_SERVICE_URL,
|
||||
"production": self.PRODUCTION_SERVICE_URL,
|
||||
"orders": self.ORDERS_SERVICE_URL,
|
||||
"suppliers": self.SUPPLIERS_SERVICE_URL,
|
||||
"recipes": self.RECIPES_SERVICE_URL,
|
||||
}
|
||||
|
||||
@property
|
||||
def DATABASE_CONFIG(self) -> Dict[str, Any]:
|
||||
"""Get database configuration for SQLAlchemy"""
|
||||
return {
|
||||
"url": self.DATABASE_URL,
|
||||
"pool_size": self.DB_POOL_SIZE,
|
||||
"max_overflow": self.DB_MAX_OVERFLOW,
|
||||
"pool_timeout": self.DB_POOL_TIMEOUT,
|
||||
"pool_recycle": self.DB_POOL_RECYCLE,
|
||||
"pool_pre_ping": self.DB_POOL_PRE_PING,
|
||||
"echo": self.DB_ECHO,
|
||||
}
|
||||
|
||||
@property
|
||||
def REDIS_CONFIG(self) -> Dict[str, Any]:
|
||||
"""Get Redis configuration"""
|
||||
return {
|
||||
"url": self.REDIS_URL_WITH_DB,
|
||||
"max_connections": self.REDIS_MAX_CONNECTIONS,
|
||||
"retry_on_timeout": self.REDIS_RETRY_ON_TIMEOUT,
|
||||
"socket_keepalive": self.REDIS_SOCKET_KEEPALIVE,
|
||||
"socket_keepalive_options": self.REDIS_SOCKET_KEEPALIVE_OPTIONS,
|
||||
}
|
||||
|
||||
# ================================================================
|
||||
# CONFIGURATION LOADING
|
||||
# ================================================================
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file_encoding = 'utf-8'
|
||||
case_sensitive = True
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# Validate critical settings in production
|
||||
if self.IS_PRODUCTION:
|
||||
self._validate_production_settings()
|
||||
|
||||
def _validate_production_settings(self):
|
||||
"""Validate production-specific settings"""
|
||||
critical_settings = [
|
||||
'JWT_SECRET_KEY',
|
||||
'DATABASE_URL',
|
||||
'REDIS_URL',
|
||||
'RABBITMQ_URL'
|
||||
]
|
||||
|
||||
for setting in critical_settings:
|
||||
value = getattr(self, setting)
|
||||
if not value or 'change' in value.lower() or 'default' in value.lower():
|
||||
raise ValueError(f"{setting} must be properly configured for production")
|
||||
70
shared/config/environments.py
Executable file
70
shared/config/environments.py
Executable file
@@ -0,0 +1,70 @@
|
||||
# ================================================================
|
||||
# ENVIRONMENT-SPECIFIC CONFIGURATIONS
|
||||
# shared/config/environments.py
|
||||
# ================================================================
|
||||
|
||||
"""
|
||||
Environment-specific configuration overrides
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
|
||||
DEVELOPMENT_OVERRIDES: Dict[str, Any] = {
|
||||
"DEBUG": True,
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"DB_ECHO": True,
|
||||
"API_DOCS_ENABLED": True,
|
||||
"CORS_ORIGINS": "http://localhost:3000,http://localhost:3001,http://127.0.0.1:3000",
|
||||
"MOCK_EXTERNAL_APIS": True,
|
||||
"AUTO_RELOAD": True,
|
||||
}
|
||||
|
||||
STAGING_OVERRIDES: Dict[str, Any] = {
|
||||
"DEBUG": False,
|
||||
"LOG_LEVEL": "INFO",
|
||||
"DB_ECHO": False,
|
||||
"API_DOCS_ENABLED": True,
|
||||
"MOCK_EXTERNAL_APIS": False,
|
||||
"AUTO_RELOAD": False,
|
||||
}
|
||||
|
||||
PRODUCTION_OVERRIDES: Dict[str, Any] = {
|
||||
"DEBUG": False,
|
||||
"LOG_LEVEL": "WARNING",
|
||||
"DB_ECHO": False,
|
||||
"API_DOCS_ENABLED": False,
|
||||
"MOCK_EXTERNAL_APIS": False,
|
||||
"AUTO_RELOAD": False,
|
||||
"PROFILING_ENABLED": False,
|
||||
"RATE_LIMIT_ENABLED": True,
|
||||
}
|
||||
|
||||
TESTING_OVERRIDES: Dict[str, Any] = {
|
||||
"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"DATABASE_URL": "postgresql+asyncpg://test_user:test_pass@test-db:5432/test_db",
|
||||
"REDIS_URL": "redis://test-redis:6379",
|
||||
"MOCK_EXTERNAL_APIS": True,
|
||||
"EMAIL_VERIFICATION_REQUIRED": False,
|
||||
"RATE_LIMIT_ENABLED": False,
|
||||
}
|
||||
|
||||
def get_environment_overrides(environment: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get configuration overrides for specific environment
|
||||
|
||||
Args:
|
||||
environment: Environment name (development, staging, production, testing)
|
||||
|
||||
Returns:
|
||||
Dict: Configuration overrides
|
||||
"""
|
||||
overrides = {
|
||||
"development": DEVELOPMENT_OVERRIDES,
|
||||
"staging": STAGING_OVERRIDES,
|
||||
"production": PRODUCTION_OVERRIDES,
|
||||
"testing": TESTING_OVERRIDES,
|
||||
}
|
||||
|
||||
return overrides.get(environment.lower(), {})
|
||||
49
shared/config/feature_flags.py
Executable file
49
shared/config/feature_flags.py
Executable file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Feature flags for enterprise tier functionality
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
|
||||
|
||||
class FeatureFlags:
|
||||
"""Enterprise feature flags configuration"""
|
||||
|
||||
# Main enterprise tier feature flag
|
||||
ENABLE_ENTERPRISE_TIER = os.getenv("ENABLE_ENTERPRISE_TIER", "true").lower() == "true"
|
||||
|
||||
# Internal transfer feature flag
|
||||
ENABLE_INTERNAL_TRANSFERS = os.getenv("ENABLE_INTERNAL_TRANSFERS", "true").lower() == "true"
|
||||
|
||||
# Distribution service feature flag
|
||||
ENABLE_DISTRIBUTION_SERVICE = os.getenv("ENABLE_DISTRIBUTION_SERVICE", "true").lower() == "true"
|
||||
|
||||
# Network dashboard feature flag
|
||||
ENABLE_NETWORK_DASHBOARD = os.getenv("ENABLE_NETWORK_DASHBOARD", "true").lower() == "true"
|
||||
|
||||
# Child tenant management feature flag
|
||||
ENABLE_CHILD_TENANT_MANAGEMENT = os.getenv("ENABLE_CHILD_TENANT_MANAGEMENT", "true").lower() == "true"
|
||||
|
||||
# Aggregated forecasting feature flag
|
||||
ENABLE_AGGREGATED_FORECASTING = os.getenv("ENABLE_AGGREGATED_FORECASTING", "true").lower() == "true"
|
||||
|
||||
@classmethod
|
||||
def get_all_flags(cls) -> Dict[str, Any]:
|
||||
"""Get all feature flags as a dictionary"""
|
||||
return {
|
||||
'ENABLE_ENTERPRISE_TIER': cls.ENABLE_ENTERPRISE_TIER,
|
||||
'ENABLE_INTERNAL_TRANSFERS': cls.ENABLE_INTERNAL_TRANSFERS,
|
||||
'ENABLE_DISTRIBUTION_SERVICE': cls.ENABLE_DISTRIBUTION_SERVICE,
|
||||
'ENABLE_NETWORK_DASHBOARD': cls.ENABLE_NETWORK_DASHBOARD,
|
||||
'ENABLE_CHILD_TENANT_MANAGEMENT': cls.ENABLE_CHILD_TENANT_MANAGEMENT,
|
||||
'ENABLE_AGGREGATED_FORECASTING': cls.ENABLE_AGGREGATED_FORECASTING,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def is_enabled(cls, flag_name: str) -> bool:
|
||||
"""Check if a specific feature flag is enabled"""
|
||||
return getattr(cls, flag_name, False)
|
||||
|
||||
|
||||
# Export the feature flags
|
||||
__all__ = ["FeatureFlags"]
|
||||
216
shared/config/rabbitmq_config.py
Executable file
216
shared/config/rabbitmq_config.py
Executable file
@@ -0,0 +1,216 @@
|
||||
# shared/config/rabbitmq_config.py
|
||||
"""
|
||||
RabbitMQ configuration for the event system
|
||||
|
||||
Supports three event classes through a unified topic exchange:
|
||||
- ALERT: Actionable events requiring user decision
|
||||
- NOTIFICATION: Informational state changes
|
||||
- RECOMMENDATION: AI-generated suggestions
|
||||
|
||||
Routing key pattern: {event_class}.{event_domain}.{severity}
|
||||
Examples:
|
||||
- alert.inventory.urgent
|
||||
- notification.production.info
|
||||
- recommendation.demand.medium
|
||||
"""
|
||||
|
||||
RABBITMQ_CONFIG = {
|
||||
"exchanges": {
|
||||
"events": {
|
||||
"name": "events.exchange",
|
||||
"type": "topic",
|
||||
"durable": True,
|
||||
"auto_delete": False
|
||||
},
|
||||
# Legacy exchange for backward compatibility during migration
|
||||
"alerts": {
|
||||
"name": "alerts.exchange",
|
||||
"type": "topic",
|
||||
"durable": True,
|
||||
"auto_delete": False
|
||||
},
|
||||
"dead_letter": {
|
||||
"name": "dlx.exchange",
|
||||
"type": "direct",
|
||||
"durable": True,
|
||||
"auto_delete": False
|
||||
}
|
||||
},
|
||||
"queues": {
|
||||
"event_processing": {
|
||||
"name": "event.processing.queue",
|
||||
"durable": True,
|
||||
"arguments": {
|
||||
"x-message-ttl": 3600000, # 1 hour TTL
|
||||
"x-max-length": 10000, # Max 10k messages
|
||||
"x-overflow": "reject-publish",
|
||||
"x-dead-letter-exchange": "dlx.exchange",
|
||||
"x-dead-letter-routing-key": "failed.events"
|
||||
}
|
||||
},
|
||||
# Legacy queue for backward compatibility
|
||||
"alert_processing": {
|
||||
"name": "alert.processing.queue",
|
||||
"durable": True,
|
||||
"arguments": {
|
||||
"x-message-ttl": 3600000,
|
||||
"x-max-length": 10000,
|
||||
"x-overflow": "reject-publish",
|
||||
"x-dead-letter-exchange": "dlx.exchange",
|
||||
"x-dead-letter-routing-key": "failed.items"
|
||||
}
|
||||
},
|
||||
"dead_letter": {
|
||||
"name": "event.dead_letter.queue",
|
||||
"durable": True,
|
||||
"arguments": {
|
||||
"x-message-ttl": 86400000 # 24 hours for dead letters
|
||||
}
|
||||
}
|
||||
},
|
||||
"bindings": [
|
||||
# New event architecture bindings
|
||||
{
|
||||
"queue": "event.processing.queue",
|
||||
"exchange": "events.exchange",
|
||||
"routing_key": "*.*.*" # event_class.event_domain.severity
|
||||
},
|
||||
# Legacy bindings for backward compatibility
|
||||
{
|
||||
"queue": "alert.processing.queue",
|
||||
"exchange": "alerts.exchange",
|
||||
"routing_key": "*.*.*" # alert/recommendation.severity.service
|
||||
},
|
||||
{
|
||||
"queue": "event.dead_letter.queue",
|
||||
"exchange": "dlx.exchange",
|
||||
"routing_key": "failed.events"
|
||||
},
|
||||
{
|
||||
"queue": "event.dead_letter.queue",
|
||||
"exchange": "dlx.exchange",
|
||||
"routing_key": "failed.items" # Legacy
|
||||
}
|
||||
],
|
||||
"routing_patterns": {
|
||||
# New event architecture patterns
|
||||
# event_class.event_domain.severity
|
||||
"alert_inventory": "alert.inventory.*",
|
||||
"alert_production": "alert.production.*",
|
||||
"alert_supply_chain": "alert.supply_chain.*",
|
||||
"notification_inventory": "notification.inventory.*",
|
||||
"notification_production": "notification.production.*",
|
||||
"notification_operations": "notification.operations.*",
|
||||
"recommendation_all": "recommendation.*.*",
|
||||
|
||||
# By severity
|
||||
"all_urgent": "*.*.urgent",
|
||||
"all_high": "*.*.high",
|
||||
"all_medium": "*.*.medium",
|
||||
"all_low": "*.*.low",
|
||||
|
||||
# By event class
|
||||
"all_alerts": "alert.*.*",
|
||||
"all_notifications": "notification.*.*",
|
||||
"all_recommendations": "recommendation.*.*",
|
||||
|
||||
# By domain
|
||||
"inventory_all": "*.inventory.*",
|
||||
"production_all": "*.production.*",
|
||||
"supply_chain_all": "*.supply_chain.*",
|
||||
"demand_all": "*.demand.*",
|
||||
"operations_all": "*.operations.*",
|
||||
|
||||
# Legacy patterns (for backward compatibility)
|
||||
"legacy_alert": "alert.{severity}.{service}",
|
||||
"legacy_recommendation": "recommendation.{severity}.{service}",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_routing_key(event_class: str, event_domain: str, severity: str) -> str:
|
||||
"""
|
||||
Generate routing key for event publishing.
|
||||
|
||||
New pattern: {event_class}.{event_domain}.{severity}
|
||||
|
||||
Args:
|
||||
event_class: 'alert', 'notification', or 'recommendation'
|
||||
event_domain: 'inventory', 'production', 'supply_chain', 'demand', 'operations'
|
||||
severity: 'urgent', 'high', 'medium', 'low'
|
||||
|
||||
Returns:
|
||||
Routing key string
|
||||
|
||||
Examples:
|
||||
>>> get_routing_key('alert', 'inventory', 'urgent')
|
||||
'alert.inventory.urgent'
|
||||
>>> get_routing_key('notification', 'production', 'info')
|
||||
'notification.production.info'
|
||||
"""
|
||||
return f"{event_class}.{event_domain}.{severity}"
|
||||
|
||||
|
||||
def get_legacy_routing_key(item_type: str, severity: str, service: str) -> str:
|
||||
"""
|
||||
Generate legacy routing key for backward compatibility.
|
||||
|
||||
Legacy pattern: {item_type}.{severity}.{service}
|
||||
|
||||
TODO: Remove after migration is complete.
|
||||
"""
|
||||
return f"{item_type}.{severity}.{service}"
|
||||
|
||||
|
||||
def get_binding_patterns(
|
||||
event_classes: list = None,
|
||||
event_domains: list = None,
|
||||
severities: list = None
|
||||
) -> list:
|
||||
"""
|
||||
Generate binding patterns for selective consumption.
|
||||
|
||||
Args:
|
||||
event_classes: List of event classes to bind (default: all)
|
||||
event_domains: List of event domains to bind (default: all)
|
||||
severities: List of severities to bind (default: all)
|
||||
|
||||
Returns:
|
||||
List of routing key patterns
|
||||
|
||||
Examples:
|
||||
>>> get_binding_patterns(['alert'], ['inventory'], ['urgent', 'high'])
|
||||
['alert.inventory.urgent', 'alert.inventory.high']
|
||||
"""
|
||||
patterns = []
|
||||
|
||||
event_classes = event_classes or ["alert", "notification", "recommendation"]
|
||||
event_domains = event_domains or ["inventory", "production", "supply_chain", "demand", "operations"]
|
||||
severities = severities or ["urgent", "high", "medium", "low"]
|
||||
|
||||
for event_class in event_classes:
|
||||
for event_domain in event_domains:
|
||||
for severity in severities:
|
||||
patterns.append(f"{event_class}.{event_domain}.{severity}")
|
||||
|
||||
return patterns
|
||||
|
||||
|
||||
def priority_score_to_severity(priority_score: int) -> str:
|
||||
"""
|
||||
Convert priority score (0-100) to severity level.
|
||||
|
||||
Args:
|
||||
priority_score: Priority score (0-100)
|
||||
|
||||
Returns:
|
||||
Severity level: 'urgent', 'high', 'medium', or 'low'
|
||||
"""
|
||||
if priority_score >= 90:
|
||||
return "urgent"
|
||||
elif priority_score >= 70:
|
||||
return "high"
|
||||
elif priority_score >= 50:
|
||||
return "medium"
|
||||
else:
|
||||
return "low"
|
||||
83
shared/config/utils.py
Executable file
83
shared/config/utils.py
Executable file
@@ -0,0 +1,83 @@
|
||||
# ================================================================
|
||||
# SHARED CONFIGURATION UTILITIES
|
||||
# shared/config/utils.py
|
||||
# ================================================================
|
||||
|
||||
"""
|
||||
Configuration utilities and helpers
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Type
|
||||
from shared.config.base import BaseServiceSettings
|
||||
|
||||
# Service settings registry
|
||||
SERVICE_SETTINGS: Dict[str, Type[BaseServiceSettings]] = {
|
||||
"gateway": GatewaySettings,
|
||||
"auth-service": AuthSettings,
|
||||
"training-service": TrainingSettings,
|
||||
"forecasting-service": ForecastingSettings,
|
||||
"data-service": DataSettings,
|
||||
"tenant-service": TenantSettings,
|
||||
"notification-service": NotificationSettings,
|
||||
}
|
||||
|
||||
def get_settings_for_service(service_name: str) -> BaseServiceSettings:
|
||||
"""
|
||||
Get settings instance for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service
|
||||
|
||||
Returns:
|
||||
BaseServiceSettings: Configured settings instance
|
||||
|
||||
Raises:
|
||||
ValueError: If service name is not recognized
|
||||
"""
|
||||
if service_name not in SERVICE_SETTINGS:
|
||||
raise ValueError(f"Unknown service: {service_name}. Available: {list(SERVICE_SETTINGS.keys())}")
|
||||
|
||||
settings_class = SERVICE_SETTINGS[service_name]
|
||||
return settings_class()
|
||||
|
||||
def validate_all_service_configs() -> Dict[str, Any]:
|
||||
"""
|
||||
Validate configuration for all services
|
||||
|
||||
Returns:
|
||||
Dict: Validation results for each service
|
||||
"""
|
||||
results = {}
|
||||
|
||||
for service_name, settings_class in SERVICE_SETTINGS.items():
|
||||
try:
|
||||
settings = settings_class()
|
||||
results[service_name] = {
|
||||
"status": "valid",
|
||||
"config": {
|
||||
"app_name": settings.APP_NAME,
|
||||
"version": settings.VERSION,
|
||||
"environment": settings.ENVIRONMENT,
|
||||
"database_configured": bool(settings.DATABASE_URL),
|
||||
"redis_configured": bool(settings.REDIS_URL),
|
||||
"debug_mode": settings.DEBUG,
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
results[service_name] = {
|
||||
"status": "error",
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
def get_service_urls() -> Dict[str, str]:
|
||||
"""
|
||||
Get all service URLs from any service configuration
|
||||
|
||||
Returns:
|
||||
Dict: Service name to URL mapping
|
||||
"""
|
||||
# Use auth service settings as reference (all services have same URLs)
|
||||
settings = AuthSettings()
|
||||
return settings.SERVICE_REGISTRY
|
||||
Reference in New Issue
Block a user