117 lines
5.5 KiB
Python
117 lines
5.5 KiB
Python
# services/alert_processor/app/config.py
|
|
"""
|
|
Alert Processor Service Configuration
|
|
"""
|
|
|
|
import os
|
|
from typing import List
|
|
from shared.config.base import BaseServiceSettings
|
|
|
|
class AlertProcessorConfig(BaseServiceSettings):
|
|
"""Configuration for Alert Processor Service"""
|
|
SERVICE_NAME: str = "alert-processor"
|
|
APP_NAME: str = "Alert Processor Service"
|
|
DESCRIPTION: str = "Central alert and recommendation processor"
|
|
|
|
# Database configuration (secure approach - build from components)
|
|
@property
|
|
def DATABASE_URL(self) -> str:
|
|
"""Build database URL from secure components"""
|
|
# Try complete URL first (for backward compatibility)
|
|
complete_url = os.getenv("ALERT_PROCESSOR_DATABASE_URL")
|
|
if complete_url:
|
|
return complete_url
|
|
|
|
# Build from components (secure approach)
|
|
user = os.getenv("ALERT_PROCESSOR_DB_USER", "alert_processor_user")
|
|
password = os.getenv("ALERT_PROCESSOR_DB_PASSWORD", "alert_processor_pass123")
|
|
host = os.getenv("ALERT_PROCESSOR_DB_HOST", "localhost")
|
|
port = os.getenv("ALERT_PROCESSOR_DB_PORT", "5432")
|
|
name = os.getenv("ALERT_PROCESSOR_DB_NAME", "alert_processor_db")
|
|
|
|
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
|
|
|
|
# Use dedicated Redis DB for alert processing
|
|
REDIS_DB: int = int(os.getenv("ALERT_PROCESSOR_REDIS_DB", "6"))
|
|
|
|
# Alert processing configuration
|
|
BATCH_SIZE: int = int(os.getenv("ALERT_BATCH_SIZE", "10"))
|
|
PROCESSING_TIMEOUT: int = int(os.getenv("ALERT_PROCESSING_TIMEOUT", "30"))
|
|
|
|
# Deduplication settings
|
|
ALERT_DEDUPLICATION_WINDOW_MINUTES: int = int(os.getenv("ALERT_DEDUPLICATION_WINDOW_MINUTES", "15"))
|
|
RECOMMENDATION_DEDUPLICATION_WINDOW_MINUTES: int = int(os.getenv("RECOMMENDATION_DEDUPLICATION_WINDOW_MINUTES", "60"))
|
|
|
|
# Alert severity channel mappings (hardcoded for now to avoid config parsing issues)
|
|
@property
|
|
def urgent_channels(self) -> List[str]:
|
|
return ["whatsapp", "email", "push", "dashboard"]
|
|
|
|
@property
|
|
def high_channels(self) -> List[str]:
|
|
return ["whatsapp", "email", "dashboard"]
|
|
|
|
@property
|
|
def medium_channels(self) -> List[str]:
|
|
return ["email", "dashboard"]
|
|
|
|
@property
|
|
def low_channels(self) -> List[str]:
|
|
return ["dashboard"]
|
|
|
|
# ============================================================
|
|
# ENRICHMENT CONFIGURATION (NEW)
|
|
# ============================================================
|
|
|
|
# Priority scoring weights
|
|
BUSINESS_IMPACT_WEIGHT: float = float(os.getenv("BUSINESS_IMPACT_WEIGHT", "0.4"))
|
|
URGENCY_WEIGHT: float = float(os.getenv("URGENCY_WEIGHT", "0.3"))
|
|
USER_AGENCY_WEIGHT: float = float(os.getenv("USER_AGENCY_WEIGHT", "0.2"))
|
|
CONFIDENCE_WEIGHT: float = float(os.getenv("CONFIDENCE_WEIGHT", "0.1"))
|
|
|
|
# Priority thresholds
|
|
CRITICAL_THRESHOLD: int = int(os.getenv("CRITICAL_THRESHOLD", "90"))
|
|
IMPORTANT_THRESHOLD: int = int(os.getenv("IMPORTANT_THRESHOLD", "70"))
|
|
STANDARD_THRESHOLD: int = int(os.getenv("STANDARD_THRESHOLD", "50"))
|
|
|
|
# Timing intelligence
|
|
TIMING_INTELLIGENCE_ENABLED: bool = os.getenv("TIMING_INTELLIGENCE_ENABLED", "true").lower() == "true"
|
|
BATCH_LOW_PRIORITY_ALERTS: bool = os.getenv("BATCH_LOW_PRIORITY_ALERTS", "true").lower() == "true"
|
|
BUSINESS_HOURS_START: int = int(os.getenv("BUSINESS_HOURS_START", "6"))
|
|
BUSINESS_HOURS_END: int = int(os.getenv("BUSINESS_HOURS_END", "22"))
|
|
PEAK_HOURS_START: int = int(os.getenv("PEAK_HOURS_START", "7"))
|
|
PEAK_HOURS_END: int = int(os.getenv("PEAK_HOURS_END", "11"))
|
|
PEAK_HOURS_EVENING_START: int = int(os.getenv("PEAK_HOURS_EVENING_START", "17"))
|
|
PEAK_HOURS_EVENING_END: int = int(os.getenv("PEAK_HOURS_EVENING_END", "19"))
|
|
|
|
# Grouping
|
|
GROUPING_TIME_WINDOW_MINUTES: int = int(os.getenv("GROUPING_TIME_WINDOW_MINUTES", "15"))
|
|
MAX_ALERTS_PER_GROUP: int = int(os.getenv("MAX_ALERTS_PER_GROUP", "5"))
|
|
|
|
# Email digest
|
|
EMAIL_DIGEST_ENABLED: bool = os.getenv("EMAIL_DIGEST_ENABLED", "true").lower() == "true"
|
|
DIGEST_SEND_TIME: str = os.getenv("DIGEST_SEND_TIME", "18:00")
|
|
DIGEST_SEND_TIME_HOUR: int = int(os.getenv("DIGEST_SEND_TIME", "18:00").split(":")[0])
|
|
DIGEST_MIN_ALERTS: int = int(os.getenv("DIGEST_MIN_ALERTS", "5"))
|
|
|
|
# Alert grouping
|
|
ALERT_GROUPING_ENABLED: bool = os.getenv("ALERT_GROUPING_ENABLED", "true").lower() == "true"
|
|
MIN_ALERTS_FOR_GROUPING: int = int(os.getenv("MIN_ALERTS_FOR_GROUPING", "3"))
|
|
|
|
# Trend detection
|
|
TREND_DETECTION_ENABLED: bool = os.getenv("TREND_DETECTION_ENABLED", "true").lower() == "true"
|
|
TREND_LOOKBACK_DAYS: int = int(os.getenv("TREND_LOOKBACK_DAYS", "7"))
|
|
TREND_SIGNIFICANCE_THRESHOLD: float = float(os.getenv("TREND_SIGNIFICANCE_THRESHOLD", "0.15"))
|
|
|
|
# Context enrichment
|
|
ENRICHMENT_TIMEOUT_SECONDS: int = int(os.getenv("ENRICHMENT_TIMEOUT_SECONDS", "10"))
|
|
ORCHESTRATOR_CONTEXT_CACHE_TTL: int = int(os.getenv("ORCHESTRATOR_CONTEXT_CACHE_TTL", "300"))
|
|
|
|
# Peak hours (aliases for enrichment services)
|
|
EVENING_PEAK_START: int = int(os.getenv("PEAK_HOURS_EVENING_START", "17"))
|
|
EVENING_PEAK_END: int = int(os.getenv("PEAK_HOURS_EVENING_END", "19"))
|
|
|
|
# Service URLs for enrichment
|
|
ORCHESTRATOR_SERVICE_URL: str = os.getenv("ORCHESTRATOR_SERVICE_URL", "http://orchestrator-service:8000")
|
|
INVENTORY_SERVICE_URL: str = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
|
|
PRODUCTION_SERVICE_URL: str = os.getenv("PRODUCTION_SERVICE_URL", "http://production-service:8000") |