New enterprise feature

This commit is contained in:
Urtzi Alfaro
2025-11-30 09:12:40 +01:00
parent f9d0eec6ec
commit 972db02f6d
176 changed files with 19741 additions and 1361 deletions

View File

@@ -23,12 +23,11 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
from shared.utils.demo_dates import adjust_date_for_demo, BASE_REFERENCE_DATE
from shared.database.base import create_database_manager
from app.core.config import settings
logger = structlog.get_logger()
router = APIRouter(prefix="/internal/demo", tags=["internal"])
# Internal API key for service-to-service auth
INTERNAL_API_KEY = os.getenv("INTERNAL_API_KEY", "dev-internal-key-change-in-production")
# Database manager for this module
config = AlertProcessorConfig()
db_manager = create_database_manager(config.DATABASE_URL, "alert-processor-internal-demo")
@@ -40,13 +39,12 @@ async def get_db():
yield session
# Base demo tenant IDs
DEMO_TENANT_SAN_PABLO = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
DEMO_TENANT_LA_ESPIGA = "b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7"
DEMO_TENANT_PROFESSIONAL = "a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"
def verify_internal_api_key(x_internal_api_key: Optional[str] = Header(None)):
"""Verify internal API key for service-to-service communication"""
if x_internal_api_key != INTERNAL_API_KEY:
if x_internal_api_key != settings.INTERNAL_API_KEY:
logger.warning("Unauthorized internal API access attempted")
raise HTTPException(status_code=403, detail="Invalid internal API key")
return True

View File

@@ -0,0 +1,132 @@
# ================================================================
# services/alert_processor/app/core/config.py
# ================================================================
"""
Alert Processor Service Configuration
"""
import os
from pydantic import Field
from shared.config.base import BaseServiceSettings
class AlertProcessorSettings(BaseServiceSettings):
"""Alert Processor service specific settings"""
# Service Identity
APP_NAME: str = "Alert Processor Service"
SERVICE_NAME: str = "alert-processor-service"
VERSION: str = "1.0.0"
DESCRIPTION: str = "Central alert and recommendation processor"
# Database configuration (secure approach - build from components)
@property
def DATABASE_URL(self) -> str:
"""Build database URL from secure components"""
# Try complete URL first (for backward compatibility)
complete_url = os.getenv("ALERT_PROCESSOR_DATABASE_URL")
if complete_url:
return complete_url
# Build from components (secure approach)
user = os.getenv("ALERT_PROCESSOR_DB_USER", "alert_processor_user")
password = os.getenv("ALERT_PROCESSOR_DB_PASSWORD", "alert_processor_pass123")
host = os.getenv("ALERT_PROCESSOR_DB_HOST", "localhost")
port = os.getenv("ALERT_PROCESSOR_DB_PORT", "5432")
name = os.getenv("ALERT_PROCESSOR_DB_NAME", "alert_processor_db")
return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{name}"
# Use dedicated Redis DB for alert processing
REDIS_DB: int = int(os.getenv("ALERT_PROCESSOR_REDIS_DB", "6"))
# Alert processing configuration
BATCH_SIZE: int = int(os.getenv("ALERT_BATCH_SIZE", "10"))
PROCESSING_TIMEOUT: int = int(os.getenv("ALERT_PROCESSING_TIMEOUT", "30"))
# Deduplication settings
ALERT_DEDUPLICATION_WINDOW_MINUTES: int = int(os.getenv("ALERT_DEDUPLICATION_WINDOW_MINUTES", "15"))
RECOMMENDATION_DEDUPLICATION_WINDOW_MINUTES: int = int(os.getenv("RECOMMENDATION_DEDUPLICATION_WINDOW_MINUTES", "60"))
# Alert severity channel mappings (hardcoded for now to avoid config parsing issues)
@property
def urgent_channels(self) -> list[str]:
return ["whatsapp", "email", "push", "dashboard"]
@property
def high_channels(self) -> list[str]:
return ["whatsapp", "email", "dashboard"]
@property
def medium_channels(self) -> list[str]:
return ["email", "dashboard"]
@property
def low_channels(self) -> list[str]:
return ["dashboard"]
# ============================================================
# ENRICHMENT CONFIGURATION (NEW)
# ============================================================
# Priority scoring weights
BUSINESS_IMPACT_WEIGHT: float = float(os.getenv("BUSINESS_IMPACT_WEIGHT", "0.4"))
URGENCY_WEIGHT: float = float(os.getenv("URGENCY_WEIGHT", "0.3"))
USER_AGENCY_WEIGHT: float = float(os.getenv("USER_AGENCY_WEIGHT", "0.2"))
CONFIDENCE_WEIGHT: float = float(os.getenv("CONFIDENCE_WEIGHT", "0.1"))
# Priority thresholds
CRITICAL_THRESHOLD: int = int(os.getenv("CRITICAL_THRESHOLD", "90"))
IMPORTANT_THRESHOLD: int = int(os.getenv("IMPORTANT_THRESHOLD", "70"))
STANDARD_THRESHOLD: int = int(os.getenv("STANDARD_THRESHOLD", "50"))
# Timing intelligence
TIMING_INTELLIGENCE_ENABLED: bool = os.getenv("TIMING_INTELLIGENCE_ENABLED", "true").lower() == "true"
BATCH_LOW_PRIORITY_ALERTS: bool = os.getenv("BATCH_LOW_PRIORITY_ALERTS", "true").lower() == "true"
BUSINESS_HOURS_START: int = int(os.getenv("BUSINESS_HOURS_START", "6"))
BUSINESS_HOURS_END: int = int(os.getenv("BUSINESS_HOURS_END", "22"))
PEAK_HOURS_START: int = int(os.getenv("PEAK_HOURS_START", "7"))
PEAK_HOURS_END: int = int(os.getenv("PEAK_HOURS_END", "11"))
PEAK_HOURS_EVENING_START: int = int(os.getenv("PEAK_HOURS_EVENING_START", "17"))
PEAK_HOURS_EVENING_END: int = int(os.getenv("PEAK_HOURS_EVENING_END", "19"))
# Grouping
GROUPING_TIME_WINDOW_MINUTES: int = int(os.getenv("GROUPING_TIME_WINDOW_MINUTES", "15"))
MAX_ALERTS_PER_GROUP: int = int(os.getenv("MAX_ALERTS_PER_GROUP", "5"))
# Email digest
EMAIL_DIGEST_ENABLED: bool = os.getenv("EMAIL_DIGEST_ENABLED", "true").lower() == "true"
DIGEST_SEND_TIME: str = os.getenv("DIGEST_SEND_TIME", "18:00")
DIGEST_SEND_TIME_HOUR: int = int(os.getenv("DIGEST_SEND_TIME", "18:00").split(":")[0])
DIGEST_MIN_ALERTS: int = int(os.getenv("DIGEST_MIN_ALERTS", "5"))
# Alert grouping
ALERT_GROUPING_ENABLED: bool = os.getenv("ALERT_GROUPING_ENABLED", "true").lower() == "true"
MIN_ALERTS_FOR_GROUPING: int = int(os.getenv("MIN_ALERTS_FOR_GROUPING", "3"))
# Trend detection
TREND_DETECTION_ENABLED: bool = os.getenv("TREND_DETECTION_ENABLED", "true").lower() == "true"
TREND_LOOKBACK_DAYS: int = int(os.getenv("TREND_LOOKBACK_DAYS", "7"))
TREND_SIGNIFICANCE_THRESHOLD: float = float(os.getenv("TREND_SIGNIFICANCE_THRESHOLD", "0.15"))
# Context enrichment
ENRICHMENT_TIMEOUT_SECONDS: int = int(os.getenv("ENRICHMENT_TIMEOUT_SECONDS", "10"))
ORCHESTRATOR_CONTEXT_CACHE_TTL: int = int(os.getenv("ORCHESTRATOR_CONTEXT_CACHE_TTL", "300"))
# Peak hours (aliases for enrichment services)
EVENING_PEAK_START: int = int(os.getenv("PEAK_HOURS_EVENING_START", "17"))
EVENING_PEAK_END: int = int(os.getenv("PEAK_HOURS_EVENING_END", "19"))
# Service URLs for enrichment
ORCHESTRATOR_SERVICE_URL: str = os.getenv("ORCHESTRATOR_SERVICE_URL", "http://orchestrator-service:8000")
INVENTORY_SERVICE_URL: str = os.getenv("INVENTORY_SERVICE_URL", "http://inventory-service:8000")
PRODUCTION_SERVICE_URL: str = os.getenv("PRODUCTION_SERVICE_URL", "http://production-service:8000")
# Global settings instance
settings = AlertProcessorSettings()
def get_settings():
"""Get the global settings instance"""
return settings

View File

@@ -43,8 +43,8 @@ logger = structlog.get_logger()
# Demo tenant IDs (match those from other services)
DEMO_TENANT_IDS = [
uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"), # San Pablo
uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7") # La Espiga
uuid.UUID("a1b2c3d4-e5f6-47a8-b9c0-d1e2f3a4b5c6"), # Professional
uuid.UUID("b2c3d4e5-f6a7-48b9-c0d1-e2f3a4b5c6d7")
]
# System user ID for AI actions

View File

@@ -0,0 +1,228 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Demo Retail Alerts Seeding Script for Alert Processor Service
Creates stockout and low-stock alerts for child retail outlets
Usage:
python /app/scripts/demo/seed_demo_alerts_retail.py
Environment Variables Required:
ALERTS_DATABASE_URL - PostgreSQL connection string
"""
import asyncio
import uuid
import sys
import os
import random
from datetime import datetime, timezone, timedelta
from pathlib import Path
# Add app to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
# Add shared to path
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent.parent))
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
import structlog
from shared.utils.demo_dates import BASE_REFERENCE_DATE
from app.models import Alert, AlertStatus, PriorityLevel, AlertTypeClass
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.dev.ConsoleRenderer()
]
)
logger = structlog.get_logger()
# Fixed Demo Tenant IDs
DEMO_TENANT_CHILD_1 = uuid.UUID("d4e5f6a7-b8c9-40d1-e2f3-a4b5c6d7e8f9")
DEMO_TENANT_CHILD_2 = uuid.UUID("e5f6a7b8-c9d0-41e2-f3a4-b5c6d7e8f9a0")
DEMO_TENANT_CHILD_3 = uuid.UUID("f6a7b8c9-d0e1-42f3-a4b5-c6d7e8f9a0b1")
# Product IDs
PRODUCT_IDS = {
"PRO-BAG-001": "20000000-0000-0000-0000-000000000001",
"PRO-CRO-001": "20000000-0000-0000-0000-000000000002",
"PRO-PUE-001": "20000000-0000-0000-0000-000000000003",
"PRO-NAP-001": "20000000-0000-0000-0000-000000000004",
}
RETAIL_TENANTS = [
(DEMO_TENANT_CHILD_1, "Madrid Centro"),
(DEMO_TENANT_CHILD_2, "Barcelona Gràcia"),
(DEMO_TENANT_CHILD_3, "Valencia Ruzafa")
]
ALERT_SCENARIOS = [
{
"alert_type": "low_stock",
"title": "Stock bajo detectado",
"message_template": "Stock bajo de {product} en {location}. Unidades restantes: {units}",
"priority_score": 75,
"priority_level": PriorityLevel.IMPORTANT,
"type_class": AlertTypeClass.ACTION_NEEDED,
"financial_impact": 150.0
},
{
"alert_type": "stockout_risk",
"title": "Riesgo de quiebre de stock",
"message_template": "Riesgo de quiebre de stock para {product} en {location}. Reposición urgente necesaria",
"priority_score": 85,
"priority_level": PriorityLevel.IMPORTANT,
"type_class": AlertTypeClass.ESCALATION,
"financial_impact": 300.0
},
{
"alert_type": "expiring_soon",
"title": "Productos próximos a vencer",
"message_template": "Productos {product} próximos a vencer en {location}. Validar calidad antes de venta",
"priority_score": 65,
"priority_level": PriorityLevel.STANDARD,
"type_class": AlertTypeClass.TREND_WARNING,
"financial_impact": 80.0
}
]
async def seed_alerts_for_retail_tenant(db: AsyncSession, tenant_id: uuid.UUID, tenant_name: str):
"""Seed alerts for a retail tenant"""
logger.info(f"Seeding alerts for: {tenant_name}", tenant_id=str(tenant_id))
created = 0
# Create 2-3 alerts per retail outlet
for i in range(random.randint(2, 3)):
scenario = random.choice(ALERT_SCENARIOS)
# Pick a random product
sku = random.choice(list(PRODUCT_IDS.keys()))
base_product_id = uuid.UUID(PRODUCT_IDS[sku])
tenant_int = int(tenant_id.hex, 16)
product_id = uuid.UUID(int=tenant_int ^ int(base_product_id.hex, 16))
# Random status - most are active, some acknowledged
status = AlertStatus.ACKNOWLEDGED if random.random() < 0.3 else AlertStatus.ACTIVE
# Generate message from template
message = scenario["message_template"].format(
product=sku,
location=tenant_name,
units=random.randint(5, 15)
)
alert = Alert(
id=uuid.uuid4(),
tenant_id=tenant_id,
item_type="alert",
event_domain="inventory",
alert_type=scenario["alert_type"],
service="inventory",
title=scenario["title"],
message=message,
type_class=scenario["type_class"],
status=status,
priority_score=scenario["priority_score"],
priority_level=scenario["priority_level"],
orchestrator_context={
"product_id": str(product_id),
"product_sku": sku,
"location": tenant_name,
"created_by": "inventory_monitoring_system"
},
business_impact={
"financial_impact": scenario["financial_impact"],
"currency": "EUR",
"units_affected": random.randint(10, 50),
"impact_description": f"Impacto estimado: €{scenario['financial_impact']:.2f}"
},
urgency_context={
"time_until_consequence": f"{random.randint(2, 12)} horas",
"consequence": "Pérdida de ventas o desperdicio de producto",
"detection_time": (BASE_REFERENCE_DATE - timedelta(hours=random.randint(1, 24))).isoformat()
},
user_agency={
"user_can_fix": True,
"requires_supplier": scenario["alert_type"] == "stockout_risk",
"suggested_actions": [
"Revisar stock físico",
"Contactar con Obrador para reposición urgente" if scenario["alert_type"] == "stockout_risk" else "Ajustar pedido próximo"
]
},
trend_context=None,
smart_actions=[
{
"action_type": "restock",
"description": "Contactar con Obrador para reposición" if scenario["alert_type"] == "stockout_risk" else "Incluir en próximo pedido",
"priority": "high" if scenario["alert_type"] == "stockout_risk" else "medium"
}
],
ai_reasoning_summary=f"Sistema detectó {scenario['alert_type']} para {sku} basado en niveles actuales de inventario",
confidence_score=0.85,
timing_decision="send_now",
placement=["dashboard", "notification_panel"] if scenario["type_class"] == AlertTypeClass.ESCALATION else ["dashboard"],
alert_metadata={
"product_sku": sku,
"detection_method": "automated_monitoring",
"threshold_triggered": "min_stock_level"
},
created_at=BASE_REFERENCE_DATE - timedelta(hours=random.randint(1, 24)),
updated_at=BASE_REFERENCE_DATE
)
db.add(alert)
created += 1
await db.commit()
logger.info(f"Created {created} alerts for {tenant_name}")
return {"tenant_id": str(tenant_id), "alerts_created": created}
async def seed_all(db: AsyncSession):
"""Seed all retail alerts"""
logger.info("=" * 80)
logger.info("🚨 Starting Demo Retail Alerts Seeding")
logger.info("=" * 80)
results = []
for tenant_id, tenant_name in RETAIL_TENANTS:
result = await seed_alerts_for_retail_tenant(db, tenant_id, f"{tenant_name} (Retail)")
results.append(result)
total = sum(r["alerts_created"] for r in results)
logger.info(f"✅ Total alerts created: {total}")
return {"total_alerts": total, "results": results}
async def main():
database_url = os.getenv("ALERTS_DATABASE_URL") or os.getenv("ALERT_PROCESSOR_DATABASE_URL") or os.getenv("DATABASE_URL")
if not database_url:
logger.error("❌ DATABASE_URL not set")
return 1
if database_url.startswith("postgresql://"):
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://", 1)
engine = create_async_engine(database_url, echo=False, pool_pre_ping=True)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
try:
async with async_session() as session:
await seed_all(session)
logger.info("🎉 Retail alerts seed completed!")
return 0
except Exception as e:
logger.error(f"❌ Seed failed: {e}", exc_info=True)
return 1
finally:
await engine.dispose()
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)