New alert system and panel de control page
This commit is contained in:
@@ -6,6 +6,9 @@ COPY shared/ /shared/
|
||||
# Then your main service stage
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Create non-root user for security
|
||||
RUN groupadd -r appgroup && useradd -r -g appgroup appuser
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
@@ -26,9 +29,15 @@ COPY --from=shared /shared /app/shared
|
||||
# Copy application code
|
||||
COPY gateway/ .
|
||||
|
||||
# Change ownership to non-root user
|
||||
RUN chown -R appuser:appgroup /app
|
||||
|
||||
# Add shared libraries to Python path
|
||||
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
|
||||
@@ -144,18 +144,226 @@ async def metrics():
|
||||
"""Metrics endpoint for monitoring"""
|
||||
return {"metrics": "enabled"}
|
||||
|
||||
# ================================================================
|
||||
# SERVER-SENT EVENTS (SSE) HELPER FUNCTIONS
|
||||
# ================================================================
|
||||
|
||||
def _get_subscription_channels(tenant_id: str, channel_filters: list) -> list:
|
||||
"""
|
||||
Determine which Redis channels to subscribe to based on filters.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant identifier
|
||||
channel_filters: List of channel patterns (e.g., ["inventory.alerts", "*.notifications"])
|
||||
|
||||
Returns:
|
||||
List of full channel names to subscribe to
|
||||
|
||||
Examples:
|
||||
>>> _get_subscription_channels("abc", ["inventory.alerts"])
|
||||
["tenant:abc:inventory.alerts"]
|
||||
|
||||
>>> _get_subscription_channels("abc", ["*.alerts"])
|
||||
["tenant:abc:inventory.alerts", "tenant:abc:production.alerts", ...]
|
||||
|
||||
>>> _get_subscription_channels("abc", [])
|
||||
["tenant:abc:inventory.alerts", "tenant:abc:inventory.notifications", ...]
|
||||
"""
|
||||
all_domains = ["inventory", "production", "supply_chain", "demand", "operations"]
|
||||
all_classes = ["alerts", "notifications"]
|
||||
|
||||
channels = []
|
||||
|
||||
if not channel_filters:
|
||||
# Subscribe to ALL channels (backward compatible)
|
||||
for domain in all_domains:
|
||||
for event_class in all_classes:
|
||||
channels.append(f"tenant:{tenant_id}:{domain}.{event_class}")
|
||||
# Also subscribe to recommendations (tenant-wide)
|
||||
channels.append(f"tenant:{tenant_id}:recommendations")
|
||||
# Also subscribe to legacy channel for backward compatibility
|
||||
channels.append(f"alerts:{tenant_id}")
|
||||
return channels
|
||||
|
||||
# Parse filters and expand wildcards
|
||||
for filter_pattern in channel_filters:
|
||||
if filter_pattern == "*.*":
|
||||
# All channels
|
||||
for domain in all_domains:
|
||||
for event_class in all_classes:
|
||||
channels.append(f"tenant:{tenant_id}:{domain}.{event_class}")
|
||||
channels.append(f"tenant:{tenant_id}:recommendations")
|
||||
|
||||
elif filter_pattern.endswith(".*"):
|
||||
# Domain wildcard (e.g., "inventory.*")
|
||||
domain = filter_pattern.split(".")[0]
|
||||
for event_class in all_classes:
|
||||
channels.append(f"tenant:{tenant_id}:{domain}.{event_class}")
|
||||
|
||||
elif filter_pattern.startswith("*."):
|
||||
# Class wildcard (e.g., "*.alerts")
|
||||
event_class = filter_pattern.split(".")[1]
|
||||
if event_class == "recommendations":
|
||||
channels.append(f"tenant:{tenant_id}:recommendations")
|
||||
else:
|
||||
for domain in all_domains:
|
||||
channels.append(f"tenant:{tenant_id}:{domain}.{event_class}")
|
||||
|
||||
elif filter_pattern == "recommendations":
|
||||
# Recommendations channel
|
||||
channels.append(f"tenant:{tenant_id}:recommendations")
|
||||
|
||||
else:
|
||||
# Specific channel (e.g., "inventory.alerts")
|
||||
channels.append(f"tenant:{tenant_id}:{filter_pattern}")
|
||||
|
||||
return list(set(channels)) # Remove duplicates
|
||||
|
||||
|
||||
async def _load_initial_state(redis_client, tenant_id: str, channel_filters: list) -> list:
|
||||
"""
|
||||
Load initial state from Redis cache based on channel filters.
|
||||
|
||||
Args:
|
||||
redis_client: Redis client
|
||||
tenant_id: Tenant identifier
|
||||
channel_filters: List of channel patterns
|
||||
|
||||
Returns:
|
||||
List of initial events
|
||||
"""
|
||||
initial_events = []
|
||||
|
||||
try:
|
||||
if not channel_filters:
|
||||
# Load from legacy cache if no filters (backward compat)
|
||||
legacy_cache_key = f"active_alerts:{tenant_id}"
|
||||
cached_data = await redis_client.get(legacy_cache_key)
|
||||
if cached_data:
|
||||
return json.loads(cached_data)
|
||||
|
||||
# Also try loading from new domain-specific caches
|
||||
all_domains = ["inventory", "production", "supply_chain", "demand", "operations"]
|
||||
all_classes = ["alerts", "notifications"]
|
||||
|
||||
for domain in all_domains:
|
||||
for event_class in all_classes:
|
||||
cache_key = f"active_events:{tenant_id}:{domain}.{event_class}s"
|
||||
cached_data = await redis_client.get(cache_key)
|
||||
if cached_data:
|
||||
events = json.loads(cached_data)
|
||||
initial_events.extend(events)
|
||||
|
||||
# Load recommendations
|
||||
recommendations_cache_key = f"active_events:{tenant_id}:recommendations"
|
||||
cached_data = await redis_client.get(recommendations_cache_key)
|
||||
if cached_data:
|
||||
initial_events.extend(json.loads(cached_data))
|
||||
|
||||
return initial_events
|
||||
|
||||
# Load based on specific filters
|
||||
for filter_pattern in channel_filters:
|
||||
# Extract domain and class from filter
|
||||
if "." in filter_pattern:
|
||||
parts = filter_pattern.split(".")
|
||||
domain = parts[0] if parts[0] != "*" else None
|
||||
event_class = parts[1] if len(parts) > 1 and parts[1] != "*" else None
|
||||
|
||||
if domain and event_class:
|
||||
# Specific cache (e.g., "inventory.alerts")
|
||||
cache_key = f"active_events:{tenant_id}:{domain}.{event_class}s"
|
||||
cached_data = await redis_client.get(cache_key)
|
||||
if cached_data:
|
||||
initial_events.extend(json.loads(cached_data))
|
||||
|
||||
elif domain and not event_class:
|
||||
# Domain wildcard (e.g., "inventory.*")
|
||||
for ec in ["alerts", "notifications"]:
|
||||
cache_key = f"active_events:{tenant_id}:{domain}.{ec}"
|
||||
cached_data = await redis_client.get(cache_key)
|
||||
if cached_data:
|
||||
initial_events.extend(json.loads(cached_data))
|
||||
|
||||
elif not domain and event_class:
|
||||
# Class wildcard (e.g., "*.alerts")
|
||||
all_domains = ["inventory", "production", "supply_chain", "demand", "operations"]
|
||||
for d in all_domains:
|
||||
cache_key = f"active_events:{tenant_id}:{d}.{event_class}s"
|
||||
cached_data = await redis_client.get(cache_key)
|
||||
if cached_data:
|
||||
initial_events.extend(json.loads(cached_data))
|
||||
|
||||
elif filter_pattern == "recommendations":
|
||||
cache_key = f"active_events:{tenant_id}:recommendations"
|
||||
cached_data = await redis_client.get(cache_key)
|
||||
if cached_data:
|
||||
initial_events.extend(json.loads(cached_data))
|
||||
|
||||
return initial_events
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading initial state for tenant {tenant_id}: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def _determine_event_type(event_data: dict) -> str:
|
||||
"""
|
||||
Determine SSE event type from event data.
|
||||
|
||||
Args:
|
||||
event_data: Event data dictionary
|
||||
|
||||
Returns:
|
||||
SSE event type: 'alert', 'notification', or 'recommendation'
|
||||
"""
|
||||
# New event architecture uses 'event_class'
|
||||
if 'event_class' in event_data:
|
||||
return event_data['event_class'] # 'alert', 'notification', or 'recommendation'
|
||||
|
||||
# Legacy format uses 'item_type'
|
||||
if 'item_type' in event_data:
|
||||
if event_data['item_type'] == 'recommendation':
|
||||
return 'recommendation'
|
||||
else:
|
||||
return 'alert'
|
||||
|
||||
# Default to 'alert' for backward compatibility
|
||||
return 'alert'
|
||||
|
||||
|
||||
# ================================================================
|
||||
# SERVER-SENT EVENTS (SSE) ENDPOINT
|
||||
# ================================================================
|
||||
|
||||
@app.get("/api/events")
|
||||
async def events_stream(request: Request, tenant_id: str):
|
||||
async def events_stream(
|
||||
request: Request,
|
||||
tenant_id: str,
|
||||
channels: str = None # Comma-separated channel filters (e.g., "inventory.alerts,production.notifications")
|
||||
):
|
||||
"""
|
||||
Server-Sent Events stream for real-time notifications.
|
||||
Server-Sent Events stream for real-time notifications with multi-channel support.
|
||||
|
||||
Authentication is handled by auth middleware via query param token.
|
||||
User context is available in request.state.user (injected by middleware).
|
||||
Tenant ID is provided by the frontend as a query parameter.
|
||||
|
||||
Query Parameters:
|
||||
tenant_id: Tenant identifier (required)
|
||||
channels: Comma-separated channel filters (optional)
|
||||
Examples:
|
||||
- "inventory.alerts,production.notifications" - Specific channels
|
||||
- "*.alerts" - All alert channels
|
||||
- "inventory.*" - All inventory events
|
||||
- None - All channels (default, backward compatible)
|
||||
|
||||
New channel pattern: tenant:{tenant_id}:{domain}.{class}
|
||||
Examples:
|
||||
- tenant:abc:inventory.alerts
|
||||
- tenant:abc:production.notifications
|
||||
- tenant:abc:recommendations
|
||||
|
||||
Legacy channel (backward compat): alerts:{tenant_id}
|
||||
"""
|
||||
global redis_client
|
||||
|
||||
@@ -171,73 +379,74 @@ async def events_stream(request: Request, tenant_id: str):
|
||||
if not tenant_id:
|
||||
raise HTTPException(status_code=400, detail="tenant_id query parameter is required")
|
||||
|
||||
logger.info(f"SSE connection request for user {email}, tenant {tenant_id}")
|
||||
|
||||
logger.info(f"SSE connection established for tenant: {tenant_id}")
|
||||
|
||||
# Parse channel filters
|
||||
channel_filters = []
|
||||
if channels:
|
||||
channel_filters = [c.strip() for c in channels.split(',') if c.strip()]
|
||||
|
||||
logger.info(f"SSE connection request for user {email}, tenant {tenant_id}, channels: {channel_filters or 'all'}")
|
||||
|
||||
async def event_generator():
|
||||
"""Generate server-sent events from Redis pub/sub"""
|
||||
"""Generate server-sent events from Redis pub/sub with multi-channel support"""
|
||||
pubsub = None
|
||||
try:
|
||||
# Subscribe to tenant-specific alert channel
|
||||
pubsub = redis_client.pubsub()
|
||||
channel_name = f"alerts:{tenant_id}"
|
||||
await pubsub.subscribe(channel_name)
|
||||
|
||||
# Determine which channels to subscribe to
|
||||
subscription_channels = _get_subscription_channels(tenant_id, channel_filters)
|
||||
|
||||
# Subscribe to all determined channels
|
||||
if subscription_channels:
|
||||
await pubsub.subscribe(*subscription_channels)
|
||||
logger.info(f"Subscribed to {len(subscription_channels)} channels for tenant {tenant_id}")
|
||||
else:
|
||||
# Fallback to legacy channel if no channels specified
|
||||
legacy_channel = f"alerts:{tenant_id}"
|
||||
await pubsub.subscribe(legacy_channel)
|
||||
logger.info(f"Subscribed to legacy channel: {legacy_channel}")
|
||||
|
||||
# Send initial connection event
|
||||
yield f"event: connection\n"
|
||||
yield f"data: {json.dumps({'type': 'connected', 'message': 'SSE connection established', 'timestamp': time.time()})}\n\n"
|
||||
yield f"data: {json.dumps({'type': 'connected', 'message': 'SSE connection established', 'channels': subscription_channels or ['all'], 'timestamp': time.time()})}\n\n"
|
||||
|
||||
# Fetch and send initial active alerts from Redis cache
|
||||
try:
|
||||
cache_key = f"active_alerts:{tenant_id}"
|
||||
cached_alerts = await redis_client.get(cache_key)
|
||||
if cached_alerts:
|
||||
active_items = json.loads(cached_alerts)
|
||||
logger.info(f"Sending initial_items to tenant {tenant_id}, count: {len(active_items)}")
|
||||
yield f"event: initial_items\n"
|
||||
yield f"data: {json.dumps(active_items)}\n\n"
|
||||
else:
|
||||
logger.info(f"No cached alerts found for tenant {tenant_id}")
|
||||
yield f"event: initial_items\n"
|
||||
yield f"data: {json.dumps([])}\n\n"
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching initial items for tenant {tenant_id}: {e}")
|
||||
# Still send empty initial_items event
|
||||
yield f"event: initial_items\n"
|
||||
# Fetch and send initial state from cache (domain-specific or legacy)
|
||||
initial_events = await _load_initial_state(redis_client, tenant_id, channel_filters)
|
||||
if initial_events:
|
||||
logger.info(f"Sending {len(initial_events)} initial events to tenant {tenant_id}")
|
||||
yield f"event: initial_state\n"
|
||||
yield f"data: {json.dumps(initial_events)}\n\n"
|
||||
else:
|
||||
# Send empty initial state for compatibility
|
||||
yield f"event: initial_state\n"
|
||||
yield f"data: {json.dumps([])}\n\n"
|
||||
|
||||
heartbeat_counter = 0
|
||||
|
||||
|
||||
while True:
|
||||
# Check if client has disconnected
|
||||
if await request.is_disconnected():
|
||||
logger.info(f"SSE client disconnected for tenant: {tenant_id}")
|
||||
break
|
||||
|
||||
|
||||
try:
|
||||
# Get message from Redis with timeout
|
||||
message = await asyncio.wait_for(pubsub.get_message(ignore_subscribe_messages=True), timeout=10.0)
|
||||
|
||||
if message and message['type'] == 'message':
|
||||
# Forward the alert/notification from Redis
|
||||
alert_data = json.loads(message['data'])
|
||||
|
||||
# Determine event type based on alert data
|
||||
event_type = "notification"
|
||||
if alert_data.get('item_type') == 'alert':
|
||||
if alert_data.get('severity') in ['high', 'urgent']:
|
||||
event_type = "inventory_alert"
|
||||
else:
|
||||
event_type = "notification"
|
||||
elif alert_data.get('item_type') == 'recommendation':
|
||||
event_type = "notification"
|
||||
if message and message['type'] == 'message':
|
||||
# Forward the event from Redis
|
||||
event_data = json.loads(message['data'])
|
||||
|
||||
# Determine event type for SSE
|
||||
event_type = _determine_event_type(event_data)
|
||||
|
||||
# Add channel metadata for frontend routing
|
||||
event_data['_channel'] = message['channel'].decode('utf-8') if isinstance(message['channel'], bytes) else message['channel']
|
||||
|
||||
yield f"event: {event_type}\n"
|
||||
yield f"data: {json.dumps(alert_data)}\n\n"
|
||||
|
||||
logger.debug(f"SSE message sent to tenant {tenant_id}: {alert_data.get('title')}")
|
||||
|
||||
yield f"data: {json.dumps(event_data)}\n\n"
|
||||
|
||||
logger.debug(f"SSE event sent to tenant {tenant_id}: {event_type} - {event_data.get('title')}")
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# Send heartbeat every 10 timeouts (100 seconds)
|
||||
heartbeat_counter += 1
|
||||
@@ -245,17 +454,17 @@ async def events_stream(request: Request, tenant_id: str):
|
||||
yield f"event: heartbeat\n"
|
||||
yield f"data: {json.dumps({'type': 'heartbeat', 'timestamp': time.time()})}\n\n"
|
||||
heartbeat_counter = 0
|
||||
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info(f"SSE connection cancelled for tenant: {tenant_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"SSE error for tenant {tenant_id}: {e}")
|
||||
logger.error(f"SSE error for tenant {tenant_id}: {e}", exc_info=True)
|
||||
finally:
|
||||
if pubsub:
|
||||
await pubsub.unsubscribe()
|
||||
await pubsub.close()
|
||||
logger.info(f"SSE connection closed for tenant: {tenant_id}")
|
||||
|
||||
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
media_type="text/event-stream",
|
||||
|
||||
@@ -247,6 +247,13 @@ async def proxy_tenant_notifications(request: Request, tenant_id: str = Path(...
|
||||
# TENANT-SCOPED ALERT ANALYTICS ENDPOINTS (Must come BEFORE inventory alerts)
|
||||
# ================================================================
|
||||
|
||||
# Exact match for /alerts endpoint (without additional path)
|
||||
@router.api_route("/{tenant_id}/alerts", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||
async def proxy_tenant_alerts_list(request: Request, tenant_id: str = Path(...)):
|
||||
"""Proxy tenant alerts list requests to alert processor service"""
|
||||
target_path = f"/api/v1/tenants/{tenant_id}/alerts"
|
||||
return await _proxy_to_alert_processor_service(request, target_path, tenant_id=tenant_id)
|
||||
|
||||
@router.api_route("/{tenant_id}/alerts/{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||
async def proxy_tenant_alert_analytics(request: Request, tenant_id: str = Path(...), path: str = ""):
|
||||
"""Proxy tenant alert analytics requests to alert processor service"""
|
||||
|
||||
Reference in New Issue
Block a user