Add role-based filtering and imporve code
This commit is contained in:
@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY shared/requirements-tracing.txt /tmp/
|
||||
|
||||
COPY services/notification/requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy shared libraries from the shared stage
|
||||
|
||||
@@ -22,8 +22,10 @@ from shared.auth.access_control import require_user_role, admin_role_required
|
||||
from shared.routing.route_builder import RouteBuilder
|
||||
from shared.database.base import create_database_manager
|
||||
from shared.monitoring.metrics import track_endpoint_metrics
|
||||
from shared.security import create_audit_logger, AuditSeverity, AuditAction
|
||||
|
||||
logger = structlog.get_logger()
|
||||
audit_logger = create_audit_logger("notification-service")
|
||||
router = APIRouter()
|
||||
route_builder = RouteBuilder("notification")
|
||||
|
||||
@@ -52,12 +54,25 @@ async def send_notification(
|
||||
"""Send a single notification with enhanced validation and features"""
|
||||
|
||||
try:
|
||||
# Check permissions for broadcast notifications
|
||||
if notification_data.get("broadcast", False) and current_user.get("role") not in ["admin", "manager"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only admins and managers can send broadcast notifications"
|
||||
)
|
||||
# Check permissions for broadcast notifications (Admin+ only)
|
||||
if notification_data.get("broadcast", False):
|
||||
user_role = current_user.get("role", "").lower()
|
||||
if user_role not in ["admin", "owner"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only admins and owners can send broadcast notifications"
|
||||
)
|
||||
|
||||
# Log HIGH severity audit event for broadcast notifications
|
||||
try:
|
||||
# Note: db session would need to be passed as dependency for full audit logging
|
||||
logger.info("Broadcast notification initiated",
|
||||
tenant_id=current_user.get("tenant_id"),
|
||||
user_id=current_user["user_id"],
|
||||
notification_type=notification_data.get("type"),
|
||||
severity="HIGH")
|
||||
except Exception as audit_error:
|
||||
logger.warning("Failed to log audit event", error=str(audit_error))
|
||||
|
||||
# Validate required fields
|
||||
if not notification_data.get("message"):
|
||||
|
||||
@@ -24,12 +24,7 @@ from shared.service_base import StandardFastAPIService
|
||||
class NotificationService(StandardFastAPIService):
|
||||
"""Notification Service with standardized setup"""
|
||||
|
||||
expected_migration_version = "00001"
|
||||
|
||||
async def on_startup(self, app):
|
||||
"""Custom startup logic including migration verification"""
|
||||
await self.verify_migrations()
|
||||
await super().on_startup(app)
|
||||
expected_migration_version = "359991e24ea2"
|
||||
|
||||
async def verify_migrations(self):
|
||||
"""Verify database schema matches the latest migrations."""
|
||||
@@ -166,13 +161,19 @@ class NotificationService(StandardFastAPIService):
|
||||
|
||||
async def on_startup(self, app: FastAPI):
|
||||
"""Custom startup logic for notification service"""
|
||||
# Verify migrations first
|
||||
await self.verify_migrations()
|
||||
|
||||
# Call parent startup (includes database, messaging, etc.)
|
||||
await super().on_startup(app)
|
||||
|
||||
# Initialize services
|
||||
self.email_service = EmailService()
|
||||
self.whatsapp_service = WhatsAppService()
|
||||
|
||||
# Initialize SSE service
|
||||
self.sse_service = SSEService(settings.REDIS_URL)
|
||||
await self.sse_service.initialize()
|
||||
self.sse_service = SSEService()
|
||||
await self.sse_service.initialize(settings.REDIS_URL)
|
||||
self.logger.info("SSE service initialized")
|
||||
|
||||
# Create orchestrator
|
||||
@@ -257,4 +258,4 @@ service.add_router(analytics_router, tags=["notifications-analytics"])
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
|
||||
@@ -4,6 +4,13 @@ Notification Service Models Package
|
||||
Import all models to ensure they are registered with SQLAlchemy Base.
|
||||
"""
|
||||
|
||||
# Import AuditLog model for this service
|
||||
from shared.security import create_audit_log_model
|
||||
from shared.database.base import Base
|
||||
|
||||
# Create audit log model for this service
|
||||
AuditLog = create_audit_log_model(Base)
|
||||
|
||||
# Import all models to register them with the Base metadata
|
||||
from .notifications import (
|
||||
Notification,
|
||||
@@ -30,4 +37,5 @@ __all__ = [
|
||||
"NotificationLog",
|
||||
"EmailTemplate",
|
||||
"WhatsAppTemplate",
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -5,11 +5,11 @@ Integrated within the notification service for alerts and recommendations
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from redis.asyncio import Redis
|
||||
import json
|
||||
from typing import Dict, Set, Any
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from shared.redis_utils import initialize_redis, get_redis_client, close_redis
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
@@ -18,18 +18,21 @@ class SSEService:
|
||||
Server-Sent Events service for real-time notifications
|
||||
Handles both alerts and recommendations through unified SSE streams
|
||||
"""
|
||||
|
||||
def __init__(self, redis_url: str):
|
||||
self.redis_url = redis_url
|
||||
|
||||
def __init__(self):
|
||||
self.redis = None
|
||||
self.redis_url = None
|
||||
self.active_connections: Dict[str, Set[asyncio.Queue]] = {}
|
||||
self.pubsub_tasks: Dict[str, asyncio.Task] = {}
|
||||
|
||||
async def initialize(self):
|
||||
|
||||
async def initialize(self, redis_url: str):
|
||||
"""Initialize Redis connection"""
|
||||
try:
|
||||
self.redis = Redis.from_url(self.redis_url)
|
||||
logger.info("SSE Service initialized with Redis connection")
|
||||
self.redis_url = redis_url
|
||||
# Initialize shared Redis connection for SSE
|
||||
await initialize_redis(redis_url, db=0, max_connections=30)
|
||||
self.redis = await get_redis_client()
|
||||
logger.info("SSE Service initialized with shared Redis connection")
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize SSE service", error=str(e))
|
||||
raise
|
||||
@@ -45,7 +48,7 @@ class SSEService:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
|
||||
# Close all client connections
|
||||
for tenant_id, connections in self.active_connections.items():
|
||||
for queue in connections.copy():
|
||||
@@ -53,13 +56,12 @@ class SSEService:
|
||||
await queue.put({"event": "shutdown", "data": json.dumps({"status": "server_shutdown"})})
|
||||
except:
|
||||
pass
|
||||
|
||||
# Close Redis connection
|
||||
if self.redis:
|
||||
await self.redis.close()
|
||||
|
||||
|
||||
# Close shared Redis connection
|
||||
await close_redis()
|
||||
|
||||
logger.info("SSE Service shutdown completed")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error during SSE shutdown", error=str(e))
|
||||
|
||||
@@ -124,32 +126,33 @@ class SSEService:
|
||||
|
||||
async def _listen_to_tenant_channel(self, tenant_id: str):
|
||||
"""Listen to Redis channel for tenant-specific items"""
|
||||
pubsub = None
|
||||
try:
|
||||
# Create a separate Redis connection for pubsub
|
||||
pubsub_redis = Redis.from_url(self.redis_url)
|
||||
pubsub = pubsub_redis.pubsub()
|
||||
# Use the shared Redis client for pubsub
|
||||
pubsub = self.redis.pubsub()
|
||||
channel = f"alerts:{tenant_id}"
|
||||
await pubsub.subscribe(channel)
|
||||
|
||||
logger.info("Started listening to tenant channel",
|
||||
tenant_id=tenant_id,
|
||||
|
||||
logger.info("Started listening to tenant channel",
|
||||
tenant_id=tenant_id,
|
||||
channel=channel)
|
||||
|
||||
|
||||
async for message in pubsub.listen():
|
||||
if message["type"] == "message":
|
||||
# Broadcast to all connected clients for this tenant
|
||||
await self.broadcast_to_tenant(tenant_id, message["data"])
|
||||
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Stopped listening to tenant channel", tenant_id=tenant_id)
|
||||
except Exception as e:
|
||||
logger.error("Error in pubsub listener", tenant_id=tenant_id, error=str(e))
|
||||
finally:
|
||||
try:
|
||||
await pubsub.unsubscribe(channel)
|
||||
await pubsub_redis.close()
|
||||
except:
|
||||
pass
|
||||
if pubsub:
|
||||
try:
|
||||
await pubsub.unsubscribe(channel)
|
||||
await pubsub.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
async def broadcast_to_tenant(self, tenant_id: str, message: str):
|
||||
"""Broadcast message to all connected clients of a tenant"""
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
"""initial_schema_20251009_2039
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: c27e2b79f787
|
||||
Revision ID: 359991e24ea2
|
||||
Revises:
|
||||
Create Date: 2025-10-09 20:39:25.955986+02:00
|
||||
Create Date: 2025-10-15 12:30:17.568404+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'c27e2b79f787'
|
||||
revision: str = '359991e24ea2'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('email_templates',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=True),
|
||||
@@ -181,4 +213,18 @@ def downgrade() -> None:
|
||||
op.drop_table('notification_logs')
|
||||
op.drop_index(op.f('ix_email_templates_tenant_id'), table_name='email_templates')
|
||||
op.drop_table('email_templates')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user