Add role-based filtering and imporve code

This commit is contained in:
Urtzi Alfaro
2025-10-15 16:12:49 +02:00
parent 96ad5c6692
commit 8f9e9a7edc
158 changed files with 11033 additions and 1544 deletions

View File

@@ -16,9 +16,13 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY shared/requirements-tracing.txt /tmp/
COPY services/auth/requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r /tmp/requirements-tracing.txt
RUN pip install --no-cache-dir -r requirements.txt
# Copy shared libraries from the shared stage

View File

@@ -25,11 +25,15 @@ from shared.auth.decorators import (
require_admin_role_dep
)
from shared.routing import RouteBuilder
from shared.security import create_audit_logger, AuditSeverity, AuditAction
logger = structlog.get_logger()
router = APIRouter(tags=["users"])
route_builder = RouteBuilder('auth')
# Initialize audit logger
audit_logger = create_audit_logger("auth-service")
@router.get(route_builder.build_base_route("me", include_tenant_prefix=False), response_model=UserResponse)
async def get_current_user_info(
@@ -184,14 +188,32 @@ async def delete_admin_user(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Admin user {user_id} not found"
)
# Log audit event for user deletion
try:
# Get tenant_id from current_user or use a placeholder for system-level operations
tenant_id_str = current_user.get("tenant_id", "00000000-0000-0000-0000-000000000000")
await audit_logger.log_deletion(
db_session=db,
tenant_id=tenant_id_str,
user_id=current_user["user_id"],
resource_type="user",
resource_id=user_id,
resource_data=user_info,
description=f"Admin {current_user.get('email', current_user['user_id'])} initiated deletion of user {user_info.get('email', user_id)}",
endpoint="/delete/{user_id}",
method="DELETE"
)
except Exception as audit_error:
logger.warning("Failed to log audit event", error=str(audit_error))
# Start deletion as background task for better performance
background_tasks.add_task(
execute_admin_user_deletion,
user_id=user_id,
requesting_user_id=current_user["user_id"]
)
return {
"success": True,
"message": f"Admin user deletion for {user_id} has been initiated",

View File

@@ -8,7 +8,7 @@ import re
import hashlib
from datetime import datetime, timedelta, timezone
from typing import Optional, Dict, Any, List
import redis.asyncio as redis
from shared.redis_utils import get_redis_client
from fastapi import HTTPException, status
import structlog
from passlib.context import CryptContext
@@ -24,8 +24,7 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# Initialize JWT handler with SAME configuration as gateway
jwt_handler = JWTHandler(settings.JWT_SECRET_KEY, settings.JWT_ALGORITHM)
# Redis client for session management
redis_client = redis.from_url(settings.REDIS_URL)
# Note: Redis client is now accessed via get_redis_client() from shared.redis_utils
class SecurityManager:
"""Security utilities for authentication - FIXED VERSION"""

View File

@@ -3,6 +3,13 @@
Models export for auth service
"""
# Import AuditLog model for this service
from shared.security import create_audit_log_model
from shared.database.base import Base
# Create audit log model for this service
AuditLog = create_audit_log_model(Base)
from .users import User
from .tokens import RefreshToken, LoginAttempt
from .onboarding import UserOnboardingProgress, UserOnboardingSummary
@@ -13,4 +20,5 @@ __all__ = [
'LoginAttempt',
'UserOnboardingProgress',
'UserOnboardingSummary',
"AuditLog",
]

View File

@@ -1,18 +1,18 @@
"""initial_schema_20251009_2038
"""initial_schema_20251015_1229
Revision ID: 105797cd9710
Revision ID: 13327ad46a4d
Revises:
Create Date: 2025-10-09 20:38:43.315537+02:00
Create Date: 2025-10-15 12:29:13.886996+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '105797cd9710'
revision: str = '13327ad46a4d'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
@@ -20,6 +20,38 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('login_attempts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
@@ -111,4 +143,18 @@ def downgrade() -> None:
op.drop_table('refresh_tokens')
op.drop_index(op.f('ix_login_attempts_email'), table_name='login_attempts')
op.drop_table('login_attempts')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###