Add role-based filtering and imporve code

This commit is contained in:
Urtzi Alfaro
2025-10-15 16:12:49 +02:00
parent 96ad5c6692
commit 8f9e9a7edc
158 changed files with 11033 additions and 1544 deletions

View File

@@ -1,81 +0,0 @@
"""Add cloning status tracking
Revision ID: 002
Revises: 001
Create Date: 2025-01-10
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers
revision = '002'
down_revision = 'a1b2c3d4e5f6' # References the actual initial schema revision
branch_labels = None
depends_on = None
def upgrade():
"""Add new status values and cloning tracking fields"""
# Add new columns for cloning progress
op.add_column('demo_sessions', sa.Column('cloning_started_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('demo_sessions', sa.Column('cloning_completed_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('demo_sessions', sa.Column('total_records_cloned', sa.Integer(), server_default='0', nullable=False))
op.add_column('demo_sessions', sa.Column('cloning_progress', postgresql.JSONB(astext_type=sa.Text()), server_default='{}', nullable=False))
# Update the status enum to include new values
# PostgreSQL doesn't support IF NOT EXISTS for enum values in older versions
# We need to check if values exist before adding them
from sqlalchemy import text
conn = op.get_bind()
# Check and add each enum value if it doesn't exist
enum_values_to_add = ['pending', 'ready', 'failed', 'partial']
for value in enum_values_to_add:
# Check if the enum value already exists
result = conn.execute(text("""
SELECT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = :value
AND enumtypid = (
SELECT oid FROM pg_type WHERE typname = 'demosessionstatus'
)
);
"""), {"value": value})
exists = result.scalar()
if not exists:
# Add the enum value
# Note: ALTER TYPE ADD VALUE cannot run inside a transaction block in PostgreSQL
# but Alembic handles this for us
conn.execute(text(f"ALTER TYPE demosessionstatus ADD VALUE '{value}'"))
# Update existing sessions: active → ready
op.execute("""
UPDATE demo_sessions
SET status = 'ready'
WHERE status = 'active' AND data_cloned = true;
""")
def downgrade():
"""Remove cloning status tracking"""
# Remove new columns
op.drop_column('demo_sessions', 'cloning_progress')
op.drop_column('demo_sessions', 'total_records_cloned')
op.drop_column('demo_sessions', 'cloning_completed_at')
op.drop_column('demo_sessions', 'cloning_started_at')
# Note: Cannot easily remove enum values in PostgreSQL
# Migration down would require recreating the enum type
op.execute("""
UPDATE demo_sessions
SET status = 'active'
WHERE status IN ('ready', 'pending', 'failed', 'partial');
""")

View File

@@ -1,64 +0,0 @@
"""initial_schema
Revision ID: a1b2c3d4e5f6
Revises:
Create Date: 2025-10-02 17:45:00.000000+02:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'a1b2c3d4e5f6'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create demo_sessions table
op.create_table('demo_sessions',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('session_id', sa.String(length=100), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('base_demo_tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('virtual_tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('demo_account_type', sa.String(length=50), nullable=False),
sa.Column('status', sa.Enum('active', 'expired', 'destroyed', name='demosessionstatus'), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('destroyed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('request_count', sa.Integer(), nullable=True),
sa.Column('data_cloned', sa.Boolean(), nullable=True),
sa.Column('redis_populated', sa.Boolean(), nullable=True),
sa.Column('session_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('session_id')
)
# Create indexes
op.create_index(op.f('ix_demo_sessions_session_id'), 'demo_sessions', ['session_id'], unique=False)
op.create_index(op.f('ix_demo_sessions_base_demo_tenant_id'), 'demo_sessions', ['base_demo_tenant_id'], unique=False)
op.create_index(op.f('ix_demo_sessions_virtual_tenant_id'), 'demo_sessions', ['virtual_tenant_id'], unique=False)
op.create_index(op.f('ix_demo_sessions_status'), 'demo_sessions', ['status'], unique=False)
op.create_index(op.f('ix_demo_sessions_created_at'), 'demo_sessions', ['created_at'], unique=False)
op.create_index(op.f('ix_demo_sessions_expires_at'), 'demo_sessions', ['expires_at'], unique=False)
def downgrade() -> None:
# Drop indexes
op.drop_index(op.f('ix_demo_sessions_expires_at'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_created_at'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_status'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_virtual_tenant_id'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_base_demo_tenant_id'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_session_id'), table_name='demo_sessions')
# Drop table (this will automatically drop the enum if it's only used here)
op.drop_table('demo_sessions')

View File

@@ -0,0 +1,109 @@
"""initial_schema_20251015_1231
Revision ID: de5ec23ee752
Revises:
Create Date: 2025-10-15 10:31:12.539158
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'de5ec23ee752'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
op.create_table('demo_sessions',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('session_id', sa.String(length=100), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('base_demo_tenant_id', sa.UUID(), nullable=False),
sa.Column('virtual_tenant_id', sa.UUID(), nullable=False),
sa.Column('demo_account_type', sa.String(length=50), nullable=False),
sa.Column('status', sa.Enum('pending', 'ready', 'failed', 'partial', 'active', 'expired', 'destroyed', name='demosessionstatus'), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_activity_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('destroyed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cloning_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('cloning_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('total_records_cloned', sa.Integer(), nullable=True),
sa.Column('cloning_progress', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('request_count', sa.Integer(), nullable=True),
sa.Column('data_cloned', sa.Boolean(), nullable=True),
sa.Column('redis_populated', sa.Boolean(), nullable=True),
sa.Column('session_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_demo_sessions_base_demo_tenant_id'), 'demo_sessions', ['base_demo_tenant_id'], unique=False)
op.create_index(op.f('ix_demo_sessions_created_at'), 'demo_sessions', ['created_at'], unique=False)
op.create_index(op.f('ix_demo_sessions_expires_at'), 'demo_sessions', ['expires_at'], unique=False)
op.create_index(op.f('ix_demo_sessions_session_id'), 'demo_sessions', ['session_id'], unique=True)
op.create_index(op.f('ix_demo_sessions_status'), 'demo_sessions', ['status'], unique=False)
op.create_index(op.f('ix_demo_sessions_virtual_tenant_id'), 'demo_sessions', ['virtual_tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_demo_sessions_virtual_tenant_id'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_status'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_session_id'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_expires_at'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_created_at'), table_name='demo_sessions')
op.drop_index(op.f('ix_demo_sessions_base_demo_tenant_id'), table_name='demo_sessions')
op.drop_table('demo_sessions')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')
# ### end Alembic commands ###