New alert service
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
"""Alembic environment configuration for alert-processor service"""
|
||||
"""Alembic environment configuration for alert_processor service"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
@@ -20,7 +20,6 @@ if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.config import AlertProcessorConfig
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
@@ -35,12 +34,12 @@ except ImportError as e:
|
||||
config = context.config
|
||||
|
||||
# Determine service name from file path
|
||||
service_name = "alert-processor"
|
||||
service_name_upper = "ALERT_PROCESSOR"
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv('ALERT_PROCESSOR_DATABASE_URL') or # Service-specific
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
@@ -56,18 +55,18 @@ if not database_url:
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Try service-specific environment variables (alert-processor specific pattern)
|
||||
db_host = os.getenv('ALERT_PROCESSOR_DB_HOST', 'alert-processor-db-service')
|
||||
db_port = os.getenv('ALERT_PROCESSOR_DB_PORT', '5432')
|
||||
db_name = os.getenv('ALERT_PROCESSOR_DB_NAME', 'alert_processor_db')
|
||||
db_user = os.getenv('ALERT_PROCESSOR_DB_USER', 'alert_processor_user')
|
||||
db_password = os.getenv('ALERT_PROCESSOR_DB_PASSWORD')
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
|
||||
if not database_url:
|
||||
error_msg = "ERROR: No database URL configured for alert-processor service"
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
|
||||
@@ -1,275 +0,0 @@
|
||||
"""Unified initial schema for alert-processor service
|
||||
|
||||
Revision ID: 20251125_unified_initial_schema
|
||||
Revises:
|
||||
Create Date: 2025-11-25
|
||||
|
||||
This is a unified migration that includes:
|
||||
- All enum types (alertstatus, prioritylevel, alerttypeclass)
|
||||
- Alerts table with full enrichment capabilities
|
||||
- Alert interactions table for user engagement tracking
|
||||
- Audit logs table for compliance and debugging
|
||||
- All enhancements from incremental migrations:
|
||||
- event_domain column
|
||||
- action_created_at, superseded_by_action_id, hidden_from_ui columns
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '20251125_unified_initial_schema'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ============================================================
|
||||
# Create Enum Types
|
||||
# ============================================================
|
||||
op.execute("""
|
||||
CREATE TYPE alertstatus AS ENUM (
|
||||
'active',
|
||||
'resolved',
|
||||
'acknowledged',
|
||||
'ignored',
|
||||
'in_progress',
|
||||
'dismissed'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE prioritylevel AS ENUM (
|
||||
'critical',
|
||||
'important',
|
||||
'standard',
|
||||
'info'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE alerttypeclass AS ENUM (
|
||||
'action_needed',
|
||||
'prevented_issue',
|
||||
'trend_warning',
|
||||
'escalation',
|
||||
'information'
|
||||
);
|
||||
""")
|
||||
|
||||
# ============================================================
|
||||
# Create Alerts Table
|
||||
# ============================================================
|
||||
op.create_table('alerts',
|
||||
# Core alert fields
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('item_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('event_domain', sa.String(50), nullable=True), # Added from 20251125_add_event_domain_column
|
||||
sa.Column('alert_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('status', postgresql.ENUM('active', 'resolved', 'acknowledged', 'ignored', 'in_progress', 'dismissed', name='alertstatus', create_type=False), nullable=False),
|
||||
sa.Column('service', sa.String(length=100), nullable=False),
|
||||
sa.Column('title', sa.String(length=500), nullable=False), # Increased from 255 to match model
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('alert_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
|
||||
# Priority scoring fields
|
||||
sa.Column('priority_score', sa.Integer(), nullable=False),
|
||||
sa.Column('priority_level', postgresql.ENUM('critical', 'important', 'standard', 'info', name='prioritylevel', create_type=False), nullable=False),
|
||||
|
||||
# Alert classification
|
||||
sa.Column('type_class', postgresql.ENUM('action_needed', 'prevented_issue', 'trend_warning', 'escalation', 'information', name='alerttypeclass', create_type=False), nullable=False),
|
||||
|
||||
# Context enrichment (JSONB)
|
||||
sa.Column('orchestrator_context', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('business_impact', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('urgency_context', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('user_agency', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('trend_context', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
|
||||
# Smart actions
|
||||
sa.Column('smart_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
|
||||
# AI reasoning
|
||||
sa.Column('ai_reasoning_summary', sa.Text(), nullable=True),
|
||||
sa.Column('confidence_score', sa.Float(), nullable=False, server_default='0.8'),
|
||||
|
||||
# Timing intelligence
|
||||
sa.Column('timing_decision', sa.String(50), nullable=False, server_default='send_now'),
|
||||
sa.Column('scheduled_send_time', sa.DateTime(timezone=True), nullable=True),
|
||||
|
||||
# Placement hints for frontend
|
||||
sa.Column('placement', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
|
||||
# Escalation & chaining (Added from 20251123_add_alert_enhancements)
|
||||
sa.Column('action_created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('superseded_by_action_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('hidden_from_ui', sa.Boolean(), nullable=False, server_default='false'),
|
||||
|
||||
# Timestamps
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
|
||||
# Constraints
|
||||
sa.CheckConstraint('priority_score >= 0 AND priority_score <= 100', name='chk_priority_score_range')
|
||||
)
|
||||
|
||||
# ============================================================
|
||||
# Create Indexes for Alerts Table
|
||||
# ============================================================
|
||||
op.create_index(op.f('ix_alerts_created_at'), 'alerts', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_status'), 'alerts', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_alerts_tenant_id'), 'alerts', ['tenant_id'], unique=False)
|
||||
|
||||
# Enrichment indexes
|
||||
op.create_index(
|
||||
'idx_alerts_priority_score',
|
||||
'alerts',
|
||||
['tenant_id', 'priority_score', 'created_at'],
|
||||
postgresql_using='btree'
|
||||
)
|
||||
op.create_index(
|
||||
'idx_alerts_type_class',
|
||||
'alerts',
|
||||
['tenant_id', 'type_class', 'status'],
|
||||
postgresql_using='btree'
|
||||
)
|
||||
op.create_index(
|
||||
'idx_alerts_priority_level',
|
||||
'alerts',
|
||||
['priority_level', 'status'],
|
||||
postgresql_using='btree'
|
||||
)
|
||||
op.create_index(
|
||||
'idx_alerts_timing',
|
||||
'alerts',
|
||||
['timing_decision', 'scheduled_send_time'],
|
||||
postgresql_using='btree',
|
||||
postgresql_where=sa.text("timing_decision != 'send_now'")
|
||||
)
|
||||
|
||||
# Domain index (from 20251125_add_event_domain_column)
|
||||
op.create_index('idx_alerts_domain', 'alerts', ['tenant_id', 'event_domain', 'status'], unique=False)
|
||||
|
||||
# Escalation indexes (from 20251123_add_alert_enhancements)
|
||||
op.create_index('idx_alerts_action_created', 'alerts', ['tenant_id', 'action_created_at'], unique=False)
|
||||
op.create_index('idx_alerts_superseded', 'alerts', ['superseded_by_action_id'], unique=False)
|
||||
op.create_index('idx_alerts_hidden', 'alerts', ['tenant_id', 'hidden_from_ui', 'status'], unique=False)
|
||||
|
||||
# ============================================================
|
||||
# Create Alert Interactions Table
|
||||
# ============================================================
|
||||
op.create_table('alert_interactions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('alert_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('interaction_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('interacted_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('response_time_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('interaction_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for alert_interactions
|
||||
op.create_index('idx_alert_interactions_tenant_alert', 'alert_interactions', ['tenant_id', 'alert_id'], unique=False)
|
||||
op.create_index('idx_alert_interactions_user', 'alert_interactions', ['user_id'], unique=False)
|
||||
op.create_index('idx_alert_interactions_time', 'alert_interactions', ['interacted_at'], unique=False)
|
||||
op.create_index('idx_alert_interactions_type', 'alert_interactions', ['interaction_type'], unique=False)
|
||||
op.create_index('idx_alert_interactions_tenant_time', 'alert_interactions', ['tenant_id', 'interacted_at'], unique=False)
|
||||
|
||||
# ============================================================
|
||||
# Create Audit Logs Table
|
||||
# ============================================================
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for audit_logs
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
|
||||
# Remove server defaults after table creation (for new inserts)
|
||||
op.alter_column('alerts', 'confidence_score', server_default=None)
|
||||
op.alter_column('alerts', 'timing_decision', server_default=None)
|
||||
op.alter_column('alerts', 'hidden_from_ui', server_default=None)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop audit_logs table and indexes
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
|
||||
# Drop alert_interactions table and indexes
|
||||
op.drop_index('idx_alert_interactions_tenant_time', table_name='alert_interactions')
|
||||
op.drop_index('idx_alert_interactions_type', table_name='alert_interactions')
|
||||
op.drop_index('idx_alert_interactions_time', table_name='alert_interactions')
|
||||
op.drop_index('idx_alert_interactions_user', table_name='alert_interactions')
|
||||
op.drop_index('idx_alert_interactions_tenant_alert', table_name='alert_interactions')
|
||||
op.drop_table('alert_interactions')
|
||||
|
||||
# Drop alerts table and indexes
|
||||
op.drop_index('idx_alerts_hidden', table_name='alerts')
|
||||
op.drop_index('idx_alerts_superseded', table_name='alerts')
|
||||
op.drop_index('idx_alerts_action_created', table_name='alerts')
|
||||
op.drop_index('idx_alerts_domain', table_name='alerts')
|
||||
op.drop_index('idx_alerts_timing', table_name='alerts')
|
||||
op.drop_index('idx_alerts_priority_level', table_name='alerts')
|
||||
op.drop_index('idx_alerts_type_class', table_name='alerts')
|
||||
op.drop_index('idx_alerts_priority_score', table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_tenant_id'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_status'), table_name='alerts')
|
||||
op.drop_index(op.f('ix_alerts_created_at'), table_name='alerts')
|
||||
op.drop_table('alerts')
|
||||
|
||||
# Drop enum types
|
||||
op.execute('DROP TYPE IF EXISTS alerttypeclass;')
|
||||
op.execute('DROP TYPE IF EXISTS prioritylevel;')
|
||||
op.execute('DROP TYPE IF EXISTS alertstatus;')
|
||||
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
Clean unified events table schema.
|
||||
|
||||
Revision ID: 20251205_unified
|
||||
Revises:
|
||||
Create Date: 2025-12-05
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers
|
||||
revision = '20251205_unified'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""
|
||||
Create unified events table with JSONB enrichment contexts.
|
||||
"""
|
||||
|
||||
# Create events table
|
||||
op.create_table(
|
||||
'events',
|
||||
|
||||
# Core fields
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
|
||||
# Classification
|
||||
sa.Column('event_class', sa.String(50), nullable=False),
|
||||
sa.Column('event_domain', sa.String(50), nullable=False),
|
||||
sa.Column('event_type', sa.String(100), nullable=False),
|
||||
sa.Column('service', sa.String(50), nullable=False),
|
||||
|
||||
# i18n content (NO hardcoded title/message)
|
||||
sa.Column('i18n_title_key', sa.String(200), nullable=False),
|
||||
sa.Column('i18n_title_params', postgresql.JSONB, nullable=False, server_default=sa.text("'{}'::jsonb")),
|
||||
sa.Column('i18n_message_key', sa.String(200), nullable=False),
|
||||
sa.Column('i18n_message_params', postgresql.JSONB, nullable=False, server_default=sa.text("'{}'::jsonb")),
|
||||
|
||||
# Priority
|
||||
sa.Column('priority_score', sa.Integer, nullable=False, server_default='50'),
|
||||
sa.Column('priority_level', sa.String(20), nullable=False),
|
||||
sa.Column('type_class', sa.String(50), nullable=False),
|
||||
|
||||
# Enrichment contexts (JSONB)
|
||||
sa.Column('orchestrator_context', postgresql.JSONB, nullable=True),
|
||||
sa.Column('business_impact', postgresql.JSONB, nullable=True),
|
||||
sa.Column('urgency', postgresql.JSONB, nullable=True),
|
||||
sa.Column('user_agency', postgresql.JSONB, nullable=True),
|
||||
sa.Column('trend_context', postgresql.JSONB, nullable=True),
|
||||
|
||||
# Smart actions
|
||||
sa.Column('smart_actions', postgresql.JSONB, nullable=False, server_default=sa.text("'[]'::jsonb")),
|
||||
|
||||
# AI reasoning
|
||||
sa.Column('ai_reasoning_summary_key', sa.String(200), nullable=True),
|
||||
sa.Column('ai_reasoning_summary_params', postgresql.JSONB, nullable=True),
|
||||
sa.Column('ai_reasoning_details', postgresql.JSONB, nullable=True),
|
||||
sa.Column('confidence_score', sa.Float, nullable=True),
|
||||
|
||||
# Entity references
|
||||
sa.Column('entity_links', postgresql.JSONB, nullable=False, server_default=sa.text("'{}'::jsonb")),
|
||||
|
||||
# Status
|
||||
sa.Column('status', sa.String(20), nullable=False, server_default='active'),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
|
||||
|
||||
# Metadata
|
||||
sa.Column('event_metadata', postgresql.JSONB, nullable=False, server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
|
||||
# Create indexes for efficient queries (matching SQLAlchemy model)
|
||||
op.create_index('idx_events_tenant_status', 'events', ['tenant_id', 'status'])
|
||||
op.create_index('idx_events_tenant_priority', 'events', ['tenant_id', 'priority_score'])
|
||||
op.create_index('idx_events_tenant_class', 'events', ['tenant_id', 'event_class'])
|
||||
op.create_index('idx_events_tenant_created', 'events', ['tenant_id', 'created_at'])
|
||||
op.create_index('idx_events_type_class_status', 'events', ['type_class', 'status'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""
|
||||
Drop events table and all indexes.
|
||||
"""
|
||||
op.drop_index('idx_events_type_class_status', 'events')
|
||||
op.drop_index('idx_events_tenant_created', 'events')
|
||||
op.drop_index('idx_events_tenant_class', 'events')
|
||||
op.drop_index('idx_events_tenant_priority', 'events')
|
||||
op.drop_index('idx_events_tenant_status', 'events')
|
||||
op.drop_table('events')
|
||||
Reference in New Issue
Block a user