Refactor services alembic

This commit is contained in:
Urtzi Alfaro
2025-09-29 19:16:34 +02:00
parent befcc126b0
commit 2712a60a2a
68 changed files with 2659 additions and 2511 deletions

View File

@@ -1,93 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = .
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version number format
# Uses Alembic datetime format
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
# version name format
version_path_separator = /
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql+asyncpg://alert_processor_user:alert_processor_pass123@alert-processor-db:5432/alert_processor_db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1,62 +1,54 @@
"""
Alembic environment configuration for Alert Processor Service
"""
"""Alembic environment configuration for alert-processor service"""
import asyncio
from logging.config import fileConfig
import logging
import os
import sys
from pathlib import Path
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the app directory to the path
sys.path.insert(0, str(Path(__file__).parent.parent))
# Add the service directory to the Python path
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if service_path not in sys.path:
sys.path.insert(0, service_path)
# Import models to ensure they're registered
from app.models.alerts import * # noqa
from shared.database.base import Base
# Add shared modules to path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
if shared_path not in sys.path:
sys.path.insert(0, shared_path)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
try:
from app.core.config import settings
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
print(f"Current Python path: {sys.path}")
raise
# this is the Alembic Config object
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# Set database URL from settings if not already set
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set the SQLAlchemy URL from environment variable if available
database_url = os.getenv('ALERT_PROCESSOR_DATABASE_URL')
if database_url:
config.set_main_option('sqlalchemy.url', database_url)
# add your model's MetaData object here
# for 'autogenerate' support
# Set target metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
@@ -70,9 +62,7 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Run migrations with database connection"""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -83,9 +73,8 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in async mode"""
"""Run migrations in 'online' mode."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -97,13 +86,11 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
run_migrations_online()

View File

@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
@@ -21,4 +23,4 @@ def upgrade() -> None:
def downgrade() -> None:
${downgrades if downgrades else "pass"}
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,29 @@
"""Initial alert-processor service tables
Revision ID: 001_initial_alert_processor
Revises:
Create Date: 2024-01-01 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '001_initial_alert_processor'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# TODO: Add table creation statements for alert_processor service
# This is a placeholder migration - replace with actual table definitions
pass
def downgrade() -> None:
# TODO: Add table drop statements for alert_processor service
pass

View File

@@ -1,64 +0,0 @@
"""Initial alerts table
Revision ID: 001
Revises:
Create Date: 2025-09-18 23:17:00
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create enum types
alert_status_enum = postgresql.ENUM('active', 'resolved', 'acknowledged', 'ignored', name='alertstatus')
alert_severity_enum = postgresql.ENUM('low', 'medium', 'high', 'urgent', name='alertseverity')
alert_status_enum.create(op.get_bind())
alert_severity_enum.create(op.get_bind())
# Create alerts table
op.create_table('alerts',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('item_type', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.String(length=100), nullable=False),
sa.Column('severity', alert_severity_enum, nullable=False),
sa.Column('status', alert_status_enum, nullable=False),
sa.Column('service', sa.String(length=100), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('actions', sa.JSON(), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('resolved_at', sa.DateTime(), nullable=True),
)
# Create indexes
op.create_index('ix_alerts_tenant_id', 'alerts', ['tenant_id'])
op.create_index('ix_alerts_severity', 'alerts', ['severity'])
op.create_index('ix_alerts_status', 'alerts', ['status'])
op.create_index('ix_alerts_created_at', 'alerts', ['created_at'])
def downgrade() -> None:
# Drop indexes
op.drop_index('ix_alerts_created_at', 'alerts')
op.drop_index('ix_alerts_status', 'alerts')
op.drop_index('ix_alerts_severity', 'alerts')
op.drop_index('ix_alerts_tenant_id', 'alerts')
# Drop table
op.drop_table('alerts')
# Drop enum types
op.execute('DROP TYPE alertseverity')
op.execute('DROP TYPE alertstatus')