Fix DB issue 2s

This commit is contained in:
Urtzi Alfaro
2025-09-30 21:58:10 +02:00
parent 147893015e
commit 7cc4b957a5
77 changed files with 4385 additions and 1211 deletions

View File

@@ -1,3 +1,10 @@
# Alert Processor Dockerfile
# Add this stage at the top of each service Dockerfile
FROM python:3.11-slim AS shared
WORKDIR /shared
COPY shared/ /shared/
# Then your main service stage
FROM python:3.11-slim
WORKDIR /app
@@ -5,29 +12,27 @@ WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install dependencies
# Copy requirements
COPY services/alert_processor/requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy shared libraries
COPY shared/ /app/shared/
# Copy shared libraries from the shared stage
COPY --from=shared /shared /app/shared
# Copy application code
COPY services/alert_processor/app/ /app/app/
# Copy migrations and alembic config
COPY services/alert_processor/migrations/ /app/migrations/
COPY services/alert_processor/alembic.ini /app/alembic.ini
COPY services/alert_processor/ .
# Copy scripts directory
COPY scripts/ /app/scripts/
# Create non-root user
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
USER appuser
# Add shared libraries to Python path
ENV PYTHONPATH="/app:/app/shared:${PYTHONPATH:-}"
EXPOSE 8000
CMD ["python", "-m", "app.main"]
# Run application (worker service, not a web API)
CMD ["python", "-m", "app.main"]

View File

@@ -21,7 +21,7 @@ if shared_path not in sys.path:
sys.path.insert(0, shared_path)
try:
from app.core.config import settings
from app.config import AlertProcessorConfig
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
@@ -36,7 +36,8 @@ except ImportError as e:
config = context.config
# Set database URL from environment variables or settings
database_url = os.getenv('DATABASE_URL')
# Try service-specific DATABASE_URL first, then fall back to generic
database_url = os.getenv('ALERT_PROCESSOR_DATABASE_URL') or os.getenv('DATABASE_URL')
# If DATABASE_URL is not set, construct from individual components
if not database_url:
@@ -49,11 +50,22 @@ if not database_url:
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Fallback to settings
database_url = getattr(settings, 'DATABASE_URL', None)
# As a last resort, construct the database URL manually from individual environment variables
# that are likely to be set in the Kubernetes environment
db_user = os.getenv("ALERT_PROCESSOR_DB_USER", "alert_processor_user")
db_password = os.getenv("ALERT_PROCESSOR_DB_PASSWORD", "alert_processor_pass123")
db_host = os.getenv("ALERT_PROCESSOR_DB_HOST", "alert-processor-db-service")
db_port = os.getenv("ALERT_PROCESSOR_DB_PORT", "5432")
db_name = os.getenv("ALERT_PROCESSOR_DB_NAME", "alert_processor_db")
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
if database_url:
print(f"Using database URL: {database_url}")
config.set_main_option("sqlalchemy.url", database_url)
else:
print("ERROR: No database URL configured!")
raise Exception("No database URL found after all fallback methods")
# Interpret the config file for Python logging
if config.config_file_name is not None:

View File

@@ -0,0 +1,53 @@
"""Initial schema for alert processor
Revision ID: 00000001
Revises:
Create Date: 2025-09-30 18:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '000001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create alerts table (ENUMs will be created automatically)
op.create_table('alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('item_type', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.String(length=100), nullable=False),
sa.Column('severity', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='alertseverity'), nullable=False),
sa.Column('status', sa.Enum('ACTIVE', 'RESOLVED', 'ACKNOWLEDGED', 'IGNORED', name='alertstatus'), nullable=False),
sa.Column('service', sa.String(length=100), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('actions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('alert_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('resolved_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_alerts_tenant_id'), 'alerts', ['tenant_id'], unique=False)
op.create_index(op.f('ix_alerts_severity'), 'alerts', ['severity'], unique=False)
op.create_index(op.f('ix_alerts_status'), 'alerts', ['status'], unique=False)
op.create_index(op.f('ix_alerts_created_at'), 'alerts', ['created_at'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_alerts_created_at'), table_name='alerts')
op.drop_index(op.f('ix_alerts_status'), table_name='alerts')
op.drop_index(op.f('ix_alerts_severity'), table_name='alerts')
op.drop_index(op.f('ix_alerts_tenant_id'), table_name='alerts')
op.drop_table('alerts')
# Drop enums (will be dropped automatically with table, but explicit for clarity)
sa.Enum(name='alertseverity').drop(op.get_bind(), checkfirst=True)
sa.Enum(name='alertstatus').drop(op.get_bind(), checkfirst=True)

View File

@@ -4,9 +4,11 @@ aio-pika==9.3.1
redis==5.0.1
asyncpg==0.29.0
sqlalchemy==2.0.23
alembic==1.12.1
psycopg2-binary==2.9.9
structlog==23.2.0
prometheus-client==0.19.0
pydantic-settings==2.1.0
pydantic==2.5.2
httpx==0.25.2
python-jose[cryptography]==3.3.0
python-jose[cryptography]==3.3.0