Improve AI logic
This commit is contained in:
67
services/ai_insights/migrations/env.py
Normal file
67
services/ai_insights/migrations/env.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Alembic environment configuration."""
|
||||
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from alembic import context
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '..')))
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import Base
|
||||
from app.models import * # Import all models
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set sqlalchemy.url from settings
|
||||
# Replace asyncpg with psycopg2 for synchronous Alembic migrations
|
||||
db_url = settings.DATABASE_URL.replace('postgresql+asyncpg://', 'postgresql://')
|
||||
config.set_main_option('sqlalchemy.url', db_url)
|
||||
|
||||
# Add your model's MetaData object here for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/ai_insights/migrations/script.py.mako
Normal file
26
services/ai_insights/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,111 @@
|
||||
"""Initial schema for AI Insights Service
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-11-02 14:30:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create ai_insights table
|
||||
op.create_table(
|
||||
'ai_insights',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('type', sa.String(50), nullable=False),
|
||||
sa.Column('priority', sa.String(20), nullable=False),
|
||||
sa.Column('category', sa.String(50), nullable=False),
|
||||
sa.Column('title', sa.String(255), nullable=False),
|
||||
sa.Column('description', sa.Text, nullable=False),
|
||||
sa.Column('impact_type', sa.String(50)),
|
||||
sa.Column('impact_value', sa.DECIMAL(10, 2)),
|
||||
sa.Column('impact_unit', sa.String(20)),
|
||||
sa.Column('confidence', sa.Integer, nullable=False),
|
||||
sa.Column('metrics_json', JSONB),
|
||||
sa.Column('actionable', sa.Boolean, nullable=False, server_default='true'),
|
||||
sa.Column('recommendation_actions', JSONB),
|
||||
sa.Column('status', sa.String(20), nullable=False, server_default='new'),
|
||||
sa.Column('source_service', sa.String(50)),
|
||||
sa.Column('source_data_id', sa.String(100)),
|
||||
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now(), nullable=False),
|
||||
sa.Column('applied_at', sa.TIMESTAMP(timezone=True)),
|
||||
sa.Column('expired_at', sa.TIMESTAMP(timezone=True)),
|
||||
sa.CheckConstraint('confidence >= 0 AND confidence <= 100', name='check_confidence_range')
|
||||
)
|
||||
|
||||
# Create indexes for ai_insights
|
||||
op.create_index('idx_tenant_id', 'ai_insights', ['tenant_id'])
|
||||
op.create_index('idx_type', 'ai_insights', ['type'])
|
||||
op.create_index('idx_priority', 'ai_insights', ['priority'])
|
||||
op.create_index('idx_category', 'ai_insights', ['category'])
|
||||
op.create_index('idx_confidence', 'ai_insights', ['confidence'])
|
||||
op.create_index('idx_status', 'ai_insights', ['status'])
|
||||
op.create_index('idx_actionable', 'ai_insights', ['actionable'])
|
||||
op.create_index('idx_created_at', 'ai_insights', ['created_at'])
|
||||
op.create_index('idx_tenant_status_category', 'ai_insights', ['tenant_id', 'status', 'category'])
|
||||
op.create_index('idx_tenant_created_confidence', 'ai_insights', ['tenant_id', 'created_at', 'confidence'])
|
||||
op.create_index('idx_actionable_status', 'ai_insights', ['actionable', 'status'])
|
||||
|
||||
# Create insight_feedback table
|
||||
op.create_table(
|
||||
'insight_feedback',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('insight_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('action_taken', sa.String(100)),
|
||||
sa.Column('result_data', JSONB),
|
||||
sa.Column('success', sa.Boolean, nullable=False),
|
||||
sa.Column('error_message', sa.Text),
|
||||
sa.Column('expected_impact_value', sa.DECIMAL(10, 2)),
|
||||
sa.Column('actual_impact_value', sa.DECIMAL(10, 2)),
|
||||
sa.Column('variance_percentage', sa.DECIMAL(5, 2)),
|
||||
sa.Column('applied_by', sa.String(100)),
|
||||
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['insight_id'], ['ai_insights.id'], ondelete='CASCADE')
|
||||
)
|
||||
|
||||
# Create indexes for insight_feedback
|
||||
op.create_index('idx_feedback_insight_id', 'insight_feedback', ['insight_id'])
|
||||
op.create_index('idx_feedback_success', 'insight_feedback', ['success'])
|
||||
op.create_index('idx_feedback_created_at', 'insight_feedback', ['created_at'])
|
||||
op.create_index('idx_insight_success', 'insight_feedback', ['insight_id', 'success'])
|
||||
op.create_index('idx_created_success', 'insight_feedback', ['created_at', 'success'])
|
||||
|
||||
# Create insight_correlations table
|
||||
op.create_table(
|
||||
'insight_correlations',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('parent_insight_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('child_insight_id', UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('correlation_type', sa.String(50), nullable=False),
|
||||
sa.Column('correlation_strength', sa.DECIMAL(3, 2), nullable=False),
|
||||
sa.Column('combined_confidence', sa.Integer),
|
||||
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['parent_insight_id'], ['ai_insights.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['child_insight_id'], ['ai_insights.id'], ondelete='CASCADE')
|
||||
)
|
||||
|
||||
# Create indexes for insight_correlations
|
||||
op.create_index('idx_corr_parent', 'insight_correlations', ['parent_insight_id'])
|
||||
op.create_index('idx_corr_child', 'insight_correlations', ['child_insight_id'])
|
||||
op.create_index('idx_corr_type', 'insight_correlations', ['correlation_type'])
|
||||
op.create_index('idx_corr_created_at', 'insight_correlations', ['created_at'])
|
||||
op.create_index('idx_parent_child', 'insight_correlations', ['parent_insight_id', 'child_insight_id'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table('insight_correlations')
|
||||
op.drop_table('insight_feedback')
|
||||
op.drop_table('ai_insights')
|
||||
Reference in New Issue
Block a user