Initial commit - production deployment
This commit is contained in:
141
services/forecasting/migrations/env.py
Normal file
141
services/forecasting/migrations/env.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Alembic environment configuration for forecasting service"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # noqa: F401, F403
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Determine service name from file path
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
# If DATABASE_URL is not set, construct from individual components
|
||||
if not database_url:
|
||||
# Try generic PostgreSQL environment variables first
|
||||
postgres_host = os.getenv('POSTGRES_HOST')
|
||||
postgres_port = os.getenv('POSTGRES_PORT', '5432')
|
||||
postgres_db = os.getenv('POSTGRES_DB')
|
||||
postgres_user = os.getenv('POSTGRES_USER')
|
||||
postgres_password = os.getenv('POSTGRES_PASSWORD')
|
||||
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
else:
|
||||
# Final fallback: try to get from settings object
|
||||
try:
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not database_url:
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Execute migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async support."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
services/forecasting/migrations/script.py.mako
Normal file
26
services/forecasting/migrations/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,174 @@
|
||||
"""initial_schema_20251015_1230
|
||||
|
||||
Revision ID: 301bc59f6dfb
|
||||
Revises:
|
||||
Create Date: 2025-10-15 12:30:42.311369+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '301bc59f6dfb'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('action', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('severity', sa.String(length=20), nullable=False),
|
||||
sa.Column('service_name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||
sa.Column('user_agent', sa.Text(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(length=255), nullable=True),
|
||||
sa.Column('method', sa.String(length=10), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
|
||||
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
|
||||
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
|
||||
op.create_table('forecasts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('product_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('location', sa.String(length=255), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('predicted_demand', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_lower', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_upper', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_level', sa.Float(), nullable=True),
|
||||
sa.Column('model_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('model_version', sa.String(length=50), nullable=False),
|
||||
sa.Column('algorithm', sa.String(length=50), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('day_of_week', sa.Integer(), nullable=False),
|
||||
sa.Column('is_holiday', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_weekend', sa.Boolean(), nullable=True),
|
||||
sa.Column('weather_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('weather_precipitation', sa.Float(), nullable=True),
|
||||
sa.Column('weather_description', sa.String(length=100), nullable=True),
|
||||
sa.Column('traffic_volume', sa.Integer(), nullable=True),
|
||||
sa.Column('processing_time_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('features_used', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_forecasts_forecast_date'), 'forecasts', ['forecast_date'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_inventory_product_id'), 'forecasts', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_location'), 'forecasts', ['location'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_product_name'), 'forecasts', ['product_name'], unique=False)
|
||||
op.create_index(op.f('ix_forecasts_tenant_id'), 'forecasts', ['tenant_id'], unique=False)
|
||||
op.create_table('model_performance_metrics',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('model_id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('mae', sa.Float(), nullable=True),
|
||||
sa.Column('mape', sa.Float(), nullable=True),
|
||||
sa.Column('rmse', sa.Float(), nullable=True),
|
||||
sa.Column('accuracy_score', sa.Float(), nullable=True),
|
||||
sa.Column('evaluation_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('evaluation_period_start', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('evaluation_period_end', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('sample_size', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_model_performance_metrics_model_id'), 'model_performance_metrics', ['model_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_tenant_id'), 'model_performance_metrics', ['tenant_id'], unique=False)
|
||||
op.create_table('prediction_batches',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('batch_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('requested_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('total_products', sa.Integer(), nullable=True),
|
||||
sa.Column('completed_products', sa.Integer(), nullable=True),
|
||||
sa.Column('failed_products', sa.Integer(), nullable=True),
|
||||
sa.Column('forecast_days', sa.Integer(), nullable=True),
|
||||
sa.Column('business_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('processing_time_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('cancelled_by', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_prediction_batches_tenant_id'), 'prediction_batches', ['tenant_id'], unique=False)
|
||||
op.create_table('prediction_cache',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('cache_key', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('location', sa.String(length=255), nullable=False),
|
||||
sa.Column('forecast_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('predicted_demand', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_lower', sa.Float(), nullable=False),
|
||||
sa.Column('confidence_upper', sa.Float(), nullable=False),
|
||||
sa.Column('model_id', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('hit_count', sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_prediction_cache_cache_key'), 'prediction_cache', ['cache_key'], unique=True)
|
||||
op.create_index(op.f('ix_prediction_cache_tenant_id'), 'prediction_cache', ['tenant_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_prediction_cache_tenant_id'), table_name='prediction_cache')
|
||||
op.drop_index(op.f('ix_prediction_cache_cache_key'), table_name='prediction_cache')
|
||||
op.drop_table('prediction_cache')
|
||||
op.drop_index(op.f('ix_prediction_batches_tenant_id'), table_name='prediction_batches')
|
||||
op.drop_table('prediction_batches')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_tenant_id'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_model_id'), table_name='model_performance_metrics')
|
||||
op.drop_table('model_performance_metrics')
|
||||
op.drop_index(op.f('ix_forecasts_tenant_id'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_product_name'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_location'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_inventory_product_id'), table_name='forecasts')
|
||||
op.drop_index(op.f('ix_forecasts_forecast_date'), table_name='forecasts')
|
||||
op.drop_table('forecasts')
|
||||
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
|
||||
op.drop_index('idx_audit_user_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_service_created', table_name='audit_logs')
|
||||
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,91 @@
|
||||
"""add_sales_data_updates_table
|
||||
|
||||
Revision ID: 00003
|
||||
Revises: 00002
|
||||
Create Date: 2025-11-17 17:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '00003'
|
||||
down_revision: Union[str, None] = '00002'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create sales_data_updates table
|
||||
op.create_table(
|
||||
'sales_data_updates',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('update_date_start', sa.Date(), nullable=False),
|
||||
sa.Column('update_date_end', sa.Date(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('update_source', sa.String(length=100), nullable=True),
|
||||
sa.Column('records_affected', sa.Integer(), nullable=True),
|
||||
sa.Column('validation_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('validation_run_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('validated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('validation_error', sa.String(length=500), nullable=True),
|
||||
sa.Column('requires_validation', sa.Boolean(), nullable=True),
|
||||
sa.Column('import_job_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('notes', sa.String(length=500), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for sales_data_updates
|
||||
op.create_index(
|
||||
'ix_sales_data_updates_tenant_id',
|
||||
'sales_data_updates',
|
||||
['tenant_id'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_sales_data_updates_update_date_start',
|
||||
'sales_data_updates',
|
||||
['update_date_start'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_sales_data_updates_update_date_end',
|
||||
'sales_data_updates',
|
||||
['update_date_end'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_sales_updates_tenant_status',
|
||||
'sales_data_updates',
|
||||
['tenant_id', 'validation_status', 'created_at'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_sales_updates_date_range',
|
||||
'sales_data_updates',
|
||||
['tenant_id', 'update_date_start', 'update_date_end'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_sales_updates_validation_status',
|
||||
'sales_data_updates',
|
||||
['validation_status'],
|
||||
unique=False
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('ix_sales_updates_validation_status', table_name='sales_data_updates')
|
||||
op.drop_index('ix_sales_updates_date_range', table_name='sales_data_updates')
|
||||
op.drop_index('ix_sales_updates_tenant_status', table_name='sales_data_updates')
|
||||
op.drop_index('ix_sales_data_updates_update_date_end', table_name='sales_data_updates')
|
||||
op.drop_index('ix_sales_data_updates_update_date_start', table_name='sales_data_updates')
|
||||
op.drop_index('ix_sales_data_updates_tenant_id', table_name='sales_data_updates')
|
||||
|
||||
# Drop table
|
||||
op.drop_table('sales_data_updates')
|
||||
@@ -0,0 +1,89 @@
|
||||
"""add_validation_runs_table
|
||||
|
||||
Revision ID: 00002
|
||||
Revises: 301bc59f6dfb
|
||||
Create Date: 2025-11-17 16:30:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '00002'
|
||||
down_revision: Union[str, None] = '301bc59f6dfb'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create validation_runs table
|
||||
op.create_table(
|
||||
'validation_runs',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('orchestration_run_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('validation_start_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('validation_end_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('duration_seconds', sa.Float(), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('total_forecasts_evaluated', sa.Integer(), nullable=True),
|
||||
sa.Column('forecasts_with_actuals', sa.Integer(), nullable=True),
|
||||
sa.Column('forecasts_without_actuals', sa.Integer(), nullable=True),
|
||||
sa.Column('overall_mae', sa.Float(), nullable=True),
|
||||
sa.Column('overall_mape', sa.Float(), nullable=True),
|
||||
sa.Column('overall_rmse', sa.Float(), nullable=True),
|
||||
sa.Column('overall_r2_score', sa.Float(), nullable=True),
|
||||
sa.Column('overall_accuracy_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('total_predicted_demand', sa.Float(), nullable=True),
|
||||
sa.Column('total_actual_demand', sa.Float(), nullable=True),
|
||||
sa.Column('metrics_by_product', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('metrics_by_location', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('metrics_records_created', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('error_details', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('triggered_by', sa.String(length=100), nullable=True),
|
||||
sa.Column('execution_mode', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for validation_runs
|
||||
op.create_index(
|
||||
'ix_validation_runs_tenant_id',
|
||||
'validation_runs',
|
||||
['tenant_id'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_validation_runs_tenant_created',
|
||||
'validation_runs',
|
||||
['tenant_id', 'started_at'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_validation_runs_status',
|
||||
'validation_runs',
|
||||
['status', 'started_at'],
|
||||
unique=False
|
||||
)
|
||||
op.create_index(
|
||||
'ix_validation_runs_orchestration',
|
||||
'validation_runs',
|
||||
['orchestration_run_id'],
|
||||
unique=False
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('ix_validation_runs_orchestration', table_name='validation_runs')
|
||||
op.drop_index('ix_validation_runs_status', table_name='validation_runs')
|
||||
op.drop_index('ix_validation_runs_tenant_created', table_name='validation_runs')
|
||||
op.drop_index('ix_validation_runs_tenant_id', table_name='validation_runs')
|
||||
|
||||
# Drop table
|
||||
op.drop_table('validation_runs')
|
||||
Reference in New Issue
Block a user