Refactor services alembic
This commit is contained in:
@@ -1,63 +1,54 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
# ================================================================
|
||||
# services/suppliers/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
sourceless = false
|
||||
|
||||
# version number format
|
||||
# Uses Alembic datetime format
|
||||
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version name format
|
||||
version_path_separator = /
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+asyncpg://suppliers_user:suppliers_pass123@suppliers-db:5432/suppliers_db
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://suppliers_user:password@suppliers-db-service:5432/suppliers_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
# on newly generated revision scripts.
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
@@ -90,4 +81,4 @@ formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,62 +1,54 @@
|
||||
"""
|
||||
Alembic environment configuration for Suppliers Service
|
||||
"""
|
||||
"""Alembic environment configuration for suppliers service"""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Import models to ensure they're registered
|
||||
from app.models.suppliers import * # noqa
|
||||
from shared.database.base import Base
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable if available
|
||||
database_url = os.getenv('SUPPLIERS_DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
@@ -70,9 +62,7 @@ def run_migrations_offline() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with database connection"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
@@ -83,9 +73,8 @@ def do_run_migrations(connection: Connection) -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in async mode"""
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
@@ -97,13 +86,11 @@ async def run_async_migrations() -> None:
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Initial supplioers service tables
|
||||
|
||||
Revision ID: 001_initial_suppliers
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_suppliers'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# TODO: Add table creation statements for suppliers service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# TODO: Add table drop statements for suppliers service
|
||||
pass
|
||||
@@ -1,151 +0,0 @@
|
||||
"""Standardize product references to inventory_product_id
|
||||
|
||||
Revision ID: 001_standardize_product_references
|
||||
Revises:
|
||||
Create Date: 2025-01-15 12:00:00.000000
|
||||
|
||||
This migration standardizes product references across the suppliers service by:
|
||||
1. Renaming ingredient_id columns to inventory_product_id
|
||||
2. Removing redundant product_name columns where UUID references exist
|
||||
3. Updating indexes to match new column names
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers
|
||||
revision = '001_standardize_product_references'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Apply the changes to standardize product references"""
|
||||
|
||||
# 1. Update supplier_price_lists table
|
||||
print("Updating supplier_price_lists table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('supplier_price_lists', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('supplier_price_lists', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_price_lists_ingredient')
|
||||
op.create_index('ix_price_lists_inventory_product', 'supplier_price_lists',
|
||||
['inventory_product_id'])
|
||||
|
||||
|
||||
# 2. Update purchase_order_items table
|
||||
print("Updating purchase_order_items table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('purchase_order_items', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('purchase_order_items', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_po_items_ingredient')
|
||||
op.create_index('ix_po_items_inventory_product', 'purchase_order_items',
|
||||
['inventory_product_id'])
|
||||
|
||||
|
||||
# 3. Update delivery_items table
|
||||
print("Updating delivery_items table...")
|
||||
|
||||
# Rename ingredient_id to inventory_product_id
|
||||
op.alter_column('delivery_items', 'ingredient_id',
|
||||
new_column_name='inventory_product_id')
|
||||
|
||||
# Drop the product_name column (redundant with UUID reference)
|
||||
op.drop_column('delivery_items', 'product_name')
|
||||
|
||||
# Update index name
|
||||
op.drop_index('ix_delivery_items_ingredient')
|
||||
op.create_index('ix_delivery_items_inventory_product', 'delivery_items',
|
||||
['inventory_product_id'])
|
||||
|
||||
print("Migration completed successfully!")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Revert the changes (for rollback purposes)"""
|
||||
|
||||
print("Rolling back product reference standardization...")
|
||||
|
||||
# 1. Revert delivery_items table
|
||||
print("Reverting delivery_items table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_delivery_items_inventory_product')
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('delivery_items',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('delivery_items', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_delivery_items_ingredient')
|
||||
op.create_index('ix_delivery_items_ingredient', 'delivery_items',
|
||||
['ingredient_id'])
|
||||
|
||||
|
||||
# 2. Revert purchase_order_items table
|
||||
print("Reverting purchase_order_items table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_po_items_inventory_product')
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('purchase_order_items',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('purchase_order_items', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_po_items_ingredient')
|
||||
op.create_index('ix_po_items_ingredient', 'purchase_order_items',
|
||||
['ingredient_id'])
|
||||
|
||||
|
||||
# 3. Revert supplier_price_lists table
|
||||
print("Reverting supplier_price_lists table...")
|
||||
|
||||
# Revert index name
|
||||
op.drop_index('ix_price_lists_inventory_product')
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists',
|
||||
['inventory_product_id']) # Will rename back to ingredient_id below
|
||||
|
||||
# Add back product_name column (will be empty initially)
|
||||
op.add_column('supplier_price_lists',
|
||||
sa.Column('product_name', sa.String(255), nullable=False,
|
||||
server_default='Unknown Product'))
|
||||
|
||||
# Rename inventory_product_id back to ingredient_id
|
||||
op.alter_column('supplier_price_lists', 'inventory_product_id',
|
||||
new_column_name='ingredient_id')
|
||||
|
||||
# Update index to use ingredient_id
|
||||
op.drop_index('ix_price_lists_ingredient')
|
||||
op.create_index('ix_price_lists_ingredient', 'supplier_price_lists',
|
||||
['ingredient_id'])
|
||||
|
||||
print("Rollback completed successfully!")
|
||||
@@ -1,285 +0,0 @@
|
||||
"""add performance tracking tables
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2024-12-19 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Create performance metric type enum
|
||||
performance_metric_type = postgresql.ENUM(
|
||||
'DELIVERY_PERFORMANCE', 'QUALITY_SCORE', 'PRICE_COMPETITIVENESS',
|
||||
'COMMUNICATION_RATING', 'ORDER_ACCURACY', 'RESPONSE_TIME',
|
||||
'COMPLIANCE_SCORE', 'FINANCIAL_STABILITY',
|
||||
name='performancemetrictype'
|
||||
)
|
||||
performance_metric_type.create(op.get_bind())
|
||||
|
||||
# Create performance period enum
|
||||
performance_period = postgresql.ENUM(
|
||||
'DAILY', 'WEEKLY', 'MONTHLY', 'QUARTERLY', 'YEARLY',
|
||||
name='performanceperiod'
|
||||
)
|
||||
performance_period.create(op.get_bind())
|
||||
|
||||
# Create alert severity enum
|
||||
alert_severity = postgresql.ENUM(
|
||||
'CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO',
|
||||
name='alertseverity'
|
||||
)
|
||||
alert_severity.create(op.get_bind())
|
||||
|
||||
# Create alert type enum
|
||||
alert_type = postgresql.ENUM(
|
||||
'POOR_QUALITY', 'LATE_DELIVERY', 'PRICE_INCREASE', 'LOW_PERFORMANCE',
|
||||
'CONTRACT_EXPIRY', 'COMPLIANCE_ISSUE', 'FINANCIAL_RISK',
|
||||
'COMMUNICATION_ISSUE', 'CAPACITY_CONSTRAINT', 'CERTIFICATION_EXPIRY',
|
||||
name='alerttype'
|
||||
)
|
||||
alert_type.create(op.get_bind())
|
||||
|
||||
# Create alert status enum
|
||||
alert_status = postgresql.ENUM(
|
||||
'ACTIVE', 'ACKNOWLEDGED', 'IN_PROGRESS', 'RESOLVED', 'DISMISSED',
|
||||
name='alertstatus'
|
||||
)
|
||||
alert_status.create(op.get_bind())
|
||||
|
||||
# Create supplier performance metrics table
|
||||
op.create_table('supplier_performance_metrics',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=False),
|
||||
sa.Column('period', performance_period, nullable=False),
|
||||
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('metric_value', sa.Float(), nullable=False),
|
||||
sa.Column('target_value', sa.Float(), nullable=True),
|
||||
sa.Column('previous_value', sa.Float(), nullable=True),
|
||||
sa.Column('total_orders', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_deliveries', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('on_time_deliveries', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('late_deliveries', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('quality_issues', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_amount', sa.Numeric(precision=12, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('metrics_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('trend_direction', sa.String(length=20), nullable=True),
|
||||
sa.Column('trend_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('external_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('calculated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('calculated_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for performance metrics
|
||||
op.create_index('ix_performance_metrics_tenant_supplier', 'supplier_performance_metrics', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_performance_metrics_type_period', 'supplier_performance_metrics', ['metric_type', 'period'])
|
||||
op.create_index('ix_performance_metrics_period_dates', 'supplier_performance_metrics', ['period_start', 'period_end'])
|
||||
op.create_index('ix_performance_metrics_value', 'supplier_performance_metrics', ['metric_value'])
|
||||
|
||||
# Create supplier alerts table
|
||||
op.create_table('supplier_alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('alert_type', alert_type, nullable=False),
|
||||
sa.Column('severity', alert_severity, nullable=False),
|
||||
sa.Column('status', alert_status, nullable=False, default='ACTIVE'),
|
||||
sa.Column('title', sa.String(length=255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('trigger_value', sa.Float(), nullable=True),
|
||||
sa.Column('threshold_value', sa.Float(), nullable=True),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=True),
|
||||
sa.Column('purchase_order_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('delivery_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('performance_metric_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('triggered_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('acknowledged_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('resolved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('resolution_notes', sa.Text(), nullable=True),
|
||||
sa.Column('auto_resolve', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('auto_resolve_condition', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('escalated', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('escalated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('escalated_to', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('notification_sent', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('notification_sent_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('priority_score', sa.Integer(), nullable=False, default=50),
|
||||
sa.Column('business_impact', sa.String(length=50), nullable=True),
|
||||
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['performance_metric_id'], ['supplier_performance_metrics.id'], ),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for alerts
|
||||
op.create_index('ix_supplier_alerts_tenant_supplier', 'supplier_alerts', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_supplier_alerts_type_severity', 'supplier_alerts', ['alert_type', 'severity'])
|
||||
op.create_index('ix_supplier_alerts_status_triggered', 'supplier_alerts', ['status', 'triggered_at'])
|
||||
op.create_index('ix_supplier_alerts_metric_type', 'supplier_alerts', ['metric_type'])
|
||||
op.create_index('ix_supplier_alerts_priority', 'supplier_alerts', ['priority_score'])
|
||||
|
||||
# Create supplier scorecards table
|
||||
op.create_table('supplier_scorecards',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('scorecard_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('period', performance_period, nullable=False),
|
||||
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('period_end', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('overall_score', sa.Float(), nullable=False),
|
||||
sa.Column('quality_score', sa.Float(), nullable=False),
|
||||
sa.Column('delivery_score', sa.Float(), nullable=False),
|
||||
sa.Column('cost_score', sa.Float(), nullable=False),
|
||||
sa.Column('service_score', sa.Float(), nullable=False),
|
||||
sa.Column('overall_rank', sa.Integer(), nullable=True),
|
||||
sa.Column('category_rank', sa.Integer(), nullable=True),
|
||||
sa.Column('total_suppliers_evaluated', sa.Integer(), nullable=True),
|
||||
sa.Column('on_time_delivery_rate', sa.Float(), nullable=False),
|
||||
sa.Column('quality_rejection_rate', sa.Float(), nullable=False),
|
||||
sa.Column('order_accuracy_rate', sa.Float(), nullable=False),
|
||||
sa.Column('response_time_hours', sa.Float(), nullable=False),
|
||||
sa.Column('cost_variance_percentage', sa.Float(), nullable=False),
|
||||
sa.Column('total_orders_processed', sa.Integer(), nullable=False, default=0),
|
||||
sa.Column('total_amount_processed', sa.Numeric(precision=12, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('average_order_value', sa.Numeric(precision=10, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('cost_savings_achieved', sa.Numeric(precision=10, scale=2), nullable=False, default=0.0),
|
||||
sa.Column('score_trend', sa.String(length=20), nullable=True),
|
||||
sa.Column('score_change_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('strengths', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('improvement_areas', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('is_final', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('approved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('approved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('attachments', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('generated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('generated_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['supplier_id'], ['suppliers.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for scorecards
|
||||
op.create_index('ix_scorecards_tenant_supplier', 'supplier_scorecards', ['tenant_id', 'supplier_id'])
|
||||
op.create_index('ix_scorecards_period_dates', 'supplier_scorecards', ['period_start', 'period_end'])
|
||||
op.create_index('ix_scorecards_overall_score', 'supplier_scorecards', ['overall_score'])
|
||||
op.create_index('ix_scorecards_period', 'supplier_scorecards', ['period'])
|
||||
op.create_index('ix_scorecards_final', 'supplier_scorecards', ['is_final'])
|
||||
|
||||
# Create supplier benchmarks table
|
||||
op.create_table('supplier_benchmarks',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('benchmark_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('benchmark_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('supplier_category', sa.String(length=100), nullable=True),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=False),
|
||||
sa.Column('excellent_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('good_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('acceptable_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('poor_threshold', sa.Float(), nullable=False),
|
||||
sa.Column('data_source', sa.String(length=255), nullable=True),
|
||||
sa.Column('sample_size', sa.Integer(), nullable=True),
|
||||
sa.Column('confidence_level', sa.Float(), nullable=True),
|
||||
sa.Column('effective_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('expiry_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('methodology', sa.Text(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for benchmarks
|
||||
op.create_index('ix_benchmarks_tenant_type', 'supplier_benchmarks', ['tenant_id', 'benchmark_type'])
|
||||
op.create_index('ix_benchmarks_metric_type', 'supplier_benchmarks', ['metric_type'])
|
||||
op.create_index('ix_benchmarks_category', 'supplier_benchmarks', ['supplier_category'])
|
||||
op.create_index('ix_benchmarks_active', 'supplier_benchmarks', ['is_active'])
|
||||
|
||||
# Create alert rules table
|
||||
op.create_table('alert_rules',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('rule_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('rule_description', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('alert_type', alert_type, nullable=False),
|
||||
sa.Column('severity', alert_severity, nullable=False),
|
||||
sa.Column('metric_type', performance_metric_type, nullable=True),
|
||||
sa.Column('trigger_condition', sa.String(length=50), nullable=False),
|
||||
sa.Column('threshold_value', sa.Float(), nullable=False),
|
||||
sa.Column('consecutive_violations', sa.Integer(), nullable=False, default=1),
|
||||
sa.Column('supplier_categories', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('supplier_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('exclude_suppliers', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('evaluation_period', performance_period, nullable=False),
|
||||
sa.Column('time_window_hours', sa.Integer(), nullable=True),
|
||||
sa.Column('business_hours_only', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('auto_resolve', sa.Boolean(), nullable=False, default=False),
|
||||
sa.Column('auto_resolve_threshold', sa.Float(), nullable=True),
|
||||
sa.Column('auto_resolve_duration_hours', sa.Integer(), nullable=True),
|
||||
sa.Column('notification_enabled', sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('escalation_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('escalation_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('recommended_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('auto_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('priority', sa.Integer(), nullable=False, default=50),
|
||||
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('last_triggered', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('trigger_count', sa.Integer(), nullable=False, default=0),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for alert rules
|
||||
op.create_index('ix_alert_rules_tenant_active', 'alert_rules', ['tenant_id', 'is_active'])
|
||||
op.create_index('ix_alert_rules_type_severity', 'alert_rules', ['alert_type', 'severity'])
|
||||
op.create_index('ix_alert_rules_metric_type', 'alert_rules', ['metric_type'])
|
||||
op.create_index('ix_alert_rules_priority', 'alert_rules', ['priority'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Drop all tables and indexes
|
||||
op.drop_table('alert_rules')
|
||||
op.drop_table('supplier_benchmarks')
|
||||
op.drop_table('supplier_scorecards')
|
||||
op.drop_table('supplier_alerts')
|
||||
op.drop_table('supplier_performance_metrics')
|
||||
|
||||
# Drop enums
|
||||
op.execute('DROP TYPE IF EXISTS alertstatus')
|
||||
op.execute('DROP TYPE IF EXISTS alerttype')
|
||||
op.execute('DROP TYPE IF EXISTS alertseverity')
|
||||
op.execute('DROP TYPE IF EXISTS performanceperiod')
|
||||
op.execute('DROP TYPE IF EXISTS performancemetrictype')
|
||||
Reference in New Issue
Block a user