Files
bakery-ia/services/orchestrator/migrations/versions/001_initial_schema.py

202 lines
10 KiB
Python

"""Initial orchestration schema
Revision ID: 001_initial_schema
Revises:
Create Date: 2025-11-05 00:00:00.000000
This is the consolidated initial schema for the orchestration service.
It includes all tables, enums, indexes, and constraints needed for the
orchestration_runs table and related functionality.
Tables:
- orchestration_runs: Main audit trail for orchestration executions
Enums:
- orchestrationstatus: Status values for orchestration runs
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001_initial_schema'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
"""Create initial orchestration schema"""
# ================================================================
# Create Enums
# ================================================================
# Create PostgreSQL enum type for orchestration status
orchestrationstatus_enum = postgresql.ENUM(
'pending',
'running',
'completed',
'partial_success',
'failed',
'cancelled',
name='orchestrationstatus',
create_type=False
)
orchestrationstatus_enum.create(op.get_bind(), checkfirst=True)
# ================================================================
# Create Tables
# ================================================================
# Create orchestration_runs table
op.create_table(
'orchestration_runs',
# Primary identification
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('run_number', sa.String(length=50), nullable=False),
# Run details
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('status', orchestrationstatus_enum, nullable=False, server_default='pending'),
sa.Column('run_type', sa.String(length=50), nullable=False, server_default=sa.text("'scheduled'::character varying")),
sa.Column('priority', sa.String(length=20), nullable=False, server_default=sa.text("'normal'::character varying")),
# Timing
sa.Column('started_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('duration_seconds', sa.Integer(), nullable=True),
# Forecasting step tracking
sa.Column('forecasting_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('forecasting_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('forecasting_status', sa.String(length=20), nullable=True),
sa.Column('forecasting_error', sa.Text(), nullable=True),
# Production step tracking
sa.Column('production_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('production_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('production_status', sa.String(length=20), nullable=True),
sa.Column('production_error', sa.Text(), nullable=True),
# Procurement step tracking
sa.Column('procurement_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('procurement_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('procurement_status', sa.String(length=20), nullable=True),
sa.Column('procurement_error', sa.Text(), nullable=True),
# Notification step tracking
sa.Column('notification_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notification_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('notification_status', sa.String(length=20), nullable=True),
sa.Column('notification_error', sa.Text(), nullable=True),
# AI Insights step tracking
sa.Column('ai_insights_started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('ai_insights_completed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('ai_insights_status', sa.String(length=20), nullable=True),
sa.Column('ai_insights_error', sa.Text(), nullable=True),
sa.Column('ai_insights_generated', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('ai_insights_posted', sa.Integer(), nullable=False, server_default=sa.text('0')),
# Results summary
sa.Column('forecasts_generated', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('production_batches_created', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('procurement_plans_created', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('purchase_orders_created', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('notifications_sent', sa.Integer(), nullable=False, server_default=sa.text('0')),
# Forecast data passed between services
sa.Column('forecast_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
# Error handling
sa.Column('retry_count', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('max_retries_reached', sa.Boolean(), nullable=False, server_default=sa.text('false')),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_details', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
# External references
sa.Column('forecast_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('production_schedule_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('procurement_plan_id', postgresql.UUID(as_uuid=True), nullable=True),
# Saga tracking
sa.Column('saga_steps_total', sa.Integer(), nullable=False, server_default=sa.text('0')),
sa.Column('saga_steps_completed', sa.Integer(), nullable=False, server_default=sa.text('0')),
# Audit fields
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), onupdate=sa.text('now()'), nullable=False),
sa.Column('triggered_by', sa.String(length=100), nullable=True),
# Performance metrics
sa.Column('fulfillment_rate', sa.Integer(), nullable=True),
sa.Column('on_time_delivery_rate', sa.Integer(), nullable=True),
sa.Column('cost_accuracy', sa.Integer(), nullable=True),
sa.Column('quality_score', sa.Integer(), nullable=True),
# Metadata
sa.Column('run_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
# Constraints
sa.PrimaryKeyConstraint('id', name=op.f('pk_orchestration_runs'))
)
# ================================================================
# Create Indexes
# ================================================================
# Primary lookup indexes
op.create_index('ix_orchestration_runs_run_number', 'orchestration_runs', ['run_number'], unique=True)
op.create_index('ix_orchestration_runs_tenant_id', 'orchestration_runs', ['tenant_id'], unique=False)
op.create_index('ix_orchestration_runs_status', 'orchestration_runs', ['status'], unique=False)
# Temporal indexes
op.create_index('ix_orchestration_runs_started_at', 'orchestration_runs', ['started_at'], unique=False)
op.create_index('ix_orchestration_runs_completed_at', 'orchestration_runs', ['completed_at'], unique=False)
# Classification indexes
op.create_index('ix_orchestration_runs_run_type', 'orchestration_runs', ['run_type'], unique=False)
op.create_index('ix_orchestration_runs_trigger', 'orchestration_runs', ['triggered_by'], unique=False)
# Composite indexes for common queries
op.create_index('ix_orchestration_runs_tenant_status', 'orchestration_runs', ['tenant_id', 'status'], unique=False)
op.create_index('ix_orchestration_runs_tenant_type', 'orchestration_runs', ['tenant_id', 'run_type'], unique=False)
op.create_index('ix_orchestration_runs_tenant_started', 'orchestration_runs', ['tenant_id', 'started_at'], unique=False)
op.create_index('ix_orchestration_runs_status_started', 'orchestration_runs', ['status', 'started_at'], unique=False)
# Performance metric indexes
op.create_index('ix_orchestration_runs_fulfillment_rate', 'orchestration_runs', ['fulfillment_rate'], unique=False)
op.create_index('ix_orchestration_runs_on_time_delivery_rate', 'orchestration_runs', ['on_time_delivery_rate'], unique=False)
op.create_index('ix_orchestration_runs_cost_accuracy', 'orchestration_runs', ['cost_accuracy'], unique=False)
op.create_index('ix_orchestration_runs_quality_score', 'orchestration_runs', ['quality_score'], unique=False)
def downgrade():
"""Drop orchestration schema"""
# Drop indexes
op.drop_index('ix_orchestration_runs_quality_score', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_cost_accuracy', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_on_time_delivery_rate', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_fulfillment_rate', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_status_started', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_tenant_started', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_tenant_type', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_tenant_status', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_trigger', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_run_type', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_completed_at', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_started_at', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_status', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_tenant_id', table_name='orchestration_runs')
op.drop_index('ix_orchestration_runs_run_number', table_name='orchestration_runs')
# Drop table
op.drop_table('orchestration_runs')
# Drop enum type
op.execute("DROP TYPE IF EXISTS orchestrationstatus")