Fix Alembic issue
This commit is contained in:
@@ -1,78 +0,0 @@
|
||||
"""Initial schema for training service
|
||||
|
||||
Revision ID: 0001
|
||||
Revises:
|
||||
Create Date: 2025-09-30 18:00:00.0000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table('training_jobs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('model_id', sa.UUID(), nullable=False),
|
||||
sa.Column('job_name', sa.String(255), nullable=False),
|
||||
sa.Column('job_type', sa.String(100), nullable=False),
|
||||
sa.Column('status', sa.String(50), nullable=True),
|
||||
sa.Column('progress', sa.Float(), nullable=True),
|
||||
sa.Column('parameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('metrics', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('training_data_path', sa.String(500), nullable=True),
|
||||
sa.Column('model_path', sa.String(500), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_training_jobs_tenant_id'), 'training_jobs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_training_jobs_model_id'), 'training_jobs', ['model_id'], unique=False)
|
||||
op.create_index(op.f('ix_training_jobs_status'), 'training_jobs', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_training_jobs_job_type'), 'training_jobs', ['job_type'], unique=False)
|
||||
|
||||
op.create_table('ml_models',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('version', sa.String(50), nullable=False),
|
||||
sa.Column('model_type', sa.String(100), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.String(50), nullable=True),
|
||||
sa.Column('accuracy', sa.Float(), nullable=True),
|
||||
sa.Column('f1_score', sa.Float(), nullable=True),
|
||||
sa.Column('precision', sa.Float(), nullable=True),
|
||||
sa.Column('recall', sa.Float(), nullable=True),
|
||||
sa.Column('model_path', sa.String(500), nullable=True),
|
||||
sa.Column('hyperparameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('training_data_info', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_ml_models_tenant_id'), 'ml_models', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_ml_models_name'), 'ml_models', ['name'], unique=False)
|
||||
op.create_index(op.f('ix_ml_models_version'), 'ml_models', ['version'], unique=False)
|
||||
op.create_index(op.f('ix_ml_models_status'), 'ml_models', ['status'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_ml_models_status'), table_name='ml_models')
|
||||
op.drop_index(op.f('ix_ml_models_version'), table_name='ml_models')
|
||||
op.drop_index(op.f('ix_ml_models_name'), table_name='ml_models')
|
||||
op.drop_index(op.f('ix_ml_models_tenant_id'), table_name='ml_models')
|
||||
op.drop_table('ml_models')
|
||||
op.drop_index(op.f('ix_training_jobs_job_type'), table_name='training_jobs')
|
||||
op.drop_index(op.f('ix_training_jobs_status'), table_name='training_jobs')
|
||||
op.drop_index(op.f('ix_training_jobs_model_id'), table_name='training_jobs')
|
||||
op.drop_index(op.f('ix_training_jobs_tenant_id'), table_name='training_jobs')
|
||||
op.drop_table('training_jobs')
|
||||
@@ -0,0 +1,159 @@
|
||||
"""initial_schema_20251001_1118
|
||||
|
||||
Revision ID: 121e47ff97c4
|
||||
Revises:
|
||||
Create Date: 2025-10-01 11:18:37.223786+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '121e47ff97c4'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('model_artifacts',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('model_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('artifact_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('file_path', sa.String(length=1000), nullable=False),
|
||||
sa.Column('file_size_bytes', sa.Integer(), nullable=True),
|
||||
sa.Column('checksum', sa.String(length=255), nullable=True),
|
||||
sa.Column('storage_location', sa.String(length=100), nullable=False),
|
||||
sa.Column('compression', sa.String(length=50), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_model_artifacts_id'), 'model_artifacts', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_model_artifacts_model_id'), 'model_artifacts', ['model_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_artifacts_tenant_id'), 'model_artifacts', ['tenant_id'], unique=False)
|
||||
op.create_table('model_performance_metrics',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('model_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('mae', sa.Float(), nullable=True),
|
||||
sa.Column('mse', sa.Float(), nullable=True),
|
||||
sa.Column('rmse', sa.Float(), nullable=True),
|
||||
sa.Column('mape', sa.Float(), nullable=True),
|
||||
sa.Column('r2_score', sa.Float(), nullable=True),
|
||||
sa.Column('accuracy_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('prediction_confidence', sa.Float(), nullable=True),
|
||||
sa.Column('evaluation_period_start', sa.DateTime(), nullable=True),
|
||||
sa.Column('evaluation_period_end', sa.DateTime(), nullable=True),
|
||||
sa.Column('evaluation_samples', sa.Integer(), nullable=True),
|
||||
sa.Column('measured_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_model_performance_metrics_id'), 'model_performance_metrics', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_inventory_product_id'), 'model_performance_metrics', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_model_id'), 'model_performance_metrics', ['model_id'], unique=False)
|
||||
op.create_index(op.f('ix_model_performance_metrics_tenant_id'), 'model_performance_metrics', ['tenant_id'], unique=False)
|
||||
op.create_table('model_training_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('job_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('progress', sa.Integer(), nullable=True),
|
||||
sa.Column('current_step', sa.String(length=500), nullable=True),
|
||||
sa.Column('start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('config', sa.JSON(), nullable=True),
|
||||
sa.Column('results', sa.JSON(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_model_training_logs_id'), 'model_training_logs', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_model_training_logs_job_id'), 'model_training_logs', ['job_id'], unique=True)
|
||||
op.create_index(op.f('ix_model_training_logs_tenant_id'), 'model_training_logs', ['tenant_id'], unique=False)
|
||||
op.create_table('trained_models',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('inventory_product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('model_type', sa.String(), nullable=True),
|
||||
sa.Column('model_version', sa.String(), nullable=True),
|
||||
sa.Column('job_id', sa.String(), nullable=False),
|
||||
sa.Column('model_path', sa.String(), nullable=False),
|
||||
sa.Column('metadata_path', sa.String(), nullable=True),
|
||||
sa.Column('mape', sa.Float(), nullable=True),
|
||||
sa.Column('mae', sa.Float(), nullable=True),
|
||||
sa.Column('rmse', sa.Float(), nullable=True),
|
||||
sa.Column('r2_score', sa.Float(), nullable=True),
|
||||
sa.Column('training_samples', sa.Integer(), nullable=True),
|
||||
sa.Column('hyperparameters', sa.JSON(), nullable=True),
|
||||
sa.Column('features_used', sa.JSON(), nullable=True),
|
||||
sa.Column('normalization_params', sa.JSON(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_production', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('training_start_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('training_end_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('data_quality_score', sa.Float(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_by', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_trained_models_inventory_product_id'), 'trained_models', ['inventory_product_id'], unique=False)
|
||||
op.create_index(op.f('ix_trained_models_tenant_id'), 'trained_models', ['tenant_id'], unique=False)
|
||||
op.create_table('training_job_queue',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('job_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('job_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('priority', sa.Integer(), nullable=True),
|
||||
sa.Column('config', sa.JSON(), nullable=True),
|
||||
sa.Column('scheduled_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('estimated_duration_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('retry_count', sa.Integer(), nullable=True),
|
||||
sa.Column('max_retries', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('cancelled_by', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_training_job_queue_id'), 'training_job_queue', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_training_job_queue_job_id'), 'training_job_queue', ['job_id'], unique=True)
|
||||
op.create_index(op.f('ix_training_job_queue_tenant_id'), 'training_job_queue', ['tenant_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_training_job_queue_tenant_id'), table_name='training_job_queue')
|
||||
op.drop_index(op.f('ix_training_job_queue_job_id'), table_name='training_job_queue')
|
||||
op.drop_index(op.f('ix_training_job_queue_id'), table_name='training_job_queue')
|
||||
op.drop_table('training_job_queue')
|
||||
op.drop_index(op.f('ix_trained_models_tenant_id'), table_name='trained_models')
|
||||
op.drop_index(op.f('ix_trained_models_inventory_product_id'), table_name='trained_models')
|
||||
op.drop_table('trained_models')
|
||||
op.drop_index(op.f('ix_model_training_logs_tenant_id'), table_name='model_training_logs')
|
||||
op.drop_index(op.f('ix_model_training_logs_job_id'), table_name='model_training_logs')
|
||||
op.drop_index(op.f('ix_model_training_logs_id'), table_name='model_training_logs')
|
||||
op.drop_table('model_training_logs')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_tenant_id'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_model_id'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_inventory_product_id'), table_name='model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_performance_metrics_id'), table_name='model_performance_metrics')
|
||||
op.drop_table('model_performance_metrics')
|
||||
op.drop_index(op.f('ix_model_artifacts_tenant_id'), table_name='model_artifacts')
|
||||
op.drop_index(op.f('ix_model_artifacts_model_id'), table_name='model_artifacts')
|
||||
op.drop_index(op.f('ix_model_artifacts_id'), table_name='model_artifacts')
|
||||
op.drop_table('model_artifacts')
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user