Fix Alembic issue
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
"""Alembic environment configuration for production service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
@@ -25,7 +24,7 @@ try:
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
from app.models import * # noqa: F401, F403
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
@@ -35,12 +34,19 @@ except ImportError as e:
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Set database URL from environment variables or settings
|
||||
# Try service-specific DATABASE_URL first, then fall back to generic
|
||||
database_url = os.getenv('PRODUCTION_DATABASE_URL') or os.getenv('DATABASE_URL')
|
||||
# Determine service name from file path
|
||||
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
|
||||
service_name_upper = service_name.upper().replace('-', '_')
|
||||
|
||||
# Set database URL from environment variables with multiple fallback strategies
|
||||
database_url = (
|
||||
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
|
||||
os.getenv('DATABASE_URL') # Generic fallback
|
||||
)
|
||||
|
||||
# If DATABASE_URL is not set, construct from individual components
|
||||
if not database_url:
|
||||
# Try generic PostgreSQL environment variables first
|
||||
postgres_host = os.getenv('POSTGRES_HOST')
|
||||
postgres_port = os.getenv('POSTGRES_PORT', '5432')
|
||||
postgres_db = os.getenv('POSTGRES_DB')
|
||||
@@ -50,11 +56,28 @@ if not database_url:
|
||||
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
|
||||
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
|
||||
else:
|
||||
# Fallback to settings
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
# Try service-specific environment variables
|
||||
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
|
||||
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
|
||||
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
|
||||
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
|
||||
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
|
||||
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
if db_password:
|
||||
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||
else:
|
||||
# Final fallback: try to get from settings object
|
||||
try:
|
||||
database_url = getattr(settings, 'DATABASE_URL', None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not database_url:
|
||||
error_msg = f"ERROR: No database URL configured for {service_name} service"
|
||||
print(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
@@ -63,6 +86,7 @@ if config.config_file_name is not None:
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
@@ -78,7 +102,9 @@ def run_migrations_offline() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Execute migrations with the given connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
@@ -89,8 +115,9 @@ def do_run_migrations(connection: Connection) -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
"""Run migrations in 'online' mode with async support."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
@@ -102,10 +129,12 @@ async def run_async_migrations() -> None:
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
"""Initial schema for production service
|
||||
|
||||
Revision ID: 00001
|
||||
Revises:
|
||||
Create Date: 2025-09-30 18:00:00.0000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy import Enum
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create production_batches table (ENUMs will be created automatically)
|
||||
op.create_table('production_batches',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('batch_number', sa.String(50), nullable=False),
|
||||
sa.Column('product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('product_name', sa.String(255), nullable=False),
|
||||
sa.Column('recipe_id', sa.UUID(), nullable=True),
|
||||
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('planned_quantity', sa.Float(), nullable=False),
|
||||
sa.Column('planned_duration_minutes', sa.Integer(), nullable=False),
|
||||
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('actual_duration_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('status', sa.Enum('PENDING', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED', 'ON_HOLD', 'QUALITY_CHECK', 'FAILED', name='productionstatus'), nullable=False),
|
||||
sa.Column('priority', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
|
||||
sa.Column('current_process_stage', sa.Enum('mixing', 'proofing', 'shaping', 'baking', 'cooling', 'packaging', 'finishing', name='processstage'), nullable=True),
|
||||
sa.Column('process_stage_history', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('pending_quality_checks', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('completed_quality_checks', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('estimated_cost', sa.Float(), nullable=True),
|
||||
sa.Column('actual_cost', sa.Float(), nullable=True),
|
||||
sa.Column('labor_cost', sa.Float(), nullable=True),
|
||||
sa.Column('material_cost', sa.Float(), nullable=True),
|
||||
sa.Column('overhead_cost', sa.Float(), nullable=True),
|
||||
sa.Column('yield_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('quality_score', sa.Float(), nullable=True),
|
||||
sa.Column('waste_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('defect_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('equipment_used', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('staff_assigned', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('station_id', sa.String(50), nullable=True),
|
||||
sa.Column('order_id', sa.UUID(), nullable=True),
|
||||
sa.Column('forecast_id', sa.UUID(), nullable=True),
|
||||
sa.Column('is_rush_order', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_special_recipe', sa.Boolean(), nullable=True),
|
||||
sa.Column('production_notes', sa.Text(), nullable=True),
|
||||
sa.Column('quality_notes', sa.Text(), nullable=True),
|
||||
sa.Column('delay_reason', sa.String(255), nullable=True),
|
||||
sa.Column('cancellation_reason', sa.String(255), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('batch_number')
|
||||
)
|
||||
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_product_id'), 'production_batches', ['product_id'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_current_process_stage'), 'production_batches', ['current_process_stage'], unique=False)
|
||||
|
||||
# Create production_schedules table
|
||||
op.create_table('production_schedules',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('shift_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('shift_end', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('total_capacity_hours', sa.Float(), nullable=False),
|
||||
sa.Column('planned_capacity_hours', sa.Float(), nullable=False),
|
||||
sa.Column('actual_capacity_hours', sa.Float(), nullable=True),
|
||||
sa.Column('overtime_hours', sa.Float(), nullable=True),
|
||||
sa.Column('staff_count', sa.Integer(), nullable=False),
|
||||
sa.Column('equipment_capacity', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('station_assignments', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('total_batches_planned', sa.Integer(), nullable=True),
|
||||
sa.Column('total_batches_completed', sa.Integer(), nullable=True),
|
||||
sa.Column('total_quantity_planned', sa.Float(), nullable=True),
|
||||
sa.Column('total_quantity_produced', sa.Float(), nullable=True),
|
||||
sa.Column('is_finalized', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('utilization_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('on_time_completion_rate', sa.Float(), nullable=True),
|
||||
sa.Column('schedule_notes', sa.Text(), nullable=True),
|
||||
sa.Column('schedule_adjustments', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('finalized_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
|
||||
|
||||
# Create production_capacity table
|
||||
op.create_table('production_capacity',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('resource_type', sa.String(50), nullable=False),
|
||||
sa.Column('resource_id', sa.String(100), nullable=False),
|
||||
sa.Column('resource_name', sa.String(255), nullable=False),
|
||||
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('end_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('total_capacity_units', sa.Float(), nullable=False),
|
||||
sa.Column('allocated_capacity_units', sa.Float(), nullable=True),
|
||||
sa.Column('remaining_capacity_units', sa.Float(), nullable=False),
|
||||
sa.Column('is_available', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_maintenance', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_reserved', sa.Boolean(), nullable=True),
|
||||
sa.Column('equipment_type', sa.String(100), nullable=True),
|
||||
sa.Column('max_batch_size', sa.Float(), nullable=True),
|
||||
sa.Column('min_batch_size', sa.Float(), nullable=True),
|
||||
sa.Column('setup_time_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('cleanup_time_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('efficiency_rating', sa.Float(), nullable=True),
|
||||
sa.Column('maintenance_status', sa.String(50), nullable=True),
|
||||
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('restrictions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_production_capacity_tenant_id'), 'production_capacity', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_production_capacity_date'), 'production_capacity', ['date'], unique=False)
|
||||
|
||||
# Create quality_check_templates table
|
||||
op.create_table('quality_check_templates',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('template_code', sa.String(100), nullable=True),
|
||||
sa.Column('check_type', sa.String(50), nullable=False),
|
||||
sa.Column('category', sa.String(100), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('instructions', sa.Text(), nullable=True),
|
||||
sa.Column('parameters', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('thresholds', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('scoring_criteria', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_required', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_critical', sa.Boolean(), nullable=True),
|
||||
sa.Column('weight', sa.Float(), nullable=True),
|
||||
sa.Column('min_value', sa.Float(), nullable=True),
|
||||
sa.Column('max_value', sa.Float(), nullable=True),
|
||||
sa.Column('target_value', sa.Float(), nullable=True),
|
||||
sa.Column('unit', sa.String(20), nullable=True),
|
||||
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('applicable_stages', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_by', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_quality_check_templates_tenant_id'), 'quality_check_templates', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_quality_check_templates_template_code'), 'quality_check_templates', ['template_code'], unique=False)
|
||||
|
||||
# Create quality_checks table
|
||||
op.create_table('quality_checks',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('batch_id', sa.UUID(), nullable=False),
|
||||
sa.Column('template_id', sa.UUID(), nullable=True),
|
||||
sa.Column('check_type', sa.String(50), nullable=False),
|
||||
sa.Column('process_stage', sa.Enum('mixing', 'proofing', 'shaping', 'baking', 'cooling', 'packaging', 'finishing', name='processstage'), nullable=True),
|
||||
sa.Column('check_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('checker_id', sa.String(100), nullable=True),
|
||||
sa.Column('quality_score', sa.Float(), nullable=False),
|
||||
sa.Column('pass_fail', sa.Boolean(), nullable=False),
|
||||
sa.Column('defect_count', sa.Integer(), nullable=True),
|
||||
sa.Column('defect_types', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('measured_weight', sa.Float(), nullable=True),
|
||||
sa.Column('measured_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('measured_moisture', sa.Float(), nullable=True),
|
||||
sa.Column('measured_dimensions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('stage_specific_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('target_weight', sa.Float(), nullable=True),
|
||||
sa.Column('target_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('target_moisture', sa.Float(), nullable=True),
|
||||
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('within_tolerance', sa.Boolean(), nullable=True),
|
||||
sa.Column('corrective_action_needed', sa.Boolean(), nullable=True),
|
||||
sa.Column('corrective_actions', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('template_results', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('criteria_scores', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('check_notes', sa.Text(), nullable=True),
|
||||
sa.Column('photos_urls', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('certificate_url', sa.String(500), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['batch_id'], ['production_batches.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_quality_checks_tenant_id'), 'quality_checks', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_quality_checks_batch_id'), 'quality_checks', ['batch_id'], unique=False)
|
||||
op.create_index(op.f('ix_quality_checks_template_id'), 'quality_checks', ['template_id'], unique=False)
|
||||
op.create_index(op.f('ix_quality_checks_process_stage'), 'quality_checks', ['process_stage'], unique=False)
|
||||
|
||||
# Create equipment table
|
||||
op.create_table('equipment',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('type', sa.Enum('oven', 'mixer', 'proofer', 'freezer', 'packaging', 'other', name='equipmenttype'), nullable=False),
|
||||
sa.Column('model', sa.String(100), nullable=True),
|
||||
sa.Column('serial_number', sa.String(100), nullable=True),
|
||||
sa.Column('location', sa.String(255), nullable=True),
|
||||
sa.Column('status', sa.Enum('operational', 'maintenance', 'down', 'warning', name='equipmentstatus'), nullable=True),
|
||||
sa.Column('install_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('next_maintenance_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('maintenance_interval_days', sa.Integer(), nullable=True),
|
||||
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('uptime_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('energy_usage_kwh', sa.Float(), nullable=True),
|
||||
sa.Column('power_kw', sa.Float(), nullable=True),
|
||||
sa.Column('capacity', sa.Float(), nullable=True),
|
||||
sa.Column('weight_kg', sa.Float(), nullable=True),
|
||||
sa.Column('current_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('target_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('timezone(\'utc\', now())'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_equipment_tenant_id'), 'equipment', ['tenant_id'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop equipment table
|
||||
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
|
||||
op.drop_table('equipment')
|
||||
|
||||
# Drop quality_checks table
|
||||
op.drop_index(op.f('ix_quality_checks_process_stage'), table_name='quality_checks')
|
||||
op.drop_index(op.f('ix_quality_checks_template_id'), table_name='quality_checks')
|
||||
op.drop_index(op.f('ix_quality_checks_batch_id'), table_name='quality_checks')
|
||||
op.drop_index(op.f('ix_quality_checks_tenant_id'), table_name='quality_checks')
|
||||
op.drop_table('quality_checks')
|
||||
|
||||
# Drop quality_check_templates table
|
||||
op.drop_index(op.f('ix_quality_check_templates_template_code'), table_name='quality_check_templates')
|
||||
op.drop_index(op.f('ix_quality_check_templates_tenant_id'), table_name='quality_check_templates')
|
||||
op.drop_table('quality_check_templates')
|
||||
|
||||
# Drop production_capacity table
|
||||
op.drop_index(op.f('ix_production_capacity_date'), table_name='production_capacity')
|
||||
op.drop_index(op.f('ix_production_capacity_tenant_id'), table_name='production_capacity')
|
||||
op.drop_table('production_capacity')
|
||||
|
||||
# Drop production_schedules table
|
||||
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
|
||||
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
|
||||
op.drop_table('production_schedules')
|
||||
|
||||
# Drop production_batches table
|
||||
op.drop_index(op.f('ix_production_batches_current_process_stage'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_product_id'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
|
||||
op.drop_table('production_batches')
|
||||
|
||||
# Drop enums
|
||||
process_stage_enum = Enum(name='processstage')
|
||||
process_stage_enum.drop(op.get_bind(), checkfirst=True)
|
||||
|
||||
equipment_type_enum = Enum(name='equipmenttype')
|
||||
equipment_type_enum.drop(op.get_bind(), checkfirst=True)
|
||||
|
||||
equipment_status_enum = Enum(name='equipmentstatus')
|
||||
equipment_status_enum.drop(op.get_bind(), checkfirst=True)
|
||||
|
||||
production_priority_enum = Enum(name='productionpriority')
|
||||
production_priority_enum.drop(op.get_bind(), checkfirst=True)
|
||||
|
||||
production_status_enum = Enum(name='productionstatus')
|
||||
production_status_enum.drop(op.get_bind(), checkfirst=True)
|
||||
@@ -0,0 +1,258 @@
|
||||
"""initial_schema_20251001_1119
|
||||
|
||||
Revision ID: 2fe9ab08dd7b
|
||||
Revises:
|
||||
Create Date: 2025-10-01 11:19:59.233402+02:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '2fe9ab08dd7b'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('equipment',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('type', sa.Enum('OVEN', 'MIXER', 'PROOFER', 'FREEZER', 'PACKAGING', 'OTHER', name='equipmenttype'), nullable=False),
|
||||
sa.Column('model', sa.String(length=100), nullable=True),
|
||||
sa.Column('serial_number', sa.String(length=100), nullable=True),
|
||||
sa.Column('location', sa.String(length=255), nullable=True),
|
||||
sa.Column('status', sa.Enum('OPERATIONAL', 'MAINTENANCE', 'DOWN', 'WARNING', name='equipmentstatus'), nullable=False),
|
||||
sa.Column('install_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('next_maintenance_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('maintenance_interval_days', sa.Integer(), nullable=True),
|
||||
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('uptime_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('energy_usage_kwh', sa.Float(), nullable=True),
|
||||
sa.Column('power_kw', sa.Float(), nullable=True),
|
||||
sa.Column('capacity', sa.Float(), nullable=True),
|
||||
sa.Column('weight_kg', sa.Float(), nullable=True),
|
||||
sa.Column('current_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('target_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_equipment_tenant_id'), 'equipment', ['tenant_id'], unique=False)
|
||||
op.create_table('production_batches',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('batch_number', sa.String(length=50), nullable=False),
|
||||
sa.Column('product_id', sa.UUID(), nullable=False),
|
||||
sa.Column('product_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('recipe_id', sa.UUID(), nullable=True),
|
||||
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('planned_quantity', sa.Float(), nullable=False),
|
||||
sa.Column('planned_duration_minutes', sa.Integer(), nullable=False),
|
||||
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('actual_duration_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('status', sa.Enum('PENDING', 'IN_PROGRESS', 'COMPLETED', 'CANCELLED', 'ON_HOLD', 'QUALITY_CHECK', 'FAILED', name='productionstatus'), nullable=False),
|
||||
sa.Column('priority', sa.Enum('LOW', 'MEDIUM', 'HIGH', 'URGENT', name='productionpriority'), nullable=False),
|
||||
sa.Column('current_process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
|
||||
sa.Column('process_stage_history', sa.JSON(), nullable=True),
|
||||
sa.Column('pending_quality_checks', sa.JSON(), nullable=True),
|
||||
sa.Column('completed_quality_checks', sa.JSON(), nullable=True),
|
||||
sa.Column('estimated_cost', sa.Float(), nullable=True),
|
||||
sa.Column('actual_cost', sa.Float(), nullable=True),
|
||||
sa.Column('labor_cost', sa.Float(), nullable=True),
|
||||
sa.Column('material_cost', sa.Float(), nullable=True),
|
||||
sa.Column('overhead_cost', sa.Float(), nullable=True),
|
||||
sa.Column('yield_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('quality_score', sa.Float(), nullable=True),
|
||||
sa.Column('waste_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('defect_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('equipment_used', sa.JSON(), nullable=True),
|
||||
sa.Column('staff_assigned', sa.JSON(), nullable=True),
|
||||
sa.Column('station_id', sa.String(length=50), nullable=True),
|
||||
sa.Column('order_id', sa.UUID(), nullable=True),
|
||||
sa.Column('forecast_id', sa.UUID(), nullable=True),
|
||||
sa.Column('is_rush_order', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_special_recipe', sa.Boolean(), nullable=True),
|
||||
sa.Column('production_notes', sa.Text(), nullable=True),
|
||||
sa.Column('quality_notes', sa.Text(), nullable=True),
|
||||
sa.Column('delay_reason', sa.String(length=255), nullable=True),
|
||||
sa.Column('cancellation_reason', sa.String(length=255), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_production_batches_batch_number'), 'production_batches', ['batch_number'], unique=True)
|
||||
op.create_index(op.f('ix_production_batches_current_process_stage'), 'production_batches', ['current_process_stage'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_product_id'), 'production_batches', ['product_id'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_status'), 'production_batches', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_production_batches_tenant_id'), 'production_batches', ['tenant_id'], unique=False)
|
||||
op.create_table('production_capacity',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('resource_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('resource_id', sa.String(length=100), nullable=False),
|
||||
sa.Column('resource_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('end_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('total_capacity_units', sa.Float(), nullable=False),
|
||||
sa.Column('allocated_capacity_units', sa.Float(), nullable=False),
|
||||
sa.Column('remaining_capacity_units', sa.Float(), nullable=False),
|
||||
sa.Column('is_available', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_maintenance', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_reserved', sa.Boolean(), nullable=True),
|
||||
sa.Column('equipment_type', sa.String(length=100), nullable=True),
|
||||
sa.Column('max_batch_size', sa.Float(), nullable=True),
|
||||
sa.Column('min_batch_size', sa.Float(), nullable=True),
|
||||
sa.Column('setup_time_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('cleanup_time_minutes', sa.Integer(), nullable=True),
|
||||
sa.Column('efficiency_rating', sa.Float(), nullable=True),
|
||||
sa.Column('maintenance_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('last_maintenance_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('restrictions', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_production_capacity_date'), 'production_capacity', ['date'], unique=False)
|
||||
op.create_index(op.f('ix_production_capacity_tenant_id'), 'production_capacity', ['tenant_id'], unique=False)
|
||||
op.create_table('production_schedules',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('shift_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('shift_end', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('total_capacity_hours', sa.Float(), nullable=False),
|
||||
sa.Column('planned_capacity_hours', sa.Float(), nullable=False),
|
||||
sa.Column('actual_capacity_hours', sa.Float(), nullable=True),
|
||||
sa.Column('overtime_hours', sa.Float(), nullable=True),
|
||||
sa.Column('staff_count', sa.Integer(), nullable=False),
|
||||
sa.Column('equipment_capacity', sa.JSON(), nullable=True),
|
||||
sa.Column('station_assignments', sa.JSON(), nullable=True),
|
||||
sa.Column('total_batches_planned', sa.Integer(), nullable=False),
|
||||
sa.Column('total_batches_completed', sa.Integer(), nullable=True),
|
||||
sa.Column('total_quantity_planned', sa.Float(), nullable=False),
|
||||
sa.Column('total_quantity_produced', sa.Float(), nullable=True),
|
||||
sa.Column('is_finalized', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('efficiency_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('utilization_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('on_time_completion_rate', sa.Float(), nullable=True),
|
||||
sa.Column('schedule_notes', sa.Text(), nullable=True),
|
||||
sa.Column('schedule_adjustments', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('finalized_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_production_schedules_schedule_date'), 'production_schedules', ['schedule_date'], unique=False)
|
||||
op.create_index(op.f('ix_production_schedules_tenant_id'), 'production_schedules', ['tenant_id'], unique=False)
|
||||
op.create_table('quality_check_templates',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('template_code', sa.String(length=100), nullable=True),
|
||||
sa.Column('check_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('category', sa.String(length=100), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('instructions', sa.Text(), nullable=True),
|
||||
sa.Column('parameters', sa.JSON(), nullable=True),
|
||||
sa.Column('thresholds', sa.JSON(), nullable=True),
|
||||
sa.Column('scoring_criteria', sa.JSON(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_required', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_critical', sa.Boolean(), nullable=True),
|
||||
sa.Column('weight', sa.Float(), nullable=True),
|
||||
sa.Column('min_value', sa.Float(), nullable=True),
|
||||
sa.Column('max_value', sa.Float(), nullable=True),
|
||||
sa.Column('target_value', sa.Float(), nullable=True),
|
||||
sa.Column('unit', sa.String(length=20), nullable=True),
|
||||
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('applicable_stages', sa.JSON(), nullable=True),
|
||||
sa.Column('created_by', sa.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_quality_check_templates_template_code'), 'quality_check_templates', ['template_code'], unique=False)
|
||||
op.create_index(op.f('ix_quality_check_templates_tenant_id'), 'quality_check_templates', ['tenant_id'], unique=False)
|
||||
op.create_table('quality_checks',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('tenant_id', sa.UUID(), nullable=False),
|
||||
sa.Column('batch_id', sa.UUID(), nullable=False),
|
||||
sa.Column('template_id', sa.UUID(), nullable=True),
|
||||
sa.Column('check_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('process_stage', sa.Enum('MIXING', 'PROOFING', 'SHAPING', 'BAKING', 'COOLING', 'PACKAGING', 'FINISHING', name='processstage'), nullable=True),
|
||||
sa.Column('check_time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('checker_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('quality_score', sa.Float(), nullable=False),
|
||||
sa.Column('pass_fail', sa.Boolean(), nullable=False),
|
||||
sa.Column('defect_count', sa.Integer(), nullable=False),
|
||||
sa.Column('defect_types', sa.JSON(), nullable=True),
|
||||
sa.Column('measured_weight', sa.Float(), nullable=True),
|
||||
sa.Column('measured_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('measured_moisture', sa.Float(), nullable=True),
|
||||
sa.Column('measured_dimensions', sa.JSON(), nullable=True),
|
||||
sa.Column('stage_specific_data', sa.JSON(), nullable=True),
|
||||
sa.Column('target_weight', sa.Float(), nullable=True),
|
||||
sa.Column('target_temperature', sa.Float(), nullable=True),
|
||||
sa.Column('target_moisture', sa.Float(), nullable=True),
|
||||
sa.Column('tolerance_percentage', sa.Float(), nullable=True),
|
||||
sa.Column('within_tolerance', sa.Boolean(), nullable=True),
|
||||
sa.Column('corrective_action_needed', sa.Boolean(), nullable=True),
|
||||
sa.Column('corrective_actions', sa.JSON(), nullable=True),
|
||||
sa.Column('template_results', sa.JSON(), nullable=True),
|
||||
sa.Column('criteria_scores', sa.JSON(), nullable=True),
|
||||
sa.Column('check_notes', sa.Text(), nullable=True),
|
||||
sa.Column('photos_urls', sa.JSON(), nullable=True),
|
||||
sa.Column('certificate_url', sa.String(length=500), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_quality_checks_batch_id'), 'quality_checks', ['batch_id'], unique=False)
|
||||
op.create_index(op.f('ix_quality_checks_process_stage'), 'quality_checks', ['process_stage'], unique=False)
|
||||
op.create_index(op.f('ix_quality_checks_template_id'), 'quality_checks', ['template_id'], unique=False)
|
||||
op.create_index(op.f('ix_quality_checks_tenant_id'), 'quality_checks', ['tenant_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_quality_checks_tenant_id'), table_name='quality_checks')
|
||||
op.drop_index(op.f('ix_quality_checks_template_id'), table_name='quality_checks')
|
||||
op.drop_index(op.f('ix_quality_checks_process_stage'), table_name='quality_checks')
|
||||
op.drop_index(op.f('ix_quality_checks_batch_id'), table_name='quality_checks')
|
||||
op.drop_table('quality_checks')
|
||||
op.drop_index(op.f('ix_quality_check_templates_tenant_id'), table_name='quality_check_templates')
|
||||
op.drop_index(op.f('ix_quality_check_templates_template_code'), table_name='quality_check_templates')
|
||||
op.drop_table('quality_check_templates')
|
||||
op.drop_index(op.f('ix_production_schedules_tenant_id'), table_name='production_schedules')
|
||||
op.drop_index(op.f('ix_production_schedules_schedule_date'), table_name='production_schedules')
|
||||
op.drop_table('production_schedules')
|
||||
op.drop_index(op.f('ix_production_capacity_tenant_id'), table_name='production_capacity')
|
||||
op.drop_index(op.f('ix_production_capacity_date'), table_name='production_capacity')
|
||||
op.drop_table('production_capacity')
|
||||
op.drop_index(op.f('ix_production_batches_tenant_id'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_status'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_product_id'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_current_process_stage'), table_name='production_batches')
|
||||
op.drop_index(op.f('ix_production_batches_batch_number'), table_name='production_batches')
|
||||
op.drop_table('production_batches')
|
||||
op.drop_index(op.f('ix_equipment_tenant_id'), table_name='equipment')
|
||||
op.drop_table('equipment')
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user