Refactor services alembic

This commit is contained in:
Urtzi Alfaro
2025-09-29 19:16:34 +02:00
parent befcc126b0
commit 2712a60a2a
68 changed files with 2659 additions and 2511 deletions

View File

@@ -0,0 +1,84 @@
# ================================================================
# services/inventory/alembic.ini - Alembic Configuration
# ================================================================
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration file names
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
timezone = Europe/Madrid
# max length of characters to apply to the
# "slug" field
truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
sourceless = false
# version of a migration file's filename format
version_num_format = %s
# version path separator
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
output_encoding = utf-8
# Database URL - will be overridden by environment variable or settings
sqlalchemy.url = postgresql+asyncpg://inventory_user:password@inventory-db-service:5432/inventory_db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1,62 +1,54 @@
"""
Alembic environment configuration for Inventory Service
"""
"""Alembic environment configuration for inventory service"""
import asyncio
from logging.config import fileConfig
import logging
import os
import sys
from pathlib import Path
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the app directory to the path
sys.path.insert(0, str(Path(__file__).parent.parent))
# Add the service directory to the Python path
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if service_path not in sys.path:
sys.path.insert(0, service_path)
# Import models to ensure they're registered
from app.models.inventory import * # noqa
from shared.database.base import Base
# Add shared modules to path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
if shared_path not in sys.path:
sys.path.insert(0, shared_path)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
try:
from app.core.config import settings
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # Import all models
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
print(f"Current Python path: {sys.path}")
raise
# this is the Alembic Config object
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# Set database URL from settings if not already set
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set the SQLAlchemy URL from environment variable if available
database_url = os.getenv('INVENTORY_DATABASE_URL')
if database_url:
config.set_main_option('sqlalchemy.url', database_url)
# add your model's MetaData object here
# for 'autogenerate' support
# Set target metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
@@ -70,9 +62,7 @@ def run_migrations_offline() -> None:
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Run migrations with database connection"""
context.configure(
connection=connection,
target_metadata=target_metadata,
@@ -83,9 +73,8 @@ def do_run_migrations(connection: Connection) -> None:
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in async mode"""
"""Run migrations in 'online' mode."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
@@ -97,13 +86,11 @@ async def run_async_migrations() -> None:
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
run_migrations_online()

View File

@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
@@ -21,4 +23,4 @@ def upgrade() -> None:
def downgrade() -> None:
${downgrades if downgrades else "pass"}
${downgrades if downgrades else "pass"}

View File

@@ -1,223 +1,28 @@
"""Initial inventory tables
"""Initial inventory service tables
Revision ID: 001
Revises:
Create Date: 2025-01-15 10:00:00.000000
Revision ID: 001_initial_inventory
Create Date: 2024-01-01 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
revision: str = '001_initial_inventory'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create enum types
op.execute("""
CREATE TYPE unitofmeasure AS ENUM (
'kg', 'g', 'l', 'ml', 'units', 'pcs', 'pkg', 'bags', 'boxes'
);
""")
op.execute("""
CREATE TYPE ingredientcategory AS ENUM (
'flour', 'yeast', 'dairy', 'eggs', 'sugar', 'fats', 'salt',
'spices', 'additives', 'packaging', 'cleaning', 'other'
);
""")
op.execute("""
CREATE TYPE stockmovementtype AS ENUM (
'purchase', 'production_use', 'adjustment', 'waste',
'transfer', 'return', 'initial_stock'
);
""")
# Create ingredients table
op.create_table(
'ingredients',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('sku', sa.String(100), nullable=True),
sa.Column('barcode', sa.String(50), nullable=True),
sa.Column('category', sa.Enum('flour', 'yeast', 'dairy', 'eggs', 'sugar', 'fats', 'salt', 'spices', 'additives', 'packaging', 'cleaning', 'other', name='ingredientcategory'), nullable=False),
sa.Column('subcategory', sa.String(100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('brand', sa.String(100), nullable=True),
sa.Column('unit_of_measure', sa.Enum('kg', 'g', 'l', 'ml', 'units', 'pcs', 'pkg', 'bags', 'boxes', name='unitofmeasure'), nullable=False),
sa.Column('package_size', sa.Float(), nullable=True),
sa.Column('average_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('last_purchase_price', sa.Numeric(10, 2), nullable=True),
sa.Column('standard_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('low_stock_threshold', sa.Float(), nullable=False, server_default='10.0'),
sa.Column('reorder_point', sa.Float(), nullable=False, server_default='20.0'),
sa.Column('reorder_quantity', sa.Float(), nullable=False, server_default='50.0'),
sa.Column('max_stock_level', sa.Float(), nullable=True),
sa.Column('requires_refrigeration', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('requires_freezing', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('storage_temperature_min', sa.Float(), nullable=True),
sa.Column('storage_temperature_max', sa.Float(), nullable=True),
sa.Column('storage_humidity_max', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
sa.Column('is_perishable', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# Create stock table
op.create_table(
'stock',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('batch_number', sa.String(100), nullable=True),
sa.Column('lot_number', sa.String(100), nullable=True),
sa.Column('supplier_batch_ref', sa.String(100), nullable=True),
sa.Column('current_quantity', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('reserved_quantity', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('available_quantity', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('received_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('best_before_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('storage_location', sa.String(100), nullable=True),
sa.Column('warehouse_zone', sa.String(50), nullable=True),
sa.Column('shelf_position', sa.String(50), nullable=True),
sa.Column('is_available', sa.Boolean(), nullable=True, server_default='true'),
sa.Column('is_expired', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('quality_status', sa.String(20), nullable=True, server_default='good'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Create stock_movements table
op.create_table(
'stock_movements',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('movement_type', sa.Enum('purchase', 'production_use', 'adjustment', 'waste', 'transfer', 'return', 'initial_stock', name='stockmovementtype'), nullable=False),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('quantity_before', sa.Float(), nullable=True),
sa.Column('quantity_after', sa.Float(), nullable=True),
sa.Column('reference_number', sa.String(100), nullable=True),
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('reason_code', sa.String(50), nullable=True),
sa.Column('movement_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Create stock_alerts table
op.create_table(
'stock_alerts',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('alert_type', sa.String(50), nullable=False),
sa.Column('severity', sa.String(20), nullable=False, server_default='medium'),
sa.Column('title', sa.String(255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('current_quantity', sa.Float(), nullable=True),
sa.Column('threshold_value', sa.Float(), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
sa.Column('is_acknowledged', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('acknowledged_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_resolved', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('resolved_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for ingredients table
op.create_index('idx_ingredients_tenant_name', 'ingredients', ['tenant_id', 'name'], unique=True)
op.create_index('idx_ingredients_tenant_sku', 'ingredients', ['tenant_id', 'sku'])
op.create_index('idx_ingredients_barcode', 'ingredients', ['barcode'])
op.create_index('idx_ingredients_category', 'ingredients', ['tenant_id', 'category', 'is_active'])
op.create_index('idx_ingredients_stock_levels', 'ingredients', ['tenant_id', 'low_stock_threshold', 'reorder_point'])
# Create indexes for stock table
op.create_index('idx_stock_tenant_ingredient', 'stock', ['tenant_id', 'ingredient_id'])
op.create_index('idx_stock_expiration', 'stock', ['tenant_id', 'expiration_date', 'is_available'])
op.create_index('idx_stock_batch', 'stock', ['tenant_id', 'batch_number'])
op.create_index('idx_stock_low_levels', 'stock', ['tenant_id', 'current_quantity', 'is_available'])
op.create_index('idx_stock_quality', 'stock', ['tenant_id', 'quality_status', 'is_available'])
# Create indexes for stock_movements table
op.create_index('idx_movements_tenant_date', 'stock_movements', ['tenant_id', 'movement_date'])
op.create_index('idx_movements_tenant_ingredient', 'stock_movements', ['tenant_id', 'ingredient_id', 'movement_date'])
op.create_index('idx_movements_type', 'stock_movements', ['tenant_id', 'movement_type', 'movement_date'])
op.create_index('idx_movements_reference', 'stock_movements', ['reference_number'])
op.create_index('idx_movements_supplier', 'stock_movements', ['supplier_id', 'movement_date'])
# Create indexes for stock_alerts table
op.create_index('idx_alerts_tenant_active', 'stock_alerts', ['tenant_id', 'is_active', 'created_at'])
op.create_index('idx_alerts_type_severity', 'stock_alerts', ['alert_type', 'severity', 'is_active'])
op.create_index('idx_alerts_ingredient', 'stock_alerts', ['ingredient_id', 'is_active'])
op.create_index('idx_alerts_unresolved', 'stock_alerts', ['tenant_id', 'is_resolved', 'is_active'])
# TODO: Add table creation statements for inventory service
# This is a placeholder migration - replace with actual table definitions
pass
def downgrade() -> None:
# Drop indexes
op.drop_index('idx_alerts_unresolved', table_name='stock_alerts')
op.drop_index('idx_alerts_ingredient', table_name='stock_alerts')
op.drop_index('idx_alerts_type_severity', table_name='stock_alerts')
op.drop_index('idx_alerts_tenant_active', table_name='stock_alerts')
op.drop_index('idx_movements_supplier', table_name='stock_movements')
op.drop_index('idx_movements_reference', table_name='stock_movements')
op.drop_index('idx_movements_type', table_name='stock_movements')
op.drop_index('idx_movements_tenant_ingredient', table_name='stock_movements')
op.drop_index('idx_movements_tenant_date', table_name='stock_movements')
op.drop_index('idx_stock_quality', table_name='stock')
op.drop_index('idx_stock_low_levels', table_name='stock')
op.drop_index('idx_stock_batch', table_name='stock')
op.drop_index('idx_stock_expiration', table_name='stock')
op.drop_index('idx_stock_tenant_ingredient', table_name='stock')
op.drop_index('idx_ingredients_stock_levels', table_name='ingredients')
op.drop_index('idx_ingredients_category', table_name='ingredients')
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
op.drop_index('idx_ingredients_tenant_sku', table_name='ingredients')
op.drop_index('idx_ingredients_tenant_name', table_name='ingredients')
# Drop tables
op.drop_table('stock_alerts')
op.drop_table('stock_movements')
op.drop_table('stock')
op.drop_table('ingredients')
# Drop enum types
op.execute("DROP TYPE stockmovementtype;")
op.execute("DROP TYPE ingredientcategory;")
op.execute("DROP TYPE unitofmeasure;")
# TODO: Add table drop statements for inventory service
pass

View File

@@ -1,95 +0,0 @@
"""Add finished products support
Revision ID: 002
Revises: 001
Create Date: 2025-01-15 10:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '002'
down_revision = '001'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create new enum types for finished products
op.execute("""
CREATE TYPE producttype AS ENUM (
'ingredient', 'finished_product'
);
""")
op.execute("""
CREATE TYPE productcategory AS ENUM (
'bread', 'croissants', 'pastries', 'cakes', 'cookies',
'muffins', 'sandwiches', 'seasonal', 'beverages', 'other_products'
);
""")
# Add new columns to ingredients table
op.add_column('ingredients', sa.Column('product_type',
sa.Enum('ingredient', 'finished_product', name='producttype'),
nullable=False, server_default='ingredient'))
op.add_column('ingredients', sa.Column('product_category',
sa.Enum('bread', 'croissants', 'pastries', 'cakes', 'cookies', 'muffins', 'sandwiches', 'seasonal', 'beverages', 'other_products', name='productcategory'),
nullable=True))
# Rename existing category column to ingredient_category
op.alter_column('ingredients', 'category', new_column_name='ingredient_category')
# Add finished product specific columns
op.add_column('ingredients', sa.Column('supplier_name', sa.String(200), nullable=True))
op.add_column('ingredients', sa.Column('display_life_hours', sa.Integer(), nullable=True))
op.add_column('ingredients', sa.Column('best_before_hours', sa.Integer(), nullable=True))
op.add_column('ingredients', sa.Column('central_baker_product_code', sa.String(100), nullable=True))
op.add_column('ingredients', sa.Column('delivery_days', sa.String(20), nullable=True))
op.add_column('ingredients', sa.Column('minimum_order_quantity', sa.Float(), nullable=True))
op.add_column('ingredients', sa.Column('pack_size', sa.Integer(), nullable=True))
op.add_column('ingredients', sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
# Update existing indexes and create new ones
op.drop_index('idx_ingredients_category', table_name='ingredients')
# Create new indexes for enhanced functionality
op.create_index('idx_ingredients_product_type', 'ingredients', ['tenant_id', 'product_type', 'is_active'])
op.create_index('idx_ingredients_ingredient_category', 'ingredients', ['tenant_id', 'ingredient_category', 'is_active'])
op.create_index('idx_ingredients_product_category', 'ingredients', ['tenant_id', 'product_category', 'is_active'])
op.create_index('idx_ingredients_central_baker', 'ingredients', ['tenant_id', 'supplier_name', 'product_type'])
def downgrade() -> None:
# Drop new indexes
op.drop_index('idx_ingredients_central_baker', table_name='ingredients')
op.drop_index('idx_ingredients_product_category', table_name='ingredients')
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
op.drop_index('idx_ingredients_product_type', table_name='ingredients')
# Remove finished product specific columns
op.drop_column('ingredients', 'nutritional_info')
op.drop_column('ingredients', 'pack_size')
op.drop_column('ingredients', 'minimum_order_quantity')
op.drop_column('ingredients', 'delivery_days')
op.drop_column('ingredients', 'central_baker_product_code')
op.drop_column('ingredients', 'best_before_hours')
op.drop_column('ingredients', 'display_life_hours')
op.drop_column('ingredients', 'supplier_name')
# Remove new columns
op.drop_column('ingredients', 'product_category')
op.drop_column('ingredients', 'product_type')
# Rename ingredient_category back to category
op.alter_column('ingredients', 'ingredient_category', new_column_name='category')
# Recreate original category index
op.create_index('idx_ingredients_category', 'ingredients', ['tenant_id', 'category', 'is_active'])
# Drop new enum types
op.execute("DROP TYPE productcategory;")
op.execute("DROP TYPE producttype;")

View File

@@ -1,114 +0,0 @@
"""Add production stage enum and columns
Revision ID: 003
Revises: 002
Create Date: 2025-01-17 15:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '003'
down_revision = '002'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create ProductionStage enum type
op.execute("""
CREATE TYPE productionstage AS ENUM (
'raw_ingredient', 'par_baked', 'fully_baked',
'prepared_dough', 'frozen_product'
);
""")
# Add production_stage column to stock table
op.add_column('stock', sa.Column('production_stage',
sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'),
nullable=False, server_default='raw_ingredient'))
# Add transformation_reference column to stock table
op.add_column('stock', sa.Column('transformation_reference', sa.String(100), nullable=True))
# Add stage-specific expiration tracking columns
op.add_column('stock', sa.Column('original_expiration_date', sa.DateTime(timezone=True), nullable=True))
op.add_column('stock', sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=True))
op.add_column('stock', sa.Column('final_expiration_date', sa.DateTime(timezone=True), nullable=True))
# Create product_transformations table
op.create_table(
'product_transformations',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('transformation_reference', sa.String(100), nullable=False),
sa.Column('source_ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('target_ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('source_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('target_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('source_quantity', sa.Float(), nullable=False),
sa.Column('target_quantity', sa.Float(), nullable=False),
sa.Column('conversion_ratio', sa.Float(), nullable=False, server_default='1.0'),
sa.Column('expiration_calculation_method', sa.String(50), nullable=False, server_default='days_from_transformation'),
sa.Column('expiration_days_offset', sa.Integer(), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('process_notes', sa.Text(), nullable=True),
sa.Column('performed_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('source_batch_numbers', sa.Text(), nullable=True),
sa.Column('target_batch_number', sa.String(100), nullable=True),
sa.Column('is_completed', sa.Boolean(), nullable=True, server_default='true'),
sa.Column('is_reversed', sa.Boolean(), nullable=True, server_default='false'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.ForeignKeyConstraint(['source_ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['target_ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Add new indexes for enhanced functionality
op.create_index('idx_stock_production_stage', 'stock', ['tenant_id', 'production_stage', 'is_available'])
op.create_index('idx_stock_transformation', 'stock', ['tenant_id', 'transformation_reference'])
op.create_index('idx_stock_final_expiration', 'stock', ['tenant_id', 'final_expiration_date', 'is_available'])
# Create indexes for product_transformations table
op.create_index('idx_transformations_tenant_date', 'product_transformations', ['tenant_id', 'transformation_date'])
op.create_index('idx_transformations_reference', 'product_transformations', ['transformation_reference'])
op.create_index('idx_transformations_source', 'product_transformations', ['tenant_id', 'source_ingredient_id'])
op.create_index('idx_transformations_target', 'product_transformations', ['tenant_id', 'target_ingredient_id'])
op.create_index('idx_transformations_stages', 'product_transformations', ['source_stage', 'target_stage'])
# Update existing stockmovementtype enum to include TRANSFORMATION
op.execute("ALTER TYPE stockmovementtype ADD VALUE 'transformation';")
def downgrade() -> None:
# Drop indexes for product_transformations
op.drop_index('idx_transformations_stages', table_name='product_transformations')
op.drop_index('idx_transformations_target', table_name='product_transformations')
op.drop_index('idx_transformations_source', table_name='product_transformations')
op.drop_index('idx_transformations_reference', table_name='product_transformations')
op.drop_index('idx_transformations_tenant_date', table_name='product_transformations')
# Drop new stock indexes
op.drop_index('idx_stock_final_expiration', table_name='stock')
op.drop_index('idx_stock_transformation', table_name='stock')
op.drop_index('idx_stock_production_stage', table_name='stock')
# Drop product_transformations table
op.drop_table('product_transformations')
# Remove new columns from stock table
op.drop_column('stock', 'final_expiration_date')
op.drop_column('stock', 'transformation_date')
op.drop_column('stock', 'original_expiration_date')
op.drop_column('stock', 'transformation_reference')
op.drop_column('stock', 'production_stage')
# Drop ProductionStage enum type
op.execute("DROP TYPE productionstage;")
# Note: Cannot easily remove 'transformation' from existing enum in PostgreSQL
# This would require recreating the enum and updating all references
# For now, we leave the enum value as it won't cause issues

View File

@@ -1,104 +0,0 @@
"""Move storage configuration from ingredient to batch level
Revision ID: 004_move_storage_config_to_batch
Revises: 003_add_production_stage_enum
Create Date: 2025-01-17 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '004_move_storage_config_to_batch'
down_revision = '003_add_production_stage_enum'
branch_labels = None
depends_on = None
def upgrade():
"""Move storage configuration from ingredients to stock batches"""
# Add batch-specific storage columns to stock table
op.add_column('stock', sa.Column('requires_refrigeration', sa.Boolean(), default=False))
op.add_column('stock', sa.Column('requires_freezing', sa.Boolean(), default=False))
op.add_column('stock', sa.Column('storage_temperature_min', sa.Float(), nullable=True))
op.add_column('stock', sa.Column('storage_temperature_max', sa.Float(), nullable=True))
op.add_column('stock', sa.Column('storage_humidity_max', sa.Float(), nullable=True))
op.add_column('stock', sa.Column('shelf_life_days', sa.Integer(), nullable=True))
op.add_column('stock', sa.Column('storage_instructions', sa.Text(), nullable=True))
# Migrate existing data from ingredients to stock batches
# This will copy the ingredient-level storage config to all existing stock batches
op.execute("""
UPDATE stock
SET
requires_refrigeration = i.requires_refrigeration,
requires_freezing = i.requires_freezing,
storage_temperature_min = i.storage_temperature_min,
storage_temperature_max = i.storage_temperature_max,
storage_humidity_max = i.storage_humidity_max,
shelf_life_days = i.shelf_life_days,
storage_instructions = i.storage_instructions
FROM ingredients i
WHERE stock.ingredient_id = i.id
""")
# Remove storage configuration columns from ingredients table
# Keep only shelf_life_days as default value
op.drop_column('ingredients', 'requires_refrigeration')
op.drop_column('ingredients', 'requires_freezing')
op.drop_column('ingredients', 'storage_temperature_min')
op.drop_column('ingredients', 'storage_temperature_max')
op.drop_column('ingredients', 'storage_humidity_max')
op.drop_column('ingredients', 'storage_instructions')
def downgrade():
"""Revert storage configuration back to ingredient level"""
# Add storage configuration columns back to ingredients table
op.add_column('ingredients', sa.Column('requires_refrigeration', sa.Boolean(), default=False))
op.add_column('ingredients', sa.Column('requires_freezing', sa.Boolean(), default=False))
op.add_column('ingredients', sa.Column('storage_temperature_min', sa.Float(), nullable=True))
op.add_column('ingredients', sa.Column('storage_temperature_max', sa.Float(), nullable=True))
op.add_column('ingredients', sa.Column('storage_humidity_max', sa.Float(), nullable=True))
op.add_column('ingredients', sa.Column('storage_instructions', sa.Text(), nullable=True))
# Migrate data back from stock to ingredients (use most common values per ingredient)
op.execute("""
UPDATE ingredients
SET
requires_refrigeration = COALESCE(
(SELECT bool_or(s.requires_refrigeration) FROM stock s WHERE s.ingredient_id = ingredients.id),
false
),
requires_freezing = COALESCE(
(SELECT bool_or(s.requires_freezing) FROM stock s WHERE s.ingredient_id = ingredients.id),
false
),
storage_temperature_min = (
SELECT MIN(s.storage_temperature_min) FROM stock s WHERE s.ingredient_id = ingredients.id
),
storage_temperature_max = (
SELECT MAX(s.storage_temperature_max) FROM stock s WHERE s.ingredient_id = ingredients.id
),
storage_humidity_max = (
SELECT MAX(s.storage_humidity_max) FROM stock s WHERE s.ingredient_id = ingredients.id
),
storage_instructions = (
SELECT s.storage_instructions FROM stock s
WHERE s.ingredient_id = ingredients.id
AND s.storage_instructions IS NOT NULL
LIMIT 1
)
""")
# Remove batch-specific storage columns from stock table
op.drop_column('stock', 'requires_refrigeration')
op.drop_column('stock', 'requires_freezing')
op.drop_column('stock', 'storage_temperature_min')
op.drop_column('stock', 'storage_temperature_max')
op.drop_column('stock', 'storage_humidity_max')
op.drop_column('stock', 'shelf_life_days')
op.drop_column('stock', 'storage_instructions')