Create new services: inventory, recipes, suppliers
This commit is contained in:
93
services/inventory/migrations/alembic.ini
Normal file
93
services/inventory/migrations/alembic.ini
Normal file
@@ -0,0 +1,93 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version number format
|
||||
# Uses Alembic datetime format
|
||||
version_num_format = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d
|
||||
|
||||
# version name format
|
||||
version_path_separator = /
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+asyncpg://inventory_user:inventory_pass123@inventory-db:5432/inventory_db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
109
services/inventory/migrations/env.py
Normal file
109
services/inventory/migrations/env.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Alembic environment configuration for Inventory Service
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add the app directory to the path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# Import models to ensure they're registered
|
||||
from app.models.inventory import * # noqa
|
||||
from shared.database.base import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable if available
|
||||
database_url = os.getenv('INVENTORY_DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with database connection"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in async mode"""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
services/inventory/migrations/script.py.mako
Normal file
24
services/inventory/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,223 @@
|
||||
"""Initial inventory tables
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-01-15 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum types
|
||||
op.execute("""
|
||||
CREATE TYPE unitofmeasure AS ENUM (
|
||||
'kg', 'g', 'l', 'ml', 'units', 'pcs', 'pkg', 'bags', 'boxes'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE ingredientcategory AS ENUM (
|
||||
'flour', 'yeast', 'dairy', 'eggs', 'sugar', 'fats', 'salt',
|
||||
'spices', 'additives', 'packaging', 'cleaning', 'other'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE stockmovementtype AS ENUM (
|
||||
'purchase', 'production_use', 'adjustment', 'waste',
|
||||
'transfer', 'return', 'initial_stock'
|
||||
);
|
||||
""")
|
||||
|
||||
# Create ingredients table
|
||||
op.create_table(
|
||||
'ingredients',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('sku', sa.String(100), nullable=True),
|
||||
sa.Column('barcode', sa.String(50), nullable=True),
|
||||
sa.Column('category', sa.Enum('flour', 'yeast', 'dairy', 'eggs', 'sugar', 'fats', 'salt', 'spices', 'additives', 'packaging', 'cleaning', 'other', name='ingredientcategory'), nullable=False),
|
||||
sa.Column('subcategory', sa.String(100), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('brand', sa.String(100), nullable=True),
|
||||
sa.Column('unit_of_measure', sa.Enum('kg', 'g', 'l', 'ml', 'units', 'pcs', 'pkg', 'bags', 'boxes', name='unitofmeasure'), nullable=False),
|
||||
sa.Column('package_size', sa.Float(), nullable=True),
|
||||
sa.Column('average_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('last_purchase_price', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('standard_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('low_stock_threshold', sa.Float(), nullable=False, server_default='10.0'),
|
||||
sa.Column('reorder_point', sa.Float(), nullable=False, server_default='20.0'),
|
||||
sa.Column('reorder_quantity', sa.Float(), nullable=False, server_default='50.0'),
|
||||
sa.Column('max_stock_level', sa.Float(), nullable=True),
|
||||
sa.Column('requires_refrigeration', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('requires_freezing', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('storage_temperature_min', sa.Float(), nullable=True),
|
||||
sa.Column('storage_temperature_max', sa.Float(), nullable=True),
|
||||
sa.Column('storage_humidity_max', sa.Float(), nullable=True),
|
||||
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
|
||||
sa.Column('storage_instructions', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_perishable', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create stock table
|
||||
op.create_table(
|
||||
'stock',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('batch_number', sa.String(100), nullable=True),
|
||||
sa.Column('lot_number', sa.String(100), nullable=True),
|
||||
sa.Column('supplier_batch_ref', sa.String(100), nullable=True),
|
||||
sa.Column('current_quantity', sa.Float(), nullable=False, server_default='0.0'),
|
||||
sa.Column('reserved_quantity', sa.Float(), nullable=False, server_default='0.0'),
|
||||
sa.Column('available_quantity', sa.Float(), nullable=False, server_default='0.0'),
|
||||
sa.Column('received_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('best_before_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('storage_location', sa.String(100), nullable=True),
|
||||
sa.Column('warehouse_zone', sa.String(50), nullable=True),
|
||||
sa.Column('shelf_position', sa.String(50), nullable=True),
|
||||
sa.Column('is_available', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_expired', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('quality_status', sa.String(20), nullable=True, server_default='good'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create stock_movements table
|
||||
op.create_table(
|
||||
'stock_movements',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('movement_type', sa.Enum('purchase', 'production_use', 'adjustment', 'waste', 'transfer', 'return', 'initial_stock', name='stockmovementtype'), nullable=False),
|
||||
sa.Column('quantity', sa.Float(), nullable=False),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('quantity_before', sa.Float(), nullable=True),
|
||||
sa.Column('quantity_after', sa.Float(), nullable=True),
|
||||
sa.Column('reference_number', sa.String(100), nullable=True),
|
||||
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('notes', sa.Text(), nullable=True),
|
||||
sa.Column('reason_code', sa.String(50), nullable=True),
|
||||
sa.Column('movement_date', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
|
||||
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create stock_alerts table
|
||||
op.create_table(
|
||||
'stock_alerts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('alert_type', sa.String(50), nullable=False),
|
||||
sa.Column('severity', sa.String(20), nullable=False, server_default='medium'),
|
||||
sa.Column('title', sa.String(255), nullable=False),
|
||||
sa.Column('message', sa.Text(), nullable=False),
|
||||
sa.Column('current_quantity', sa.Float(), nullable=True),
|
||||
sa.Column('threshold_value', sa.Float(), nullable=True),
|
||||
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True, server_default='true'),
|
||||
sa.Column('is_acknowledged', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('acknowledged_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('is_resolved', sa.Boolean(), nullable=True, server_default='false'),
|
||||
sa.Column('resolved_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('resolution_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
|
||||
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for ingredients table
|
||||
op.create_index('idx_ingredients_tenant_name', 'ingredients', ['tenant_id', 'name'], unique=True)
|
||||
op.create_index('idx_ingredients_tenant_sku', 'ingredients', ['tenant_id', 'sku'])
|
||||
op.create_index('idx_ingredients_barcode', 'ingredients', ['barcode'])
|
||||
op.create_index('idx_ingredients_category', 'ingredients', ['tenant_id', 'category', 'is_active'])
|
||||
op.create_index('idx_ingredients_stock_levels', 'ingredients', ['tenant_id', 'low_stock_threshold', 'reorder_point'])
|
||||
|
||||
# Create indexes for stock table
|
||||
op.create_index('idx_stock_tenant_ingredient', 'stock', ['tenant_id', 'ingredient_id'])
|
||||
op.create_index('idx_stock_expiration', 'stock', ['tenant_id', 'expiration_date', 'is_available'])
|
||||
op.create_index('idx_stock_batch', 'stock', ['tenant_id', 'batch_number'])
|
||||
op.create_index('idx_stock_low_levels', 'stock', ['tenant_id', 'current_quantity', 'is_available'])
|
||||
op.create_index('idx_stock_quality', 'stock', ['tenant_id', 'quality_status', 'is_available'])
|
||||
|
||||
# Create indexes for stock_movements table
|
||||
op.create_index('idx_movements_tenant_date', 'stock_movements', ['tenant_id', 'movement_date'])
|
||||
op.create_index('idx_movements_tenant_ingredient', 'stock_movements', ['tenant_id', 'ingredient_id', 'movement_date'])
|
||||
op.create_index('idx_movements_type', 'stock_movements', ['tenant_id', 'movement_type', 'movement_date'])
|
||||
op.create_index('idx_movements_reference', 'stock_movements', ['reference_number'])
|
||||
op.create_index('idx_movements_supplier', 'stock_movements', ['supplier_id', 'movement_date'])
|
||||
|
||||
# Create indexes for stock_alerts table
|
||||
op.create_index('idx_alerts_tenant_active', 'stock_alerts', ['tenant_id', 'is_active', 'created_at'])
|
||||
op.create_index('idx_alerts_type_severity', 'stock_alerts', ['alert_type', 'severity', 'is_active'])
|
||||
op.create_index('idx_alerts_ingredient', 'stock_alerts', ['ingredient_id', 'is_active'])
|
||||
op.create_index('idx_alerts_unresolved', 'stock_alerts', ['tenant_id', 'is_resolved', 'is_active'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('idx_alerts_unresolved', table_name='stock_alerts')
|
||||
op.drop_index('idx_alerts_ingredient', table_name='stock_alerts')
|
||||
op.drop_index('idx_alerts_type_severity', table_name='stock_alerts')
|
||||
op.drop_index('idx_alerts_tenant_active', table_name='stock_alerts')
|
||||
|
||||
op.drop_index('idx_movements_supplier', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_reference', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_type', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_tenant_ingredient', table_name='stock_movements')
|
||||
op.drop_index('idx_movements_tenant_date', table_name='stock_movements')
|
||||
|
||||
op.drop_index('idx_stock_quality', table_name='stock')
|
||||
op.drop_index('idx_stock_low_levels', table_name='stock')
|
||||
op.drop_index('idx_stock_batch', table_name='stock')
|
||||
op.drop_index('idx_stock_expiration', table_name='stock')
|
||||
op.drop_index('idx_stock_tenant_ingredient', table_name='stock')
|
||||
|
||||
op.drop_index('idx_ingredients_stock_levels', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_tenant_sku', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_tenant_name', table_name='ingredients')
|
||||
|
||||
# Drop tables
|
||||
op.drop_table('stock_alerts')
|
||||
op.drop_table('stock_movements')
|
||||
op.drop_table('stock')
|
||||
op.drop_table('ingredients')
|
||||
|
||||
# Drop enum types
|
||||
op.execute("DROP TYPE stockmovementtype;")
|
||||
op.execute("DROP TYPE ingredientcategory;")
|
||||
op.execute("DROP TYPE unitofmeasure;")
|
||||
@@ -0,0 +1,95 @@
|
||||
"""Add finished products support
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2025-01-15 10:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create new enum types for finished products
|
||||
op.execute("""
|
||||
CREATE TYPE producttype AS ENUM (
|
||||
'ingredient', 'finished_product'
|
||||
);
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TYPE productcategory AS ENUM (
|
||||
'bread', 'croissants', 'pastries', 'cakes', 'cookies',
|
||||
'muffins', 'sandwiches', 'seasonal', 'beverages', 'other_products'
|
||||
);
|
||||
""")
|
||||
|
||||
# Add new columns to ingredients table
|
||||
op.add_column('ingredients', sa.Column('product_type',
|
||||
sa.Enum('ingredient', 'finished_product', name='producttype'),
|
||||
nullable=False, server_default='ingredient'))
|
||||
|
||||
op.add_column('ingredients', sa.Column('product_category',
|
||||
sa.Enum('bread', 'croissants', 'pastries', 'cakes', 'cookies', 'muffins', 'sandwiches', 'seasonal', 'beverages', 'other_products', name='productcategory'),
|
||||
nullable=True))
|
||||
|
||||
# Rename existing category column to ingredient_category
|
||||
op.alter_column('ingredients', 'category', new_column_name='ingredient_category')
|
||||
|
||||
# Add finished product specific columns
|
||||
op.add_column('ingredients', sa.Column('supplier_name', sa.String(200), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('display_life_hours', sa.Integer(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('best_before_hours', sa.Integer(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('central_baker_product_code', sa.String(100), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('delivery_days', sa.String(20), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('minimum_order_quantity', sa.Float(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('pack_size', sa.Integer(), nullable=True))
|
||||
op.add_column('ingredients', sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
|
||||
|
||||
# Update existing indexes and create new ones
|
||||
op.drop_index('idx_ingredients_category', table_name='ingredients')
|
||||
|
||||
# Create new indexes for enhanced functionality
|
||||
op.create_index('idx_ingredients_product_type', 'ingredients', ['tenant_id', 'product_type', 'is_active'])
|
||||
op.create_index('idx_ingredients_ingredient_category', 'ingredients', ['tenant_id', 'ingredient_category', 'is_active'])
|
||||
op.create_index('idx_ingredients_product_category', 'ingredients', ['tenant_id', 'product_category', 'is_active'])
|
||||
op.create_index('idx_ingredients_central_baker', 'ingredients', ['tenant_id', 'supplier_name', 'product_type'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop new indexes
|
||||
op.drop_index('idx_ingredients_central_baker', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_product_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
|
||||
op.drop_index('idx_ingredients_product_type', table_name='ingredients')
|
||||
|
||||
# Remove finished product specific columns
|
||||
op.drop_column('ingredients', 'nutritional_info')
|
||||
op.drop_column('ingredients', 'pack_size')
|
||||
op.drop_column('ingredients', 'minimum_order_quantity')
|
||||
op.drop_column('ingredients', 'delivery_days')
|
||||
op.drop_column('ingredients', 'central_baker_product_code')
|
||||
op.drop_column('ingredients', 'best_before_hours')
|
||||
op.drop_column('ingredients', 'display_life_hours')
|
||||
op.drop_column('ingredients', 'supplier_name')
|
||||
|
||||
# Remove new columns
|
||||
op.drop_column('ingredients', 'product_category')
|
||||
op.drop_column('ingredients', 'product_type')
|
||||
|
||||
# Rename ingredient_category back to category
|
||||
op.alter_column('ingredients', 'ingredient_category', new_column_name='category')
|
||||
|
||||
# Recreate original category index
|
||||
op.create_index('idx_ingredients_category', 'ingredients', ['tenant_id', 'category', 'is_active'])
|
||||
|
||||
# Drop new enum types
|
||||
op.execute("DROP TYPE productcategory;")
|
||||
op.execute("DROP TYPE producttype;")
|
||||
Reference in New Issue
Block a user