Initial commit - production deployment

This commit is contained in:
2026-01-21 17:17:16 +01:00
commit c23d00dd92
2289 changed files with 638440 additions and 0 deletions

View File

@@ -0,0 +1,49 @@
-- ================================================================
-- Performance Indexes Migration for Inventory Service
-- Created: 2025-11-15
-- Purpose: Add indexes to improve dashboard query performance
-- ================================================================
-- Index for ingredients by tenant and ingredient_category
-- Used in: get_stock_by_category, get_business_model_metrics
CREATE INDEX IF NOT EXISTS idx_ingredients_tenant_ing_category
ON ingredients(tenant_id, ingredient_category)
WHERE is_active = true;
-- Index for ingredients by tenant and product_category
-- Used in: get_stock_status_by_category
CREATE INDEX IF NOT EXISTS idx_ingredients_tenant_prod_category
ON ingredients(tenant_id, product_category)
WHERE is_active = true;
-- Index for stock by tenant, ingredient, and availability
-- Used in: get_stock_summary_by_tenant, get_ingredient_stock_levels
CREATE INDEX IF NOT EXISTS idx_stock_tenant_ingredient_available
ON stock(tenant_id, ingredient_id, is_available);
-- Index for food_safety_alerts by tenant, status, and severity
-- Used in: get_alerts_by_severity, get_food_safety_dashboard
CREATE INDEX IF NOT EXISTS idx_alerts_tenant_status_severity
ON food_safety_alerts(tenant_id, status, severity);
-- Index for stock_movements by tenant and movement_date (descending)
-- Used in: get_movements_by_type, get_recent_activity
CREATE INDEX IF NOT EXISTS idx_movements_tenant_date
ON stock_movements(tenant_id, movement_date DESC);
-- Index for stock by tenant and ingredient with quantity
-- Used in: get_live_metrics with stock level calculations
CREATE INDEX IF NOT EXISTS idx_stock_tenant_quantity
ON stock(tenant_id, ingredient_id, quantity_current)
WHERE is_available = true;
-- Index for ingredients by tenant and creation date
-- Used in: get_recent_activity for recently added ingredients
CREATE INDEX IF NOT EXISTS idx_ingredients_tenant_created
ON ingredients(tenant_id, created_at DESC)
WHERE is_active = true;
-- Composite index for stock movements by type and date
-- Used in: get_movements_by_type with date filtering
CREATE INDEX IF NOT EXISTS idx_movements_type_date
ON stock_movements(tenant_id, movement_type, movement_date DESC);

View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
"""
Script to apply performance indexes to the inventory database
Usage: python apply_indexes.py
"""
import asyncio
import os
import sys
from pathlib import Path
# Add parent directory to path to import app modules
sys.path.insert(0, str(Path(__file__).parent.parent))
from sqlalchemy import text
from app.core.database import database_manager
import structlog
logger = structlog.get_logger()
async def apply_indexes():
"""Apply performance indexes from SQL file"""
try:
# Read the SQL file
sql_file = Path(__file__).parent / "001_add_performance_indexes.sql"
with open(sql_file, 'r') as f:
sql_content = f.read()
logger.info("Applying performance indexes...")
# Split by semicolons and execute each statement
statements = [s.strip() for s in sql_content.split(';') if s.strip() and not s.strip().startswith('--')]
async with database_manager.get_session() as session:
for statement in statements:
if statement:
logger.info(f"Executing: {statement[:100]}...")
await session.execute(text(statement))
await session.commit()
logger.info(f"Successfully applied {len(statements)} index statements")
return True
except Exception as e:
logger.error(f"Failed to apply indexes: {e}", exc_info=True)
return False
async def verify_indexes():
"""Verify that indexes were created"""
try:
logger.info("Verifying indexes...")
verify_query = """
SELECT
schemaname,
tablename,
indexname,
indexdef
FROM pg_indexes
WHERE indexname LIKE 'idx_%'
AND schemaname = 'public'
ORDER BY tablename, indexname;
"""
async with database_manager.get_session() as session:
result = await session.execute(text(verify_query))
indexes = result.fetchall()
logger.info(f"Found {len(indexes)} indexes:")
for idx in indexes:
logger.info(f" {idx.tablename}.{idx.indexname}")
return True
except Exception as e:
logger.error(f"Failed to verify indexes: {e}", exc_info=True)
return False
async def main():
"""Main entry point"""
logger.info("Starting index migration...")
# Apply indexes
success = await apply_indexes()
if success:
# Verify indexes
await verify_indexes()
logger.info("Index migration completed successfully")
else:
logger.error("Index migration failed")
sys.exit(1)
# Close database connections
await database_manager.close_connections()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,141 @@
"""Alembic environment configuration for inventory service"""
import asyncio
import os
import sys
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the service directory to the Python path
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if service_path not in sys.path:
sys.path.insert(0, service_path)
# Add shared modules to path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
if shared_path not in sys.path:
sys.path.insert(0, shared_path)
try:
from app.core.config import settings
from shared.database.base import Base
# Import all models to ensure they are registered with Base.metadata
from app.models import * # noqa: F401, F403
except ImportError as e:
print(f"Import error in migrations env.py: {e}")
print(f"Current Python path: {sys.path}")
raise
# this is the Alembic Config object
config = context.config
# Determine service name from file path
service_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
service_name_upper = service_name.upper().replace('-', '_')
# Set database URL from environment variables with multiple fallback strategies
database_url = (
os.getenv(f'{service_name_upper}_DATABASE_URL') or # Service-specific
os.getenv('DATABASE_URL') # Generic fallback
)
# If DATABASE_URL is not set, construct from individual components
if not database_url:
# Try generic PostgreSQL environment variables first
postgres_host = os.getenv('POSTGRES_HOST')
postgres_port = os.getenv('POSTGRES_PORT', '5432')
postgres_db = os.getenv('POSTGRES_DB')
postgres_user = os.getenv('POSTGRES_USER')
postgres_password = os.getenv('POSTGRES_PASSWORD')
if all([postgres_host, postgres_db, postgres_user, postgres_password]):
database_url = f"postgresql+asyncpg://{postgres_user}:{postgres_password}@{postgres_host}:{postgres_port}/{postgres_db}"
else:
# Try service-specific environment variables
db_host = os.getenv(f'{service_name_upper}_DB_HOST', f'{service_name}-db-service')
db_port = os.getenv(f'{service_name_upper}_DB_PORT', '5432')
db_name = os.getenv(f'{service_name_upper}_DB_NAME', f'{service_name.replace("-", "_")}_db')
db_user = os.getenv(f'{service_name_upper}_DB_USER', f'{service_name.replace("-", "_")}_user')
db_password = os.getenv(f'{service_name_upper}_DB_PASSWORD')
if db_password:
database_url = f"postgresql+asyncpg://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}"
else:
# Final fallback: try to get from settings object
try:
database_url = getattr(settings, 'DATABASE_URL', None)
except Exception:
pass
if not database_url:
error_msg = f"ERROR: No database URL configured for {service_name} service"
print(error_msg)
raise Exception(error_msg)
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set target metadata
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode with async support."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,631 @@
"""Unified initial schema for inventory service
Revision ID: 20251123_unified_initial_schema
Revises:
Create Date: 2025-11-23
This unified migration combines all previous inventory migrations:
- Initial schema (e7fcea67bf4e)
- Local production support (add_local_production_support)
- Stock management fields nullable (20251108_1200_make_stock_fields_nullable)
- Stock receipts (20251123_add_stock_receipts)
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '20251123_unified_initial_schema'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create audit_logs table
op.create_table('audit_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.UUID(), nullable=False),
sa.Column('action', sa.String(length=100), nullable=False),
sa.Column('resource_type', sa.String(length=100), nullable=False),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('service_name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('changes', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('audit_metadata', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('endpoint', sa.String(length=255), nullable=True),
sa.Column('method', sa.String(length=10), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_audit_resource_type_action', 'audit_logs', ['resource_type', 'action'], unique=False)
op.create_index('idx_audit_service_created', 'audit_logs', ['service_name', 'created_at'], unique=False)
op.create_index('idx_audit_severity_created', 'audit_logs', ['severity', 'created_at'], unique=False)
op.create_index('idx_audit_tenant_created', 'audit_logs', ['tenant_id', 'created_at'], unique=False)
op.create_index('idx_audit_user_created', 'audit_logs', ['user_id', 'created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False)
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False)
op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False)
op.create_index(op.f('ix_audit_logs_service_name'), 'audit_logs', ['service_name'], unique=False)
op.create_index(op.f('ix_audit_logs_severity'), 'audit_logs', ['severity'], unique=False)
op.create_index(op.f('ix_audit_logs_tenant_id'), 'audit_logs', ['tenant_id'], unique=False)
op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False)
# Create ingredients table with all fields including local production support and nullable stock fields
op.create_table('ingredients',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('sku', sa.String(length=100), nullable=True),
sa.Column('barcode', sa.String(length=50), nullable=True),
sa.Column('product_type', sa.Enum('INGREDIENT', 'FINISHED_PRODUCT', name='producttype'), nullable=False),
sa.Column('ingredient_category', sa.Enum('FLOUR', 'YEAST', 'DAIRY', 'EGGS', 'SUGAR', 'FATS', 'SALT', 'SPICES', 'ADDITIVES', 'PACKAGING', 'CLEANING', 'OTHER', name='ingredientcategory'), nullable=True),
sa.Column('product_category', sa.Enum('BREAD', 'CROISSANTS', 'PASTRIES', 'CAKES', 'COOKIES', 'MUFFINS', 'SANDWICHES', 'SEASONAL', 'BEVERAGES', 'OTHER_PRODUCTS', name='productcategory'), nullable=True),
sa.Column('subcategory', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('brand', sa.String(length=100), nullable=True),
sa.Column('unit_of_measure', sa.Enum('KILOGRAMS', 'GRAMS', 'LITERS', 'MILLILITERS', 'UNITS', 'PIECES', 'PACKAGES', 'BAGS', 'BOXES', name='unitofmeasure'), nullable=False),
sa.Column('package_size', sa.Float(), nullable=True),
sa.Column('average_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('last_purchase_price', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('standard_cost', sa.Numeric(precision=10, scale=2), nullable=True),
# Stock management fields - made nullable for simplified onboarding
sa.Column('low_stock_threshold', sa.Float(), nullable=True),
sa.Column('reorder_point', sa.Float(), nullable=True),
sa.Column('reorder_quantity', sa.Float(), nullable=True),
sa.Column('max_stock_level', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('display_life_hours', sa.Integer(), nullable=True),
sa.Column('best_before_hours', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('central_baker_product_code', sa.String(length=100), nullable=True),
sa.Column('delivery_days', sa.String(length=20), nullable=True),
sa.Column('minimum_order_quantity', sa.Float(), nullable=True),
sa.Column('pack_size', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_perishable', sa.Boolean(), nullable=True),
sa.Column('allergen_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('nutritional_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
# Local production support fields
sa.Column('produced_locally', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), nullable=True),
# Audit fields
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_ingredients_barcode', 'ingredients', ['barcode'], unique=False)
op.create_index('idx_ingredients_ingredient_category', 'ingredients', ['tenant_id', 'ingredient_category', 'is_active'], unique=False)
op.create_index('idx_ingredients_product_category', 'ingredients', ['tenant_id', 'product_category', 'is_active'], unique=False)
op.create_index('idx_ingredients_product_type', 'ingredients', ['tenant_id', 'product_type', 'is_active'], unique=False)
op.create_index('idx_ingredients_stock_levels', 'ingredients', ['tenant_id', 'low_stock_threshold', 'reorder_point'], unique=False)
op.create_index('idx_ingredients_tenant_name', 'ingredients', ['tenant_id', 'name'], unique=True)
op.create_index('idx_ingredients_tenant_sku', 'ingredients', ['tenant_id', 'sku'], unique=False)
op.create_index(op.f('ix_ingredients_barcode'), 'ingredients', ['barcode'], unique=False)
op.create_index(op.f('ix_ingredients_ingredient_category'), 'ingredients', ['ingredient_category'], unique=False)
op.create_index(op.f('ix_ingredients_name'), 'ingredients', ['name'], unique=False)
op.create_index(op.f('ix_ingredients_product_category'), 'ingredients', ['product_category'], unique=False)
op.create_index(op.f('ix_ingredients_product_type'), 'ingredients', ['product_type'], unique=False)
op.create_index(op.f('ix_ingredients_sku'), 'ingredients', ['sku'], unique=False)
op.create_index(op.f('ix_ingredients_tenant_id'), 'ingredients', ['tenant_id'], unique=False)
op.create_index('ix_ingredients_produced_locally', 'ingredients', ['produced_locally'], unique=False)
op.create_index('ix_ingredients_recipe_id', 'ingredients', ['recipe_id'], unique=False)
# Create temperature_logs table
op.create_table('temperature_logs',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('storage_location', sa.String(length=100), nullable=False),
sa.Column('warehouse_zone', sa.String(length=50), nullable=True),
sa.Column('equipment_id', sa.String(length=100), nullable=True),
sa.Column('temperature_celsius', sa.Float(), nullable=False),
sa.Column('humidity_percentage', sa.Float(), nullable=True),
sa.Column('target_temperature_min', sa.Float(), nullable=True),
sa.Column('target_temperature_max', sa.Float(), nullable=True),
sa.Column('is_within_range', sa.Boolean(), nullable=False),
sa.Column('alert_triggered', sa.Boolean(), nullable=False),
sa.Column('deviation_minutes', sa.Integer(), nullable=True),
sa.Column('measurement_method', sa.String(length=50), nullable=False),
sa.Column('device_id', sa.String(length=100), nullable=True),
sa.Column('calibration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('recorded_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('recorded_by', sa.UUID(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_temperature_logs_recorded_at'), 'temperature_logs', ['recorded_at'], unique=False)
op.create_index(op.f('ix_temperature_logs_storage_location'), 'temperature_logs', ['storage_location'], unique=False)
op.create_index(op.f('ix_temperature_logs_tenant_id'), 'temperature_logs', ['tenant_id'], unique=False)
# Create food_safety_compliance table
op.create_table('food_safety_compliance',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('standard', sa.Enum('HACCP', 'FDA', 'USDA', 'FSMA', 'SQF', 'BRC', 'IFS', 'ISO22000', 'ORGANIC', 'NON_GMO', 'ALLERGEN_FREE', 'KOSHER', 'HALAL', name='foodsafetystandard'), nullable=False),
sa.Column('compliance_status', sa.Enum('COMPLIANT', 'NON_COMPLIANT', 'PENDING_REVIEW', 'EXPIRED', 'WARNING', name='compliancestatus'), nullable=False),
sa.Column('certification_number', sa.String(length=100), nullable=True),
sa.Column('certifying_body', sa.String(length=200), nullable=True),
sa.Column('certification_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('requirements', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('compliance_notes', sa.Text(), nullable=True),
sa.Column('documentation_url', sa.String(length=500), nullable=True),
sa.Column('last_audit_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_audit_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('auditor_name', sa.String(length=200), nullable=True),
sa.Column('audit_score', sa.Float(), nullable=True),
sa.Column('risk_level', sa.String(length=20), nullable=False),
sa.Column('risk_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('mitigation_measures', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('requires_monitoring', sa.Boolean(), nullable=False),
sa.Column('monitoring_frequency_days', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_safety_compliance_expiration_date'), 'food_safety_compliance', ['expiration_date'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_ingredient_id'), 'food_safety_compliance', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_next_audit_date'), 'food_safety_compliance', ['next_audit_date'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_standard'), 'food_safety_compliance', ['standard'], unique=False)
op.create_index(op.f('ix_food_safety_compliance_tenant_id'), 'food_safety_compliance', ['tenant_id'], unique=False)
# Create product_transformations table
op.create_table('product_transformations',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('transformation_reference', sa.String(length=100), nullable=False),
sa.Column('source_ingredient_id', sa.UUID(), nullable=False),
sa.Column('target_ingredient_id', sa.UUID(), nullable=False),
sa.Column('source_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('target_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('source_quantity', sa.Float(), nullable=False),
sa.Column('target_quantity', sa.Float(), nullable=False),
sa.Column('conversion_ratio', sa.Float(), nullable=False),
sa.Column('expiration_calculation_method', sa.String(length=50), nullable=False),
sa.Column('expiration_days_offset', sa.Integer(), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('process_notes', sa.Text(), nullable=True),
sa.Column('performed_by', sa.UUID(), nullable=True),
sa.Column('source_batch_numbers', sa.Text(), nullable=True),
sa.Column('target_batch_number', sa.String(length=100), nullable=True),
sa.Column('is_completed', sa.Boolean(), nullable=True),
sa.Column('is_reversed', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['source_ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['target_ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_transformations_reference', 'product_transformations', ['transformation_reference'], unique=False)
op.create_index('idx_transformations_source', 'product_transformations', ['tenant_id', 'source_ingredient_id'], unique=False)
op.create_index('idx_transformations_stages', 'product_transformations', ['source_stage', 'target_stage'], unique=False)
op.create_index('idx_transformations_target', 'product_transformations', ['tenant_id', 'target_ingredient_id'], unique=False)
op.create_index('idx_transformations_tenant_date', 'product_transformations', ['tenant_id', 'transformation_date'], unique=False)
op.create_index(op.f('ix_product_transformations_tenant_id'), 'product_transformations', ['tenant_id'], unique=False)
op.create_index(op.f('ix_product_transformations_transformation_reference'), 'product_transformations', ['transformation_reference'], unique=False)
# Create stock table
op.create_table('stock',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('supplier_id', sa.UUID(), nullable=True),
sa.Column('batch_number', sa.String(length=100), nullable=True),
sa.Column('lot_number', sa.String(length=100), nullable=True),
sa.Column('supplier_batch_ref', sa.String(length=100), nullable=True),
sa.Column('production_stage', sa.Enum('raw_ingredient', 'par_baked', 'fully_baked', 'prepared_dough', 'frozen_product', name='productionstage'), nullable=False),
sa.Column('transformation_reference', sa.String(length=100), nullable=True),
sa.Column('current_quantity', sa.Float(), nullable=False),
sa.Column('reserved_quantity', sa.Float(), nullable=False),
sa.Column('available_quantity', sa.Float(), nullable=False),
sa.Column('received_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('best_before_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('original_expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('transformation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('final_expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('unit_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('storage_location', sa.String(length=100), nullable=True),
sa.Column('warehouse_zone', sa.String(length=50), nullable=True),
sa.Column('shelf_position', sa.String(length=50), nullable=True),
sa.Column('requires_refrigeration', sa.Boolean(), nullable=True),
sa.Column('requires_freezing', sa.Boolean(), nullable=True),
sa.Column('storage_temperature_min', sa.Float(), nullable=True),
sa.Column('storage_temperature_max', sa.Float(), nullable=True),
sa.Column('storage_humidity_max', sa.Float(), nullable=True),
sa.Column('shelf_life_days', sa.Integer(), nullable=True),
sa.Column('storage_instructions', sa.Text(), nullable=True),
sa.Column('is_available', sa.Boolean(), nullable=True),
sa.Column('is_expired', sa.Boolean(), nullable=True),
sa.Column('quality_status', sa.String(length=20), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_stock_batch', 'stock', ['tenant_id', 'batch_number'], unique=False)
op.create_index('idx_stock_expiration', 'stock', ['tenant_id', 'expiration_date', 'is_available'], unique=False)
op.create_index('idx_stock_final_expiration', 'stock', ['tenant_id', 'final_expiration_date', 'is_available'], unique=False)
op.create_index('idx_stock_low_levels', 'stock', ['tenant_id', 'current_quantity', 'is_available'], unique=False)
op.create_index('idx_stock_production_stage', 'stock', ['tenant_id', 'production_stage', 'is_available'], unique=False)
op.create_index('idx_stock_quality', 'stock', ['tenant_id', 'quality_status', 'is_available'], unique=False)
op.create_index('idx_stock_tenant_ingredient', 'stock', ['tenant_id', 'ingredient_id'], unique=False)
op.create_index('idx_stock_transformation', 'stock', ['tenant_id', 'transformation_reference'], unique=False)
op.create_index(op.f('ix_stock_batch_number'), 'stock', ['batch_number'], unique=False)
op.create_index(op.f('ix_stock_expiration_date'), 'stock', ['expiration_date'], unique=False)
op.create_index(op.f('ix_stock_ingredient_id'), 'stock', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_is_expired'), 'stock', ['is_expired'], unique=False)
op.create_index(op.f('ix_stock_lot_number'), 'stock', ['lot_number'], unique=False)
op.create_index(op.f('ix_stock_production_stage'), 'stock', ['production_stage'], unique=False)
op.create_index(op.f('ix_stock_supplier_id'), 'stock', ['supplier_id'], unique=False)
op.create_index(op.f('ix_stock_tenant_id'), 'stock', ['tenant_id'], unique=False)
op.create_index(op.f('ix_stock_transformation_reference'), 'stock', ['transformation_reference'], unique=False)
# Create stock_receipts table (lot-level tracking)
# Note: The Enum will automatically create the type on first use
op.create_table(
'stock_receipts',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('po_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('po_number', sa.String(100), nullable=True),
sa.Column('received_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('received_by_user_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('status', sa.Enum('draft', 'confirmed', 'cancelled', name='receiptstatus', create_type=True), nullable=False, server_default='draft'),
sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('supplier_name', sa.String(255), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('has_discrepancies', sa.Boolean, nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_index('idx_stock_receipts_tenant_status', 'stock_receipts', ['tenant_id', 'status'])
op.create_index('idx_stock_receipts_po', 'stock_receipts', ['po_id'])
op.create_index('idx_stock_receipts_received_at', 'stock_receipts', ['tenant_id', 'received_at'])
op.create_index('idx_stock_receipts_supplier', 'stock_receipts', ['supplier_id', 'received_at'])
# Create stock_receipt_line_items table
op.create_table(
'stock_receipt_line_items',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('receipt_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('ingredient_name', sa.String(255), nullable=True),
sa.Column('po_line_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('expected_quantity', sa.Numeric(10, 2), nullable=False),
sa.Column('actual_quantity', sa.Numeric(10, 2), nullable=False),
sa.Column('unit_of_measure', sa.String(20), nullable=False),
sa.Column('has_discrepancy', sa.Boolean, nullable=False, server_default='false'),
sa.Column('discrepancy_reason', sa.Text, nullable=True),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['receipt_id'], ['stock_receipts.id'], ondelete='CASCADE'),
)
op.create_index('idx_line_items_receipt', 'stock_receipt_line_items', ['receipt_id'])
op.create_index('idx_line_items_ingredient', 'stock_receipt_line_items', ['ingredient_id'])
op.create_index('idx_line_items_discrepancy', 'stock_receipt_line_items', ['tenant_id', 'has_discrepancy'])
# Create stock_lots table
op.create_table(
'stock_lots',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('line_item_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('lot_number', sa.String(100), nullable=True),
sa.Column('supplier_lot_number', sa.String(100), nullable=True),
sa.Column('quantity', sa.Numeric(10, 2), nullable=False),
sa.Column('unit_of_measure', sa.String(20), nullable=False),
sa.Column('expiration_date', sa.Date, nullable=False),
sa.Column('best_before_date', sa.Date, nullable=True),
sa.Column('warehouse_location', sa.String(100), nullable=True),
sa.Column('storage_zone', sa.String(50), nullable=True),
sa.Column('quality_notes', sa.Text, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.text('now()')),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['line_item_id'], ['stock_receipt_line_items.id'], ondelete='CASCADE'),
)
op.create_index('idx_lots_line_item', 'stock_lots', ['line_item_id'])
op.create_index('idx_lots_stock', 'stock_lots', ['stock_id'])
op.create_index('idx_lots_expiration', 'stock_lots', ['tenant_id', 'expiration_date'])
op.create_index('idx_lots_lot_number', 'stock_lots', ['tenant_id', 'lot_number'])
# Create food_safety_alerts table
op.create_table('food_safety_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('alert_code', sa.String(length=50), nullable=False),
sa.Column('alert_type', sa.Enum('TEMPERATURE_VIOLATION', 'EXPIRATION_WARNING', 'EXPIRED_PRODUCT', 'CONTAMINATION_RISK', 'ALLERGEN_CROSS_CONTAMINATION', 'STORAGE_VIOLATION', 'QUALITY_DEGRADATION', 'RECALL_NOTICE', 'CERTIFICATION_EXPIRY', 'SUPPLIER_COMPLIANCE_ISSUE', name='foodsafetyalerttype'), nullable=False),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('risk_level', sa.String(length=20), nullable=False),
sa.Column('source_entity_type', sa.String(length=50), nullable=False),
sa.Column('source_entity_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=True),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('title', sa.String(length=200), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('detailed_message', sa.Text(), nullable=True),
sa.Column('regulatory_requirement', sa.String(length=100), nullable=True),
sa.Column('compliance_standard', sa.Enum('HACCP', 'FDA', 'USDA', 'FSMA', 'SQF', 'BRC', 'IFS', 'ISO22000', 'ORGANIC', 'NON_GMO', 'ALLERGEN_FREE', 'KOSHER', 'HALAL', name='foodsafetystandard'), nullable=True),
sa.Column('regulatory_action_required', sa.Boolean(), nullable=False),
sa.Column('trigger_condition', sa.String(length=200), nullable=True),
sa.Column('threshold_value', sa.Numeric(precision=15, scale=4), nullable=True),
sa.Column('actual_value', sa.Numeric(precision=15, scale=4), nullable=True),
sa.Column('alert_data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('environmental_factors', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('affected_products', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('public_health_risk', sa.Boolean(), nullable=False),
sa.Column('business_impact', sa.Text(), nullable=True),
sa.Column('estimated_loss', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('alert_state', sa.String(length=50), nullable=False),
sa.Column('immediate_actions_taken', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('investigation_notes', sa.Text(), nullable=True),
sa.Column('resolution_action', sa.String(length=200), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('corrective_actions', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('preventive_measures', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('first_occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_occurred_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('escalation_deadline', sa.DateTime(timezone=True), nullable=True),
sa.Column('occurrence_count', sa.Integer(), nullable=False),
sa.Column('is_recurring', sa.Boolean(), nullable=False),
sa.Column('recurrence_pattern', sa.String(length=100), nullable=True),
sa.Column('assigned_to', sa.UUID(), nullable=True),
sa.Column('assigned_role', sa.String(length=50), nullable=True),
sa.Column('escalated_to', sa.UUID(), nullable=True),
sa.Column('escalation_level', sa.Integer(), nullable=False),
sa.Column('notification_sent', sa.Boolean(), nullable=False),
sa.Column('notification_methods', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('notification_recipients', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('regulatory_notification_required', sa.Boolean(), nullable=False),
sa.Column('regulatory_notification_sent', sa.Boolean(), nullable=False),
sa.Column('documentation', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('audit_trail', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('external_reference', sa.String(length=100), nullable=True),
sa.Column('detection_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('response_time_minutes', sa.Integer(), nullable=True),
sa.Column('resolution_time_minutes', sa.Integer(), nullable=True),
sa.Column('alert_accuracy', sa.Boolean(), nullable=True),
sa.Column('false_positive', sa.Boolean(), nullable=False),
sa.Column('feedback_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.Column('updated_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_safety_alerts_alert_code'), 'food_safety_alerts', ['alert_code'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_alert_type'), 'food_safety_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_first_occurred_at'), 'food_safety_alerts', ['first_occurred_at'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_ingredient_id'), 'food_safety_alerts', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_severity'), 'food_safety_alerts', ['severity'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_source_entity_id'), 'food_safety_alerts', ['source_entity_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_status'), 'food_safety_alerts', ['status'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_stock_id'), 'food_safety_alerts', ['stock_id'], unique=False)
op.create_index(op.f('ix_food_safety_alerts_tenant_id'), 'food_safety_alerts', ['tenant_id'], unique=False)
# Create stock_alerts table
op.create_table('stock_alerts',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('alert_type', sa.String(length=50), nullable=False),
sa.Column('severity', sa.String(length=20), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('current_quantity', sa.Float(), nullable=True),
sa.Column('threshold_value', sa.Float(), nullable=True),
sa.Column('expiration_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_acknowledged', sa.Boolean(), nullable=True),
sa.Column('acknowledged_by', sa.UUID(), nullable=True),
sa.Column('acknowledged_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('is_resolved', sa.Boolean(), nullable=True),
sa.Column('resolved_by', sa.UUID(), nullable=True),
sa.Column('resolved_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('resolution_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_alerts_ingredient', 'stock_alerts', ['ingredient_id', 'is_active'], unique=False)
op.create_index('idx_alerts_tenant_active', 'stock_alerts', ['tenant_id', 'is_active', 'created_at'], unique=False)
op.create_index('idx_alerts_type_severity', 'stock_alerts', ['alert_type', 'severity', 'is_active'], unique=False)
op.create_index('idx_alerts_unresolved', 'stock_alerts', ['tenant_id', 'is_resolved', 'is_active'], unique=False)
op.create_index(op.f('ix_stock_alerts_alert_type'), 'stock_alerts', ['alert_type'], unique=False)
op.create_index(op.f('ix_stock_alerts_ingredient_id'), 'stock_alerts', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_stock_id'), 'stock_alerts', ['stock_id'], unique=False)
op.create_index(op.f('ix_stock_alerts_tenant_id'), 'stock_alerts', ['tenant_id'], unique=False)
# Create stock_movements table
op.create_table('stock_movements',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('ingredient_id', sa.UUID(), nullable=False),
sa.Column('stock_id', sa.UUID(), nullable=True),
sa.Column('movement_type', sa.Enum('PURCHASE', 'PRODUCTION_USE', 'TRANSFORMATION', 'ADJUSTMENT', 'WASTE', 'TRANSFER', 'RETURN', 'INITIAL_STOCK', name='stockmovementtype'), nullable=False),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('total_cost', sa.Numeric(precision=10, scale=2), nullable=True),
sa.Column('quantity_before', sa.Float(), nullable=True),
sa.Column('quantity_after', sa.Float(), nullable=True),
sa.Column('reference_number', sa.String(length=100), nullable=True),
sa.Column('supplier_id', sa.UUID(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('reason_code', sa.String(length=50), nullable=True),
sa.Column('movement_date', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_by', sa.UUID(), nullable=True),
sa.ForeignKeyConstraint(['ingredient_id'], ['ingredients.id'], ),
sa.ForeignKeyConstraint(['stock_id'], ['stock.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_movements_reference', 'stock_movements', ['reference_number'], unique=False)
op.create_index('idx_movements_supplier', 'stock_movements', ['supplier_id', 'movement_date'], unique=False)
op.create_index('idx_movements_tenant_date', 'stock_movements', ['tenant_id', 'movement_date'], unique=False)
op.create_index('idx_movements_tenant_ingredient', 'stock_movements', ['tenant_id', 'ingredient_id', 'movement_date'], unique=False)
op.create_index('idx_movements_type', 'stock_movements', ['tenant_id', 'movement_type', 'movement_date'], unique=False)
op.create_index(op.f('ix_stock_movements_ingredient_id'), 'stock_movements', ['ingredient_id'], unique=False)
op.create_index(op.f('ix_stock_movements_movement_date'), 'stock_movements', ['movement_date'], unique=False)
op.create_index(op.f('ix_stock_movements_movement_type'), 'stock_movements', ['movement_type'], unique=False)
op.create_index(op.f('ix_stock_movements_reference_number'), 'stock_movements', ['reference_number'], unique=False)
op.create_index(op.f('ix_stock_movements_stock_id'), 'stock_movements', ['stock_id'], unique=False)
op.create_index(op.f('ix_stock_movements_supplier_id'), 'stock_movements', ['supplier_id'], unique=False)
op.create_index(op.f('ix_stock_movements_tenant_id'), 'stock_movements', ['tenant_id'], unique=False)
def downgrade() -> None:
# Drop all tables in reverse order
op.drop_index(op.f('ix_stock_movements_tenant_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_supplier_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_stock_id'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_reference_number'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_movement_type'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_movement_date'), table_name='stock_movements')
op.drop_index(op.f('ix_stock_movements_ingredient_id'), table_name='stock_movements')
op.drop_index('idx_movements_type', table_name='stock_movements')
op.drop_index('idx_movements_tenant_ingredient', table_name='stock_movements')
op.drop_index('idx_movements_tenant_date', table_name='stock_movements')
op.drop_index('idx_movements_supplier', table_name='stock_movements')
op.drop_index('idx_movements_reference', table_name='stock_movements')
op.drop_table('stock_movements')
op.drop_index(op.f('ix_stock_alerts_tenant_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_stock_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_ingredient_id'), table_name='stock_alerts')
op.drop_index(op.f('ix_stock_alerts_alert_type'), table_name='stock_alerts')
op.drop_index('idx_alerts_unresolved', table_name='stock_alerts')
op.drop_index('idx_alerts_type_severity', table_name='stock_alerts')
op.drop_index('idx_alerts_tenant_active', table_name='stock_alerts')
op.drop_index('idx_alerts_ingredient', table_name='stock_alerts')
op.drop_table('stock_alerts')
op.drop_index(op.f('ix_food_safety_alerts_tenant_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_stock_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_status'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_source_entity_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_severity'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_ingredient_id'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_first_occurred_at'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_alert_type'), table_name='food_safety_alerts')
op.drop_index(op.f('ix_food_safety_alerts_alert_code'), table_name='food_safety_alerts')
op.drop_table('food_safety_alerts')
# Drop stock receipts tables
op.drop_index('idx_lots_lot_number', table_name='stock_lots')
op.drop_index('idx_lots_expiration', table_name='stock_lots')
op.drop_index('idx_lots_stock', table_name='stock_lots')
op.drop_index('idx_lots_line_item', table_name='stock_lots')
op.drop_table('stock_lots')
op.drop_index('idx_line_items_discrepancy', table_name='stock_receipt_line_items')
op.drop_index('idx_line_items_ingredient', table_name='stock_receipt_line_items')
op.drop_index('idx_line_items_receipt', table_name='stock_receipt_line_items')
op.drop_table('stock_receipt_line_items')
op.drop_index('idx_stock_receipts_supplier', table_name='stock_receipts')
op.drop_index('idx_stock_receipts_received_at', table_name='stock_receipts')
op.drop_index('idx_stock_receipts_po', table_name='stock_receipts')
op.drop_index('idx_stock_receipts_tenant_status', table_name='stock_receipts')
op.drop_table('stock_receipts')
op.execute('DROP TYPE receiptstatus')
# Continue with other tables
op.drop_index(op.f('ix_stock_transformation_reference'), table_name='stock')
op.drop_index(op.f('ix_stock_tenant_id'), table_name='stock')
op.drop_index(op.f('ix_stock_supplier_id'), table_name='stock')
op.drop_index(op.f('ix_stock_production_stage'), table_name='stock')
op.drop_index(op.f('ix_stock_lot_number'), table_name='stock')
op.drop_index(op.f('ix_stock_is_expired'), table_name='stock')
op.drop_index(op.f('ix_stock_ingredient_id'), table_name='stock')
op.drop_index(op.f('ix_stock_expiration_date'), table_name='stock')
op.drop_index(op.f('ix_stock_batch_number'), table_name='stock')
op.drop_index('idx_stock_transformation', table_name='stock')
op.drop_index('idx_stock_tenant_ingredient', table_name='stock')
op.drop_index('idx_stock_quality', table_name='stock')
op.drop_index('idx_stock_production_stage', table_name='stock')
op.drop_index('idx_stock_low_levels', table_name='stock')
op.drop_index('idx_stock_final_expiration', table_name='stock')
op.drop_index('idx_stock_expiration', table_name='stock')
op.drop_index('idx_stock_batch', table_name='stock')
op.drop_table('stock')
op.drop_index(op.f('ix_product_transformations_transformation_reference'), table_name='product_transformations')
op.drop_index(op.f('ix_product_transformations_tenant_id'), table_name='product_transformations')
op.drop_index('idx_transformations_tenant_date', table_name='product_transformations')
op.drop_index('idx_transformations_target', table_name='product_transformations')
op.drop_index('idx_transformations_stages', table_name='product_transformations')
op.drop_index('idx_transformations_source', table_name='product_transformations')
op.drop_index('idx_transformations_reference', table_name='product_transformations')
op.drop_table('product_transformations')
op.drop_index(op.f('ix_food_safety_compliance_tenant_id'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_standard'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_next_audit_date'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_ingredient_id'), table_name='food_safety_compliance')
op.drop_index(op.f('ix_food_safety_compliance_expiration_date'), table_name='food_safety_compliance')
op.drop_table('food_safety_compliance')
op.drop_index(op.f('ix_temperature_logs_tenant_id'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_storage_location'), table_name='temperature_logs')
op.drop_index(op.f('ix_temperature_logs_recorded_at'), table_name='temperature_logs')
op.drop_table('temperature_logs')
op.drop_index('ix_ingredients_recipe_id', table_name='ingredients')
op.drop_index('ix_ingredients_produced_locally', table_name='ingredients')
op.drop_index(op.f('ix_ingredients_tenant_id'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_sku'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_product_type'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_product_category'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_name'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_ingredient_category'), table_name='ingredients')
op.drop_index(op.f('ix_ingredients_barcode'), table_name='ingredients')
op.drop_index('idx_ingredients_tenant_sku', table_name='ingredients')
op.drop_index('idx_ingredients_tenant_name', table_name='ingredients')
op.drop_index('idx_ingredients_stock_levels', table_name='ingredients')
op.drop_index('idx_ingredients_product_type', table_name='ingredients')
op.drop_index('idx_ingredients_product_category', table_name='ingredients')
op.drop_index('idx_ingredients_ingredient_category', table_name='ingredients')
op.drop_index('idx_ingredients_barcode', table_name='ingredients')
op.drop_table('ingredients')
op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_tenant_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_severity'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_service_name'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs')
op.drop_index('idx_audit_user_created', table_name='audit_logs')
op.drop_index('idx_audit_tenant_created', table_name='audit_logs')
op.drop_index('idx_audit_severity_created', table_name='audit_logs')
op.drop_index('idx_audit_service_created', table_name='audit_logs')
op.drop_index('idx_audit_resource_type_action', table_name='audit_logs')
op.drop_table('audit_logs')