Refactor services alembic
This commit is contained in:
@@ -1,183 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to add sample recipes for testing
|
||||
"""
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
# Add the app directory to Python path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), 'app'))
|
||||
|
||||
from core.database import get_db_session
|
||||
from repositories.recipe_repository import RecipeRepository
|
||||
from schemas.recipes import RecipeCreate, RecipeIngredientCreate
|
||||
|
||||
# Sample tenant ID - you should replace this with a real tenant ID from your system
|
||||
SAMPLE_TENANT_ID = "946206b3-7446-436b-b29d-f265b28d9ff5"
|
||||
|
||||
# Sample finished product IDs - you should replace these with real product IDs from your system
|
||||
SAMPLE_PRODUCT_IDS = [
|
||||
"550e8400-e29b-41d4-a716-446655440001", # Pan Integral
|
||||
"550e8400-e29b-41d4-a716-446655440002", # Croissant
|
||||
"550e8400-e29b-41d4-a716-446655440003", # Tarta de Manzana
|
||||
"550e8400-e29b-41d4-a716-446655440004", # Magdalenas
|
||||
]
|
||||
|
||||
# Sample ingredient IDs - you should replace these with real ingredient IDs from your system
|
||||
SAMPLE_INGREDIENT_IDS = [
|
||||
"660e8400-e29b-41d4-a716-446655440001", # Harina integral
|
||||
"660e8400-e29b-41d4-a716-446655440002", # Agua
|
||||
"660e8400-e29b-41d4-a716-446655440003", # Levadura
|
||||
"660e8400-e29b-41d4-a716-446655440004", # Sal
|
||||
"660e8400-e29b-41d4-a716-446655440005", # Harina de fuerza
|
||||
"660e8400-e29b-41d4-a716-446655440006", # Mantequilla
|
||||
"660e8400-e29b-41d4-a716-446655440007", # Leche
|
||||
"660e8400-e29b-41d4-a716-446655440008", # Azúcar
|
||||
"660e8400-e29b-41d4-a716-446655440009", # Manzanas
|
||||
"660e8400-e29b-41d4-a716-446655440010", # Huevos
|
||||
"660e8400-e29b-41d4-a716-446655440011", # Limón
|
||||
"660e8400-e29b-41d4-a716-446655440012", # Canela
|
||||
]
|
||||
|
||||
async def add_sample_recipes():
|
||||
"""Add sample recipes to the database"""
|
||||
async with get_db_session() as session:
|
||||
recipe_repo = RecipeRepository(session)
|
||||
|
||||
sample_recipes = [
|
||||
{
|
||||
"name": "Pan de Molde Integral",
|
||||
"recipe_code": "PAN001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[0],
|
||||
"description": "Pan integral artesanal con semillas, perfecto para desayunos saludables.",
|
||||
"category": "bread",
|
||||
"difficulty_level": 2,
|
||||
"yield_quantity": 1,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 120,
|
||||
"cook_time_minutes": 35,
|
||||
"total_time_minutes": 155,
|
||||
"is_signature_item": False,
|
||||
"target_margin_percentage": Decimal("40.0"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[0], "quantity": 500, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[1], "quantity": 300, "unit": "ml", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[2], "quantity": 10, "unit": "g", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[3], "quantity": 8, "unit": "g", "is_optional": False, "ingredient_order": 4},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Croissants de Mantequilla",
|
||||
"recipe_code": "CRO001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[1],
|
||||
"description": "Croissants franceses tradicionales con laminado de mantequilla.",
|
||||
"category": "pastry",
|
||||
"difficulty_level": 3,
|
||||
"yield_quantity": 12,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 480,
|
||||
"cook_time_minutes": 20,
|
||||
"total_time_minutes": 500,
|
||||
"is_signature_item": True,
|
||||
"target_margin_percentage": Decimal("52.8"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[4], "quantity": 500, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[5], "quantity": 250, "unit": "g", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[6], "quantity": 150, "unit": "ml", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[7], "quantity": 50, "unit": "g", "is_optional": False, "ingredient_order": 4},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Tarta de Manzana",
|
||||
"recipe_code": "TAR001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[2],
|
||||
"description": "Tarta casera de manzana con canela y masa quebrada.",
|
||||
"category": "cake",
|
||||
"difficulty_level": 1,
|
||||
"yield_quantity": 8,
|
||||
"yield_unit": "portions",
|
||||
"prep_time_minutes": 45,
|
||||
"cook_time_minutes": 40,
|
||||
"total_time_minutes": 85,
|
||||
"is_signature_item": False,
|
||||
"target_margin_percentage": Decimal("65.0"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[8], "quantity": 1000, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[0], "quantity": 250, "unit": "g", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[5], "quantity": 125, "unit": "g", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[7], "quantity": 100, "unit": "g", "is_optional": False, "ingredient_order": 4},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[11], "quantity": 5, "unit": "g", "is_optional": True, "ingredient_order": 5},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Magdalenas de Limón",
|
||||
"recipe_code": "MAG001",
|
||||
"finished_product_id": SAMPLE_PRODUCT_IDS[3],
|
||||
"description": "Magdalenas suaves y esponjosas con ralladura de limón.",
|
||||
"category": "pastry",
|
||||
"difficulty_level": 1,
|
||||
"yield_quantity": 12,
|
||||
"yield_unit": "units",
|
||||
"prep_time_minutes": 20,
|
||||
"cook_time_minutes": 25,
|
||||
"total_time_minutes": 45,
|
||||
"is_signature_item": False,
|
||||
"target_margin_percentage": Decimal("57.8"),
|
||||
"ingredients": [
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[0], "quantity": 200, "unit": "g", "is_optional": False, "ingredient_order": 1},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[9], "quantity": 3, "unit": "units", "is_optional": False, "ingredient_order": 2},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[7], "quantity": 150, "unit": "g", "is_optional": False, "ingredient_order": 3},
|
||||
{"ingredient_id": SAMPLE_INGREDIENT_IDS[10], "quantity": 2, "unit": "units", "is_optional": False, "ingredient_order": 4},
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
for recipe_data in sample_recipes:
|
||||
try:
|
||||
# Prepare ingredients
|
||||
ingredients = [
|
||||
RecipeIngredientCreate(**ing_data)
|
||||
for ing_data in recipe_data.pop("ingredients")
|
||||
]
|
||||
|
||||
# Create recipe
|
||||
recipe_create = RecipeCreate(
|
||||
**recipe_data,
|
||||
ingredients=ingredients
|
||||
)
|
||||
|
||||
# Check if recipe already exists
|
||||
existing_recipes = await recipe_repo.search_recipes(
|
||||
tenant_id=SAMPLE_TENANT_ID,
|
||||
search_term=recipe_data["name"]
|
||||
)
|
||||
|
||||
recipe_exists = any(
|
||||
recipe.name == recipe_data["name"]
|
||||
for recipe in existing_recipes
|
||||
)
|
||||
|
||||
if not recipe_exists:
|
||||
recipe = await recipe_repo.create_recipe(
|
||||
tenant_id=SAMPLE_TENANT_ID,
|
||||
recipe_data=recipe_create
|
||||
)
|
||||
print(f"✅ Created recipe: {recipe.name}")
|
||||
else:
|
||||
print(f"⏭️ Recipe already exists: {recipe_data['name']}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating recipe {recipe_data['name']}: {e}")
|
||||
|
||||
await session.commit()
|
||||
print(f"\n🎉 Sample recipes setup completed!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🧁 Adding sample recipes to database...")
|
||||
print(f"📍 Tenant ID: {SAMPLE_TENANT_ID}")
|
||||
print("=" * 50)
|
||||
|
||||
asyncio.run(add_sample_recipes())
|
||||
84
services/recipes/alembic.ini
Normal file
84
services/recipes/alembic.ini
Normal file
@@ -0,0 +1,84 @@
|
||||
# ================================================================
|
||||
# services/recipes/alembic.ini - Alembic Configuration
|
||||
# ================================================================
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration file names
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
timezone = Europe/Madrid
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
sourceless = false
|
||||
|
||||
# version of a migration file's filename format
|
||||
version_num_format = %s
|
||||
|
||||
# version path separator
|
||||
version_path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
output_encoding = utf-8
|
||||
|
||||
# Database URL - will be overridden by environment variable or settings
|
||||
sqlalchemy.url = postgresql+asyncpg://recipes_user:password@recipes-db-service:5432/recipes_db
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,41 +0,0 @@
|
||||
"""Add quality check configuration to recipes
|
||||
|
||||
Revision ID: 004
|
||||
Revises: 003
|
||||
Create Date: 2024-01-15 10:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004'
|
||||
down_revision = '003'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Upgrade database schema to add quality check configuration"""
|
||||
|
||||
# Add quality_check_configuration column to recipes table
|
||||
op.add_column('recipes', sa.Column('quality_check_configuration', postgresql.JSONB, nullable=True))
|
||||
|
||||
# Create index for better performance on quality configuration queries
|
||||
op.create_index(
|
||||
'ix_recipes_quality_check_configuration',
|
||||
'recipes',
|
||||
['quality_check_configuration'],
|
||||
postgresql_using='gin'
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Downgrade database schema"""
|
||||
|
||||
# Drop index
|
||||
op.drop_index('ix_recipes_quality_check_configuration')
|
||||
|
||||
# Remove quality_check_configuration column
|
||||
op.drop_column('recipes', 'quality_check_configuration')
|
||||
@@ -1,94 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version number format string
|
||||
# version_num_format = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d
|
||||
|
||||
# version number path regex
|
||||
# version_path_separator = :
|
||||
# version_path_separator = os # Use os.pathsep. Default configuration used when version_path_separator is not provided
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql://recipes_user:recipes_pass@localhost:5432/recipes_db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -1,71 +1,96 @@
|
||||
"""Alembic environment configuration for recipes service"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from alembic import context
|
||||
|
||||
# Add the parent directory to the path so we can import our modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
# Add the service directory to the Python path
|
||||
service_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
||||
if service_path not in sys.path:
|
||||
sys.path.insert(0, service_path)
|
||||
|
||||
# Import the Base class and all models
|
||||
from shared.database.base import Base
|
||||
from app.models.recipes import *
|
||||
# Add shared modules to path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "shared"))
|
||||
if shared_path not in sys.path:
|
||||
sys.path.insert(0, shared_path)
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
try:
|
||||
from app.core.config import settings
|
||||
from shared.database.base import Base
|
||||
|
||||
# Import all models to ensure they are registered with Base.metadata
|
||||
from app.models import * # Import all models
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error in migrations env.py: {e}")
|
||||
print(f"Current Python path: {sys.path}")
|
||||
raise
|
||||
|
||||
# this is the Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
# Set database URL from settings if not already set
|
||||
database_url = os.getenv('DATABASE_URL') or getattr(settings, 'DATABASE_URL', None)
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the target metadata
|
||||
# Set target metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
def get_database_url():
|
||||
"""Get database URL from environment or config"""
|
||||
return os.getenv('RECIPES_DATABASE_URL', config.get_main_option("sqlalchemy.url"))
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = get_database_url()
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
configuration = config.get_section(config.config_ini_section)
|
||||
configuration["sqlalchemy.url"] = get_database_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
)
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
await connectable.dispose()
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
run_migrations_online()
|
||||
|
||||
@@ -5,15 +5,17 @@ Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
@@ -21,4 +23,4 @@ def upgrade() -> None:
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -1,240 +1,29 @@
|
||||
"""Initial recipe management tables
|
||||
"""Initial ecipes service tables
|
||||
|
||||
Revision ID: 001_initial_recipe_tables
|
||||
Revises:
|
||||
Create Date: 2024-01-15 10:00:00.000000
|
||||
Revision ID: 001_initial_recipes
|
||||
Revises:
|
||||
Create Date: 2024-01-01 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001_initial_recipe_tables'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
revision: str = '001_initial_recipes'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum types
|
||||
op.execute("CREATE TYPE recipestatus AS ENUM ('draft', 'active', 'testing', 'archived', 'discontinued')")
|
||||
op.execute("CREATE TYPE productionstatus AS ENUM ('planned', 'in_progress', 'completed', 'failed', 'cancelled')")
|
||||
op.execute("CREATE TYPE measurementunit AS ENUM ('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%')")
|
||||
op.execute("CREATE TYPE productionpriority AS ENUM ('low', 'normal', 'high', 'urgent')")
|
||||
|
||||
# Create recipes table
|
||||
op.create_table(
|
||||
'recipes',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('name', sa.String(255), nullable=False, index=True),
|
||||
sa.Column('recipe_code', sa.String(100), nullable=True, index=True),
|
||||
sa.Column('version', sa.String(20), nullable=False, default='1.0'),
|
||||
sa.Column('finished_product_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('description', sa.Text, nullable=True),
|
||||
sa.Column('category', sa.String(100), nullable=True, index=True),
|
||||
sa.Column('cuisine_type', sa.String(100), nullable=True),
|
||||
sa.Column('difficulty_level', sa.Integer, nullable=False, default=1),
|
||||
sa.Column('yield_quantity', sa.Float, nullable=False),
|
||||
sa.Column('yield_unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
|
||||
sa.Column('prep_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('cook_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('total_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('rest_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('estimated_cost_per_unit', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('last_calculated_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('cost_calculation_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('target_margin_percentage', sa.Float, nullable=True),
|
||||
sa.Column('suggested_selling_price', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('instructions', postgresql.JSONB, nullable=True),
|
||||
sa.Column('preparation_notes', sa.Text, nullable=True),
|
||||
sa.Column('storage_instructions', sa.Text, nullable=True),
|
||||
sa.Column('quality_standards', sa.Text, nullable=True),
|
||||
sa.Column('serves_count', sa.Integer, nullable=True),
|
||||
sa.Column('nutritional_info', postgresql.JSONB, nullable=True),
|
||||
sa.Column('allergen_info', postgresql.JSONB, nullable=True),
|
||||
sa.Column('dietary_tags', postgresql.JSONB, nullable=True),
|
||||
sa.Column('batch_size_multiplier', sa.Float, nullable=False, default=1.0),
|
||||
sa.Column('minimum_batch_size', sa.Float, nullable=True),
|
||||
sa.Column('maximum_batch_size', sa.Float, nullable=True),
|
||||
sa.Column('optimal_production_temperature', sa.Float, nullable=True),
|
||||
sa.Column('optimal_humidity', sa.Float, nullable=True),
|
||||
sa.Column('quality_check_points', postgresql.JSONB, nullable=True),
|
||||
sa.Column('common_issues', postgresql.JSONB, nullable=True),
|
||||
sa.Column('status', sa.Enum('draft', 'active', 'testing', 'archived', 'discontinued', name='recipestatus'), nullable=False, default='draft', index=True),
|
||||
sa.Column('is_seasonal', sa.Boolean, default=False),
|
||||
sa.Column('season_start_month', sa.Integer, nullable=True),
|
||||
sa.Column('season_end_month', sa.Integer, nullable=True),
|
||||
sa.Column('is_signature_item', sa.Boolean, default=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('updated_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create recipe ingredients table
|
||||
op.create_table(
|
||||
'recipe_ingredients',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipes.id', ondelete='CASCADE'), nullable=False, index=True),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('quantity', sa.Float, nullable=False),
|
||||
sa.Column('unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
|
||||
sa.Column('quantity_in_base_unit', sa.Float, nullable=True),
|
||||
sa.Column('alternative_quantity', sa.Float, nullable=True),
|
||||
sa.Column('alternative_unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=True),
|
||||
sa.Column('preparation_method', sa.String(255), nullable=True),
|
||||
sa.Column('ingredient_notes', sa.Text, nullable=True),
|
||||
sa.Column('is_optional', sa.Boolean, default=False),
|
||||
sa.Column('ingredient_order', sa.Integer, nullable=False, default=1),
|
||||
sa.Column('ingredient_group', sa.String(100), nullable=True),
|
||||
sa.Column('substitution_options', postgresql.JSONB, nullable=True),
|
||||
sa.Column('substitution_ratio', sa.Float, nullable=True),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('cost_updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create production batches table
|
||||
op.create_table(
|
||||
'production_batches',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipes.id'), nullable=False, index=True),
|
||||
sa.Column('batch_number', sa.String(100), nullable=False, index=True),
|
||||
sa.Column('production_date', sa.DateTime(timezone=True), nullable=False, index=True),
|
||||
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('planned_quantity', sa.Float, nullable=False),
|
||||
sa.Column('actual_quantity', sa.Float, nullable=True),
|
||||
sa.Column('yield_percentage', sa.Float, nullable=True),
|
||||
sa.Column('batch_size_multiplier', sa.Float, nullable=False, default=1.0),
|
||||
sa.Column('status', sa.Enum('planned', 'in_progress', 'completed', 'failed', 'cancelled', name='productionstatus'), nullable=False, default='planned', index=True),
|
||||
sa.Column('priority', sa.Enum('low', 'normal', 'high', 'urgent', name='productionpriority'), nullable=False, default='normal'),
|
||||
sa.Column('assigned_staff', postgresql.JSONB, nullable=True),
|
||||
sa.Column('production_notes', sa.Text, nullable=True),
|
||||
sa.Column('quality_score', sa.Float, nullable=True),
|
||||
sa.Column('quality_notes', sa.Text, nullable=True),
|
||||
sa.Column('defect_rate', sa.Float, nullable=True),
|
||||
sa.Column('rework_required', sa.Boolean, default=False),
|
||||
sa.Column('planned_material_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('actual_material_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('labor_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('overhead_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_production_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('cost_per_unit', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('production_temperature', sa.Float, nullable=True),
|
||||
sa.Column('production_humidity', sa.Float, nullable=True),
|
||||
sa.Column('oven_temperature', sa.Float, nullable=True),
|
||||
sa.Column('baking_time_minutes', sa.Integer, nullable=True),
|
||||
sa.Column('waste_quantity', sa.Float, nullable=False, default=0.0),
|
||||
sa.Column('waste_reason', sa.String(255), nullable=True),
|
||||
sa.Column('efficiency_percentage', sa.Float, nullable=True),
|
||||
sa.Column('customer_order_reference', sa.String(100), nullable=True),
|
||||
sa.Column('pre_order_quantity', sa.Float, nullable=True),
|
||||
sa.Column('shelf_quantity', sa.Float, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('completed_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create production ingredient consumption table
|
||||
op.create_table(
|
||||
'production_ingredient_consumption',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('production_batch_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('production_batches.id', ondelete='CASCADE'), nullable=False, index=True),
|
||||
sa.Column('recipe_ingredient_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipe_ingredients.id'), nullable=False, index=True),
|
||||
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True, index=True),
|
||||
sa.Column('planned_quantity', sa.Float, nullable=False),
|
||||
sa.Column('actual_quantity', sa.Float, nullable=False),
|
||||
sa.Column('unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
|
||||
sa.Column('variance_quantity', sa.Float, nullable=True),
|
||||
sa.Column('variance_percentage', sa.Float, nullable=True),
|
||||
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('consumption_time', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column('consumption_notes', sa.Text, nullable=True),
|
||||
sa.Column('staff_member', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('ingredient_condition', sa.String(50), nullable=True),
|
||||
sa.Column('quality_impact', sa.String(255), nullable=True),
|
||||
sa.Column('substitution_used', sa.Boolean, default=False),
|
||||
sa.Column('substitution_details', sa.Text, nullable=True),
|
||||
)
|
||||
|
||||
# Create production schedules table
|
||||
op.create_table(
|
||||
'production_schedules',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
|
||||
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False, index=True),
|
||||
sa.Column('schedule_name', sa.String(255), nullable=True),
|
||||
sa.Column('total_planned_batches', sa.Integer, nullable=False, default=0),
|
||||
sa.Column('total_planned_items', sa.Float, nullable=False, default=0.0),
|
||||
sa.Column('estimated_production_hours', sa.Float, nullable=True),
|
||||
sa.Column('estimated_material_cost', sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column('is_published', sa.Boolean, default=False),
|
||||
sa.Column('is_completed', sa.Boolean, default=False),
|
||||
sa.Column('completion_percentage', sa.Float, nullable=True),
|
||||
sa.Column('available_staff_hours', sa.Float, nullable=True),
|
||||
sa.Column('oven_capacity_hours', sa.Float, nullable=True),
|
||||
sa.Column('production_capacity_limit', sa.Float, nullable=True),
|
||||
sa.Column('schedule_notes', sa.Text, nullable=True),
|
||||
sa.Column('preparation_instructions', sa.Text, nullable=True),
|
||||
sa.Column('special_requirements', postgresql.JSONB, nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('published_by', postgresql.UUID(as_uuid=True), nullable=True),
|
||||
sa.Column('published_at', sa.DateTime(timezone=True), nullable=True),
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('idx_recipes_tenant_name', 'recipes', ['tenant_id', 'name'])
|
||||
op.create_index('idx_recipes_tenant_product', 'recipes', ['tenant_id', 'finished_product_id'])
|
||||
op.create_index('idx_recipes_status', 'recipes', ['tenant_id', 'status'])
|
||||
op.create_index('idx_recipes_category', 'recipes', ['tenant_id', 'category', 'status'])
|
||||
op.create_index('idx_recipes_seasonal', 'recipes', ['tenant_id', 'is_seasonal', 'season_start_month', 'season_end_month'])
|
||||
op.create_index('idx_recipes_signature', 'recipes', ['tenant_id', 'is_signature_item', 'status'])
|
||||
|
||||
op.create_index('idx_recipe_ingredients_recipe', 'recipe_ingredients', ['recipe_id', 'ingredient_order'])
|
||||
op.create_index('idx_recipe_ingredients_ingredient', 'recipe_ingredients', ['ingredient_id'])
|
||||
op.create_index('idx_recipe_ingredients_tenant', 'recipe_ingredients', ['tenant_id', 'recipe_id'])
|
||||
op.create_index('idx_recipe_ingredients_group', 'recipe_ingredients', ['recipe_id', 'ingredient_group', 'ingredient_order'])
|
||||
|
||||
op.create_index('idx_production_batches_tenant_date', 'production_batches', ['tenant_id', 'production_date'])
|
||||
op.create_index('idx_production_batches_recipe', 'production_batches', ['recipe_id', 'production_date'])
|
||||
op.create_index('idx_production_batches_status', 'production_batches', ['tenant_id', 'status', 'production_date'])
|
||||
op.create_index('idx_production_batches_batch_number', 'production_batches', ['tenant_id', 'batch_number'])
|
||||
op.create_index('idx_production_batches_priority', 'production_batches', ['tenant_id', 'priority', 'planned_start_time'])
|
||||
|
||||
op.create_index('idx_consumption_batch', 'production_ingredient_consumption', ['production_batch_id'])
|
||||
op.create_index('idx_consumption_ingredient', 'production_ingredient_consumption', ['ingredient_id', 'consumption_time'])
|
||||
op.create_index('idx_consumption_tenant', 'production_ingredient_consumption', ['tenant_id', 'consumption_time'])
|
||||
op.create_index('idx_consumption_recipe_ingredient', 'production_ingredient_consumption', ['recipe_ingredient_id'])
|
||||
op.create_index('idx_consumption_stock', 'production_ingredient_consumption', ['stock_id'])
|
||||
|
||||
op.create_index('idx_production_schedules_tenant_date', 'production_schedules', ['tenant_id', 'schedule_date'])
|
||||
op.create_index('idx_production_schedules_published', 'production_schedules', ['tenant_id', 'is_published', 'schedule_date'])
|
||||
op.create_index('idx_production_schedules_completed', 'production_schedules', ['tenant_id', 'is_completed', 'schedule_date'])
|
||||
# TODO: Add table creation statements for ecipes service
|
||||
# This is a placeholder migration - replace with actual table definitions
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop all tables
|
||||
op.drop_table('production_schedules')
|
||||
op.drop_table('production_ingredient_consumption')
|
||||
op.drop_table('production_batches')
|
||||
op.drop_table('recipe_ingredients')
|
||||
op.drop_table('recipes')
|
||||
|
||||
# Drop enum types
|
||||
op.execute("DROP TYPE IF EXISTS productionpriority")
|
||||
op.execute("DROP TYPE IF EXISTS measurementunit")
|
||||
op.execute("DROP TYPE IF EXISTS productionstatus")
|
||||
op.execute("DROP TYPE IF EXISTS recipestatus")
|
||||
# TODO: Add table drop statements for ecipes service
|
||||
pass
|
||||
|
||||
Reference in New Issue
Block a user