Create new services: inventory, recipes, suppliers

This commit is contained in:
Urtzi Alfaro
2025-08-13 17:39:35 +02:00
parent fbe7470ad9
commit 16b8a9d50c
151 changed files with 35799 additions and 857 deletions

View File

@@ -0,0 +1,94 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = .
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version number format string
# version_num_format = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d
# version number path regex
# version_path_separator = :
# version_path_separator = os # Use os.pathsep. Default configuration used when version_path_separator is not provided
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql://recipes_user:recipes_pass@localhost:5432/recipes_db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,71 @@
import os
import sys
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Add the parent directory to the path so we can import our modules
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
# Import the Base class and all models
from shared.database.base import Base
from app.models.recipes import *
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set the target metadata
target_metadata = Base.metadata
def get_database_url():
"""Get database URL from environment or config"""
return os.getenv('RECIPES_DATABASE_URL', config.get_main_option("sqlalchemy.url"))
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = get_database_url()
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
configuration = config.get_section(config.config_ini_section)
configuration["sqlalchemy.url"] = get_database_url()
connectable = engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,240 @@
"""Initial recipe management tables
Revision ID: 001_initial_recipe_tables
Revises:
Create Date: 2024-01-15 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '001_initial_recipe_tables'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create enum types
op.execute("CREATE TYPE recipestatus AS ENUM ('draft', 'active', 'testing', 'archived', 'discontinued')")
op.execute("CREATE TYPE productionstatus AS ENUM ('planned', 'in_progress', 'completed', 'failed', 'cancelled')")
op.execute("CREATE TYPE measurementunit AS ENUM ('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%')")
op.execute("CREATE TYPE productionpriority AS ENUM ('low', 'normal', 'high', 'urgent')")
# Create recipes table
op.create_table(
'recipes',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('name', sa.String(255), nullable=False, index=True),
sa.Column('recipe_code', sa.String(100), nullable=True, index=True),
sa.Column('version', sa.String(20), nullable=False, default='1.0'),
sa.Column('finished_product_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('description', sa.Text, nullable=True),
sa.Column('category', sa.String(100), nullable=True, index=True),
sa.Column('cuisine_type', sa.String(100), nullable=True),
sa.Column('difficulty_level', sa.Integer, nullable=False, default=1),
sa.Column('yield_quantity', sa.Float, nullable=False),
sa.Column('yield_unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
sa.Column('prep_time_minutes', sa.Integer, nullable=True),
sa.Column('cook_time_minutes', sa.Integer, nullable=True),
sa.Column('total_time_minutes', sa.Integer, nullable=True),
sa.Column('rest_time_minutes', sa.Integer, nullable=True),
sa.Column('estimated_cost_per_unit', sa.Numeric(10, 2), nullable=True),
sa.Column('last_calculated_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('cost_calculation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('target_margin_percentage', sa.Float, nullable=True),
sa.Column('suggested_selling_price', sa.Numeric(10, 2), nullable=True),
sa.Column('instructions', postgresql.JSONB, nullable=True),
sa.Column('preparation_notes', sa.Text, nullable=True),
sa.Column('storage_instructions', sa.Text, nullable=True),
sa.Column('quality_standards', sa.Text, nullable=True),
sa.Column('serves_count', sa.Integer, nullable=True),
sa.Column('nutritional_info', postgresql.JSONB, nullable=True),
sa.Column('allergen_info', postgresql.JSONB, nullable=True),
sa.Column('dietary_tags', postgresql.JSONB, nullable=True),
sa.Column('batch_size_multiplier', sa.Float, nullable=False, default=1.0),
sa.Column('minimum_batch_size', sa.Float, nullable=True),
sa.Column('maximum_batch_size', sa.Float, nullable=True),
sa.Column('optimal_production_temperature', sa.Float, nullable=True),
sa.Column('optimal_humidity', sa.Float, nullable=True),
sa.Column('quality_check_points', postgresql.JSONB, nullable=True),
sa.Column('common_issues', postgresql.JSONB, nullable=True),
sa.Column('status', sa.Enum('draft', 'active', 'testing', 'archived', 'discontinued', name='recipestatus'), nullable=False, default='draft', index=True),
sa.Column('is_seasonal', sa.Boolean, default=False),
sa.Column('season_start_month', sa.Integer, nullable=True),
sa.Column('season_end_month', sa.Integer, nullable=True),
sa.Column('is_signature_item', sa.Boolean, default=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('updated_by', postgresql.UUID(as_uuid=True), nullable=True),
)
# Create recipe ingredients table
op.create_table(
'recipe_ingredients',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipes.id', ondelete='CASCADE'), nullable=False, index=True),
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('quantity', sa.Float, nullable=False),
sa.Column('unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
sa.Column('quantity_in_base_unit', sa.Float, nullable=True),
sa.Column('alternative_quantity', sa.Float, nullable=True),
sa.Column('alternative_unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=True),
sa.Column('preparation_method', sa.String(255), nullable=True),
sa.Column('ingredient_notes', sa.Text, nullable=True),
sa.Column('is_optional', sa.Boolean, default=False),
sa.Column('ingredient_order', sa.Integer, nullable=False, default=1),
sa.Column('ingredient_group', sa.String(100), nullable=True),
sa.Column('substitution_options', postgresql.JSONB, nullable=True),
sa.Column('substitution_ratio', sa.Float, nullable=True),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('cost_updated_at', sa.DateTime(timezone=True), nullable=True),
)
# Create production batches table
op.create_table(
'production_batches',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('recipe_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipes.id'), nullable=False, index=True),
sa.Column('batch_number', sa.String(100), nullable=False, index=True),
sa.Column('production_date', sa.DateTime(timezone=True), nullable=False, index=True),
sa.Column('planned_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('planned_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('actual_end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('planned_quantity', sa.Float, nullable=False),
sa.Column('actual_quantity', sa.Float, nullable=True),
sa.Column('yield_percentage', sa.Float, nullable=True),
sa.Column('batch_size_multiplier', sa.Float, nullable=False, default=1.0),
sa.Column('status', sa.Enum('planned', 'in_progress', 'completed', 'failed', 'cancelled', name='productionstatus'), nullable=False, default='planned', index=True),
sa.Column('priority', sa.Enum('low', 'normal', 'high', 'urgent', name='productionpriority'), nullable=False, default='normal'),
sa.Column('assigned_staff', postgresql.JSONB, nullable=True),
sa.Column('production_notes', sa.Text, nullable=True),
sa.Column('quality_score', sa.Float, nullable=True),
sa.Column('quality_notes', sa.Text, nullable=True),
sa.Column('defect_rate', sa.Float, nullable=True),
sa.Column('rework_required', sa.Boolean, default=False),
sa.Column('planned_material_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('actual_material_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('labor_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('overhead_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_production_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('cost_per_unit', sa.Numeric(10, 2), nullable=True),
sa.Column('production_temperature', sa.Float, nullable=True),
sa.Column('production_humidity', sa.Float, nullable=True),
sa.Column('oven_temperature', sa.Float, nullable=True),
sa.Column('baking_time_minutes', sa.Integer, nullable=True),
sa.Column('waste_quantity', sa.Float, nullable=False, default=0.0),
sa.Column('waste_reason', sa.String(255), nullable=True),
sa.Column('efficiency_percentage', sa.Float, nullable=True),
sa.Column('customer_order_reference', sa.String(100), nullable=True),
sa.Column('pre_order_quantity', sa.Float, nullable=True),
sa.Column('shelf_quantity', sa.Float, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('completed_by', postgresql.UUID(as_uuid=True), nullable=True),
)
# Create production ingredient consumption table
op.create_table(
'production_ingredient_consumption',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('production_batch_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('production_batches.id', ondelete='CASCADE'), nullable=False, index=True),
sa.Column('recipe_ingredient_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('recipe_ingredients.id'), nullable=False, index=True),
sa.Column('ingredient_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('stock_id', postgresql.UUID(as_uuid=True), nullable=True, index=True),
sa.Column('planned_quantity', sa.Float, nullable=False),
sa.Column('actual_quantity', sa.Float, nullable=False),
sa.Column('unit', sa.Enum('g', 'kg', 'ml', 'l', 'cups', 'tbsp', 'tsp', 'units', 'pieces', '%', name='measurementunit'), nullable=False),
sa.Column('variance_quantity', sa.Float, nullable=True),
sa.Column('variance_percentage', sa.Float, nullable=True),
sa.Column('unit_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('total_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('consumption_time', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('consumption_notes', sa.Text, nullable=True),
sa.Column('staff_member', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('ingredient_condition', sa.String(50), nullable=True),
sa.Column('quality_impact', sa.String(255), nullable=True),
sa.Column('substitution_used', sa.Boolean, default=False),
sa.Column('substitution_details', sa.Text, nullable=True),
)
# Create production schedules table
op.create_table(
'production_schedules',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('tenant_id', postgresql.UUID(as_uuid=True), nullable=False, index=True),
sa.Column('schedule_date', sa.DateTime(timezone=True), nullable=False, index=True),
sa.Column('schedule_name', sa.String(255), nullable=True),
sa.Column('total_planned_batches', sa.Integer, nullable=False, default=0),
sa.Column('total_planned_items', sa.Float, nullable=False, default=0.0),
sa.Column('estimated_production_hours', sa.Float, nullable=True),
sa.Column('estimated_material_cost', sa.Numeric(10, 2), nullable=True),
sa.Column('is_published', sa.Boolean, default=False),
sa.Column('is_completed', sa.Boolean, default=False),
sa.Column('completion_percentage', sa.Float, nullable=True),
sa.Column('available_staff_hours', sa.Float, nullable=True),
sa.Column('oven_capacity_hours', sa.Float, nullable=True),
sa.Column('production_capacity_limit', sa.Float, nullable=True),
sa.Column('schedule_notes', sa.Text, nullable=True),
sa.Column('preparation_instructions', sa.Text, nullable=True),
sa.Column('special_requirements', postgresql.JSONB, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now()),
sa.Column('created_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('published_by', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('published_at', sa.DateTime(timezone=True), nullable=True),
)
# Create indexes
op.create_index('idx_recipes_tenant_name', 'recipes', ['tenant_id', 'name'])
op.create_index('idx_recipes_tenant_product', 'recipes', ['tenant_id', 'finished_product_id'])
op.create_index('idx_recipes_status', 'recipes', ['tenant_id', 'status'])
op.create_index('idx_recipes_category', 'recipes', ['tenant_id', 'category', 'status'])
op.create_index('idx_recipes_seasonal', 'recipes', ['tenant_id', 'is_seasonal', 'season_start_month', 'season_end_month'])
op.create_index('idx_recipes_signature', 'recipes', ['tenant_id', 'is_signature_item', 'status'])
op.create_index('idx_recipe_ingredients_recipe', 'recipe_ingredients', ['recipe_id', 'ingredient_order'])
op.create_index('idx_recipe_ingredients_ingredient', 'recipe_ingredients', ['ingredient_id'])
op.create_index('idx_recipe_ingredients_tenant', 'recipe_ingredients', ['tenant_id', 'recipe_id'])
op.create_index('idx_recipe_ingredients_group', 'recipe_ingredients', ['recipe_id', 'ingredient_group', 'ingredient_order'])
op.create_index('idx_production_batches_tenant_date', 'production_batches', ['tenant_id', 'production_date'])
op.create_index('idx_production_batches_recipe', 'production_batches', ['recipe_id', 'production_date'])
op.create_index('idx_production_batches_status', 'production_batches', ['tenant_id', 'status', 'production_date'])
op.create_index('idx_production_batches_batch_number', 'production_batches', ['tenant_id', 'batch_number'])
op.create_index('idx_production_batches_priority', 'production_batches', ['tenant_id', 'priority', 'planned_start_time'])
op.create_index('idx_consumption_batch', 'production_ingredient_consumption', ['production_batch_id'])
op.create_index('idx_consumption_ingredient', 'production_ingredient_consumption', ['ingredient_id', 'consumption_time'])
op.create_index('idx_consumption_tenant', 'production_ingredient_consumption', ['tenant_id', 'consumption_time'])
op.create_index('idx_consumption_recipe_ingredient', 'production_ingredient_consumption', ['recipe_ingredient_id'])
op.create_index('idx_consumption_stock', 'production_ingredient_consumption', ['stock_id'])
op.create_index('idx_production_schedules_tenant_date', 'production_schedules', ['tenant_id', 'schedule_date'])
op.create_index('idx_production_schedules_published', 'production_schedules', ['tenant_id', 'is_published', 'schedule_date'])
op.create_index('idx_production_schedules_completed', 'production_schedules', ['tenant_id', 'is_completed', 'schedule_date'])
def downgrade() -> None:
# Drop all tables
op.drop_table('production_schedules')
op.drop_table('production_ingredient_consumption')
op.drop_table('production_batches')
op.drop_table('recipe_ingredients')
op.drop_table('recipes')
# Drop enum types
op.execute("DROP TYPE IF EXISTS productionpriority")
op.execute("DROP TYPE IF EXISTS measurementunit")
op.execute("DROP TYPE IF EXISTS productionstatus")
op.execute("DROP TYPE IF EXISTS recipestatus")